HSEYMLO2DJCDGBO4F7T6NFMFSZ4TMSOBH5XGIU5NYOEFKUSV5UKAC
VSTBTRYEZOZPPEIBDFVF2WQKPOJBZTS77SOZVDBPR2IKFHH67U5AC
FXEDPLRI7PXLDXV634ZA6D5Q3ZWG3ESTKJTMRPJ4MAHI7PKU3M6AC
G7VOM2IMN4SOHOOCFLMNPKTU5DB5EEJMK7SSBA3DLWWXFA672RUAC
SXEYMYF7P4RZMZ46WPL4IZUTSQ2ATBWYZX7QNVMS3SGOYXYOHAGQC
L4JXJHWXYNCL4QGJXNKKTOKKTAXKKXBJUUY7HFZGEUZ5A2V5H34QC
5OGOE4VWS5AIG4U2UYLLIGA3HY6UB7SNQOSESHNXBLET3VQXFBZAC
YDKNUL6B4EFM5U2GG36SSEKXHS6XK4OLIWUVE4BUAJ5VYJFHAOIQC
KWAMD2KR5UYRHHPZWL7GY2KQKNXNVS4BYBVK3FXDI23NQMWA3U4QC
BZSC7VMYSFRXDHDDAMCDR6X67FN5VWIBOSE76BQLX7OCVOJFUA3AC
CCFJ7VO3I73FE3MZRS5RSDRYNZVW7AXC345P4BXS7JIL2TU3LQJQC
I52XSRUH5RVHQBFWVMAQPTUSPAJ4KNVID2RMI3UGCVKFLYUO6WZAC
5BB266P6HPUGYEVR7QNNOA62EFPYPUYJ3UMLE5J3LLYMSUWXANIQC
VO5OQW4W2656DIYYRNZ3PO7TQ4JOKQ3GVWE5ALUTYVMX3WMXJOYQC
4OCC6D42GZYRDLH3NSKXMJTRKXP7UZ6Z3YNGCNUT7NT6WBDBCBIAC
ZHABNS3S6FSINO74FOI5KHYXYDTBPO4FQTTYTUS7NNKEVVNLYC4AC
CCLLB7OIFNFYJZTG3UCI7536TOCWSCSXR67VELSB466R24WLJSDAC
VYHHOEYHO67JNJEODX5L3CQFIV3DAXZBBIQUOMCWJDYF3VWICDNQC
3J6IK4W2BA4RJJQYXZOQFU4AQ3WJIM3TUXUNNBIQ6M6TS2JSHT6QC
OJZWJUF2TCGZ7RFVY6FPKBS5P3C4BGHZDPVH775OHVNVFMJICKNQC
I24UEJQLCH2SOXA4UHIYWTRDCHSOPU7AFTRUOTX7HZIAV4AZKYEQC
V435QOJRFHNKW3NKJHMVMFOGO3KGAZVSURLSGFUHVKOMHMF4Q2ZQC
2RXOCWUWOGHEKHT5W73LAHJSOZVRTOGS7BWLSIGEEEBJGMCZBXQAC
4VWXL6KQGYGDUQRCVJCEVIV6CKJSEIYDX4YF33OX6EDNKJNEGD2AC
R3H7D42UZ446V5TO2574BMAQQAYYJPEIMSZVDPAGVIYU2COJSWBAC
let mut changes = BTreeMap::new();
for ch in change.changes.iter() {
changes
.entry(ch.path())
.or_insert_with(Vec::new)
.push(Status {
operation: match ch {
Hunk::FileMove { .. } => "file move",
Hunk::FileDel { .. } => "file del",
Hunk::FileUndel { .. } => "file undel",
Hunk::SolveNameConflict { .. } => "solve name conflict",
Hunk::UnsolveNameConflict { .. } => "unsolve name conflict",
Hunk::FileAdd { .. } => "file add",
Hunk::Edit { .. } => "edit",
Hunk::Replacement { .. } => "replacement",
Hunk::SolveOrderConflict { .. } => "solve order conflict",
Hunk::UnsolveOrderConflict { .. } => "unsolve order conflict",
Hunk::ResurrectZombies { .. } => "resurrect zombies",
},
line: ch.line(),
});
if self.untracked {
serde_json::to_writer_pretty(
&mut std::io::stdout(),
&untracked(&repo, &*txn)?.collect::<Vec<_>>(),
)?;
writeln!(stdout)?;
} else {
let mut changes = BTreeMap::new();
for ch in change.changes.iter() {
changes
.entry(ch.path())
.or_insert_with(Vec::new)
.push(Status {
operation: match ch {
Hunk::FileMove { .. } => "file move",
Hunk::FileDel { .. } => "file del",
Hunk::FileUndel { .. } => "file undel",
Hunk::SolveNameConflict { .. } => "solve name conflict",
Hunk::UnsolveNameConflict { .. } => "unsolve name conflict",
Hunk::FileAdd { .. } => "file add",
Hunk::Edit { .. } => "edit",
Hunk::Replacement { .. } => "replacement",
Hunk::SolveOrderConflict { .. } => "solve order conflict",
Hunk::UnsolveOrderConflict { .. } => "unsolve order conflict",
Hunk::ResurrectZombies { .. } => "resurrect zombies",
},
line: ch.line(),
});
}
serde_json::to_writer_pretty(&mut std::io::stdout(), &changes)?;
writeln!(stdout)?;
}
fn untracked<'a, T: TxnTExt>(
repo: &Repository,
txn: &'a T,
) -> Result<impl Iterator<Item = PathBuf> + 'a, anyhow::Error> {
let repo_path = CanonicalPathBuf::canonicalize(&repo.path)?;
let threads = num_cpus::get();
Ok(repo
.working_copy
.iterate_prefix_rec(repo_path.clone(), repo_path.clone(), threads)?
.filter_map(move |x| {
let (path, _) = x.unwrap();
let path_ = if let Ok(path) = path.as_path().strip_prefix(&repo_path.as_path()) {
path
} else {
return None;
};
use path_slash::PathExt;
let path_str = path_.to_slash_lossy();
if !txn.is_tracked(&path_str).unwrap() {
Some(path)
} else {
None
}
}))
#[error(transparent)]
Txn(#[from] T),
}
pub struct Untracked {
join: Option<std::thread::JoinHandle<Result<(), std::io::Error>>>,
receiver: std::sync::mpsc::Receiver<(PathBuf, bool)>,
}
impl Iterator for Untracked {
type Item = Result<(PathBuf, bool), std::io::Error>;
fn next(&mut self) -> Option<Self::Item> {
if let Ok(x) = self.receiver.recv() {
return Some(Ok(x));
} else if let Some(j) = self.join.take() {
if let Ok(Err(e)) = j.join() {
return Some(Err(e));
}
}
None
}
let mut txn = txn.write();
for p in self.iterate_prefix_rec(repo_path.clone(), full.clone(), threads)? {
let (path, is_dir) = p?;
info!("Adding {:?}", path);
use path_slash::PathExt;
let path_str = path.to_slash_lossy();
match txn.add(&path_str, is_dir, salt) {
Ok(()) => {}
Err(crate::fs::FsError::AlreadyInRepo(_)) => {}
Err(e) => return Err(e.into()),
}
}
Ok(())
}
pub fn iterate_prefix_rec(
&self,
repo_path: CanonicalPathBuf,
full: CanonicalPathBuf,
threads: usize,
) -> Result<Untracked, std::io::Error> {
let mut txn = txn.write();
while let Ok((path, is_dir)) = receiver.recv() {
info!("Adding {:?}", path);
use path_slash::PathExt;
let path_str = path.to_slash_lossy();
match txn.add(&path_str, is_dir, salt) {
Ok(()) => {}
Err(crate::fs::FsError::AlreadyInRepo(_)) => {}
Err(e) => return Err(e.into()),
}
}
if let Ok(t) = t.join() {
t?
}
Ok(())
Ok(Untracked {
join: Some(t),
receiver,
})
self.add_prefix_rec(txn.clone(), repo_path, full, threads, salt)?;
{
let path = if let Ok(path) = full.as_path().strip_prefix(&repo_path.as_path()) {
path
} else {
return Ok(());
};
use path_slash::PathExt;
let path_str = path.to_slash_lossy();
if !txn.read().is_tracked(&path_str)? {
self.add_prefix_rec(&txn, repo_path, full, threads, salt)?;
}
}
target