TM6V7CPLNBBMWGHMPBA7YHWG2KJQSQZUJ7K557GTZHD7KQAQ6RHAC
(import
(fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/c71d063a2fce96001c7ef99b301dfbd0c29ebde1.tar.gz";
sha256 = "0vnbhqf0lc4mf2zmzqbfv6kqj9raijxz8xfaimxihz3c6s5gma2x";
})
{ src = ./.; }).shellNix.default
edition = "2018"
#![recursion_limit = "256"]
extern crate proc_macro;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use proc_macro2::*;
use std::iter::FromIterator;
fn name_capital(name: &str) -> String {
name.chars()
.enumerate()
.map(|(i, s)| {
if i == 0 {
s.to_uppercase().next().unwrap()
} else {
s
}
})
.collect()
}
#[proc_macro]
pub fn table(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
assert!(input_iter.next().is_none());
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
proc_macro::TokenStream::from(quote! {
type #name_capital;
})
}
#[proc_macro]
pub fn sanakirja_table_get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let name = syn::Ident::new(&name, Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { Error }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
let pre_ = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre = if !pre_.is_empty() {
quote! {
let (key, value) = #pre_;
}
} else {
quote! {}
};
let post_ = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let post = if post_.is_empty() {
quote! { if let Ok(x) = self.txn.get(&self.#name, key, value) {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
} else {
quote! { if let Ok(x) = self.txn.get(&self.#name, key, value) {
Ok(x . #post_)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}}
};
proc_macro::TokenStream::from(quote! {
fn #name_get <'txn> (&'txn self, key: #key, value: Option<#value>) -> Result<Option<#value>, TxnErr<Self::#error>> {
use ::sanakirja::Transaction;
#pre
#post
}
})
}
#[proc_macro]
pub fn sanakirja_get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { Error }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
fn #name_get(&self, db: &Self::#name_capital, key: #key, value: Option<#value>) -> Result<Option<#value>, TxnErr<Self::#error>> {
use ::sanakirja::Transaction;
if let Ok(x) = self.txn.get(db, key, value) {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
})
}
#[proc_macro]
pub fn table_get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { Error }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
fn #name_get<'txn>(&'txn self, key: #key, value: Option<#value>) -> Result<Option<#value>, TxnErr<Self::#error>>;
})
}
#[proc_macro]
pub fn get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { Error }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
fn #name_get<'txn>(&'txn self, db: &Self::#name_capital, key: #key, value: Option<#value>) -> Result<Option<#value>, TxnErr<Self::#error>>;
})
}
fn next(input_iter: &mut proc_macro2::token_stream::IntoIter) -> Vec<TokenTree> {
let mut result = Vec::new();
let mut is_first = true;
loop {
match input_iter.next() {
Some(TokenTree::Punct(p)) => {
if p.as_char() == ',' {
if !is_first {
return result;
}
} else {
result.push(TokenTree::Punct(p))
}
}
Some(e) => result.push(e),
None => return result,
}
is_first = false
}
}
#[proc_macro]
pub fn cursor(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, false, false, false)
}
#[proc_macro]
pub fn cursor_ref(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, false, false, true)
}
#[proc_macro]
pub fn iter(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, false, true, false)
}
#[proc_macro]
pub fn rev_cursor(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, true, false, false)
}
fn cursor_(input: proc_macro::TokenStream, rev: bool, iter: bool, borrow: bool) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let capital = name_capital(&name);
let cursor_name = syn::Ident::new(&format!("{}Cursor", capital,), Span::call_site());
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_iter = syn::Ident::new(&format!("iter_{}", name), Span::call_site());
let name_next = syn::Ident::new(&format!("cursor_{}_next", name), Span::call_site());
let name_prev = syn::Ident::new(&format!("cursor_{}_prev", name), Span::call_site());
let name_cursor = syn::Ident::new(
&format!("{}cursor_{}", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name_cursor_ref = syn::Ident::new(
&format!("{}cursor_{}_ref", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { GraphError }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
let cursor_type = if rev {
quote! {
Result<crate::pristine::RevCursor<Self, &'txn Self, Self::#cursor_name, #key, #value>, TxnErr<Self::#error>>
}
} else {
quote! {
Result<crate::pristine::Cursor<Self, &'txn Self, Self::#cursor_name, #key, #value>, TxnErr<Self::#error>>
}
};
let def = if rev {
quote! {}
} else {
quote! {
type #cursor_name;
fn #name_next <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Result<Option<(#key, #value)>, TxnErr<Self::#error>>;
fn #name_prev <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Result<Option<(#key, #value)>, TxnErr<Self::#error>>;
}
};
let borrow = if borrow {
quote! {
fn #name_cursor_ref<RT: std::ops::Deref<Target = Self>>(
txn: RT,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> Result<crate::pristine::Cursor<Self, RT, Self::#cursor_name, #key, #value>, TxnErr<Self::#error>>;
}
} else {
quote! {}
};
let iter = if !iter {
quote! {}
} else {
quote! {
fn #name_iter <'txn> (
&'txn self,
k: #key,
v: Option<#value>
) -> #cursor_type;
}
};
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
#def
fn #name_cursor<'txn>(
&'txn self,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> #cursor_type;
#borrow
#iter
})
}
#[proc_macro]
pub fn sanakirja_cursor(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, false, false, false)
}
#[proc_macro]
pub fn sanakirja_cursor_ref(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, false, false, true)
}
#[proc_macro]
pub fn sanakirja_iter(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, false, true, false)
}
#[proc_macro]
pub fn sanakirja_rev_cursor(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, true, false, false)
}
fn sanakirja_cursor_(
input: proc_macro::TokenStream,
rev: bool,
iter: bool,
borrow: bool,
) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let cursor_name = syn::Ident::new(
&format!("{}Cursor", name_capital(&name),),
Span::call_site(),
);
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_next = syn::Ident::new(&format!("cursor_{}_next", name), Span::call_site());
let name_prev = syn::Ident::new(&format!("cursor_{}_prev", name), Span::call_site());
let name_cursor = syn::Ident::new(
&format!("{}cursor_{}", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name_cursor_ref = syn::Ident::new(
&format!("{}cursor_{}_ref", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name_iter = syn::Ident::new(
&format!("{}iter_{}", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name = syn::Ident::new(&name, Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let post = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre_init = if !pre.is_empty() {
quote! { let pos = #pre; }
} else {
quote! {}
};
let post = if !post.is_empty() {
quote! { . #post }
} else {
quote! {}
};
let iter = if iter {
quote! {
fn #name_iter <'txn> (
&'txn self,
k: #key,
v: Option<#value>
) -> Result<Cursor<Self, &'txn Self, Self::#cursor_name, #key, #value>, TxnErr<SanakirjaError>> {
self.#name_cursor(&self.#name, Some((k, v)))
}
}
} else {
quote! {}
};
let borrow = if borrow {
quote! {
fn #name_cursor_ref <RT: std::ops::Deref<Target = Self>> (
txn: RT,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> Result<Cursor<Self, RT, Self::#cursor_name, #key, #value>, TxnErr<SanakirjaError>> {
#pre_init
if let Ok((cursor, _)) = txn.txn.set_cursors(&db, pos) {
Ok(Cursor {
cursor,
txn,
marker: std::marker::PhantomData,
})
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
}
} else {
quote! {}
};
proc_macro::TokenStream::from(if rev {
quote! {
fn #name_cursor<'txn>(
&'txn self,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> Result<super::RevCursor<Self, &'txn Self, Self::#cursor_name, #key, #value>, TxnErr<SanakirjaError>> {
#pre_init
let mut cursor = if pos.is_some() {
if let Ok((x, _)) = self.txn.set_cursors(&db, pos) {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt))
}
} else if let Ok(x) = self.txn.set_cursors_last(&db) {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt))
};
Ok(super::RevCursor {
cursor,
txn: self,
marker: std::marker::PhantomData,
})
}
}
} else {
quote! {
type #cursor_name = ::sanakirja::Cursor;
fn #name_cursor<'txn>(
&'txn self,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> Result<Cursor<Self, &'txn Self, Self::#cursor_name, #key, #value>, TxnErr<SanakirjaError>> {
#pre_init
let mut cursor = if let Ok((x, _)) = self.txn.set_cursors(&db, pos) {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt))
};
Ok(Cursor {
cursor,
txn: self,
marker: std::marker::PhantomData,
})
}
#borrow
fn #name_next <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Result<Option<(#key, #value)>, TxnErr<SanakirjaError>> {
let x = if let Ok(x) = unsafe { ::sanakirja::next(&self.txn, cursor) } {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt))
};
Ok(x #post)
}
fn #name_prev <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Result<Option<(#key, #value)>, TxnErr<SanakirjaError>> {
let x = if let Ok(x) = unsafe { ::sanakirja::prev(&self.txn, cursor) } {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt))
};
Ok(x #post)
}
#iter
}
})
}
#[proc_macro]
pub fn initialized_cursor(input: proc_macro::TokenStream) -> TokenStream {
initialized_cursor_(input, false)
}
#[proc_macro]
pub fn initialized_rev_cursor(input: proc_macro::TokenStream) -> TokenStream {
initialized_cursor_(input, true)
}
fn initialized_cursor_(input: proc_macro::TokenStream, rev: bool) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let cursor_name = syn::Ident::new(
&format!("{}Cursor", name_capital(&name),),
Span::call_site(),
);
let name_next = syn::Ident::new(&format!("cursor_{}_next", name), Span::call_site());
let name_prev = syn::Ident::new(&format!("cursor_{}_prev", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let txnt = next(&mut input_iter);
let txnt: proc_macro2::TokenStream = if txnt.is_empty() {
proc_macro2::TokenStream::from(quote! { TxnT })
} else {
proc_macro2::TokenStream::from_iter(txnt.into_iter())
};
let error = next(&mut input_iter);
let error: proc_macro2::TokenStream = if error.is_empty() {
proc_macro2::TokenStream::from(quote! { GraphError })
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
assert!(input_iter.next().is_none());
if rev {
proc_macro::TokenStream::from(quote! {
impl<T: #txnt, RT: std::ops::Deref<Target = T>> Iterator for crate::pristine::RevCursor<T, RT, T::#cursor_name, #key, #value>
{
type Item = Result<(#key, #value), TxnErr<T::#error>>;
fn next(&mut self) -> Option<Self::Item> {
match self.txn.#name_prev(&mut self.cursor) {
Ok(Some(x)) => Some(Ok(x)),
Ok(None) => None,
Err(e) => Some(Err(e)),
}
}
}
})
} else {
proc_macro::TokenStream::from(quote! {
impl<T: #txnt, RT: std::ops::Deref<Target = T>>
crate::pristine::Cursor<T, RT, T::#cursor_name, #key, #value>
{
pub fn prev(&mut self) -> Option<Result<(#key, #value), TxnErr<T::#error>>> {
match self.txn.#name_prev(&mut self.cursor) {
Ok(Some(x)) => Some(Ok(x)),
Ok(None) => None,
Err(e) => Some(Err(e)),
}
}
}
impl<T: #txnt, RT: std::ops::Deref<Target = T>> Iterator for crate::pristine::Cursor<T, RT, T::#cursor_name, #key, #value>
{
type Item = Result<(#key, #value), TxnErr<T::#error>>;
fn next(&mut self) -> Option<Self::Item> {
match self.txn.#name_next(&mut self.cursor) {
Ok(Some(x)) => Some(Ok(x)),
Ok(None) => None,
Err(e) => Some(Err(e)),
}
}
}
})
}
}
#[proc_macro]
pub fn put_del(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let put = syn::Ident::new(&format!("put_{}", name), Span::call_site());
let del = syn::Ident::new(&format!("del_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { Error }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
fn #put(
&mut self,
k: #key,
e: #value,
) -> Result<bool, TxnErr<Self::#error>>;
fn #del(
&mut self,
k: #key,
e: Option<#value>,
) -> Result<bool, TxnErr<Self::#error>>;
})
}
#[proc_macro]
pub fn sanakirja_put_del(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let put = syn::Ident::new(&format!("put_{}", name), Span::call_site());
let del = syn::Ident::new(&format!("del_{}", name), Span::call_site());
let name = syn::Ident::new(&name, Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let error = next(&mut input_iter);
let error = if error.is_empty() {
quote! { Error }
} else {
proc_macro2::TokenStream::from_iter(error.into_iter())
};
let pre_key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre_value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
if pre_key.is_empty() {
proc_macro::TokenStream::from(quote! {
fn #put(
&mut self,
k: #key,
v: #value,
) -> Result<bool, TxnErr<Self::#error>> {
Ok(self.txn.put(&mut self.rng, &mut self.#name, k, v).map_err(TxnErr)?)
}
fn #del(
&mut self,
k: #key,
v: Option<#value>,
) -> Result<bool, TxnErr<Self::#error>> {
Ok(self.txn.del(&mut self.rng, &mut self.#name, k, v).map_err(TxnErr)?)
}
})
} else {
proc_macro::TokenStream::from(quote! {
fn #put(
&mut self,
k: #key,
v: #value,
) -> Result<bool, TxnErr<Self::#error>> {
let k = #pre_key;
let v = #pre_value;
Ok(self.txn.put(&mut self.rng, &mut self.#name, k, v).map_err(TxnErr)?)
}
fn #del(
&mut self,
k: #key,
v: Option<#value>,
) -> Result<bool, TxnErr<Self::#error>> {
let k = #pre_key;
let v = v.map(|v| #pre_value);
Ok(self.txn.del(&mut self.rng, &mut self.#name, k, v).map_err(TxnErr)?)
}
})
}
}
[package]
name = "pijul-macros"
description = "Macros used to write libpijul."
version = "0.3.0"
authors = ["Pierre-Étienne Meunier <pmeunier@mailbox.org>"]
edition = "2018"
repository = "https://nest.pijul.com/pijul/pijul"
license = "GPL-2.0"
include = [ "Cargo.toml", "src/lib.rs" ]
[lib]
proc-macro = true
[dependencies]
syn = "1.0"
quote = "1.0"
proc-macro2 = "1.0"
regex = "1.4"
use std::io::Write;
use std::path::PathBuf;
use anyhow::bail;
use libpijul::DOT_DIR;
use log::debug;
use crate::{config, current_dir};
pub struct Repository {
pub pristine: libpijul::pristine::sanakirja::Pristine,
pub changes: libpijul::changestore::filesystem::FileSystem,
pub working_copy: libpijul::working_copy::filesystem::FileSystem,
pub config: config::Config,
pub path: PathBuf,
pub changes_dir: PathBuf,
}
pub const PRISTINE_DIR: &str = "pristine";
pub const CHANGES_DIR: &str = "changes";
pub const CONFIG_FILE: &str = "config";
impl Repository {
pub fn save_config(&self) -> Result<(), anyhow::Error> {
let config = toml::to_string(&self.config)?;
let mut file = std::fs::File::create(&self.path.join(DOT_DIR).join(CONFIG_FILE))?;
file.write_all(config.as_bytes())?;
Ok(())
}
fn find_root_(cur: Option<PathBuf>, dot_dir: &str) -> Result<PathBuf, anyhow::Error> {
let mut cur = if let Some(cur) = cur {
cur
} else {
current_dir()?
};
cur.push(dot_dir);
loop {
debug!("{:?}", cur);
if std::fs::metadata(&cur).is_err() {
cur.pop();
if cur.pop() {
cur.push(DOT_DIR);
} else {
bail!("No Pijul repository found")
}
} else {
break;
}
}
Ok(cur)
}
pub fn find_root(cur: Option<PathBuf>) -> Result<Self, anyhow::Error> {
Self::find_root_with_dot_dir(cur, DOT_DIR)
}
pub fn find_root_with_dot_dir(
cur: Option<PathBuf>,
dot_dir: &str,
) -> Result<Self, anyhow::Error> {
let cur = Self::find_root_(cur, dot_dir)?;
let mut pristine_dir = cur.clone();
pristine_dir.push(PRISTINE_DIR);
let mut changes_dir = cur.clone();
changes_dir.push(CHANGES_DIR);
let mut working_copy_dir = cur.clone();
working_copy_dir.pop();
let config_path = cur.join(CONFIG_FILE);
let config = if let Ok(config) = std::fs::read(&config_path) {
if let Ok(toml) = toml::from_slice(&config) {
toml
} else {
bail!("Could not read configuration file at {:?}", config_path)
}
} else {
config::Config::default()
};
Ok(Repository {
pristine: libpijul::pristine::sanakirja::Pristine::new(&pristine_dir)?,
working_copy: libpijul::working_copy::filesystem::FileSystem::from_root(
&working_copy_dir,
),
changes: libpijul::changestore::filesystem::FileSystem::from_root(&working_copy_dir),
config,
path: working_copy_dir,
changes_dir,
})
}
pub fn init(path: Option<std::path::PathBuf>) -> Result<Self, anyhow::Error> {
let cur = if let Some(path) = path {
path
} else {
current_dir()?
};
let mut pristine_dir = cur.clone();
pristine_dir.push(DOT_DIR);
pristine_dir.push(PRISTINE_DIR);
if std::fs::metadata(&pristine_dir).is_err() {
std::fs::create_dir_all(&pristine_dir)?;
let mut changes_dir = cur.clone();
changes_dir.push(DOT_DIR);
changes_dir.push(CHANGES_DIR);
Ok(Repository {
pristine: libpijul::pristine::sanakirja::Pristine::new(&pristine_dir)?,
working_copy: libpijul::working_copy::filesystem::FileSystem::from_root(&cur),
changes: libpijul::changestore::filesystem::FileSystem::from_root(&cur),
config: config::Config::default(),
path: cur,
changes_dir,
})
} else {
bail!("Already in a repository")
}
}
}
use std::collections::{HashMap, HashSet};
use std::io::Write;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::Arc;
use std::time::{Duration, SystemTime};
use anyhow::bail;
use byteorder::{BigEndian, ReadBytesExt};
use indicatif::ProgressBar;
use lazy_static::lazy_static;
use libpijul::pristine::Position;
use libpijul::{Base32, ChannelRef, Hash, Merkle, MutTxnT, MutTxnTExt, TxnTExt};
use log::{debug, error};
use regex::Regex;
use thrussh::client::Session;
use tokio::sync::Mutex;
use super::{parse_line, RemoteRef};
use crate::repository::Repository;
pub struct Ssh {
pub h: thrussh::client::Handle<SshClient>,
pub c: thrussh::client::Channel,
pub channel: String,
pub remote_cmd: String,
pub path: String,
pub is_running: bool,
pub name: String,
state: Arc<Mutex<State>>,
has_errors: Arc<Mutex<bool>>,
}
lazy_static! {
static ref ADDRESS: Regex = Regex::new(
r#"(ssh://)?((?P<user>[^@]+)@)?((?P<host>(\[([^\]]+)\])|([^:/]+)))((:(?P<port>\d+)/)|:|/)(?P<path>.+)"#
)
.unwrap();
static ref PROGRESS: Arc<Mutex<Option<ProgressBar>>> = Arc::new(Mutex::new(None));
}
#[derive(Debug)]
pub struct Remote<'a> {
addr: &'a str,
host: &'a str,
path: &'a str,
config: thrussh_config::Config,
}
pub fn ssh_remote(addr: &str) -> Option<Remote> {
let cap = ADDRESS.captures(addr)?;
debug!("ssh_remote: {:?}", cap);
let host = cap.name("host").unwrap().as_str();
let mut config =
thrussh_config::parse_home(&host).unwrap_or(thrussh_config::Config::default(host));
if let Some(port) = cap.name("port").map(|x| x.as_str().parse().unwrap()) {
config.port = port
}
if let Some(u) = cap.name("user") {
config.user.clear();
config.user.push_str(u.as_str());
}
let path = cap.name("path").unwrap().as_str();
Some(Remote {
addr,
host,
path,
config,
})
}
impl<'a> Remote<'a> {
pub async fn connect(&mut self, name: &str, channel: &str) -> Result<Ssh, anyhow::Error> {
let mut home = dirs_next::home_dir().unwrap();
home.push(".ssh");
home.push("known_hosts");
let state = Arc::new(Mutex::new(State::None));
let has_errors = Arc::new(Mutex::new(false));
let client = SshClient {
addr: self.config.host_name.clone(),
known_hosts: home,
last_window_adjustment: SystemTime::now(),
state: state.clone(),
has_errors: has_errors.clone(),
};
let stream = self.config.stream().await?;
let config = Arc::new(thrussh::client::Config::default());
let mut h = thrussh::client::connect_stream(config, stream, client).await?;
let mut key_path = dirs_next::home_dir().unwrap().join(".ssh");
// First try agent auth
let authenticated = self
.auth_agent(&mut h, &mut key_path)
.await
.unwrap_or(false)
|| self.auth_pk(&mut h, &mut key_path).await
|| self.auth_password(&mut h).await?;
if !authenticated {
bail!("Not authenticated")
}
let c = h.channel_open_session().await?;
let remote_cmd = if let Ok(cmd) = std::env::var("REMOTE_PIJUL") {
cmd
} else {
"pijul".to_string()
};
Ok(Ssh {
h,
c,
channel: channel.to_string(),
remote_cmd,
path: self.path.to_string(),
is_running: false,
name: name.to_string(),
state,
has_errors,
})
}
async fn auth_agent(
&self,
h: &mut thrussh::client::Handle<SshClient>,
key_path: &mut PathBuf,
) -> Result<bool, anyhow::Error> {
let mut authenticated = false;
let mut agent = thrussh_keys::agent::client::AgentClient::connect_env().await?;
let identities = if let Some(ref file) = self.config.identity_file {
key_path.push(file);
key_path.set_extension("pub");
let k = thrussh_keys::load_public_key(&key_path);
key_path.pop();
if let Ok(k) = k {
vec![k]
} else {
return Ok(false);
}
} else {
agent.request_identities().await?
};
debug!("identities = {:?}", identities);
let mut agent = Some(agent);
for key in identities {
debug!("Trying key {:?}", key);
debug!("fingerprint = {:?}", key.fingerprint());
if let Some(a) = agent.take() {
debug!("authenticate future");
match h.authenticate_future(&self.config.user, key, a).await {
(a, Ok(auth)) => {
authenticated = auth;
agent = Some(a);
}
(a, Err(e)) => {
agent = Some(a);
debug!("not auth {:?}", e);
if let thrussh::AgentAuthError::Key(e) = e {
debug!("error: {:?}", e);
writeln!(std::io::stderr(), "Failed to sign with agent")?;
}
}
}
}
if authenticated {
return Ok(true);
}
}
Ok(false)
}
async fn auth_pk(
&self,
h: &mut thrussh::client::Handle<SshClient>,
key_path: &mut PathBuf,
) -> bool {
let mut authenticated = false;
let mut keys = Vec::new();
if let Some(ref file) = self.config.identity_file {
keys.push(file.as_str())
} else {
keys.push("id_ed25519");
keys.push("id_rsa");
}
for k in keys.iter() {
key_path.push(k);
let k = if let Some(k) = load_secret_key(&key_path, k) {
k
} else {
key_path.pop();
continue;
};
if let Ok(auth) = h
.authenticate_publickey(&self.config.user, Arc::new(k))
.await
{
authenticated = auth
}
key_path.pop();
if authenticated {
return true;
}
}
false
}
async fn auth_password(
&self,
h: &mut thrussh::client::Handle<SshClient>,
) -> Result<bool, thrussh::Error> {
let pass = rpassword::read_password_from_tty(Some(&format!(
"Password for {}@{}: ",
self.config.user, self.config.host_name
)))?;
h.authenticate_password(self.config.user.to_string(), &pass)
.await
}
}
pub fn load_secret_key(key_path: &Path, k: &str) -> Option<thrussh_keys::key::KeyPair> {
match thrussh_keys::load_secret_key(&key_path, None) {
Ok(k) => Some(k),
Err(e) => {
if let thrussh_keys::Error::KeyIsEncrypted = e {
let pass = if let Ok(pass) =
rpassword::read_password_from_tty(Some(&format!("Password for key {:?}: ", k)))
{
pass
} else {
return None;
};
if pass.is_empty() {
return None;
}
if let Ok(k) = thrussh_keys::load_secret_key(&key_path, Some(pass.as_bytes())) {
return Some(k);
}
}
None
}
}
}
pub struct SshClient {
addr: String,
known_hosts: PathBuf,
last_window_adjustment: SystemTime,
state: Arc<Mutex<State>>,
has_errors: Arc<Mutex<bool>>,
}
enum State {
None,
State {
sender: Option<tokio::sync::oneshot::Sender<Option<(u64, Merkle)>>>,
},
Changes {
sender: Option<tokio::sync::mpsc::Sender<Hash>>,
remaining_len: usize,
file: std::fs::File,
path: PathBuf,
hashes: Vec<libpijul::pristine::Hash>,
current: usize,
},
Changelist {
sender: tokio::sync::mpsc::Sender<Option<super::ListLine>>,
},
Channel {
sender: tokio::sync::mpsc::Sender<Vec<u8>>,
},
Archive {
sender: Option<tokio::sync::oneshot::Sender<u64>>,
len: u64,
conflicts: u64,
len_n: u64,
w: Box<dyn Write + Send>,
},
}
type BoxFuture<T> = Pin<Box<dyn futures::future::Future<Output = T> + Send>>;
impl thrussh::client::Handler for SshClient {
type Error = anyhow::Error;
type FutureBool = futures::future::Ready<Result<(Self, bool), anyhow::Error>>;
type FutureUnit = BoxFuture<Result<(Self, Session), anyhow::Error>>;
fn finished_bool(self, b: bool) -> Self::FutureBool {
futures::future::ready(Ok((self, b)))
}
fn finished(self, session: Session) -> Self::FutureUnit {
Box::pin(async move { Ok((self, session)) })
}
fn check_server_key(
self,
server_public_key: &thrussh_keys::key::PublicKey,
) -> Self::FutureBool {
let mut it = self.addr.split(':');
let addr = it.next().unwrap();
let port = it.next().unwrap_or("22").parse().unwrap();
match thrussh_keys::check_known_hosts_path(addr, port, server_public_key, &self.known_hosts)
{
Ok(e) => {
if e {
futures::future::ready(Ok((self, true)))
} else {
match learn(addr, port, server_public_key) {
Ok(x) => futures::future::ready(Ok((self, x))),
Err(e) => futures::future::ready(Err(e)),
}
}
}
Err(e) => {
error!("Key changed for {:?}", self.addr);
futures::future::ready(Err(e.into()))
}
}
}
fn adjust_window(&mut self, _channel: thrussh::ChannelId, target: u32) -> u32 {
let elapsed = self.last_window_adjustment.elapsed().unwrap();
self.last_window_adjustment = SystemTime::now();
if target >= 10_000_000 {
return target;
}
if elapsed < Duration::from_secs(2) {
target * 2
} else if elapsed > Duration::from_secs(8) {
target / 2
} else {
target
}
}
fn channel_eof(
self,
_channel: thrussh::ChannelId,
session: thrussh::client::Session,
) -> Self::FutureUnit {
Box::pin(async move {
*self.state.lock().await = State::None;
Ok((self, session))
})
}
fn exit_status(
self,
_channel: thrussh::ChannelId,
_status: u32,
session: thrussh::client::Session,
) -> Self::FutureUnit {
Box::pin(async move {
*self.state.lock().await = State::None;
*self.has_errors.lock().await = true;
Ok((self, session))
})
}
fn extended_data(
self,
channel: thrussh::ChannelId,
ext: u32,
data: &[u8],
session: thrussh::client::Session,
) -> Self::FutureUnit {
debug!("extended data {:?}, {:?}", std::str::from_utf8(data), ext);
if ext == 0 {
self.data(channel, data, session)
} else {
let data = data.to_vec();
Box::pin(async move {
*PROGRESS.lock().await = None;
*super::SPINNER.lock().await = None;
*self.has_errors.lock().await = true;
let stderr = std::io::stderr();
let mut handle = stderr.lock();
handle.write_all(&data)?;
Ok((self, session))
})
}
}
fn data(
self,
channel: thrussh::ChannelId,
data: &[u8],
session: thrussh::client::Session,
) -> Self::FutureUnit {
debug!("data {:?} {:?}", channel, data.len());
let data = data.to_vec();
Box::pin(async move {
match *self.state.lock().await {
State::State { ref mut sender } => {
debug!("state: State");
if let Some(sender) = sender.take() {
// If we can't parse `data` (for example if the
// remote returns the standard "-\n"), this
// returns None.
let mut s = std::str::from_utf8(&data).unwrap().split(' ');
debug!("s = {:?}", s);
if let (Some(n), Some(m)) = (s.next(), s.next()) {
let n = n.parse().unwrap();
sender
.send(Some((n, Merkle::from_base32(m.trim().as_bytes()).unwrap())))
.unwrap_or(());
} else {
sender.send(None).unwrap_or(());
}
}
}
State::Changes {
ref mut sender,
ref mut remaining_len,
ref mut file,
ref mut path,
ref hashes,
ref mut current,
} => {
debug!("state changes");
let mut p = 0;
while p < data.len() {
if *remaining_len == 0 {
*remaining_len = (&data[p..]).read_u64::<BigEndian>().unwrap() as usize;
p += 8;
debug!("remaining_len = {:?}", remaining_len);
}
if data.len() >= p + *remaining_len {
file.write_all(&data[p..p + *remaining_len])?;
// We have enough data to write the
// file, write it and move to the next
// file.
p += *remaining_len;
*remaining_len = 0;
file.flush()?;
let mut final_path = path.clone();
final_path.set_extension("change");
debug!("moving {:?} to {:?}", path, final_path);
std::fs::rename(&path, &final_path)?;
debug!("sending");
if let Some(ref mut sender) = sender {
if sender.send(hashes[*current]).await.is_err() {
break;
}
}
debug!("sent");
*current += 1;
if *current < hashes.len() {
// If we're still waiting for
// another change.
libpijul::changestore::filesystem::pop_filename(path);
libpijul::changestore::filesystem::push_filename(
path,
&hashes[*current],
);
std::fs::create_dir_all(&path.parent().unwrap())?;
path.set_extension("tmp");
debug!("creating file {:?}", path);
*file = std::fs::File::create(&path)?;
} else {
// Else, just finish.
debug!("dropping channel");
std::mem::drop(sender.take());
break;
}
} else {
// not enough data, we need more.
file.write_all(&data[p..])?;
file.flush()?;
*remaining_len -= data.len() - p;
debug!("need more data");
break;
}
}
debug!("finished, {:?} {:?}", p, data.len());
}
State::Changelist { ref mut sender } => {
debug!("state changelist");
if &data[..] == b"\n" {
debug!("log done");
sender.send(None).await.unwrap_or(())
} else if let Ok(data) = std::str::from_utf8(&data) {
for l in data.lines() {
if !l.is_empty() {
debug!("line = {:?}", l);
sender.send(parse_line(l).ok()).await.unwrap_or(())
} else {
sender.send(None).await.unwrap_or(());
}
}
}
}
State::Channel { ref mut sender } => {
debug!("state channel");
sender.send(data).await?
}
State::Archive {
ref mut sender,
ref mut w,
ref mut len,
ref mut len_n,
ref mut conflicts,
} => {
debug!("state archive");
let mut off = 0;
while *len_n < 16 && off < data.len() {
if *len_n < 8 {
*len = (*len << 8) | (data[off] as u64);
} else {
*conflicts = (*conflicts << 8) | (data[off] as u64);
}
*len_n += 1;
off += 1;
}
if *len_n >= 16 {
w.write_all(&data[off..])?;
*len -= (data.len() - off) as u64;
if *len == 0 {
if let Some(sender) = sender.take() {
sender.send(*conflicts).unwrap_or(())
}
}
}
}
State::None => {
debug!("None state");
}
}
Ok((self, session))
})
}
}
fn learn(addr: &str, port: u16, pk: &thrussh_keys::key::PublicKey) -> Result<bool, anyhow::Error> {
if port == 22 {
print!(
"Unknown key for {:?}, fingerprint {:?}. Learn it (y/N)? ",
addr,
pk.fingerprint()
);
} else {
print!(
"Unknown key for {:?}:{}, fingerprint {:?}. Learn it (y/N)? ",
addr,
port,
pk.fingerprint()
);
}
std::io::stdout().flush()?;
let mut buffer = String::new();
std::io::stdin().read_line(&mut buffer)?;
let buffer = buffer.trim();
if buffer == "Y" || buffer == "y" {
thrussh_keys::learn_known_hosts(addr, port, pk)?;
Ok(true)
} else {
Ok(false)
}
}
impl Ssh {
pub async fn finish(&mut self) -> Result<(), anyhow::Error> {
self.c.eof().await?;
while let Some(msg) = self.c.wait().await {
debug!("msg = {:?}", msg);
match msg {
thrussh::ChannelMsg::WindowAdjusted { .. } => {}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
bail!("Remote exited with status {:?}", exit_status)
}
}
msg => error!("wrong message {:?}", msg),
}
}
Ok(())
}
pub async fn get_state(
&mut self,
mid: Option<u64>,
) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
debug!("get_state");
let (sender, receiver) = tokio::sync::oneshot::channel();
*self.state.lock().await = State::State {
sender: Some(sender),
};
self.run_protocol().await?;
if let Some(mid) = mid {
self.c
.data(format!("state {} {}\n", self.channel, mid).as_bytes())
.await?;
} else {
self.c
.data(format!("state {}\n", self.channel).as_bytes())
.await?;
}
Ok(receiver.await?)
}
pub async fn archive<W: std::io::Write + Send + 'static>(
&mut self,
prefix: Option<String>,
state: Option<(Merkle, &[Hash])>,
w: W,
) -> Result<u64, anyhow::Error> {
debug!("archive");
let (sender, receiver) = tokio::sync::oneshot::channel();
*self.state.lock().await = State::Archive {
sender: Some(sender),
len: 0,
conflicts: 0,
len_n: 0,
w: Box::new(w),
};
self.run_protocol().await?;
if let Some((ref state, ref extra)) = state {
let mut cmd = format!("archive {} {}", self.channel, state.to_base32(),);
for e in extra.iter() {
cmd.push_str(&format!(" {}", e.to_base32()));
}
if let Some(ref p) = prefix {
cmd.push_str(" :");
cmd.push_str(p)
}
cmd.push('\n');
self.c.data(cmd.as_bytes()).await?;
} else {
self.c
.data(
format!(
"archive {}{}{}\n",
self.channel,
if prefix.is_some() { " :" } else { "" },
prefix.unwrap_or_else(String::new)
)
.as_bytes(),
)
.await?;
}
let conflicts = receiver.await.unwrap_or(0);
Ok(conflicts)
}
pub async fn run_protocol(&mut self) -> Result<(), anyhow::Error> {
if !self.is_running {
self.is_running = true;
debug!("run_protocol");
self.c
.exec(
true,
format!(
"{} protocol --version {} --repository {}",
self.remote_cmd,
crate::PROTOCOL_VERSION,
self.path
),
)
.await?;
while let Some(msg) = self.c.wait().await {
debug!("msg = {:?}", msg);
match msg {
thrussh::ChannelMsg::Success => break,
thrussh::ChannelMsg::WindowAdjusted { .. } => {}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
bail!("Remote exited with status {:?}", exit_status)
}
}
_ => {}
}
}
debug!("run_protocol done");
}
Ok(())
}
pub async fn download_changelist<T: MutTxnT>(
&mut self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<HashSet<Position<Hash>>, anyhow::Error> {
let (sender, mut receiver) = tokio::sync::mpsc::channel(10);
*self.state.lock().await = State::Changelist { sender };
self.run_protocol().await?;
debug!("download_changelist");
let mut command = Vec::new();
write!(command, "changelist {} {}", self.channel, from).unwrap();
for p in paths {
write!(command, " {:?}", p).unwrap()
}
command.push(b'\n');
self.c.data(&command[..]).await?;
debug!("waiting ssh, command: {:?}", std::str::from_utf8(&command));
let mut result = HashSet::new();
while let Some(Some(m)) = receiver.recv().await {
match m {
super::ListLine::Change { n, h, m } => {
txn.put_remote(remote, n, (h, m))?;
}
super::ListLine::Position(pos) => {
result.insert(pos);
}
super::ListLine::Error(err) => {
bail!(err)
}
}
}
if *self.has_errors.lock().await {
bail!("Remote sent an error")
}
debug!("no msg, result = {:?}", result);
Ok(result)
}
pub async fn upload_changes(
&mut self,
mut local: PathBuf,
to_channel: Option<&str>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
self.run_protocol().await?;
debug!("upload_changes");
let progress = ProgressBar::new(changes.len() as u64);
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template(" Uploading changes {wide_bar} {pos:>5}/{len}"),
);
let progress = super::PROGRESS.add(progress);
*PROGRESS.lock().await = Some(progress);
for c in changes {
debug!("{:?}", c);
libpijul::changestore::filesystem::push_filename(&mut local, &c);
let mut change_file = std::fs::File::open(&local)?;
let change_len = change_file.metadata()?.len();
let mut change = thrussh::CryptoVec::new_zeroed(change_len as usize);
use std::io::Read;
change_file.read_exact(&mut change[..])?;
let to_channel = if let Some(t) = to_channel {
t
} else {
self.channel.as_str()
};
self.c
.data(format!("apply {} {} {}\n", to_channel, c.to_base32(), change_len).as_bytes())
.await?;
self.c.data(&change[..]).await?;
if let Some(ref mut progress) = *PROGRESS.lock().await {
progress.inc(1);
}
libpijul::changestore::filesystem::pop_filename(&mut local);
}
if let Some(ref mut progress) = *PROGRESS.lock().await {
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template("✓ Uploading changes {pos:>5}/{len}"),
);
progress.finish();
}
Ok(())
}
pub async fn download_changes(
&mut self,
c: &[libpijul::pristine::Hash],
sender: &mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>,
changes_dir: &mut PathBuf,
full: bool,
) -> Result<(), anyhow::Error> {
self.download_changes_(c, Some(sender), changes_dir, full)
.await
}
async fn download_changes_(
&mut self,
c: &[libpijul::pristine::Hash],
mut sender: Option<&mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>>,
changes_dir: &mut PathBuf,
full: bool,
) -> Result<(), anyhow::Error> {
if c.is_empty() {
return Ok(());
}
let (sender_, mut recv) = tokio::sync::mpsc::channel(100);
let mut path = changes_dir.clone();
libpijul::changestore::filesystem::push_filename(&mut path, &c[0]);
std::fs::create_dir_all(&path.parent().unwrap())?;
path.set_extension("tmp");
let file = std::fs::File::create(&path)?;
*self.state.lock().await = State::Changes {
sender: Some(sender_),
remaining_len: 0,
path,
file,
hashes: c.to_vec(),
current: 0,
};
self.run_protocol().await?;
for c in c {
debug!("download_change {:?} {:?}", c, full);
if full {
self.c
.data(format!("change {}\n", c.to_base32()).as_bytes())
.await?;
} else {
self.c
.data(format!("partial {}\n", c.to_base32()).as_bytes())
.await?;
}
}
let progress = ProgressBar::new(c.len() as u64);
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template(" Downloading changes {wide_bar} {pos:>5}/{len}"),
);
*PROGRESS.lock().await = Some(progress);
while let Some(_hash) = recv.recv().await {
debug!("received hash {:?}", _hash);
if let Some(ref mut progress) = *PROGRESS.lock().await {
progress.inc(1);
}
if let Some(ref mut sender) = sender {
if sender.send(_hash).await.is_err() {
if let Some(ref mut progress) = *PROGRESS.lock().await {
progress.abandon();
}
break;
}
}
}
if let Some(ref mut progress) = *PROGRESS.lock().await {
if !progress.is_finished() {
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template("✓ Downloading changes {pos:>5}/{len}"),
);
progress.finish();
}
}
debug!("done downloading");
Ok(())
}
pub async fn clone_channel<T: TxnTExt + MutTxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
lazy: bool,
) -> Result<(), anyhow::Error> {
let (sender, mut recv) = tokio::sync::mpsc::channel(10);
*self.state.lock().await = State::Channel { sender };
self.run_protocol().await?;
debug!("clone channel");
self.c
.data(format!("channel {}\n", self.channel).as_bytes())
.await?;
let progress = indicatif::ProgressBar::new_spinner();
progress.set_style(
indicatif::ProgressStyle::default_spinner().template("{spinner} Cloning channel"),
);
progress.enable_steady_tick(100);
*PROGRESS.lock().await = Some(progress);
let from_dump_alive = {
let mut from_dump =
libpijul::pristine::channel_dump::ChannelFromDump::new(txn, channel.clone());
debug!("receiving channel");
while let Some(msg) = recv.recv().await {
debug!("msg = {:?}", msg.len());
if from_dump.read(&msg)? {
debug!("break");
break;
}
}
debug!("from dump done");
from_dump.alive
};
if let Some(ref mut progress) = *PROGRESS.lock().await {
progress.set_style(
indicatif::ProgressStyle::default_spinner().template("✓ Cloning channel"),
);
progress.finish();
}
let channel_ = channel.borrow();
debug!("cloned, now downloading changes");
let mut hashes = Vec::new();
if lazy {
for &ch in from_dump_alive.iter() {
let h = txn.get_external(ch)?.unwrap();
hashes.push(h);
}
} else {
for h in txn.log(&channel_, 0)? {
hashes.push((h?.1).0);
}
}
std::mem::drop(channel_);
debug!("hashes = {:#?}", hashes);
self.download_changes_(&hashes, None, &mut repo.changes_dir, true)
.await?;
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
channel,
&mut HashMap::new(),
"",
true,
None,
)?;
Ok(())
}
}
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::bail;
use lazy_static::lazy_static;
use libpijul::pristine::{Base32, ChannelRef, Hash, Merkle, MutTxnT, RemoteRef, TxnT};
use libpijul::DOT_DIR;
use libpijul::{MutTxnTExt, TxnTExt};
use log::{debug, info};
use tokio::sync::Mutex;
use crate::repository::*;
pub mod ssh;
use ssh::*;
pub mod local;
use local::*;
pub mod http;
use http::*;
pub enum RemoteRepo {
Local(Local),
Ssh(Ssh),
Http(Http),
LocalChannel(String),
None,
}
lazy_static! {
static ref PROGRESS: Arc<indicatif::MultiProgress> = Arc::new(indicatif::MultiProgress::new());
static ref SPINNER: Arc<Mutex<Option<indicatif::ProgressBar>>> = Arc::new(Mutex::new(None));
}
impl Repository {
pub async fn remote(
&self,
self_path: Option<&Path>,
name: &str,
channel: &str,
no_cert_check: bool,
) -> Result<RemoteRepo, anyhow::Error> {
if let Some(name) = self.config.remotes.get(name) {
unknown_remote(self_path, name, channel, no_cert_check).await
} else {
unknown_remote(self_path, name, channel, no_cert_check).await
}
}
}
pub async fn unknown_remote(
self_path: Option<&Path>,
name: &str,
channel: &str,
no_cert_check: bool,
) -> Result<RemoteRepo, anyhow::Error> {
if name.starts_with("http://") || name.starts_with("https://") {
debug!("unknown_remote, http = {:?}", name);
return Ok(RemoteRepo::Http(Http {
url: name.to_string(),
channel: channel.to_string(),
client: reqwest::ClientBuilder::new()
.danger_accept_invalid_certs(no_cert_check)
.build()?,
name: name.to_string(),
}));
}
if name.starts_with("ssh://") {
return if let Some(mut ssh) = ssh_remote(name) {
debug!("unknown_remote, ssh = {:?}", ssh);
Ok(RemoteRepo::Ssh(ssh.connect(name, channel).await?))
} else {
bail!("Remote not found: {:?}", name)
};
}
if let Ok(root) = std::fs::canonicalize(name) {
if let Some(path) = self_path {
let path = std::fs::canonicalize(path)?;
if path == root {
return Ok(RemoteRepo::LocalChannel(channel.to_string()));
}
}
let mut dot_dir = root.join(DOT_DIR);
let changes_dir = dot_dir.join(CHANGES_DIR);
dot_dir.push(PRISTINE_DIR);
debug!("dot_dir = {:?}", dot_dir);
if let Ok(pristine) = libpijul::pristine::sanakirja::Pristine::new(&dot_dir) {
debug!("pristine done");
return Ok(RemoteRepo::Local(Local {
root: Path::new(name).to_path_buf(),
channel: channel.to_string(),
changes_dir,
pristine: Arc::new(pristine),
name: name.to_string(),
}));
}
}
if let Some(mut ssh) = ssh_remote(name) {
debug!("unknown_remote, ssh = {:?}", ssh);
Ok(RemoteRepo::Ssh(ssh.connect(name, channel).await?))
} else {
bail!("Remote not found: {:?}", name)
}
}
impl RemoteRepo {
fn name(&self) -> Option<&str> {
match *self {
RemoteRepo::Ssh(ref s) => Some(s.name.as_str()),
RemoteRepo::Local(ref l) => Some(l.name.as_str()),
RemoteRepo::Http(ref h) => Some(h.name.as_str()),
RemoteRepo::LocalChannel(_) => None,
RemoteRepo::None => unreachable!(),
}
}
pub fn repo_name(&self) -> Option<String> {
match *self {
RemoteRepo::Ssh(ref s) => {
if let Some(sep) = s.name.rfind(|c| c == ':' || c == '/') {
Some(s.name.split_at(sep + 1).1.to_string())
} else {
Some(s.name.as_str().to_string())
}
}
RemoteRepo::Local(ref l) => {
if let Some(file) = l.root.file_name() {
Some(file.to_str().unwrap().to_string())
} else {
None
}
}
RemoteRepo::Http(ref h) => {
let url = reqwest::Url::parse(&h.url).unwrap();
if let Some(name) = libpijul::path::file_name(url.path()) {
Some(name.to_string())
} else {
url.host().map(|h| h.to_string())
}
}
RemoteRepo::LocalChannel(_) => None,
RemoteRepo::None => unreachable!(),
}
}
pub async fn finish(&mut self) -> Result<(), anyhow::Error> {
if let RemoteRepo::Ssh(s) = self {
s.finish().await?
}
Ok(())
}
pub async fn update_changelist<T: MutTxnTExt + TxnTExt>(
&mut self,
txn: &mut T,
path: &[String],
) -> Result<Option<(HashSet<Position<Hash>>, RemoteRef<T>)>, anyhow::Error> {
let progress = indicatif::ProgressBar::new_spinner();
progress.set_style(
indicatif::ProgressStyle::default_spinner()
.template("{spinner} Updating remote changelist"),
);
progress.enable_steady_tick(100);
*SPINNER.lock().await = Some(progress);
debug!("update_changelist");
let name = if let Some(name) = self.name() {
name
} else {
return Ok(None);
};
let mut remote = txn.open_or_create_remote(name).unwrap();
let n = self
.dichotomy_changelist(txn, &remote.borrow().remote)
.await?;
debug!("update changelist {:?}", n);
let v: Vec<_> = txn
.iter_remote(&remote.borrow().remote, n)?
.filter_map(|k| {
let k = k.unwrap().0;
if k >= n {
Some(k)
} else {
None
}
})
.collect();
for k in v {
debug!("deleting {:?}", k);
txn.del_remote(&mut remote, k)?;
}
let paths = self.download_changelist(txn, &mut remote, n, path).await?;
if let Some(progress) = SPINNER.lock().await.take() {
progress.set_style(
indicatif::ProgressStyle::default_spinner()
.template("✓ Updating remote changelist"),
);
progress.finish();
}
Ok(Some((paths, remote)))
}
async fn dichotomy_changelist<T: MutTxnT + TxnTExt>(
&mut self,
txn: &T,
remote: &T::Remote,
) -> Result<u64, anyhow::Error> {
let mut a = 0;
let (mut b, (_, state)) = if let Some(last) = txn.last_remote(remote)? {
last
} else {
debug!("the local copy of the remote has no changes");
return Ok(0);
};
if let Some((_, s)) = self.get_state(txn, Some(b)).await? {
if s == state {
// The local list is already up to date.
return Ok(b + 1);
}
}
// Else, find the last state we have in common with the
// remote, it might be older than the last known state (if
// changes were unrecorded on the remote).
while a < b {
let mid = (a + b) / 2;
let (mid, (_, state)) = txn.get_remote_state(remote, mid)?.unwrap();
let remote_state = self.get_state(txn, Some(mid)).await?;
debug!("dichotomy {:?} {:?} {:?}", mid, state, remote_state);
if let Some((_, remote_state)) = remote_state {
if remote_state == state {
if a == mid {
return Ok(a + 1);
} else {
a = mid;
continue;
}
}
}
if b == mid {
break;
} else {
b = mid
}
}
Ok(a)
}
async fn get_state<T: libpijul::TxnTExt>(
&mut self,
txn: &T,
mid: Option<u64>,
) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
match *self {
RemoteRepo::Local(ref mut l) => l.get_state(mid),
RemoteRepo::Ssh(ref mut s) => s.get_state(mid).await,
RemoteRepo::Http(ref mut h) => h.get_state(mid).await,
RemoteRepo::LocalChannel(ref channel) => {
if let Some(channel) = txn.load_channel(&channel)? {
local::get_state(txn, &channel, mid)
} else {
Ok(None)
}
}
RemoteRepo::None => unreachable!(),
}
}
pub async fn archive<W: std::io::Write + Send + 'static>(
&mut self,
prefix: Option<String>,
state: Option<(Merkle, &[Hash])>,
w: W,
) -> Result<u64, anyhow::Error> {
match *self {
RemoteRepo::Local(ref mut l) => {
debug!("archiving local repo");
let changes = libpijul::changestore::filesystem::FileSystem::from_root(&l.root);
let mut tarball = libpijul::output::Tarball::new(w, prefix);
let conflicts = if let Some((state, extra)) = state {
let mut txn = l.pristine.mut_txn_begin();
let mut channel = txn.load_channel(&l.channel)?.unwrap();
txn.archive_with_state(&changes, &mut channel, state, extra, &mut tarball)?
} else {
let txn = l.pristine.txn_begin()?;
let channel = txn.load_channel(&l.channel)?.unwrap();
txn.archive(&changes, &channel, &mut tarball)?
};
Ok(conflicts.len() as u64)
}
RemoteRepo::Ssh(ref mut s) => s.archive(prefix, state, w).await,
RemoteRepo::Http(ref mut h) => h.archive(prefix, state, w).await,
RemoteRepo::LocalChannel(_) => unreachable!(),
RemoteRepo::None => unreachable!(),
}
}
async fn download_changelist<T: MutTxnTExt>(
&mut self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<HashSet<Position<Hash>>, anyhow::Error> {
match *self {
RemoteRepo::Local(ref mut l) => l.download_changelist(txn, remote, from, paths),
RemoteRepo::Ssh(ref mut s) => s.download_changelist(txn, remote, from, paths).await,
RemoteRepo::Http(ref h) => h.download_changelist(txn, remote, from, paths).await,
RemoteRepo::LocalChannel(_) => Ok(HashSet::new()),
RemoteRepo::None => unreachable!(),
}
}
pub async fn upload_changes<T: MutTxnTExt>(
&mut self,
txn: &mut T,
local: PathBuf,
to_channel: Option<&str>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
match self {
RemoteRepo::Local(ref mut l) => l.upload_changes(local, to_channel, changes),
RemoteRepo::Ssh(ref mut s) => s.upload_changes(local, to_channel, changes).await,
RemoteRepo::Http(ref h) => h.upload_changes(local, to_channel, changes).await,
RemoteRepo::LocalChannel(ref channel) => {
let mut channel = txn.open_or_create_channel(channel)?;
let store = libpijul::changestore::filesystem::FileSystem::from_changes(local);
local::upload_changes(&store, txn, &mut channel, changes)
}
RemoteRepo::None => unreachable!(),
}
}
/// Start (and possibly complete) the download of a change.
pub async fn download_changes(
&mut self,
hashes: &[libpijul::pristine::Hash],
send: &mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>,
path: &mut PathBuf,
full: bool,
) -> Result<bool, anyhow::Error> {
debug!("download_changes");
match *self {
RemoteRepo::Local(ref mut l) => l.download_changes(hashes, send, path).await?,
RemoteRepo::Ssh(ref mut s) => s.download_changes(hashes, send, path, full).await?,
RemoteRepo::Http(ref mut h) => h.download_changes(hashes, send, path, full).await?,
RemoteRepo::LocalChannel(_) => {}
RemoteRepo::None => unreachable!(),
}
libpijul::changestore::filesystem::pop_filename(path);
Ok(true)
}
pub async fn pull<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
to_apply: &[Hash],
inodes: &HashSet<Position<Hash>>,
do_apply: bool,
) -> Result<Vec<Hash>, anyhow::Error> {
let (mut send, mut recv) = tokio::sync::mpsc::channel(100);
let mut change_path_ = repo.changes_dir.clone();
let mut to_download_ = Vec::new();
for h in to_apply {
libpijul::changestore::filesystem::push_filename(&mut change_path_, h);
if std::fs::metadata(&change_path_).is_err() {
to_download_.push(*h)
}
libpijul::changestore::filesystem::pop_filename(&mut change_path_);
}
let to_download = to_download_.clone();
let mut self_ = std::mem::replace(self, RemoteRepo::None);
let t = tokio::spawn(async move {
self_
.download_changes(&to_download_, &mut send, &mut change_path_, false)
.await?;
Ok::<_, anyhow::Error>(self_)
});
let mut ws = libpijul::ApplyWorkspace::new();
let mut change_path = repo.changes_dir.clone();
let progress = if do_apply {
let p = indicatif::ProgressBar::new(to_download.len() as u64);
p.set_style(
indicatif::ProgressStyle::default_bar()
.template(" Applying changes {wide_bar} {pos:>5}/{len}"),
);
Some(PROGRESS.add(p))
} else {
None
};
let t_progress = std::thread::spawn(|| {
PROGRESS.join().unwrap_or(());
});
let mut to_apply_inodes = Vec::new();
for h in to_apply {
libpijul::changestore::filesystem::push_filename(&mut change_path, &h);
debug!("change_path = {:?}", change_path);
while std::fs::metadata(&change_path).is_err() {
debug!("waiting");
let r = recv.recv().await;
debug!("r = {:?}", r);
if r.is_none() {
if let Some(ref progress) = progress {
progress.abandon();
}
break;
}
}
libpijul::changestore::filesystem::pop_filename(&mut change_path);
let touches_inodes = inodes.is_empty()
|| {
debug!("inodes = {:?}", inodes);
use libpijul::changestore::ChangeStore;
let changes = repo.changes.get_changes(h)?;
changes.iter().any(|c| {
c.iter().any(|c| {
let inode = c.inode();
debug!("inode = {:?}", inode);
if let Some(h) = inode.change {
inodes.contains(&Position {
change: h,
pos: inode.pos,
})
} else {
false
}
})
})
}
|| { inodes.iter().any(|i| i.change == *h) };
if touches_inodes {
to_apply_inodes.push(*h);
} else {
continue;
}
if let Some(ref progress) = progress {
info!("Applying {:?}", h);
progress.inc(1);
txn.apply_change_ws(&repo.changes, channel, *h, &mut ws)?;
} else {
debug!("not applying {:?}", h)
}
}
if let Some(progress) = progress {
if !progress.is_finished() {
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template("✓ Applying changes {pos:>5}/{len}"),
);
progress.finish()
}
}
std::mem::drop(recv);
debug!("waiting for spawned process");
*self = t.await??;
t_progress.join().unwrap();
Ok(to_apply_inodes)
}
pub async fn clone_tag<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
tag: &[Hash],
) -> Result<(), anyhow::Error> {
let (mut send_signal, mut recv_signal) = tokio::sync::mpsc::channel(100);
let (send_hash, mut recv_hash) = tokio::sync::mpsc::channel(100);
let mut change_path_ = repo.changes_dir.clone();
let mut self_ = std::mem::replace(self, RemoteRepo::None);
let t = tokio::spawn(async move {
while let Some(hash) = recv_hash.recv().await {
self_
.download_changes(&[hash], &mut send_signal, &mut change_path_, false)
.await?;
}
Ok(self_)
});
for &h in tag.iter() {
send_hash.send(h).await?;
}
let mut change_path = repo.changes_dir.clone();
let mut hashes = Vec::new();
while let Some(hash) = recv_signal.recv().await {
libpijul::changestore::filesystem::push_filename(&mut change_path, &hash);
std::fs::create_dir_all(change_path.parent().unwrap())?;
use libpijul::changestore::ChangeStore;
hashes.push(hash);
for dep in repo.changes.get_dependencies(&hash)? {
let dep: libpijul::pristine::Hash = dep;
send_hash.send(dep).await?;
}
libpijul::changestore::filesystem::pop_filename(&mut change_path);
}
std::mem::drop(recv_signal);
std::mem::drop(send_hash);
let mut ws = libpijul::ApplyWorkspace::new();
while let Some(hash) = hashes.pop() {
txn.apply_change_ws(&repo.changes, channel, hash, &mut ws)?;
}
let r: Result<_, anyhow::Error> = t.await?;
*self = r?;
Ok(())
}
pub async fn clone_state<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
state: Merkle,
lazy: bool,
) -> Result<(), anyhow::Error> {
self.update_changelist(txn, &[]).await?;
let name = self.name().unwrap();
let remote = txn.open_or_create_remote(name).unwrap();
if let RemoteRepo::Ssh(ref mut s) = self {
s.clone_channel(repo, txn, channel, lazy).await?;
let mut to_unrecord = Vec::new();
let mut found = false;
for x in txn.iter_rev_remote(&remote.borrow().remote, None)? {
let (n, (h, s)) = x?;
debug!("{:?} {:?} {:?}", n, h, s);
if s == state {
found = true;
break;
}
to_unrecord.push(h);
}
if !found {
bail!("State not found: {:?}", state)
}
self.pull(repo, txn, channel, &to_unrecord, &HashSet::new(), false)
.await?;
for unrec in to_unrecord.iter() {
txn.unrecord(&repo.changes, channel, unrec)?;
}
return Ok(());
}
let mut to_pull = Vec::new();
let mut found = false;
for x in txn.iter_remote(&remote.borrow().remote, 0)? {
let (n, (h, s)) = x?;
debug!("{:?} {:?} {:?}", n, h, s);
to_pull.push(h);
if s == state {
found = true;
break;
}
}
if !found {
bail!("State not found: {:?}", state)
}
self.pull(repo, txn, channel, &to_pull, &HashSet::new(), true)
.await?;
Ok(())
}
pub async fn complete_changes<T: MutTxnT + TxnTExt>(
&mut self,
repo: &crate::repository::Repository,
txn: &T,
local_channel: &mut ChannelRef<T>,
changes: &[Hash],
full: bool,
) -> Result<(), anyhow::Error> {
use libpijul::changestore::ChangeStore;
let (send_hash, mut recv_hash) = tokio::sync::mpsc::channel(100);
let (mut send_sig, mut recv_sig) = tokio::sync::mpsc::channel(100);
let mut self_ = std::mem::replace(self, RemoteRepo::None);
let mut changes_dir = repo.changes_dir.clone();
let t = tokio::spawn(async move {
while let Some(h) = recv_hash.recv().await {
debug!("downloading full patch: {:?}", h);
self_
.download_changes(&[h], &mut send_sig, &mut changes_dir, true)
.await?;
}
let result: Result<_, anyhow::Error> = Ok(self_);
result
});
for c in changes {
if repo.changes.has_contents(*c, txn.get_internal(*c)?) {
debug!("has contents {:?}", c);
continue;
}
if full {
debug!("sending send_hash");
send_hash.send(*c).await?;
debug!("sent");
continue;
}
let change = if let Some(i) = txn.get_internal(*c)? {
i
} else {
continue;
};
// Check if at least one non-empty vertex from c is still alive.
let v = libpijul::pristine::Vertex {
change,
start: libpijul::pristine::ChangePosition(0),
end: libpijul::pristine::ChangePosition(0),
};
let channel = local_channel.borrow();
if txn.is_alive(&channel, v)? {
send_hash.send(*c).await?;
}
}
debug!("dropping send_hash");
std::mem::drop(send_hash);
while recv_sig.recv().await.is_some() {}
*self = t.await??;
Ok(())
}
pub async fn clone_channel<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
local_channel: &mut ChannelRef<T>,
lazy: bool,
path: &[String],
) -> Result<(), anyhow::Error> {
if path.is_empty() {
if let RemoteRepo::Ssh(s) = self {
return s.clone_channel(repo, txn, local_channel, lazy).await;
}
}
let (inodes, remote_changes) = self
.update_changelist(txn, path)
.await?
.expect("Remote is not self");
let mut pullable = Vec::new();
for x in txn.iter_remote(&remote_changes.borrow().remote, 0)? {
let (_, (h, _)) = x?;
pullable.push(h)
}
self.pull(repo, txn, local_channel, &pullable, &inodes, true)
.await?;
Ok(())
}
}
use libpijul::pristine::{ChangePosition, Position};
use regex::Regex;
lazy_static! {
static ref CHANGELIST_LINE: Regex =
Regex::new(r#"(?P<num>[0-9]+)\.(?P<hash>[A-Za-z0-9]+)\.(?P<merkle>[A-Za-z0-9]+)"#).unwrap();
static ref PATHS_LINE: Regex =
Regex::new(r#"(?P<hash>[A-Za-z0-9]+)\.(?P<num>[0-9]+)"#).unwrap();
}
enum ListLine {
Change { n: u64, h: Hash, m: Merkle },
Position(Position<Hash>),
Error(String),
}
fn parse_line(data: &str) -> Result<ListLine, anyhow::Error> {
debug!("data = {:?}", data);
if let Some(caps) = CHANGELIST_LINE.captures(data) {
if let (Some(h), Some(m)) = (
Hash::from_base32(caps.name("hash").unwrap().as_str().as_bytes()),
Merkle::from_base32(caps.name("merkle").unwrap().as_str().as_bytes()),
) {
return Ok(ListLine::Change {
n: caps.name("num").unwrap().as_str().parse().unwrap(),
h,
m,
});
}
}
if data.starts_with("error:") {
return Ok(ListLine::Error(data.split_at(6).1.to_string()));
}
if let Some(caps) = PATHS_LINE.captures(data) {
return Ok(ListLine::Position(Position {
change: Hash::from_base32(caps.name("hash").unwrap().as_str().as_bytes()).unwrap(),
pos: ChangePosition(caps.name("num").unwrap().as_str().parse().unwrap()),
}));
}
debug!("offending line: {:?}", data);
bail!("Protocol error")
}
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use std::sync::Arc;
use libpijul::pristine::{Hash, Merkle, MutTxnT, Position, TxnT};
use libpijul::*;
use log::debug;
use super::RemoteRef;
#[derive(Clone)]
pub struct Local {
pub channel: String,
pub root: std::path::PathBuf,
pub changes_dir: std::path::PathBuf,
pub pristine: Arc<libpijul::pristine::sanakirja::Pristine>,
pub name: String,
}
pub fn get_state<T: TxnTExt>(
txn: &T,
channel: &libpijul::pristine::ChannelRef<T>,
mid: Option<u64>,
) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
if let Some(mid) = mid {
Ok(txn.get_changes(&channel, mid)?.map(|(_, m)| (mid, m)))
} else {
Ok(txn.reverse_log(&channel.borrow(), None)?.next().map(|n| {
let (n, (_, m)) = n.unwrap();
(n, m)
}))
}
}
impl Local {
pub fn get_state(&mut self, mid: Option<u64>) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
let txn = self.pristine.txn_begin()?;
let channel = txn.load_channel(&self.channel)?.unwrap();
Ok(get_state(&txn, &channel, mid)?)
}
pub fn download_changelist<T: MutTxnT>(
&mut self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<HashSet<Position<Hash>>, anyhow::Error> {
let store = libpijul::changestore::filesystem::FileSystem::from_root(&self.root);
let remote_txn = self.pristine.txn_begin()?;
let remote_channel = if let Some(channel) = remote_txn.load_channel(&self.channel)? {
channel
} else {
debug!("no remote channel named {:?}", self.channel);
return Ok(HashSet::new());
};
let mut paths_ = HashSet::new();
let mut result = HashSet::new();
for s in paths {
if let Ok((p, _ambiguous)) = remote_txn.follow_oldest_path(&store, &remote_channel, s) {
debug!("p = {:?}", p);
result.insert(Position {
change: remote_txn.get_external(p.change)?.unwrap(),
pos: p.pos,
});
paths_.insert(p);
paths_.extend(
libpijul::fs::iter_graph_descendants(
&remote_txn,
&remote_channel.borrow().graph,
p,
)?
.map(|x| x.unwrap()),
);
}
}
debug!("paths_ = {:?}", paths_);
for x in remote_txn.log(&remote_channel.borrow(), from)? {
let (n, (h, m)) = x?;
if n >= from {
debug!(
"downloading changelist item {:?} {:?} {:?}",
n,
h.to_base32(),
m.to_base32()
);
let h_int = remote_txn.get_internal(h)?.unwrap();
if paths_.is_empty()
|| paths_.iter().any(|x| {
remote_txn
.get_touched_files(*x, Some(h_int))
.unwrap()
.is_some()
})
{
txn.put_remote(remote, n, (h, m))?;
}
}
}
Ok(result)
}
pub fn upload_changes(
&mut self,
mut local: PathBuf,
to_channel: Option<&str>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
let store = libpijul::changestore::filesystem::FileSystem::from_root(&self.root);
let mut txn = self.pristine.mut_txn_begin();
let mut channel = txn.open_or_create_channel(to_channel.unwrap_or(&self.channel))?;
for c in changes {
libpijul::changestore::filesystem::push_filename(&mut local, &c);
libpijul::changestore::filesystem::push_filename(&mut self.changes_dir, &c);
std::fs::create_dir_all(&self.changes_dir.parent().unwrap())?;
debug!("hard link {:?} {:?}", local, self.changes_dir);
if std::fs::metadata(&self.changes_dir).is_err() {
if std::fs::hard_link(&local, &self.changes_dir).is_err() {
std::fs::copy(&local, &self.changes_dir)?;
}
}
debug!("hard link done");
libpijul::changestore::filesystem::pop_filename(&mut local);
libpijul::changestore::filesystem::pop_filename(&mut self.changes_dir);
}
let mut repo = libpijul::working_copy::filesystem::FileSystem::from_root(&self.root);
upload_changes(&store, &mut txn, &mut channel, changes)?;
txn.output_repository_no_pending(
&mut repo,
&store,
&mut channel,
&mut HashMap::new(),
"",
true,
None,
)?;
txn.commit()?;
Ok(())
}
pub async fn download_changes(
&mut self,
c: &[libpijul::pristine::Hash],
send: &mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>,
mut path: &mut PathBuf,
) -> Result<(), anyhow::Error> {
for c in c {
libpijul::changestore::filesystem::push_filename(&mut self.changes_dir, c);
libpijul::changestore::filesystem::push_filename(&mut path, c);
if std::fs::metadata(&path).is_ok() {
debug!("metadata {:?} ok", path);
libpijul::changestore::filesystem::pop_filename(&mut path);
continue;
}
std::fs::create_dir_all(&path.parent().unwrap())?;
if std::fs::hard_link(&self.changes_dir, &path).is_err() {
std::fs::copy(&self.changes_dir, &path)?;
}
debug!("hard link done");
libpijul::changestore::filesystem::pop_filename(&mut self.changes_dir);
libpijul::changestore::filesystem::pop_filename(&mut path);
debug!("sent");
send.send(*c).await.unwrap();
}
Ok(())
}
}
pub fn upload_changes<T: MutTxnTExt, C: libpijul::changestore::ChangeStore>(
store: &C,
txn: &mut T,
channel: &mut libpijul::pristine::ChannelRef<T>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
let mut ws = libpijul::ApplyWorkspace::new();
for c in changes {
txn.apply_change_ws(store, channel, *c, &mut ws)?;
}
Ok(())
}
use std::collections::HashSet;
use std::io::Write;
use std::path::PathBuf;
use anyhow::bail;
use indicatif::ProgressBar;
use libpijul::pristine::{Base32, MutTxnT, Position};
use libpijul::{Hash, RemoteRef};
use log::{debug, error};
pub struct Http {
pub url: String,
pub channel: String,
pub client: reqwest::Client,
pub name: String,
}
async fn download_change(
client: reqwest::Client,
url: String,
mut path: PathBuf,
c: libpijul::pristine::Hash,
) -> Result<libpijul::pristine::Hash, anyhow::Error> {
libpijul::changestore::filesystem::push_filename(&mut path, &c);
std::fs::create_dir_all(&path.parent().unwrap())?;
let path_ = path.with_extension("tmp");
let mut f = std::fs::File::create(&path_)?;
libpijul::changestore::filesystem::pop_filename(&mut path);
let c32 = c.to_base32();
let url = format!("{}/{}", url, super::DOT_DIR);
let mut delay = 1f64;
loop {
let mut res = if let Ok(res) = client.get(&url).query(&[("change", &c32)]).send().await {
delay = 1f64;
res
} else {
error!("HTTP error, retrying in {} seconds", delay.round());
tokio::time::sleep(std::time::Duration::from_secs_f64(delay)).await;
delay *= 2.;
continue;
};
debug!("response {:?}", res);
if !res.status().is_success() {
bail!("HTTP error {:?}", res.status())
}
let done = loop {
match res.chunk().await {
Ok(Some(chunk)) => {
debug!("writing {:?}", chunk.len());
f.write_all(&chunk)?;
}
Ok(None) => break true,
Err(_) => {
error!("Error while downloading {:?}, retrying", url);
tokio::time::sleep(std::time::Duration::from_secs_f64(delay)).await;
delay *= 2.;
break false;
}
}
};
if done {
std::fs::rename(&path_, &path_.with_extension("change"))?;
break;
}
}
Ok(c)
}
const POOL_SIZE: usize = 20;
impl Http {
pub async fn download_changes(
&mut self,
hashes: &[libpijul::pristine::Hash],
send: &mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>,
path: &PathBuf,
_full: bool,
) -> Result<(), anyhow::Error> {
let progress = ProgressBar::new(hashes.len() as u64);
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template(" Downloading changes {wide_bar} {pos:>5}/{len}"),
);
let progress = super::PROGRESS.add(progress);
let mut pool = <[_; POOL_SIZE]>::default();
let mut cur = 0;
for c in hashes {
debug!("downloading {:?}", c);
progress.inc(1);
let t = std::mem::replace(
&mut pool[cur],
Some(tokio::spawn(download_change(
self.client.clone(),
self.url.clone(),
path.clone(),
*c,
))),
);
if let Some(t) = t {
let c = t.await??;
if send.send(c).await.is_err() {
debug!("err for {:?}", c);
progress.abandon();
break;
}
}
cur = (cur + 1) % POOL_SIZE;
}
for f in 0..POOL_SIZE {
if let Some(t) = pool[(cur + f) % POOL_SIZE].take() {
let c = t.await??;
if send.send(c).await.is_err() {
debug!("err for {:?}", c);
progress.abandon();
break;
}
}
}
if !progress.is_finished() {
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template("✓ Downloading changes {pos:>5}/{len}"),
);
progress.finish();
}
Ok(())
}
pub async fn upload_changes(
&self,
mut local: PathBuf,
to_channel: Option<&str>,
changes: &[libpijul::Hash],
) -> Result<(), anyhow::Error> {
let progress = ProgressBar::new(changes.len() as u64);
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template(" Uploading changes {wide_bar} {pos:>5}/{len}"),
);
for c in changes {
libpijul::changestore::filesystem::push_filename(&mut local, &c);
let url = self.url.clone() + "/" + super::DOT_DIR;
let change = std::fs::read(&local)?;
let mut to_channel = if let Some(ch) = to_channel {
vec![("to_channel", ch)]
} else {
Vec::new()
};
let c = c.to_base32();
to_channel.push(("apply", &c));
debug!("url {:?} {:?}", url, to_channel);
self.client
.post(&url)
.query(&to_channel)
.body(change)
.send()
.await?;
progress.inc(1);
libpijul::changestore::filesystem::pop_filename(&mut local);
}
progress.set_style(
indicatif::ProgressStyle::default_bar().template("✓ Uploading changes {pos:>5}/{len}"),
);
progress.finish();
Ok(())
}
pub async fn download_changelist<T: MutTxnT>(
&self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<HashSet<Position<Hash>>, anyhow::Error> {
let url = self.url.clone() + "/" + super::DOT_DIR;
let from_ = from.to_string();
let mut query = vec![("changelist", &from_), ("channel", &self.channel)];
for p in paths.iter() {
query.push(("path", p));
}
let res = self.client.get(&url).query(&query).send().await?;
if !res.status().is_success() {
bail!("HTTP error {:?}", res.status())
}
let resp = res.bytes().await?;
let mut result = HashSet::new();
if let Ok(data) = std::str::from_utf8(&resp) {
for l in data.lines() {
if !l.is_empty() {
match super::parse_line(l)? {
super::ListLine::Change { n, m, h } => {
txn.put_remote(remote, n, (h, m))?;
}
super::ListLine::Position(pos) => {
result.insert(pos);
}
super::ListLine::Error(e) => {
let mut stderr = std::io::stderr();
writeln!(stderr, "{}", e)?;
}
}
} else {
break;
}
}
}
Ok(result)
}
pub async fn get_state(
&mut self,
mid: Option<u64>,
) -> Result<Option<(u64, libpijul::Merkle)>, anyhow::Error> {
debug!("get_state {:?}", self.url);
let url = format!("{}/{}", self.url, super::DOT_DIR);
let q = if let Some(mid) = mid {
[
("state", format!("{}", mid)),
("channel", self.channel.clone()),
]
} else {
[("state", String::new()), ("channel", self.channel.clone())]
};
let res = self.client.get(&url).query(&q).send().await?;
if !res.status().is_success() {
bail!("HTTP error {:?}", res.status())
}
let resp = res.bytes().await?;
let resp = std::str::from_utf8(&resp)?;
debug!("resp = {:?}", resp);
let mut s = resp.split(' ');
if let (Some(n), Some(m)) = (
s.next().and_then(|s| s.parse().ok()),
s.next()
.and_then(|m| libpijul::Merkle::from_base32(m.as_bytes())),
) {
Ok(Some((n, m)))
} else {
Ok(None)
}
}
pub async fn archive<W: std::io::Write + Send + 'static>(
&mut self,
prefix: Option<String>,
state: Option<(libpijul::Merkle, &[Hash])>,
mut w: W,
) -> Result<u64, anyhow::Error> {
let url = self.url.clone() + "/" + super::DOT_DIR;
let res = self.client.get(&url).query(&[("channel", &self.channel)]);
let res = if let Some((ref state, ref extra)) = state {
let mut q = vec![("archive".to_string(), state.to_base32())];
if let Some(pre) = prefix {
q.push(("outputPrefix".to_string(), pre));
}
for e in extra.iter() {
q.push(("change".to_string(), e.to_base32()))
}
res.query(&q)
} else {
res
};
let res = res.send().await?;
if !res.status().is_success() {
bail!("HTTP error {:?}", res.status())
}
use futures_util::StreamExt;
let mut stream = res.bytes_stream();
let mut conflicts = 0;
let mut n = 0;
while let Some(item) = stream.next().await {
let item = item?;
let mut off = 0;
while n < 8 && off < item.len() {
conflicts = (conflicts << 8) | (item[off] as u64);
off += 1;
n += 1
}
w.write_all(&item[off..])?;
}
Ok(conflicts as u64)
}
}
mod commands;
mod config;
mod remote;
mod repository;
use std::ffi::OsString;
use std::io::Write;
use std::path::PathBuf;
use anyhow::bail;
use clap::{AppSettings, Clap};
use human_panic::setup_panic;
use crate::commands::*;
const DEFAULT_CHANNEL: &str = "main";
const PROTOCOL_VERSION: usize = 3;
#[derive(Clap, Debug)]
#[clap(
version,
author,
global_setting(AppSettings::ColoredHelp),
setting(AppSettings::InferSubcommands)
)]
pub struct Opts {
#[clap(subcommand)]
pub subcmd: SubCommand,
}
#[derive(Clap, Debug)]
pub enum SubCommand {
/// Initializes an empty pijul repository
Init(Init),
/// Clones an existing pijul repository
Clone(Clone),
/// Creates a new change
Record(Record),
/// Shows difference between two channels/changes
Diff(Diff),
/// Show the entire log of changes
Log(Log),
/// Pushes changes to a remote upstream
Push(Push),
/// Pulls changes from a remote upstream
Pull(Pull),
/// Shows information about a particular change
Change(Change),
/// Manages different channels
Channel(Channel),
#[clap(setting = AppSettings::Hidden)]
Protocol(Protocol),
#[cfg(feature = "git")]
/// Imports a git repository into pijul
Git(Git),
/// Moves a file in the working copy and the tree
Mv(Mv),
/// Lists files tracked by pijul
Ls(Ls),
/// Adds a path to the tree.
///
/// Pijul has an internal tree to represent the files currently
/// tracked. This command adds files and directories to that tree.
Add(Add),
/// Removes a file from the tree and pristine
Remove(Remove),
/// Resets the working copy to the last recorded change.
///
/// In other words, discards all unrecorded changes.
Reset(Reset),
#[cfg(debug_assertions)]
Debug(Debug),
/// Create a new channel
Fork(Fork),
/// Unrecords a list of changes.
///
/// The changes will be removed from your log, but your working
/// copy will stay exactly the same, unless the
/// `--reset` flag was passed. A change can only be unrecorded
/// if all changes that depend on it are also unrecorded in the
/// same operation. There are two ways to call `pijul-unrecord`:
///
/// * With a list of <change-id>s. The given changes will be
/// unrecorded, if possible.
///
/// * Without listing any <change-id>s. You will be
/// presented with a list of changes to choose from.
/// The length of the list is determined by the `unrecord_changes`
/// setting in your global config or the `--show-changes` option,
/// with the latter taking precedence.
Unrecord(Unrecord),
/// Applies changes to a channel
Apply(Apply),
/// Manages remote repositories
Remote(Remote),
/// Creates an archive of the repository
Archive(Archive),
/// Shows which patch last affected each line of the every file
Credit(Credit),
#[clap(external_subcommand)]
ExternalSubcommand(Vec<OsString>),
}
#[tokio::main]
async fn main() {
setup_panic!();
env_logger::init();
let opts = Opts::parse();
if let Err(e) = run(opts).await {
writeln!(std::io::stderr(), "Error: {}", e).unwrap_or(());
std::process::exit(1);
} else {
std::process::exit(0);
}
}
#[cfg(unix)]
fn run_external_command(mut command: Vec<OsString>) -> Result<(), std::io::Error> {
let args = command.split_off(1);
let mut cmd: OsString = "pijul-".into();
cmd.push(&command[0]);
use std::os::unix::process::CommandExt;
let err = std::process::Command::new(&cmd).args(args).exec();
report_external_command_error(&command[0], err);
}
#[cfg(windows)]
fn run_external_command(mut command: Vec<OsString>) -> Result<(), std::io::Error> {
let args = command.split_off(1);
let mut cmd: OsString = "pijul-".into();
cmd.push(&command[0]);
let mut spawned = match std::process::Command::new(&cmd).args(args).spawn() {
Ok(spawned) => spawned,
Err(e) => {
report_external_command_error(&command[0], e);
}
};
let status = spawned.wait()?;
std::process::exit(status.code().unwrap_or(1))
}
fn report_external_command_error(cmd: &OsString, err: std::io::Error) -> ! {
if err.kind() == std::io::ErrorKind::NotFound {
writeln!(std::io::stderr(), "No such subcommand: {:?}", cmd).unwrap_or(());
} else {
writeln!(std::io::stderr(), "Error while running {:?}: {}", cmd, err).unwrap_or(());
}
std::process::exit(1)
}
async fn run(opts: Opts) -> Result<(), anyhow::Error> {
match opts.subcmd {
SubCommand::Log(l) => l.run(),
SubCommand::Init(init) => init.run(),
SubCommand::Clone(clone) => clone.run().await,
SubCommand::Record(record) => record.run().await,
SubCommand::Diff(diff) => diff.run(),
SubCommand::Push(push) => push.run().await,
SubCommand::Pull(pull) => pull.run().await,
SubCommand::Change(change) => change.run(),
SubCommand::Channel(channel) => channel.run(),
SubCommand::Protocol(protocol) => protocol.run(),
#[cfg(feature = "git")]
SubCommand::Git(git) => git.run(),
SubCommand::Mv(mv) => mv.run(),
SubCommand::Ls(ls) => ls.run(),
SubCommand::Add(add) => add.run(),
SubCommand::Remove(remove) => remove.run(),
SubCommand::Reset(reset) => reset.run(),
#[cfg(debug_assertions)]
SubCommand::Debug(debug) => debug.run(),
SubCommand::Fork(fork) => fork.run(),
SubCommand::Unrecord(unrecord) => unrecord.run(),
SubCommand::Apply(apply) => apply.run(),
SubCommand::Remote(remote) => remote.run(),
SubCommand::Archive(archive) => archive.run().await,
SubCommand::Credit(credit) => credit.run(),
SubCommand::ExternalSubcommand(command) => Ok(run_external_command(command)?),
}
}
pub fn current_dir() -> Result<PathBuf, anyhow::Error> {
if let Ok(cur) = std::env::current_dir() {
Ok(cur)
} else {
bail!("Cannot access working directory")
}
}
use std::collections::HashMap;
use anyhow::bail;
use log::debug;
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Global {
pub author: libpijul::change::Author,
pub unrecord_changes: Option<usize>,
}
const CONFIG_DIR: &str = "pijul";
impl Global {
pub fn load() -> Result<Global, anyhow::Error> {
if let Some(mut dir) = dirs_next::config_dir() {
dir.push(CONFIG_DIR);
dir.push("config.toml");
let s = std::fs::read(&dir)
.or_else(|e| {
// Read from `$HOME/.config/pijul` dir
if let Some(mut dir) = dirs_next::home_dir() {
dir.push(".config");
dir.push(CONFIG_DIR);
dir.push("config.toml");
std::fs::read(&dir)
} else {
Err(e.into())
}
})
.or_else(|e| {
// Read from `$HOME/.pijulconfig`
if let Some(mut dir) = dirs_next::home_dir() {
dir.push(".pijulconfig");
std::fs::read(&dir)
} else {
Err(e.into())
}
})?;
debug!("s = {:?}", s);
if let Ok(t) = toml::from_slice(&s) {
Ok(t)
} else {
bail!("Could not read configuration file at {:?}", dir)
}
} else {
bail!("Global configuration file missing")
}
}
}
#[derive(Debug, Serialize, Deserialize, Default)]
pub struct Config {
pub current_channel: Option<String>,
pub default_remote: Option<String>,
#[serde(default)]
pub remotes: HashMap<String, String>,
#[serde(default)]
pub hooks: Hooks,
}
#[derive(Debug, Serialize, Deserialize, Default)]
pub struct Hooks {
#[serde(default)]
pub record: Vec<String>,
}
impl Config {
pub fn get_current_channel<'a>(&'a self, alt: Option<&'a String>) -> &'a str {
if let Some(channel) = alt {
channel.as_ref()
} else if let Some(ref channel) = self.current_channel {
channel.as_str()
} else {
crate::DEFAULT_CHANNEL
}
}
pub fn current_channel(&self) -> Option<&str> {
if let Some(ref channel) = self.current_channel {
Some(channel.as_str())
} else {
None
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct Remote_ {
ssh: Option<SshRemote>,
local: Option<String>,
url: Option<String>,
}
#[derive(Debug)]
pub enum Remote {
Ssh(SshRemote),
Local { local: String },
Http { url: String },
None,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SshRemote {
pub addr: String,
}
impl<'de> serde::Deserialize<'de> for Remote {
fn deserialize<D>(deserializer: D) -> Result<Remote, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let r = Remote_::deserialize(deserializer)?;
if let Some(ssh) = r.ssh {
Ok(Remote::Ssh(ssh))
} else if let Some(local) = r.local {
Ok(Remote::Local { local })
} else if let Some(url) = r.url {
Ok(Remote::Http { url })
} else {
Ok(Remote::None)
}
}
}
impl serde::Serialize for Remote {
fn serialize<D>(&self, serializer: D) -> Result<D::Ok, D::Error>
where
D: serde::ser::Serializer,
{
let r = match *self {
Remote::Ssh(ref ssh) => Remote_ {
ssh: Some(ssh.clone()),
local: None,
url: None,
},
Remote::Local { ref local } => Remote_ {
local: Some(local.to_string()),
ssh: None,
url: None,
},
Remote::Http { ref url } => Remote_ {
local: None,
ssh: None,
url: Some(url.to_string()),
},
Remote::None => Remote_ {
local: None,
ssh: None,
url: None,
},
};
r.serialize(serializer)
}
}
use std::collections::HashMap;
use std::path::PathBuf;
use super::{make_changelist, parse_changelist};
use anyhow::{anyhow, bail};
use clap::Clap;
use libpijul::changestore::ChangeStore;
use libpijul::pristine::sanakirja::Txn;
use libpijul::*;
use log::debug;
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Unrecord {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Unrecord changes from this channel instead of the current channel
#[clap(long = "channel")]
channel: Option<String>,
/// Also undo the changes in the working copy (preserving unrecorded changes if there are any)
#[clap(long = "reset")]
reset: bool,
/// Show N changes in a text editor if no <change-id>s were given.
/// Defaults to the value
/// of `unrecord_changes` in your global configuration.
#[clap(long = "show-changes", value_name = "N", conflicts_with("change-id"))]
show_changes: Option<usize>,
/// The hash of a change (unambiguous prefixes are accepted)
#[clap(multiple = true)]
change_id: Vec<String>,
}
impl Unrecord {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut txn = repo.pristine.mut_txn_begin();
if let Some(mut channel) = txn.load_channel(channel_name)? {
let pending_hash = if self.reset {
super::pending(&mut txn, &mut channel, &mut repo)?
} else {
None
};
let mut hashes = Vec::new();
if self.change_id.is_empty() {
// No change ids were given, present a list for choosing
// The number can be set in the global config or passed as a command-line option
let number_of_changes = if let Some(n) = self.show_changes {
n
} else {
let cfg = crate::config::Global::load()?;
cfg.unrecord_changes.ok_or_else(|| {
anyhow!(
"Can't determine how many changes to show. \
Please set the `unrecord_changes` option in \
your global config or run `pijul unrecord` \
with the `--show-changes` option."
)
})?
};
let hashes_ = txn
.reverse_log(&channel.borrow(), None)?
.map(|h| (h.unwrap().1).0)
.take(number_of_changes)
.collect::<Vec<_>>();
let o = make_changelist(&repo.changes, &hashes_, "unrecord")?;
for h in parse_changelist(&edit::edit_bytes(&o[..])?).iter() {
hashes.push((*h, txn.get_internal(*h)?.unwrap()))
}
} else {
for c in self.change_id.iter() {
let (hash, cid) = txn.hash_from_prefix(c)?;
hashes.push((hash, cid))
}
};
let channel_ = channel.borrow();
let mut changes = Vec::new();
for (hash, change_id) in hashes {
let n = txn
.get_changeset(Txn::changes(&channel_), change_id)
.unwrap();
changes.push((hash, change_id, n));
}
std::mem::drop(channel_);
changes.sort_by(|a, b| b.2.cmp(&a.2));
for (hash, change_id, _) in changes {
let channel_ = channel.borrow();
for p in txn.iter_revdep(change_id)? {
let (p, d) = p?;
if p < change_id {
continue;
} else if p > change_id {
break;
}
if txn.get_changeset(Txn::changes(&channel_), d)?.is_some() {
let dep = txn.get_external(d)?.unwrap();
if Some(dep) == pending_hash {
bail!(
"Cannot unrecord change {} because unrecorded changes depend on it",
hash.to_base32()
);
} else {
bail!(
"Cannot unrecord change {} because {} depend on it",
hash.to_base32(),
dep.to_base32()
);
}
}
}
std::mem::drop(channel_);
txn.unrecord(&repo.changes, &mut channel, &hash)?;
}
if self.reset {
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut HashMap::new(),
"",
true,
None,
)?;
}
if let Some(h) = pending_hash {
txn.unrecord(&repo.changes, &mut channel, &h)?;
if cfg!(feature = "keep-changes") {
repo.changes.del_change(&h)?;
}
}
}
txn.commit()?;
Ok(())
}
}
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use anyhow::bail;
use canonical_path::CanonicalPathBuf;
use clap::Clap;
use libpijul::pristine::{ChangeId, ChannelMutTxnT, Position};
use libpijul::{ChannelTxnT, DepsTxnT, MutTxnT, MutTxnTExt, TxnT, TxnTExt};
use log::debug;
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Reset {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
pub repo_path: Option<PathBuf>,
/// Reset the working copy to this channel, and change the current channel to this channel.
#[clap(long = "channel")]
pub channel: Option<String>,
/// Print this file to the standard output, without modifying the repository (works for a single file only).
#[clap(long = "dry-run")]
pub dry_run: bool,
/// Only reset these files
pub files: Vec<PathBuf>,
}
impl Reset {
pub fn run(self) -> Result<(), anyhow::Error> {
self.reset(true)
}
pub fn switch(self) -> Result<(), anyhow::Error> {
self.reset(false)
}
fn reset(self, overwrite_changes: bool) -> Result<(), anyhow::Error> {
let has_repo_path = self.repo_path.is_some();
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let repo_path = CanonicalPathBuf::canonicalize(&repo.path)?;
let mut channel = if let Some(channel) = txn.load_channel(&channel_name)? {
channel
} else {
bail!("No such channel: {:?}", channel_name)
};
if self.dry_run {
if self.files.len() != 1 {
bail!("reset --dry-run needs exactly one file");
}
let (pos, _ambiguous) = if has_repo_path {
let root = std::fs::canonicalize(repo.path.join(&self.files[0]))?;
let path = root.strip_prefix(&repo_path)?.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
} else {
let mut root = crate::current_dir()?;
root.push(&self.files[0]);
let root = std::fs::canonicalize(&root)?;
let path = root.strip_prefix(&repo_path)?.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
};
txn.output_file(
&repo.changes,
&channel,
pos,
&mut libpijul::vertex_buffer::Writer::new(std::io::stdout()),
)?;
} else {
let current_channel = repo.config.get_current_channel(None);
if self.channel.as_deref() == Some(current_channel) {
if !overwrite_changes {
return Ok(());
}
} else if self.channel.is_some() {
if let Some(mut channel) = txn.load_channel(current_channel)? {
let mut state = libpijul::RecordBuilder::new();
txn.record(
&mut state,
libpijul::Algorithm::default(),
&mut channel,
&mut repo.working_copy,
&repo.changes,
"",
)?;
let rec = state.finish();
debug!("actions = {:?}", rec.actions);
if !rec.actions.is_empty() {
bail!("Cannot change channel, as there are unrecorded changes.")
}
}
}
let now = std::time::Instant::now();
if self.files.is_empty() {
if self.channel.is_none() || self.channel.as_deref() == Some(current_channel) {
let last_modified = last_modified(&txn, &channel.borrow());
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut HashMap::new(),
"",
true,
Some(last_modified),
)?;
txn.touch_channel(&mut channel.borrow_mut(), None);
txn.commit()?;
return Ok(());
}
let mut inodes = HashSet::new();
if let Some(cur) = txn.load_channel(current_channel)? {
let mut changediff = HashSet::new();
let (a, b, s) = libpijul::pristine::last_common_state(
&txn,
&cur.borrow(),
&channel.borrow(),
)?;
debug!("last common state {:?}", s);
changes_after(&txn, &cur.borrow(), a, &mut changediff, &mut inodes)?;
changes_after(&txn, &channel.borrow(), b, &mut changediff, &mut inodes)?;
}
if self.channel.is_some() {
repo.config.current_channel = self.channel;
repo.save_config()?;
}
for pos in inodes.iter() {
let (path, _) = libpijul::fs::find_path(
&repo.changes,
&txn,
&channel.borrow(),
false,
*pos,
)?;
debug!("resetting {:?}", path);
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut HashMap::new(),
&path,
true,
None,
)?;
}
} else {
let mut done = HashMap::new();
for root in self.files.iter() {
let root = std::fs::canonicalize(&root)?;
let path = root.strip_prefix(&repo_path)?.to_str().unwrap();
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut done,
&path,
true,
None,
)?;
}
}
txn.commit()?;
debug!("now = {:?}", now.elapsed());
}
let locks = libpijul::TIMERS.lock().unwrap();
debug!(
"retrieve: {:?}, graph: {:?}, output: {:?}",
locks.alive_retrieve, locks.alive_graph, locks.alive_output,
);
Ok(())
}
}
fn changes_after<T: ChannelTxnT + DepsTxnT>(
txn: &T,
chan: &T::Channel,
from: u64,
changediff: &mut HashSet<ChangeId>,
inodes: &mut HashSet<Position<ChangeId>>,
) -> Result<(), anyhow::Error> {
for x in libpijul::pristine::changeid_log(txn, chan, from)? {
let (n, (u, _)) = x?;
debug!("{:?} {:?} {:?}", n, u, from);
if n <= from {
continue;
}
if changediff.insert(u) {
for y in txn.iter_rev_touched_files(u, None)? {
let (uu, pos) = y?;
if uu < u {
continue;
} else if uu > u {
break;
}
inodes.insert(pos);
}
}
}
Ok(())
}
fn last_modified<T: ChannelTxnT>(_: &T, channel: &T::Channel) -> std::time::SystemTime {
std::time::SystemTime::UNIX_EPOCH + std::time::Duration::from_secs(T::last_modified(channel))
}
use std::collections::HashMap;
use std::io::Write;
use std::path::PathBuf;
use anyhow::bail;
use canonical_path::CanonicalPathBuf;
use chrono::Utc;
use clap::Clap;
use libpijul::change::*;
use libpijul::changestore::*;
use libpijul::pristine::ChannelMutTxnT;
use libpijul::{Base32, ChannelRef, MutTxnT, MutTxnTExt, TxnT, TxnTExt};
use log::{debug, error};
use serde_derive::{Deserialize, Serialize};
use thrussh_keys::PublicKeyBase64;
use crate::repository::*;
#[derive(Clap, Debug)]
pub struct Record {
/// Record all paths that have changed
#[clap(short = 'a', long = "all")]
pub all: bool,
/// Set the change message
#[clap(short = 'm', long = "message")]
pub message: Option<String>,
/// Set the author field
#[clap(long = "author")]
pub author: Option<String>,
/// Record the change in this channel instead of the current channel
#[clap(long = "channel")]
pub channel: Option<String>,
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
pub repo_path: Option<PathBuf>,
/// Set the timestamp field
#[clap(long = "timestamp")]
pub timestamp: Option<i64>,
/// Sign the change with the default key
#[clap(short = 'S')]
pub sign: bool,
#[clap(long = "tag")]
pub tag: bool,
/// Amend this change instead of creating a new change
#[clap(long = "amend")]
#[allow(clippy::option_option)]
pub amend: Option<Option<String>>,
/// Paths in which to record the changes
pub prefixes: Vec<PathBuf>,
}
impl Record {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path.clone())?;
let mut stdout = std::io::stdout();
let mut stderr = std::io::stderr();
for h in repo.config.hooks.record.iter() {
let mut proc = std::process::Command::new("bash")
.current_dir(&repo.path)
.args(&["-c", &h])
.spawn()?;
let status = proc.wait()?;
if !status.success() {
writeln!(stderr, "Hook {:?} exited with code {:?}", h, status)?;
std::process::exit(status.code().unwrap_or(1))
}
}
let mut txn = repo.pristine.mut_txn_begin();
let mut channel =
txn.open_or_create_channel(repo.config.get_current_channel(self.channel.as_ref()))?;
let sign = self.sign;
let header = if let Some(ref amend) = self.amend {
let h = if let Some(ref hash) = amend {
txn.hash_from_prefix(hash)?.0
} else if let Some(h) = txn.reverse_log(&channel.borrow(), None)?.next() {
(h?.1).0
} else {
return Ok(());
};
let header = if let Some(message) = self.message.clone() {
ChangeHeader {
message,
..repo.changes.get_header(&h)?
}
} else {
repo.changes.get_header(&h)?
};
txn.unrecord(&repo.changes, &mut channel, &h)?;
header
} else {
self.header()
};
let no_prefixes = self.prefixes.is_empty();
let result = self.record(
&mut txn,
&mut channel,
&mut repo.working_copy,
&repo.changes,
CanonicalPathBuf::canonicalize(&repo.path)?,
header,
)?;
if let Some((mut change, updates, hash, oldest)) = result {
let hash = hash.unwrap();
if sign {
let mut key_path = dirs_next::home_dir().unwrap().join(".ssh");
if let Some((pk, signature)) = sign_hash(&mut key_path, hash).await? {
let sig = toml::Value::try_from(vec![Signature {
public_key: pk,
timestamp: change.header.timestamp,
signature,
}])?;
let mut toml = toml::map::Map::new();
toml.insert("signatures".to_string(), sig);
change.unhashed = Some(toml.into());
let hash2 = repo.changes.save_change(&change).unwrap();
assert_eq!(hash2, hash);
}
}
txn.apply_local_change(&mut channel, &change, hash, &updates)?;
writeln!(stdout, "Hash: {}", hash.to_base32())?;
let oldest = if let Ok(t) = oldest.duration_since(std::time::SystemTime::UNIX_EPOCH) {
t.as_secs() as u64
} else {
0
};
if no_prefixes {
txn.touch_channel(&mut channel.borrow_mut(), Some(oldest));
}
txn.commit()?;
} else {
if no_prefixes {
txn.touch_channel(&mut channel.borrow_mut(), None);
txn.commit()?;
}
writeln!(stderr, "Nothing to record")?;
}
Ok(())
}
fn header(&self) -> ChangeHeader {
let authors = if let Some(ref a) = self.author {
vec![libpijul::change::Author {
name: a.clone(),
full_name: None,
email: None,
}]
} else if let Ok(global) = crate::config::Global::load() {
vec![global.author]
} else {
Vec::new()
};
ChangeHeader {
message: self.message.clone().unwrap_or_else(String::new),
authors,
description: None,
timestamp: if let Some(t) = self.timestamp {
chrono::DateTime::from_utc(chrono::NaiveDateTime::from_timestamp(t, 0), chrono::Utc)
} else {
Utc::now()
},
}
}
fn fill_relative_prefixes(&mut self) -> Result<(), anyhow::Error> {
let cwd = std::env::current_dir()?;
for p in self.prefixes.iter_mut() {
if p.is_relative() {
*p = cwd.join(&p);
}
}
Ok(())
}
fn record<T: TxnTExt + MutTxnTExt, C: ChangeStore>(
mut self,
txn: &mut T,
channel: &mut ChannelRef<T>,
working_copy: &mut libpijul::working_copy::FileSystem,
changes: &C,
repo_path: CanonicalPathBuf,
header: ChangeHeader,
) -> Result<
Option<(
Change,
HashMap<usize, libpijul::InodeUpdate>,
Option<libpijul::Hash>,
std::time::SystemTime,
)>,
anyhow::Error,
> {
let mut state = libpijul::RecordBuilder::new();
if self.prefixes.is_empty() {
txn.record(
&mut state,
libpijul::Algorithm::default(),
channel,
working_copy,
changes,
"",
)?
} else {
self.fill_relative_prefixes()?;
working_copy.record_prefixes(
txn,
channel,
changes,
&mut state,
repo_path,
&self.prefixes,
num_cpus::get(),
)?;
}
let mut rec = state.finish();
if rec.actions.is_empty() {
return Ok(None);
}
let actions = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn).unwrap())
.collect();
let change =
LocalChange::make_change(txn, channel, actions, rec.contents, header, Vec::new())?;
let file_name = |local: &Local, _| -> String { format!("{}:{}", local.path, local.line) };
debug!("has_binary = {:?}", rec.has_binary_files);
let change = if self.all {
change
} else if rec.has_binary_files {
bail!("Cannot record a binary change interactively. Please use -a.")
} else {
let mut o = Vec::new();
change.write(changes, None, file_name, true, &mut o)?;
let mut with_errors: Option<Vec<u8>> = None;
let change = loop {
let mut bytes = if let Some(ref o) = with_errors {
edit::edit_bytes(&o[..])?
} else {
edit::edit_bytes(&o[..])?
};
if bytes.iter().all(|c| (*c as char).is_whitespace()) {
return Ok(None);
}
let mut change = std::io::BufReader::new(std::io::Cursor::new(&bytes));
if let Ok(change) =
Change::read_and_deps(&mut change, &mut rec.updatables, txn, channel)
{
break change;
}
let mut err = SYNTAX_ERROR.as_bytes().to_vec();
err.append(&mut bytes);
with_errors = Some(err)
};
if change.changes.is_empty() {
return Ok(None);
}
change
};
if change.header.message.trim().is_empty() {
bail!("No change message")
}
debug!("saving change");
let hash = changes.save_change(&change).unwrap();
debug!("saved");
Ok(Some((
change,
rec.updatables,
Some(hash),
rec.oldest_change,
)))
}
}
#[derive(Debug, Serialize, Deserialize)]
struct Signature {
public_key: String,
timestamp: chrono::DateTime<chrono::Utc>,
signature: String,
}
async fn sign_hash(
key_path: &mut PathBuf,
hash: libpijul::Hash,
) -> Result<Option<(String, String)>, anyhow::Error> {
let to_sign = hash.to_bytes();
match thrussh_keys::agent::client::AgentClient::connect_env().await {
Ok(agent) => {
let mut agent = Some(agent);
for k in &["id_ed25519.pub", "id_rsa.pub"] {
key_path.push(k);
if let Ok(key) = thrussh_keys::load_public_key(&key_path) {
debug!("key");
if let Some(a) = agent.take() {
debug!("authenticate future");
if let (_, Ok(sig)) = a.sign_request_base64(&key, &to_sign).await {
key_path.pop();
let key = key.public_key_base64();
return Ok(Some((key, sig)));
}
}
}
key_path.pop();
}
}
Err(e) => {
error!("{:?}", e);
}
}
for k in &["id_ed25519", "id_rsa"] {
key_path.push(k);
if let Some(k) = crate::remote::ssh::load_secret_key(&key_path, k) {
key_path.pop();
let pk = k.public_key_base64();
return Ok(Some((pk, k.sign_detached(&to_sign)?.to_base64())));
} else {
key_path.pop();
}
}
Ok(None)
}
const SYNTAX_ERROR: &str = "# Syntax errors, please try again.
# Alternatively, you may delete the entire file (including this
# comment to abort).
";
use std::collections::{HashMap, HashSet};
use std::io::Write;
use std::path::PathBuf;
use super::{make_changelist, parse_changelist};
use anyhow::bail;
use clap::Clap;
use lazy_static::lazy_static;
use libpijul::changestore::ChangeStore;
use libpijul::pristine::sanakirja::Txn;
use libpijul::*;
use log::debug;
use regex::Regex;
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Remote {
#[clap(subcommand)]
subcmd: Option<SubRemote>,
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
}
#[derive(Clap, Debug)]
pub enum SubRemote {
/// Deletes the remote
#[clap(name = "delete")]
Delete { remote: String },
}
impl Remote {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let mut stdout = std::io::stdout();
match self.subcmd {
None => {
let txn = repo.pristine.txn_begin()?;
for r in txn.iter_remotes("")? {
let r = r?;
writeln!(stdout, " {}", r.name())?;
}
}
Some(SubRemote::Delete { remote }) => {
let mut txn = repo.pristine.mut_txn_begin();
if !txn.drop_named_remote(&remote)? {
writeln!(std::io::stderr(), "Remote not found: {:?}", remote)?
} else {
txn.commit()?;
}
}
}
Ok(())
}
}
#[derive(Clap, Debug)]
pub struct Push {
/// Path to the repository. Uses the current repository if the argument is omitted
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Push from this channel instead of the default channel
#[clap(long = "from-channel")]
from_channel: Option<String>,
/// Push all changes
#[clap(long = "all", short = 'a', conflicts_with = "changes")]
all: bool,
/// Do not check certificates (HTTPS remotes only, this option might be dangerous)
#[clap(short = 'k')]
no_cert_check: bool,
/// Push changes only relating to these paths
#[clap(long = "path")]
path: Vec<String>,
/// Push to this remote
to: Option<String>,
/// Push to this remote channel instead of the remote's default channel
#[clap(long = "to-channel")]
to_channel: Option<String>,
/// Push only these changes
#[clap(last = true)]
changes: Vec<String>,
}
#[derive(Clap, Debug)]
pub struct Pull {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Pull into this channel instead of the current channel
#[clap(long = "to-channel")]
to_channel: Option<String>,
/// Push all changes
#[clap(long = "all", short = 'a', conflicts_with = "changes")]
all: bool,
/// Do not check certificates (HTTPS remotes only, this option might be dangerous)
#[clap(short = 'k')]
no_cert_check: bool,
/// Download full changes, even when not necessory
#[clap(long = "full")]
full: bool, // This can't be symmetric with push
/// Only pull to these paths
#[clap(long = "path")]
path: Vec<String>,
/// Pull from this remote
from: Option<String>,
/// Pull from this remote channel
#[clap(long = "from-channel")]
from_channel: Option<String>,
/// Pull changes from the local repository, not necessarily from a channel
#[clap(last = true)]
changes: Vec<String>, // For local changes only, can't be symmetric.
}
lazy_static! {
static ref CHANNEL: Regex = Regex::new(r#"([^:]*)(:(.*))?"#).unwrap();
}
impl Push {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut stderr = std::io::stderr();
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let channel_name = repo.config.get_current_channel(self.from_channel.as_ref());
let remote_name = if let Some(ref rem) = self.to {
rem
} else if let Some(ref def) = repo.config.default_remote {
def
} else {
bail!("Missing remote");
};
let mut push_channel = None;
let remote_channel = if let Some(ref c) = self.to_channel {
let c = CHANNEL.captures(c).unwrap();
push_channel = c.get(3).map(|x| x.as_str());
let c = c.get(1).unwrap().as_str();
if c.is_empty() {
channel_name
} else {
c
}
} else {
channel_name
};
debug!("remote_channel = {:?} {:?}", remote_channel, push_channel);
let mut remote = repo
.remote(
Some(&repo.path),
&remote_name,
remote_channel,
self.no_cert_check,
)
.await?;
let mut txn = repo.pristine.mut_txn_begin();
let remote_changes = remote.update_changelist(&mut txn, &self.path).await?;
let channel = txn.open_or_create_channel(channel_name)?;
let mut paths = HashSet::new();
for path in self.path.iter() {
let (p, ambiguous) = txn.follow_oldest_path(&repo.changes, &channel, path)?;
if ambiguous {
bail!("Ambiguous path: {:?}", path)
}
paths.insert(p);
paths.extend(
libpijul::fs::iter_graph_descendants(&txn, &channel.borrow().graph, p)?
.map(|x| x.unwrap()),
);
}
let mut to_upload = Vec::new();
for x in txn.reverse_log(&channel.borrow(), None)? {
let (_, (h, m)) = x?;
if let Some((_, ref remote_changes)) = remote_changes {
if txn.remote_has_state(remote_changes, m)? {
break;
}
let h_int = txn.get_internal(h)?.unwrap();
if !txn.remote_has_change(&remote_changes, h)? {
if paths.is_empty() {
to_upload.push(h)
} else {
for p in paths.iter() {
if txn.get_touched_files(*p, Some(h_int))?.is_some() {
to_upload.push(h);
break;
}
}
}
}
} else if let crate::remote::RemoteRepo::LocalChannel(ref remote_channel) = remote {
if let Some(channel) = txn.load_channel(remote_channel)? {
let channel = channel.borrow();
let h_int = txn.get_internal(h)?.unwrap();
if txn.get_changeset(Txn::changes(&channel), h_int)?.is_none() {
if paths.is_empty() {
to_upload.push(h)
} else {
for p in paths.iter() {
if txn.get_touched_files(*p, Some(h_int))?.is_some() {
to_upload.push(h);
break;
}
}
}
}
}
}
}
debug!("to_upload = {:?}", to_upload);
if to_upload.is_empty() {
writeln!(stderr, "Nothing to push")?;
return Ok(());
}
to_upload.reverse();
let to_upload = if !self.changes.is_empty() {
let mut u = Vec::new();
let mut not_found = Vec::new();
for change in self.changes.iter() {
match txn.hash_from_prefix(change) {
Ok((hash, _)) => {
if to_upload.contains(&hash) {
u.push(hash);
}
}
Err(_) => {
if !not_found.contains(change) {
not_found.push(change.to_string());
}
}
}
}
if !not_found.is_empty() {
bail!("Changes not found: {:?}", not_found)
}
check_deps(&repo.changes, &to_upload, &u)?;
u
} else if self.all {
to_upload
} else {
let mut o = make_changelist(&repo.changes, &to_upload, "push")?;
let remote_changes = remote_changes.map(|x| x.1);
loop {
let d = parse_changelist(&edit::edit_bytes(&o[..])?);
let comp = complete_deps(&txn, &remote_changes, &repo.changes, &to_upload, &d)?;
if comp.len() == d.len() {
break comp;
}
o = make_changelist(&repo.changes, &comp, "push")?
}
};
debug!("to_upload = {:?}", to_upload);
if to_upload.is_empty() {
writeln!(stderr, "Nothing to push")?;
return Ok(());
}
remote
.upload_changes(&mut txn, repo.changes_dir.clone(), push_channel, &to_upload)
.await?;
txn.commit()?;
remote.finish().await?;
Ok(())
}
}
impl Pull {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
let channel_name = repo.config.get_current_channel(self.to_channel.as_ref());
let mut channel = txn.open_or_create_channel(channel_name)?;
debug!("{:?}", repo.config);
let remote_name = if let Some(ref rem) = self.from {
rem
} else if let Some(ref def) = repo.config.default_remote {
def
} else {
bail!("Missing remote")
};
let from_channel = if let Some(ref c) = self.from_channel {
c
} else {
crate::DEFAULT_CHANNEL
};
let mut remote = repo
.remote(
Some(&repo.path),
&remote_name,
from_channel,
self.no_cert_check,
)
.await?;
debug!("downloading");
let mut inodes: HashSet<libpijul::pristine::Position<libpijul::Hash>> = HashSet::new();
let mut to_download = if self.changes.is_empty() {
let remote_changes = remote.update_changelist(&mut txn, &self.path).await?;
debug!("changelist done");
let mut to_download = Vec::new();
if let Some((inodes_, remote_changes)) = remote_changes {
inodes.extend(inodes_.into_iter());
for x in txn.iter_remote(&remote_changes.borrow().remote, 0)? {
let (h, m) = x?.1;
if txn.channel_has_state(&channel.borrow(), m)?.is_some() {
break;
} else if txn.get_revchanges(&channel, h)?.is_none() {
to_download.push(h)
}
}
} else if let crate::remote::RemoteRepo::LocalChannel(ref remote_channel) = remote {
let mut inodes_ = HashSet::new();
for path in self.path.iter() {
let (p, ambiguous) = txn.follow_oldest_path(&repo.changes, &channel, path)?;
if ambiguous {
bail!("Ambiguous path: {:?}", path)
}
inodes_.insert(p);
inodes_.extend(
libpijul::fs::iter_graph_descendants(&txn, &channel.borrow().graph, p)?
.map(|x| x.unwrap()),
);
}
inodes.extend(inodes_.iter().map(|x| libpijul::pristine::Position {
change: txn.get_external(x.change).unwrap().unwrap(),
pos: x.pos,
}));
if let Some(remote_channel) = txn.load_channel(remote_channel)? {
let remote_channel = remote_channel.borrow();
for x in txn.reverse_log(&remote_channel, None)? {
let (h, m) = x?.1;
if txn.channel_has_state(&channel.borrow(), m)?.is_some() {
break;
}
let h_int = txn.get_internal(h)?.unwrap();
if txn
.get_changeset(Txn::changes(&channel.borrow()), h_int)?
.is_none()
{
if inodes_.is_empty()
|| inodes_.iter().any(|&inode| {
txn.get_rev_touched_files(h_int, Some(inode))
.unwrap()
.is_some()
})
{
to_download.push(h)
}
}
}
}
}
to_download
} else {
let r: Result<Vec<libpijul::Hash>, anyhow::Error> = self
.changes
.iter()
.map(|h| Ok(txn.hash_from_prefix(h)?.0))
.collect();
r?
};
debug!("recording");
let hash = super::pending(&mut txn, &mut channel, &mut repo)?;
let to_download = remote
.pull(
&mut repo,
&mut txn,
&mut channel,
&mut to_download,
&inodes,
self.all,
)
.await?;
if to_download.is_empty() {
let mut stderr = std::io::stderr();
writeln!(stderr, "Nothing to pull")?;
return Ok(());
}
if !self.all {
let mut o = make_changelist(&repo.changes, &to_download, "pull")?;
let d = loop {
let d = parse_changelist(&edit::edit_bytes(&o[..])?);
let comp = complete_deps(&txn, &None, &repo.changes, &to_download, &d)?;
if comp.len() == d.len() {
break comp;
}
o = make_changelist(&repo.changes, &comp, "pull")?
};
let mut ws = libpijul::ApplyWorkspace::new();
debug!("to_download = {:?}", to_download);
let progress = indicatif::ProgressBar::new(d.len() as u64);
progress.set_style(
indicatif::ProgressStyle::default_spinner()
.template(" Applying changes {wide_bar} {pos}/{len}"),
);
for h in d.iter() {
txn.apply_change_rec_ws(&repo.changes, &mut channel, *h, &mut ws)?;
progress.inc(1);
}
progress.set_style(
indicatif::ProgressStyle::default_bar()
.template("✓ Applying changes {wide_bar} {pos}/{len}"),
);
progress.finish();
}
debug!("completing changes");
remote
.complete_changes(&repo, &txn, &mut channel, &to_download, self.full)
.await?;
remote.finish().await?;
let progress = indicatif::ProgressBar::new_spinner();
progress.set_style(
indicatif::ProgressStyle::default_spinner().template("{spinner} Outputting repository"),
);
progress.enable_steady_tick(100);
debug!("inodes = {:?}", inodes);
let mut touched = HashSet::new();
for d in to_download.iter() {
if let Some(int) = txn.get_internal(*d)? {
for inode in txn.iter_rev_touched(int)? {
let (int_, inode) = inode?;
if int_ < int {
continue;
} else if int_ > int {
break;
}
let ext = libpijul::pristine::Position {
change: txn.get_external(inode.change)?.unwrap(),
pos: inode.pos,
};
if inodes.is_empty() || inodes.contains(&ext) {
touched.insert(inode);
}
}
}
}
let mut done = HashMap::new();
for i in touched {
let (path, _) =
libpijul::fs::find_path(&repo.changes, &txn, &channel.borrow(), false, i)?;
debug!("path = {:?}", path);
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut done,
&path,
true,
None,
)?;
}
progress.set_style(
indicatif::ProgressStyle::default_spinner().template("✓ Outputting repository"),
);
progress.finish();
if let Some(h) = hash {
txn.unrecord(&repo.changes, &mut channel, &h)?;
repo.changes.del_change(&h)?;
}
txn.commit()?;
Ok(())
}
}
fn complete_deps<T: TxnT, C: ChangeStore>(
txn: &T,
remote_changes: &Option<libpijul::RemoteRef<T>>,
c: &C,
original: &[libpijul::Hash],
now: &[libpijul::Hash],
) -> Result<Vec<libpijul::Hash>, anyhow::Error> {
let original_: HashSet<_> = original.iter().collect();
let mut now_ = HashSet::with_capacity(original.len());
let mut result = Vec::with_capacity(original.len());
for &h in now {
now_.insert(h);
result.push(h);
}
let mut stack = now.to_vec();
stack.reverse();
while let Some(n) = stack.pop() {
// check that all of `now`'s deps are in now or not in original
for d in c.get_dependencies(&n)? {
if let Some(ref rem) = remote_changes {
if txn.remote_has_change(rem, d)? {
continue;
}
}
if original_.get(&d).is_some() && now_.get(&d).is_none() {
result.push(d);
now_.insert(d);
stack.push(d);
}
}
if now_.insert(n) {
result.push(n)
}
}
Ok(result)
}
fn check_deps<C: ChangeStore>(
c: &C,
original: &[libpijul::Hash],
now: &[libpijul::Hash],
) -> Result<(), anyhow::Error> {
let original_: HashSet<_> = original.iter().collect();
let now_: HashSet<_> = now.iter().collect();
for n in now {
// check that all of `now`'s deps are in now or not in original
for d in c.get_dependencies(n)? {
if original_.get(&d).is_some() && now_.get(&d).is_none() {
bail!("Missing dependency: {:?}", n)
}
}
}
Ok(())
}
use std::collections::{HashMap, HashSet};
use std::io::BufWriter;
use std::io::{BufRead, Read, Write};
use std::path::PathBuf;
use crate::repository::Repository;
use anyhow::bail;
use byteorder::{BigEndian, WriteBytesExt};
use clap::Clap;
use lazy_static::lazy_static;
use libpijul::*;
use log::{debug, error};
use regex::Regex;
/// This command is not meant to be run by the user,
/// instead it is called over SSH
#[derive(Clap, Debug)]
pub struct Protocol {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Use this protocol version
#[clap(long = "version")]
version: usize,
}
lazy_static! {
static ref STATE: Regex = Regex::new(r#"state\s+(\S+)(\s+([0-9]+)?)\s+"#).unwrap();
static ref CHANGELIST: Regex = Regex::new(r#"changelist\s+(\S+)\s+([0-9]+)(.*)\s+"#).unwrap();
static ref CHANGELIST_PATHS: Regex = Regex::new(r#""(((\\")|[^"])+)""#).unwrap();
static ref CHANGE: Regex = Regex::new(r#"((change)|(partial))\s+([^ ]*)\s+"#).unwrap();
static ref APPLY: Regex = Regex::new(r#"apply\s+(\S+)\s+([^ ]*) ([0-9]+)\s+"#).unwrap();
static ref CHANNEL: Regex = Regex::new(r#"channel\s+(\S+)\s+"#).unwrap();
static ref ARCHIVE: Regex =
Regex::new(r#"archive\s+(\S+)\s*(( ([^:]+))*)( :(.*))?\n"#).unwrap();
}
fn load_channel<T: MutTxnTExt>(txn: &T, name: &str) -> Result<ChannelRef<T>, anyhow::Error> {
if let Some(c) = txn.load_channel(name)? {
Ok(c)
} else {
bail!("No such channel: {:?}", name)
}
}
const PARTIAL_CHANGE_SIZE: u64 = 1 << 20;
impl Protocol {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
let mut ws = libpijul::ApplyWorkspace::new();
let mut buf = String::new();
let mut buf2 = vec![0; 4096 * 10];
let s = std::io::stdin();
let mut s = s.lock();
let o = std::io::stdout();
let mut o = BufWriter::new(o.lock());
let mut applied = HashMap::new();
debug!("reading");
while s.read_line(&mut buf)? > 0 {
debug!("{:?}", buf);
if let Some(cap) = STATE.captures(&buf) {
let channel = load_channel(&txn, &cap[1])?;
let init = if let Some(u) = cap.get(3) {
u.as_str().parse().ok()
} else {
None
};
if let Some(pos) = init {
for x in txn.log(&channel.borrow(), pos)? {
let (n, (_, m)) = x?;
match n.cmp(&pos) {
std::cmp::Ordering::Less => continue,
std::cmp::Ordering::Greater => {
writeln!(o, "-")?;
break;
}
std::cmp::Ordering::Equal => {
writeln!(o, "{} {}", n, m.to_base32())?;
break;
}
}
}
} else if let Some(x) = txn.reverse_log(&channel.borrow(), None)?.next() {
let (n, (_, m)) = x?;
writeln!(o, "{} {}", n, m.to_base32())?
} else {
writeln!(o, "-")?;
}
o.flush()?;
} else if let Some(cap) = CHANGELIST.captures(&buf) {
let channel = load_channel(&txn, &cap[1])?;
let from: u64 = cap[2].parse().unwrap();
let mut paths = HashSet::new();
debug!("cap[3] = {:?}", &cap[3]);
for r in CHANGELIST_PATHS.captures_iter(&cap[3]) {
let s: String = r[1].replace("\\\"", "\"");
if let Ok((p, ambiguous)) = txn.follow_oldest_path(&repo.changes, &channel, &s)
{
if ambiguous {
bail!("Ambiguous path")
}
let h = txn.get_external(p.change)?.unwrap();
writeln!(o, "{}.{}", h.to_base32(), p.pos.0)?;
paths.insert(p);
paths.extend(
libpijul::fs::iter_graph_descendants(&txn, &channel.borrow().graph, p)?
.map(|x| x.unwrap()),
);
} else {
debug!("protocol line: {:?}", buf);
bail!("Protocol error")
}
}
debug!("paths = {:?}", paths);
for x in txn.log(&channel.borrow(), from)? {
let (n, (h, m)) = x?;
let h_int = txn.get_internal(h)?.unwrap();
if paths.is_empty()
|| paths.iter().any(|x| {
x.change == h_int
|| txn.get_touched_files(*x, Some(h_int)).unwrap().is_some()
})
{
writeln!(o, "{}.{}.{}", n, h.to_base32(), m.to_base32())?
}
}
writeln!(o)?;
o.flush()?;
} else if let Some(cap) = CHANGE.captures(&buf) {
let h_ = &cap[4];
let h = if let Some(h) = Hash::from_base32(h_.as_bytes()) {
h
} else {
debug!("protocol error: {:?}", buf);
bail!("Protocol error")
};
libpijul::changestore::filesystem::push_filename(&mut repo.changes_dir, &h);
debug!("repo = {:?}", repo.changes_dir);
let mut f = std::fs::File::open(&repo.changes_dir)?;
let size = std::fs::metadata(&repo.changes_dir)?.len();
let size = if &cap[1] == "change" || size <= PARTIAL_CHANGE_SIZE {
size
} else {
libpijul::change::Change::size_no_contents(&mut f)?
};
o.write_u64::<BigEndian>(size)?;
let mut size = size as usize;
while size > 0 {
if size < buf2.len() {
buf2.truncate(size as usize);
}
let n = f.read(&mut buf2[..])?;
if n == 0 {
break;
}
size -= n;
o.write_all(&buf2[..n])?;
}
o.flush()?;
libpijul::changestore::filesystem::pop_filename(&mut repo.changes_dir);
} else if let Some(cap) = APPLY.captures(&buf) {
let h = if let Some(h) = Hash::from_base32(cap[2].as_bytes()) {
h
} else {
debug!("protocol error {:?}", buf);
bail!("Protocol error");
};
let mut path = repo.changes_dir.clone();
libpijul::changestore::filesystem::push_filename(&mut path, &h);
std::fs::create_dir_all(path.parent().unwrap())?;
let size: usize = cap[3].parse().unwrap();
buf2.resize(size, 0);
s.read_exact(&mut buf2)?;
std::fs::write(&path, &buf2)?;
libpijul::change::Change::deserialize(&path.to_string_lossy(), Some(&h))?;
let mut channel = load_channel(&txn, &cap[1])?;
txn.apply_change_ws(&repo.changes, &mut channel, h, &mut ws)?;
applied.insert(cap[1].to_string(), channel);
} else if let Some(cap) = CHANNEL.captures(&buf) {
let channel = load_channel(&txn, &cap[1])?;
let channel = channel.borrow();
for d in libpijul::pristine::channel_dump::dump_channel(&txn, channel)? {
o.write_all(&d?)?;
}
o.flush()?;
} else if let Some(cap) = ARCHIVE.captures(&buf) {
let mut w = Vec::new();
let mut tarball = libpijul::output::Tarball::new(
&mut w,
cap.get(6).map(|x| x.as_str().to_string()),
);
let channel = load_channel(&txn, &cap[1])?;
let conflicts = if let Some(caps) = cap.get(2) {
debug!("caps = {:?}", caps.as_str());
let mut hashes = caps.as_str().split(' ').filter(|x| !x.is_empty());
let state: libpijul::Merkle = hashes.next().unwrap().parse().unwrap();
let extra: Vec<libpijul::Hash> = hashes.map(|x| x.parse().unwrap()).collect();
debug!("state = {:?}, extra = {:?}", state, extra);
if txn.current_state(&channel.borrow())? == state && extra.is_empty() {
txn.archive(&repo.changes, &channel, &mut tarball)?
} else {
use rand::Rng;
let fork_name: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(30)
.map(|x| x as char)
.collect();
let mut fork = txn.fork(&channel, &fork_name)?;
let conflicts = txn.archive_with_state(
&repo.changes,
&mut fork,
state,
&extra,
&mut tarball,
)?;
txn.drop_channel(&fork_name)?;
conflicts
}
} else {
txn.archive(&repo.changes, &channel, &mut tarball)?
};
std::mem::drop(tarball);
let mut o = std::io::stdout();
o.write_u64::<BigEndian>(w.len() as u64)?;
o.write_u64::<BigEndian>(conflicts.len() as u64)?;
o.write_all(&w)?;
o.flush()?;
} else {
error!("unmatched")
}
buf.clear();
}
let applied_nonempty = !applied.is_empty();
for (_, mut channel) in applied {
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut HashMap::new(),
"",
true,
None,
)?;
}
if applied_nonempty {
txn.commit()?;
}
Ok(())
}
}
mod init;
pub use init::Init;
mod clone;
pub use clone::Clone;
mod pushpull;
pub use pushpull::*;
mod log;
pub use self::log::Log;
mod record;
pub use record::Record;
mod diff;
pub use diff::Diff;
mod change;
pub use change::Change;
mod protocol;
pub use protocol::Protocol;
#[cfg(feature = "git")]
mod git;
#[cfg(feature = "git")]
pub use git::Git;
mod channel;
pub use channel::*;
mod reset;
pub use reset::*;
mod fork;
pub use fork::*;
mod unrecord;
pub use unrecord::*;
mod file_operations;
pub use file_operations::*;
mod apply;
pub use apply::*;
mod archive;
pub use archive::*;
mod credit;
pub use credit::*;
#[cfg(debug_assertions)]
mod debug;
#[cfg(debug_assertions)]
pub use debug::*;
/// Record the pending change (i.e. any unrecorded modifications in
/// the working copy), returning its hash.
fn pending<T: libpijul::MutTxnTExt + libpijul::TxnT>(
txn: &mut T,
channel: &mut libpijul::ChannelRef<T>,
repo: &mut crate::repository::Repository,
) -> Result<Option<libpijul::Hash>, anyhow::Error> {
use libpijul::changestore::ChangeStore;
let recorded = txn.record_all(
libpijul::Algorithm::default(),
channel,
&mut repo.working_copy,
&repo.changes,
"",
)?;
if recorded.actions.is_empty() {
return Ok(None);
}
let actions = recorded
.actions
.into_iter()
.map(|rec| rec.globalize(txn).unwrap())
.collect();
let mut pending_change = libpijul::change::Change::make_change(
txn,
channel,
actions,
recorded.contents,
libpijul::change::ChangeHeader::default(),
Vec::new(),
)?;
let (dependencies, extra_known) =
libpijul::change::dependencies(txn, &channel.borrow(), pending_change.changes.iter())?;
pending_change.dependencies = dependencies;
pending_change.extra_known = extra_known;
let hash = repo.changes.save_change(&pending_change).unwrap();
txn.apply_local_change(channel, &pending_change, hash, &recorded.updatables)?;
Ok(Some(hash))
}
#[cfg(unix)]
fn pager() {
if let Ok(less) = std::process::Command::new("less")
.args(&["--version"])
.output()
{
let regex = regex::bytes::Regex::new("less ([0-9]+)").unwrap();
if let Some(caps) = regex.captures(&less.stdout) {
if std::str::from_utf8(&caps[1])
.unwrap()
.parse::<usize>()
.unwrap()
>= 530
{
pager::Pager::with_pager("less -RF").setup()
} else {
pager::Pager::new().setup()
}
}
}
}
#[cfg(not(unix))]
fn pager() {}
/// Make a "changelist", i.e. a list of patches that can be edited in
/// a text editor.
fn make_changelist<S: libpijul::changestore::ChangeStore>(
changes: &S,
pullable: &[libpijul::Hash],
verb: &str,
) -> Result<Vec<u8>, anyhow::Error> {
use libpijul::Base32;
use std::io::Write;
let mut v = Vec::new();
// TODO: This message should probably be customizable
writeln!(
v,
"# Please select the changes to {}. The lines that contain just a
# valid hash, and no other character (except possibly a newline), will
# be {}ed.\n",
verb, verb,
)
.unwrap();
let mut first_p = true;
for p in pullable {
if !first_p {
writeln!(v, "").unwrap();
}
first_p = false;
writeln!(v, "{}\n", p.to_base32()).unwrap();
let deps = changes.get_dependencies(&p)?;
if !deps.is_empty() {
write!(v, " Dependencies:").unwrap();
for d in deps {
write!(v, " {}", d.to_base32()).unwrap();
}
writeln!(v).unwrap();
}
let change = changes.get_header(&p)?;
write!(v, " Author: [").unwrap();
let mut first = true;
for a in change.authors.iter() {
if !first {
write!(v, ", ").unwrap();
}
first = false;
write!(v, "{}", a).unwrap();
}
writeln!(v, "]").unwrap();
writeln!(v, " Date: {}\n", change.timestamp).unwrap();
for l in change.message.lines() {
writeln!(v, " {}", l).unwrap();
}
if let Some(desc) = change.description {
writeln!(v).unwrap();
for l in desc.lines() {
writeln!(v, " {}", l).unwrap();
}
}
}
Ok(v)
}
/// Parses a list of hashes from a slice of bytes.
/// Everything that is not a line consisting of a
/// valid hash and nothing else will be ignored.
fn parse_changelist(o: &[u8]) -> Vec<libpijul::Hash> {
use libpijul::Base32;
if let Ok(o) = std::str::from_utf8(o) {
o.lines()
.filter_map(|l| libpijul::Hash::from_base32(l.as_bytes()))
.collect()
} else {
Vec::new()
}
}
use std::io::Write;
use std::path::PathBuf;
use anyhow::bail;
use clap::Clap;
use libpijul::changestore::*;
use libpijul::{Base32, TxnT, TxnTExt};
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Log {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Show logs for this channel instead of the current channel
#[clap(long = "channel")]
channel: Option<String>,
/// Only show the change hashes
#[clap(long = "hash-only")]
hash_only: bool,
/// Include state identifiers in the output
#[clap(long = "state")]
states: bool,
/// Include full change description in the output
#[clap(long = "description")]
descriptions: bool,
}
impl Log {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = if let Some(channel) = txn.load_channel(channel_name)? {
channel
} else {
bail!("No such channel: {:?}", channel_name)
};
super::pager();
let changes = repo.changes;
let mut stdout = std::io::stdout();
if self.hash_only {
for h in txn.reverse_log(&channel.borrow(), None)? {
let h = (h?.1).0;
writeln!(stdout, "{}", h.to_base32())?
}
} else {
let states = self.states;
for h in txn.reverse_log(&channel.borrow(), None)? {
let (h, mrk) = h?.1;
let header = changes.get_header(&h)?;
writeln!(stdout, "Change {}", h.to_base32())?;
writeln!(stdout, "Author: {:?}", header.authors)?;
writeln!(stdout, "Date: {}", header.timestamp)?;
if states {
writeln!(stdout, "State: {}", mrk.to_base32())?;
}
writeln!(stdout, "\n {}\n", header.message)?;
if self.descriptions {
if let Some(ref descr) = header.description {
writeln!(stdout, "\n {}\n", descr)?;
}
}
}
}
Ok(())
}
}
use std::path::PathBuf;
use clap::Clap;
use libpijul::MutTxnT;
use crate::repository::*;
#[derive(Clap, Debug)]
pub struct Init {
/// Set the name of the current channel (defaults to "main").
#[clap(long = "channel")]
channel: Option<String>,
/// Path where the repository should be initalized
path: Option<PathBuf>,
}
impl Init {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::init(self.path)?;
let mut txn = repo.pristine.mut_txn_begin();
let channel_name = self
.channel
.unwrap_or_else(|| crate::DEFAULT_CHANNEL.to_string());
txn.open_or_create_channel(&channel_name)?;
repo.config.current_channel = Some(channel_name);
repo.save_config()?;
txn.commit()?;
Ok(())
}
}
use std::cell::RefCell;
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::io::Write;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use anyhow::bail;
use canonical_path::CanonicalPathBuf;
use clap::{ArgSettings, Clap};
use libpijul::pristine::*;
use libpijul::*;
use log::{debug, error, info, trace};
use crate::repository::*;
#[derive(Clap, Debug)]
pub struct Git {
/// Process this path instead of the current directory, creating a Pijul repository if necessary.
pub repo_path: Option<PathBuf>,
/// Time the import, and output values to this file.
#[clap(long = "stats", setting = ArgSettings::Hidden)]
stats: Option<PathBuf>,
/// Check only the first n commits processed.
#[clap(default_value = "0", setting = ArgSettings::Hidden)]
check: usize,
}
struct OpenRepo {
repo: Repository,
stats: Option<std::fs::File>,
n: usize,
check: usize,
current_commit: Option<git2::Oid>,
}
impl Git {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = if let Ok(repo) = Repository::find_root(self.repo_path.clone()) {
repo
} else {
Repository::init(self.repo_path.clone())?
};
let git = git2::Repository::open(&repo.path)?;
let head = git.head()?;
info!("Loading history…");
let oid = head.target().unwrap();
let mut path_git = repo.path.join(libpijul::DOT_DIR);
path_git.push("git");
std::fs::create_dir_all(&path_git)?;
let mut env_git = ::sanakirja::Env::new(&path_git, 1 << 15)?;
let dag = Dag::dfs(&git, oid, &mut env_git)?;
trace!(target: "dag", "{:?}", dag);
info!("Done");
let mut pristine = repo.path.join(DOT_DIR);
pristine.push(PRISTINE_DIR);
std::fs::create_dir_all(&pristine)?;
let mut repo = OpenRepo {
repo,
stats: self.stats.and_then(|f| std::fs::File::create(f).ok()),
n: 0,
check: self.check,
current_commit: None,
};
import(&git, &mut env_git, &mut repo, &dag)?;
Ok(())
}
}
#[derive(Debug)]
struct Dag {
children: BTreeMap<git2::Oid, Vec<git2::Oid>>,
parents: BTreeMap<git2::Oid, Vec<git2::Oid>>,
root: Vec<(git2::Oid, Option<libpijul::Merkle>)>,
}
impl Dag {
/// Load a Git repository in memory. The main reason this is
/// needed is to compute the *backward* relations from a commit to
/// its parents.
fn dfs(
git: &git2::Repository,
oid: git2::Oid,
env_git: &mut ::sanakirja::Env<::sanakirja::Exclusive>,
) -> Result<Self, anyhow::Error> {
use ::sanakirja::Transaction;
let mut stack = vec![git.find_commit(oid)?];
let mut oids_set = BTreeSet::new();
let mut dag = Dag {
children: BTreeMap::new(),
parents: BTreeMap::new(),
root: Vec::new(),
};
oids_set.insert(oid.clone());
let mut txn_git = ::sanakirja::Env::mut_txn_begin(env_git)?;
let db: ::sanakirja::Db<git2::Oid, libpijul::Merkle> = if let Some(db) = txn_git.root(0) {
db
} else {
txn_git.create_db()?
};
let mut state = BTreeMap::new();
for x in txn_git.iter(&db, None)? {
let (commit, merk) = x?;
state.insert(commit, merk);
}
debug!("state = {:?}", state);
while let Some(commit) = stack.pop() {
if let Some(state) = state.get(&commit.id()) {
dag.root.push((commit.id(), Some(*state)));
continue;
}
let mut has_parents = false;
for p in commit.parents() {
trace!("parent {:?}", p);
dag.children
.entry(p.id())
.or_insert(Vec::new())
.push(commit.id());
dag.parents
.entry(commit.id())
.or_insert(Vec::new())
.push(p.id());
if oids_set.insert(p.id()) {
stack.push(p);
}
has_parents = true
}
if !has_parents {
dag.root.push((commit.id(), None))
}
}
txn_git.set_root(0, db);
::sanakirja::Commit::commit(txn_git)?;
Ok(dag)
}
fn collect_dead_parents<T: MutTxnTExt>(
&self,
oid: &git2::Oid,
todo: &mut Todo,
txn: &mut T,
) -> Result<(), anyhow::Error> {
if let Some(parents) = self.parents.get(oid) {
debug!("parents {:?}", parents);
for p in parents {
let rc = todo.refs.get_mut(p).unwrap();
*rc -= 1;
if *rc == 0 {
let p_name = format!("{}", p);
debug!("dropping channel {:?}", p_name);
txn.drop_channel(&p_name)?;
}
}
}
Ok(())
}
fn insert_children_in_todo(&self, oid: &git2::Oid, todo: &mut Todo) {
if let Some(c) = self.children.get(&oid) {
for child in c {
debug!("child = {:?}", c);
if todo.next_todo_set.insert(*child) {
todo.next_todo.push(*child);
}
*todo.refs.entry(*oid).or_insert(0) += 1;
}
} else {
debug!("no children")
}
}
}
#[derive(Debug)]
struct Todo {
todo: Vec<git2::Oid>,
todo_set: BTreeSet<git2::Oid>,
next_todo: Vec<git2::Oid>,
next_todo_set: BTreeSet<git2::Oid>,
// For each key k, number of items in the union of todo and
// next_todo that have k as a parent. Moreover, all commits that
// were imported are in this map.
refs: BTreeMap<git2::Oid, usize>,
}
impl Todo {
fn new() -> Self {
Todo {
todo: Vec::new(),
todo_set: BTreeSet::new(),
next_todo: Vec::new(),
next_todo_set: BTreeSet::new(),
refs: BTreeMap::new(),
}
}
fn swap_next(&mut self, todo: Vec<git2::Oid>) {
self.todo = todo;
std::mem::swap(&mut self.todo, &mut self.next_todo);
self.todo_set.clear();
std::mem::swap(&mut self.todo_set, &mut self.next_todo_set);
}
fn insert_next(&mut self, oid: git2::Oid) {
if self.next_todo_set.insert(oid) {
self.next_todo.push(oid)
}
}
fn is_empty(&self) -> bool {
self.todo.is_empty()
}
fn all_processed(&self, parents: &[git2::Oid]) -> bool {
parents.iter().all(|x| self.refs.contains_key(x))
}
}
/// Import the entire Git DAG into Pijul.
fn import(
git: &git2::Repository,
env_git: &mut ::sanakirja::Env<::sanakirja::Exclusive>,
repo: &mut OpenRepo,
dag: &Dag,
) -> Result<(), anyhow::Error> {
let mut ws = libpijul::ApplyWorkspace::new();
let mut todo = Todo::new();
let txn = repo.repo.pristine.mut_txn_begin();
for &(oid, merkle) in dag.root.iter() {
if let Some(merkle) = merkle {
let oid_ = format!("{}", oid);
let channel = txn.load_channel(&oid_)?.unwrap();
let (_, (_, merkle_)) = txn
.changeid_reverse_log(&channel.borrow(), None)?
.next()
.unwrap()?;
if merkle != merkle_ {
bail!("Pijul channel changed since last import. Please unrecord channel {} to state {}", oid_, merkle.to_base32())
}
if let Some(children) = dag.children.get(&oid) {
*todo.refs.entry(oid).or_insert(0) += children.len();
for c in children.iter() {
todo.insert_next(*c);
}
}
} else {
todo.insert_next(oid);
if let Some(parents) = dag.parents.get(&oid) {
for p in parents.iter() {
*todo.refs.entry(*p).or_insert(0) += 1;
}
}
}
}
std::mem::drop(txn);
todo.swap_next(Vec::new());
while !todo.is_empty() {
debug!("TODO: {:?}", todo);
let mut todo_ = std::mem::replace(&mut todo.todo, Vec::new());
{
let mut txn = repo.repo.pristine.mut_txn_begin();
let mut draining = todo_.drain(..);
while let Some(oid) = draining.next() {
let mut channel = if let Some(parents) = dag.parents.get(&oid) {
// If we don't have all the parents, continue.
if !todo.all_processed(&parents) {
todo.insert_next(oid);
continue;
}
let first_parent = parents.iter().next().unwrap();
let parent_name = format!("{}", first_parent);
let parent_channel = txn.load_channel(&parent_name)?.unwrap();
let name = format!("{}", oid);
let channel = txn.fork(&parent_channel, &name)?;
channel
} else {
// Create a new channel for this commit.
let name = format!("{}", oid);
let channel = txn.open_or_create_channel(&name)?;
channel
};
let mut stats = Stats::new(oid);
import_commit_parents(
repo,
dag,
&mut txn,
&mut channel,
&oid,
&mut ws,
&mut stats,
)?;
let state = import_commit(git, repo, &mut txn, &mut channel, &oid, &mut stats)?;
save_state(env_git, &oid, state)?;
dag.collect_dead_parents(&oid, &mut todo, &mut txn)?;
dag.insert_children_in_todo(&oid, &mut todo);
if let Some(ref mut f) = repo.stats {
stats.write(repo.n, &repo.repo.path, f)?
}
// Just add the remaining commits to the todo list,
// because we prefer to move each channel as far as
// possible before switching channels.
while let Some(oid) = draining.next() {
todo.insert_next(oid)
}
}
txn.commit()?;
}
todo.swap_next(todo_)
}
Ok(())
}
fn save_state(
git: &mut ::sanakirja::Env<::sanakirja::Exclusive>,
oid: &git2::Oid,
state: libpijul::Merkle,
) -> Result<(), anyhow::Error> {
use ::sanakirja::{Commit, Transaction};
let mut txn = ::sanakirja::Env::mut_txn_begin(git)?;
let mut db: ::sanakirja::Db<git2::Oid, libpijul::Merkle> = if let Some(db) = txn.root(0) {
db
} else {
txn.create_db()?
};
txn.put(&mut rand::thread_rng(), &mut db, *oid, state)?;
txn.set_root(0, db);
txn.commit()?;
Ok(())
}
fn make_apply_plan<T: TxnTExt>(
repo: &OpenRepo,
txn: &T,
channel: &ChannelRef<T>,
dag: &Dag,
oid: &git2::Oid,
) -> Result<(bool, Vec<(libpijul::Hash, u64)>), anyhow::Error> {
let mut to_apply = Vec::new();
let mut to_apply_set = BTreeSet::new();
let mut needs_output = false;
if let Some(parents) = dag.parents.get(&oid) {
for p in parents {
// If one of the parents is not the repo's current commit,
// then we're doing either a merge or a checkout of
// another branch. If that is the case, we need to output
// the entire repository to update the
// tree/revtree/inodes/revinodes tables.
if let Some(current_commit) = repo.current_commit {
if current_commit != *p {
needs_output = true
}
}
let p_name = format!("{}", p);
let p_channel = txn.load_channel(&p_name)?.unwrap();
for x in txn.log(&p_channel.borrow(), 0)? {
let (n, (h, _)) = x?;
if txn.has_change(&channel, h)?.is_none() {
if to_apply_set.insert(h) {
to_apply.push((h, n));
}
}
}
}
} else {
needs_output = true
}
// Since we're pulling from multiple channels, the change numbers
// are not necessarily in order (especially since we've
// de-duplicated using `to_apply_set`.
to_apply.sort_by(|a, b| a.1.cmp(&b.1));
Ok((needs_output, to_apply))
}
/// Apply the changes corresponding to a commit's parents to `channel`.
fn import_commit_parents<T: TxnTExt + MutTxnTExt + GraphIter>(
repo: &mut OpenRepo,
dag: &Dag,
txn: &mut T,
channel: &mut ChannelRef<T>,
oid: &git2::Oid,
ws: &mut libpijul::ApplyWorkspace,
stats: &mut Stats,
) -> Result<(), anyhow::Error> {
// Apply all the parent's logs to `channel`
let (needs_output, to_apply) = make_apply_plan(repo, txn, channel, dag, oid)?;
let parent_application_time = std::time::Instant::now();
for h in to_apply.iter() {
debug!("to_apply {:?}", h)
}
for (h, _) in to_apply.iter() {
info!("applying {:?} to {:?}", h, T::name(&channel.borrow()));
txn.apply_change_ws(&repo.repo.changes, channel, *h, ws)?;
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive_debug(&repo.repo.changes, txn, &channel.borrow(), line!())?;
}
}
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive_debug(&repo.repo.changes, txn, &channel.borrow(), line!())?;
}
stats.parent_application_time = if to_apply.is_empty() {
std::time::Duration::from_secs(0)
} else {
parent_application_time.elapsed()
};
debug!(
"last_recorded {:?}, name {:?}",
repo.repo.config.current_channel,
T::name(&channel.borrow()),
);
stats.output_time = if !to_apply.is_empty() || needs_output {
debug!("outputting");
let output_time = std::time::Instant::now();
txn.output_repository_no_pending(
&mut repo.repo.working_copy,
&repo.repo.changes,
channel,
&mut HashMap::new(),
"",
false,
)?;
let t = output_time.elapsed();
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive_debug(&repo.repo.changes, txn, &channel.borrow(), line!())?;
}
t
} else {
std::time::Duration::from_secs(0)
};
if repo.check > 0 && repo.n % repo.check == 0 {
check_tree_inodes(txn, T::graph(&channel.borrow()));
}
Ok(())
}
/// Reset to the Git commit specified by `child`, telling Pijul which
/// files were moved in the reset.
fn git_reset<'a, T: TxnTExt + MutTxnTExt>(
git: &'a git2::Repository,
repo: &mut OpenRepo,
txn: &mut T,
channel: &mut ChannelRef<T>,
child: &git2::Oid,
stats: &mut Stats,
) -> Result<(git2::Object<'a>, BTreeSet<PathBuf>), anyhow::Error> {
// Reset the Git branch.
debug!("resetting the git branch to {:?}", child);
let reset_time = std::time::Instant::now();
let object = git.find_object(*child, None)?;
let reset_was_useful = Rc::new(RefCell::new(false));
let mut builder = git2::build::CheckoutBuilder::new();
let repo_path = repo.repo.path.clone();
let reset_was_useful_ = reset_was_useful.clone();
builder
.force()
.remove_untracked(true)
.remove_ignored(true)
.progress(move |file, a, b| {
debug!("Git progress: {:?} {:?} {:?}", file, a, b);
if let Some(file) = file {
let file = repo_path.join(file);
if let Ok(meta) = std::fs::metadata(&file) {
if !meta.file_type().is_symlink() {
*reset_was_useful_.borrow_mut() = true
}
}
}
});
builder.notify(|notif, file, _, _, _| {
info!("Git reset: {:?} {:?}", notif, file);
true
});
git.reset(&object, git2::ResetType::Hard, Some(&mut builder))?;
repo.current_commit = Some(*child);
stats.reset_time = reset_time.elapsed();
debug!("reset done");
let mut prefixes = BTreeSet::new();
{
let commit = object.as_commit().unwrap();
let new_tree = commit.tree().unwrap();
debug!("inspecting commit");
let git_diff_time = std::time::Instant::now();
for parent in commit.parents() {
let old_tree = parent.tree().unwrap();
let mut diff = git
.diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None)
.unwrap();
diff.find_similar(None).unwrap();
let mut moves = Vec::new();
for delta in diff.deltas() {
let old_path = delta.old_file().path().unwrap();
let new_path = delta.new_file().path().unwrap();
match delta.status() {
git2::Delta::Renamed => {
info!(
"mv {:?} {:?}",
old_path.to_string_lossy(),
new_path.to_string_lossy()
);
if let Ok((vertex, _)) = txn.follow_oldest_path(
&repo.repo.changes,
&channel,
&old_path.to_string_lossy(),
) {
if let Some(inode) = txn.get_revinodes(vertex, None)? {
if let Some(old_path) = libpijul::fs::inode_filename(txn, inode)? {
debug!(
"moving {:?} ({:?}) from {:?} to {:?}",
inode, vertex, old_path, new_path
);
let mut tmp_path = new_path.to_path_buf();
tmp_path.pop();
use rand::Rng;
let s: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(30)
.map(|x| x as char)
.collect();
tmp_path.push(&s);
if let Err(e) =
txn.move_file(&old_path, &tmp_path.to_string_lossy())
{
error!("{}", e);
} else {
moves.push((tmp_path, new_path));
}
}
}
}
let new_path = new_path.to_path_buf();
prefixes.insert(new_path);
}
git2::Delta::Deleted => {
let old_path = old_path.to_path_buf();
prefixes.insert(old_path);
}
_ => {
if delta.new_file().mode() != git2::FileMode::Link {
debug!("delta old = {:?} new = {:?}", old_path, new_path);
let old_path = old_path.to_path_buf();
let new_path = new_path.to_path_buf();
prefixes.insert(old_path);
prefixes.insert(new_path);
}
}
}
}
debug!("moves = {:?}", moves);
for (a, b) in moves.drain(..) {
if let Err(e) = txn.move_file(&a.to_string_lossy(), &b.to_string_lossy()) {
error!("{}", e);
}
}
}
stats.git_diff_time = git_diff_time.elapsed();
debug!("done inspecting commit");
if prefixes.contains(std::path::Path::new("")) {
prefixes.clear();
}
// Root has no parents so record everything
if prefixes.is_empty() {
prefixes.insert(repo.repo.path.clone());
}
debug!("record prefixes {:?}", prefixes);
}
Ok((object, prefixes))
}
/// Reset to the Git commit specified as `child`, and record the
/// corresponding change in Pijul.
fn import_commit<T: TxnTExt + MutTxnTExt + GraphIter>(
git: &git2::Repository,
repo: &mut OpenRepo,
txn: &mut T,
channel: &mut ChannelRef<T>,
child: &git2::Oid,
stats: &mut Stats,
) -> Result<libpijul::Merkle, anyhow::Error> {
let (object, prefixes) = git_reset(git, repo, txn, channel, child, stats)?;
for p in prefixes.iter() {
if let Ok(m) = std::fs::metadata(&p) {
if m.is_dir() {
txn.add_dir(p.to_str().unwrap()).unwrap_or(());
} else {
txn.add_file(p.to_str().unwrap()).unwrap_or(());
}
}
}
let commit = object.as_commit().unwrap();
let signature = commit.author();
// Record+Apply
debug!("recording on channel {:?}", T::name(&channel.borrow()));
let record_time = std::time::Instant::now();
let prefix_vec: Vec<_> = prefixes.into_iter().collect();
if let Some(msg) = commit.message() {
info!("Importing commit {:?}: {}", child, msg);
} else {
info!("Importing commit {:?} (no message)", child);
}
let rec = record_apply(
txn,
channel,
&mut repo.repo.working_copy,
&repo.repo.changes,
&CanonicalPathBuf::canonicalize(&repo.repo.path)?,
&prefix_vec,
libpijul::change::ChangeHeader {
message: commit.message().unwrap().to_string(),
authors: vec![libpijul::change::Author {
name: signature.name().unwrap().to_string(),
email: signature.email().map(|e| e.to_string()),
full_name: None,
}],
description: None,
timestamp: chrono::DateTime::from_utc(
chrono::NaiveDateTime::from_timestamp(signature.when().seconds(), 0),
chrono::Utc,
),
},
);
let (n_actions, hash, state) = match rec {
Ok(x) => x,
Err(libpijul::LocalApplyError::ChangeAlreadyOnChannel { hash }) => {
error!("change already on channel: {:?}", hash);
return Ok(txn.current_state(&channel.borrow())?);
}
Err(e) => return Err(e.into()),
};
stats.record_time = record_time.elapsed();
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive_debug(&repo.repo.changes, txn, &channel.borrow(), line!())?;
}
stats.n_actions = n_actions;
stats.hash = hash;
if let Some(ref mut cur) = repo.repo.config.current_channel {
cur.clear();
cur.push_str(T::name(&channel.borrow()));
} else {
repo.repo.config.current_channel = Some(T::name(&channel.borrow()).to_string())
}
repo.repo.save_config()?;
if repo.check > 0 && repo.n % repo.check == 0 {
check_tree_inodes(txn, T::graph(&channel.borrow()));
}
repo.n += 1;
Ok(state)
}
fn record_apply<T: TxnT + TxnTExt + MutTxnTExt, C: libpijul::changestore::ChangeStore>(
txn: &mut T,
channel: &mut ChannelRef<T>,
working_copy: &mut libpijul::working_copy::FileSystem,
changes: &C,
repo_path: &CanonicalPathBuf,
prefixes: &[PathBuf],
header: libpijul::change::ChangeHeader,
) -> Result<
(usize, Option<libpijul::Hash>, libpijul::Merkle),
libpijul::LocalApplyError<T::GraphError>,
> {
let mut state = libpijul::RecordBuilder::new();
let num_cpus = num_cpus::get();
for p in prefixes.iter() {
let result = working_copy.record_prefix(
txn,
channel,
changes,
&mut state,
repo_path.clone(),
p,
num_cpus,
);
use libpijul::working_copy::filesystem::*;
match result {
Ok(_) => {}
Err(Error::Add(AddError::Fs(FsError::NotFound(_)))) => {}
Err(Error::Add(AddError::Fs(FsError::AlreadyInRepo(_)))) => {}
Err(e) => {
error!("While adding {:?}: {}", p, e);
}
}
}
let rec = state.finish();
if rec.actions.is_empty() {
return Ok((
0,
None,
txn.current_state(&channel.borrow()).map_err(TxnErr)?,
));
}
let actions: Vec<_> = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn).unwrap())
.collect();
let n = actions.len();
let (dependencies, extra_known) =
libpijul::change::dependencies(txn, &channel.borrow(), actions.iter())?;
let mut change = libpijul::change::LocalChange::make_change(
txn,
channel,
actions,
rec.contents,
header,
Vec::new(),
)?;
change.dependencies = dependencies;
change.extra_known = extra_known;
debug!("saving change");
let hash = changes.save_change(&change).unwrap();
debug!("saved");
let (_, m) = txn.apply_local_change(channel, &change, hash, &rec.updatables)?;
Ok((n, Some(hash), m))
}
struct Stats {
child: git2::Oid,
n_changes: usize,
parent_application_time: std::time::Duration,
output_time: std::time::Duration,
reset_time: std::time::Duration,
git_diff_time: std::time::Duration,
record_time: std::time::Duration,
n_actions: usize,
n_files: usize,
n_dirs: usize,
total_size: u64,
changes_size: u64,
pristine_size: u64,
hash: Option<libpijul::Hash>,
}
impl Stats {
fn new(child: git2::Oid) -> Self {
let z = std::time::Duration::from_secs(0);
Stats {
child,
n_changes: 0,
parent_application_time: z,
output_time: z,
reset_time: z,
git_diff_time: z,
record_time: z,
n_actions: 0,
n_files: 0,
n_dirs: 0,
total_size: 0,
changes_size: 0,
pristine_size: 0,
hash: None,
}
}
fn write(
&mut self,
n: usize,
repo_path: &Path,
f: &mut std::fs::File,
) -> Result<(), anyhow::Error> {
// Count files.
let mut walk = ignore::WalkBuilder::new(&repo_path);
walk.add_ignore(DOT_DIR).unwrap();
for f in walk.build() {
let meta = f?.metadata()?;
if meta.is_dir() {
self.n_dirs += 1
} else {
self.n_files += 1;
self.total_size += meta.len();
}
}
let dot_dir = repo_path.join(DOT_DIR);
let pristine_dir = dot_dir.join(PRISTINE_DIR);
let changes_dir = dot_dir.join(CHANGES_DIR);
if let Ok(walk) = std::fs::read_dir(&pristine_dir) {
for f in walk {
let meta = f?.metadata()?;
self.pristine_size += meta.len();
}
}
if let Ok(walk) = std::fs::read_dir(&changes_dir) {
for f in walk {
let meta = f?.metadata()?;
self.changes_size += meta.len();
self.n_changes += 1
}
}
let timers = libpijul::get_timers();
writeln!(
f, "{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}",
self.child,
n,
self.parent_application_time.as_secs_f64(),
timers.alive_output.as_secs_f64(),
timers.alive_retrieve.as_secs_f64(),
timers.alive_graph.as_secs_f64(),
timers.alive_contents.as_secs_f64(),
timers.alive_write.as_secs_f64(),
timers.apply.as_secs_f64(),
timers.record.as_secs_f64(),
timers.repair_context.as_secs_f64(),
timers.check_cyclic_paths.as_secs_f64(),
timers.find_alive.as_secs_f64(),
self.output_time.as_secs_f64(),
self.reset_time.as_secs_f64(),
self.git_diff_time.as_secs_f64(),
self.record_time.as_secs_f64(),
self.n_actions,
self.n_files,
self.n_dirs,
self.total_size,
self.changes_size,
self.pristine_size,
if let Some(ref h) = self.hash { h.to_base32() } else { String::new() },
)?;
libpijul::reset_timers();
Ok(())
}
}
use std::path::PathBuf;
use clap::Clap;
use libpijul::{MutTxnT, MutTxnTExt, TxnT};
use log::debug;
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Fork {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Make the new channel from this channel instead of the current channel
#[clap(long = "channel", conflicts_with = "change")]
channel: Option<String>,
/// Apply this change after creating the channel
#[clap(long = "change", conflicts_with = "channel")]
change: Option<String>,
/// The name of the new channel
to: String,
}
impl Fork {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let mut txn = repo.pristine.mut_txn_begin();
if let Some(ref ch) = self.change {
let (hash, _) = txn.hash_from_prefix(ch)?;
let mut channel = txn.open_or_create_channel(&self.to)?;
txn.apply_change_rec(&repo.changes, &mut channel, hash)?
} else {
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
if let Some(channel) = txn.load_channel(channel_name)? {
txn.fork(&channel, &self.to)?;
}
}
txn.commit()?;
Ok(())
}
}
use std::io::Write;
use std::path::PathBuf;
use canonical_path::CanonicalPathBuf;
use clap::Clap;
use libpijul::{MutTxnT, MutTxnTExt, TxnTExt};
use log::{debug, info};
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Mv {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Paths which need to be moved
///
/// The last argument to this option is considered the
/// destination
paths: Vec<PathBuf>,
}
impl Mv {
pub fn run(mut self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let to = if let Some(to) = self.paths.pop() {
to
} else {
return Ok(());
};
let to = path(&self.repo_path, to);
let is_dir = if let Ok(m) = std::fs::metadata(&to) {
m.is_dir()
} else {
false
};
if !is_dir && self.paths.len() > 1 {
return Ok(());
}
let mut txn = repo.pristine.mut_txn_begin();
let repo_path = CanonicalPathBuf::canonicalize(&repo.path)?;
for p in self.paths {
debug!("p = {:?}", p);
let source = std::fs::canonicalize(&path(&self.repo_path, p.clone()))?;
let target = if is_dir { to.join(p) } else { to.clone() };
debug!("target = {:?}", target);
std::fs::rename(&source, &target)?;
let target = std::fs::canonicalize(&target)?;
let source = source.strip_prefix(&repo_path)?;
let target = target.strip_prefix(&repo_path)?;
debug!("moving {:?} -> {:?}", source, target);
txn.move_file(&source.to_string_lossy(), &target.to_string_lossy())?
}
txn.commit()?;
Ok(())
}
}
fn path(root: &Option<PathBuf>, path: PathBuf) -> PathBuf {
if let Some(ref p) = root {
p.join(path)
} else {
path
}
}
#[derive(Clap, Debug)]
pub struct Ls {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
}
impl Ls {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
let mut stdout = std::io::stdout();
for p in txn.iter_working_copy() {
let p = p?.1;
writeln!(stdout, "{}", p)?;
}
Ok(())
}
}
#[derive(Clap, Debug)]
pub struct Add {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(short = 'r', long = "recursive")]
recursive: bool,
#[clap(short = 'f', long = "force")]
force: bool,
/// Paths to add to the internal tree.
paths: Vec<PathBuf>,
}
impl Add {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let mut txn = repo.pristine.mut_txn_begin();
let threads = num_cpus::get();
let repo_path = CanonicalPathBuf::canonicalize(&repo.path)?;
let mut stderr = std::io::stderr();
for path in self.paths.iter() {
info!("Adding {:?}", path);
let path = CanonicalPathBuf::canonicalize(&path)?;
debug!("{:?}", path);
let meta = std::fs::metadata(&path)?;
debug!("{:?}", meta);
if !self.force
&& !libpijul::working_copy::filesystem::filter_ignore(
repo_path.as_ref(),
path.as_ref(),
meta.is_dir(),
)
{
continue;
}
if self.recursive {
use libpijul::working_copy::filesystem::*;
if let Ok((full, prefix)) = get_prefix(Some(repo_path.as_ref()), path.as_path()) {
repo.working_copy.add_prefix_rec(
&mut txn,
repo_path.clone(),
full.clone(),
&prefix,
threads,
)?
}
} else {
let path = if let Ok(path) = path.as_path().strip_prefix(&repo_path.as_path()) {
path
} else {
continue;
};
let path_str = path.to_str().unwrap();
if !txn.is_tracked(&path_str)? {
if let Err(e) = txn.add(&path_str, meta.is_dir()) {
writeln!(stderr, "{}", e)?;
}
}
}
}
txn.commit()?;
Ok(())
}
}
#[derive(Clap, Debug)]
pub struct Remove {
#[clap(long = "repository")]
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
repo_path: Option<PathBuf>,
/// The paths need to be removed
paths: Vec<PathBuf>,
}
impl Remove {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let mut txn = repo.pristine.mut_txn_begin();
let repo_path = CanonicalPathBuf::canonicalize(&repo.path)?;
for path in self.paths.iter() {
debug!("{:?}", path);
if let Some(p) = path.file_name() {
if let Some(p) = p.to_str() {
if p.ends_with('~') || (p.starts_with('#') && p.ends_with('#')) {
continue;
}
}
}
let path = path.canonicalize()?;
let path = if let Ok(path) = path.strip_prefix(&repo_path.as_path()) {
path
} else {
continue;
};
let path_str = path.to_str().unwrap();
if txn.is_tracked(&path_str)? {
txn.remove_file(&path_str)?;
}
}
txn.commit()?;
Ok(())
}
}
use std::collections::BTreeMap;
use std::io::Write;
use std::path::PathBuf;
use canonical_path::CanonicalPathBuf;
use clap::Clap;
use libpijul::change::*;
use libpijul::{MutTxnT, MutTxnTExt};
use serde_derive::Serialize;
use crate::repository::*;
#[derive(Clap, Debug)]
pub struct Diff {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
pub repo_path: Option<PathBuf>,
/// Output the diff in JSON format instead of the default change text format.
#[clap(long = "json")]
pub json: bool,
/// Compare with this channel.
#[clap(long = "channel")]
pub channel: Option<String>,
/// Add all the changes of this channel as dependencies (except changes implied transitively), instead of the minimal dependencies.
#[clap(long = "tag")]
pub tag: bool,
/// Show a short version of the diff.
#[clap(long = "short")]
pub short: bool,
/// Only diff those paths (files or directories). If missing, diff the entire repository.
pub prefixes: Vec<PathBuf>,
}
impl Diff {
pub fn run(mut self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path.clone())?;
let mut txn = repo.pristine.mut_txn_begin();
let mut stdout = std::io::stdout();
let mut channel =
txn.open_or_create_channel(repo.config.get_current_channel(self.channel.as_ref()))?;
let mut state = libpijul::RecordBuilder::new();
if self.prefixes.is_empty() {
txn.record(
&mut state,
libpijul::Algorithm::default(),
&mut channel,
&mut repo.working_copy,
&repo.changes,
"",
)?
} else {
self.fill_relative_prefixes()?;
repo.working_copy.record_prefixes(
&mut txn,
&mut channel,
&repo.changes,
&mut state,
CanonicalPathBuf::canonicalize(&repo.path)?,
&self.prefixes,
num_cpus::get(),
)?;
}
let rec = state.finish();
if rec.actions.is_empty() {
return Ok(());
}
let actions = rec
.actions
.into_iter()
.map(|rec| rec.globalize(&txn).unwrap())
.collect();
let mut change = LocalChange::make_change(
&txn,
&channel,
actions,
rec.contents,
ChangeHeader::default(),
Vec::new(),
)?;
let (dependencies, extra_known) = if self.tag {
full_dependencies(&txn, &channel)?
} else {
dependencies(&txn, &channel.borrow(), change.changes.iter())?
};
change.dependencies = dependencies;
change.extra_known = extra_known;
super::pager();
if self.json {
let mut changes = BTreeMap::new();
for ch in change.changes.iter() {
changes
.entry(ch.path())
.or_insert_with(Vec::new)
.push(Status {
operation: match ch {
Hunk::FileMove { .. } => "file move",
Hunk::FileDel { .. } => "file del",
Hunk::FileUndel { .. } => "file undel",
Hunk::SolveNameConflict { .. } => "solve name conflict",
Hunk::UnsolveNameConflict { .. } => "unsolve name conflict",
Hunk::FileAdd { .. } => "file add",
Hunk::Edit { .. } => "edit",
Hunk::Replacement { .. } => "replacement",
Hunk::SolveOrderConflict { .. } => "solve order conflict",
Hunk::UnsolveOrderConflict { .. } => "unsolve order conflict",
Hunk::ResurrectZombies { .. } => "resurrect zombies",
},
line: ch.line(),
});
}
serde_json::to_writer_pretty(&mut std::io::stdout(), &changes)?;
writeln!(stdout)?;
} else if self.short {
let mut changes = Vec::new();
for ch in change.changes.iter() {
changes.push(match ch {
Hunk::FileMove { path, .. } => format!("MV {}\n", path),
Hunk::FileDel { path, .. } => format!("D {}\n", path),
Hunk::FileUndel { path, .. } => format!("UD {}\n", path),
Hunk::FileAdd { path, .. } => format!("A {}", path),
Hunk::SolveNameConflict { path, .. } => format!("SC {}", path),
Hunk::UnsolveNameConflict { path, .. } => format!("UC {}", path),
Hunk::Edit {
local: Local { path, .. },
..
} => format!("M {}", path),
Hunk::Replacement {
local: Local { path, .. },
..
} => format!("R {}", path),
Hunk::SolveOrderConflict {
local: Local { path, .. },
..
} => format!("SC {}", path),
Hunk::UnsolveOrderConflict {
local: Local { path, .. },
..
} => format!("UC {}", path),
Hunk::ResurrectZombies {
local: Local { path, .. },
..
} => format!("RZ {}", path),
});
}
changes.sort_unstable();
changes.dedup();
for ch in changes {
println!("{}", ch);
}
} else {
change.write(
&repo.changes,
None,
|local: &libpijul::change::Local, _| -> String {
format!("{}:{}", local.path, local.line)
},
true,
&mut std::io::stdout(),
)?
}
Ok(())
}
fn fill_relative_prefixes(&mut self) -> Result<(), anyhow::Error> {
let cwd = std::env::current_dir()?;
for p in self.prefixes.iter_mut() {
if p.is_relative() {
*p = cwd.join(&p);
}
}
Ok(())
}
}
#[derive(Debug, Serialize)]
struct Status {
operation: &'static str,
line: Option<usize>,
}
use std::path::PathBuf;
use crate::repository::Repository;
use anyhow::bail;
use clap::Clap;
use libpijul::{TxnT, TxnTExt};
#[derive(Clap, Debug)]
pub struct Debug {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
root: Option<String>,
}
impl Debug {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
libpijul::pristine::debug_inodes(&txn);
libpijul::pristine::debug_revinodes(&txn);
libpijul::pristine::debug_tree_print(&txn);
libpijul::pristine::debug_revtree_print(&txn);
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = if let Some(channel) = txn.load_channel(&channel_name)? {
channel
} else {
bail!("No such channel: {:?}", channel_name)
};
if let Some(root) = self.root {
let (pos, _) = txn
.follow_oldest_path(&repo.changes, &channel, &root)
.unwrap();
libpijul::pristine::debug_root(
&txn,
&channel.borrow().graph,
pos.inode_vertex(),
std::io::stdout(),
true,
)?;
} else {
let channel = channel.borrow();
libpijul::pristine::debug(&txn, &channel.graph, std::io::stdout())?;
}
libpijul::pristine::check_alive_debug(&repo.changes, &txn, &channel.borrow(), 0)?;
eprintln!("{:#?}", txn.check_database());
Ok(())
}
}
use std::collections::HashSet;
use std::path::PathBuf;
use anyhow::bail;
use canonical_path::CanonicalPathBuf;
use clap::Clap;
use libpijul::vertex_buffer::VertexBuffer;
use libpijul::*;
use log::debug;
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Credit {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Use this channel instead of the current channel
#[clap(long = "channel")]
channel: Option<String>,
/// The file to annotate
file: PathBuf,
}
impl Credit {
pub fn run(self) -> Result<(), anyhow::Error> {
let has_repo_path = self.repo_path.is_some();
let mut repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = if let Some(channel) = txn.load_channel(&channel_name)? {
channel
} else {
bail!("No such channel: {:?}", channel_name)
};
if self.channel.is_some() {
repo.config.current_channel = self.channel;
repo.save_config()?;
}
let repo_path = CanonicalPathBuf::canonicalize(&repo.path)?;
let (pos, _ambiguous) = if has_repo_path {
let root = std::fs::canonicalize(repo.path.join(&self.file))?;
let path = root.strip_prefix(&repo_path.as_path())?.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
} else {
let mut root = crate::current_dir()?;
root.push(&self.file);
let root = std::fs::canonicalize(&root)?;
let path = root.strip_prefix(&repo_path.as_path())?.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
};
let channel_ = channel.borrow();
super::pager();
txn.output_file(
&repo.changes,
&channel,
pos,
&mut Creditor::new(std::io::stdout(), &txn, &channel_),
)?;
Ok(())
}
}
pub struct Creditor<'a, W: std::io::Write, T: ChannelTxnT> {
w: W,
buf: Vec<u8>,
new_line: bool,
changes: HashSet<ChangeId>,
txn: &'a T,
channel: &'a T::Channel,
}
impl<'a, W: std::io::Write, T: ChannelTxnT> Creditor<'a, W, T> {
pub fn new(w: W, txn: &'a T, channel: &'a T::Channel) -> Self {
Creditor {
w,
new_line: true,
buf: Vec::new(),
txn,
channel,
changes: HashSet::new(),
}
}
}
impl<'a, W: std::io::Write, T: TxnTExt> VertexBuffer for Creditor<'a, W, T> {
fn output_line<E, C: FnOnce(&mut Vec<u8>) -> Result<(), E>>(
&mut self,
v: Vertex<ChangeId>,
c: C,
) -> Result<(), E>
where
E: From<std::io::Error>,
{
debug!("outputting vertex {:?}", v);
self.buf.clear();
c(&mut self.buf)?;
if !v.change.is_root() {
self.changes.clear();
for e in self
.txn
.iter_adjacent(self.channel, v, EdgeFlags::PARENT, EdgeFlags::all())
.unwrap()
{
let e = e.unwrap();
if e.introduced_by.is_root() {
continue;
}
self.changes.insert(e.introduced_by);
}
if !self.new_line {
writeln!(self.w)?;
}
writeln!(self.w)?;
let mut is_first = true;
for c in self.changes.drain() {
write!(
self.w,
"{}{}",
if is_first { "" } else { ", " },
c.to_base32()
)?;
is_first = false;
}
writeln!(self.w, "\n")?;
}
let ends_with_newline = self.buf.ends_with(b"\n");
if let Ok(s) = std::str::from_utf8(&self.buf[..]) {
for l in s.lines() {
self.w.write_all(b"> ")?;
self.w.write_all(l.as_bytes())?;
self.w.write_all(b"\n")?;
}
}
if !self.buf.is_empty() {
// empty "lines" (such as in the beginning of a file)
// don't change the status of self.new_line.
self.new_line = ends_with_newline;
}
Ok(())
}
fn output_conflict_marker(&mut self, s: &str) -> Result<(), std::io::Error> {
if !self.new_line {
self.w.write_all(s.as_bytes())?;
} else {
self.w.write_all(&s.as_bytes()[1..])?;
}
Ok(())
}
}
use std::collections::HashMap;
use std::path::PathBuf;
use crate::repository::*;
use anyhow::bail;
use clap::Clap;
use libpijul::{MutTxnT, MutTxnTExt};
use log::debug;
#[derive(Clap, Debug)]
pub struct Clone {
/// Only download changes with alive contents
#[clap(long = "lazy")]
lazy: bool,
/// Set the remote channel
#[clap(long = "channel", default_value = crate::DEFAULT_CHANNEL)]
channel: String,
/// Clone this change and its dependencies
#[clap(long = "change", conflicts_with = "state")]
change: Option<String>,
/// Clone this state
#[clap(long = "state", conflicts_with = "change")]
state: Option<String>,
/// Clone this path only
#[clap(long = "path", multiple(true))]
partial_paths: Vec<String>,
/// Do not check certificates (HTTPS remotes only, this option might be dangerous)
#[clap(short = 'k')]
no_cert_check: bool,
/// Clone this remote
remote: String,
/// Path where to clone the repository.
/// If missing, the inferred name of the remote repository is used.
path: Option<PathBuf>,
}
impl Clone {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut remote =
crate::remote::unknown_remote(None, &self.remote, &self.channel, self.no_cert_check)
.await?;
let path = if let Some(path) = self.path {
if path.is_relative() {
let mut p = std::env::current_dir()?;
p.push(path);
p
} else {
path
}
} else if let Some(path) = remote.repo_name() {
let mut p = std::env::current_dir()?;
p.push(path);
p
} else {
bail!("Could not infer repository name from {:?}", self.remote)
};
debug!("path = {:?}", path);
let path_ = path.clone();
ctrlc::set_handler(move || {
std::fs::remove_dir_all(&path_).unwrap_or(());
std::process::exit(130)
})
.unwrap_or(());
let repo_path = RepoPath(path);
let mut repo = Repository::init(Some(repo_path.0.clone()))?;
let mut txn = repo.pristine.mut_txn_begin();
let mut channel = txn.open_or_create_channel(&self.channel)?;
if let Some(ref change) = self.change {
let h = change.parse()?;
remote
.clone_tag(&mut repo, &mut txn, &mut channel, &[h])
.await?
} else if let Some(ref state) = self.state {
let h = state.parse()?;
remote
.clone_state(&mut repo, &mut txn, &mut channel, h, self.lazy)
.await?
} else {
remote
.clone_channel(
&mut repo,
&mut txn,
&mut channel,
self.lazy,
&self.partial_paths,
)
.await?;
}
let progress = indicatif::ProgressBar::new_spinner();
progress.set_style(
indicatif::ProgressStyle::default_spinner().template("{spinner} Outputting repository"),
);
progress.enable_steady_tick(100);
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut HashMap::new(),
"",
true,
None,
)?;
remote.finish().await?;
txn.commit()?;
progress.set_style(
indicatif::ProgressStyle::default_spinner().template("✓ Outputting repository"),
);
progress.finish();
repo.config.current_channel = Some(self.channel);
if let crate::remote::RemoteRepo::Local(ref l) = remote {
repo.config.default_remote = std::fs::canonicalize(&l.root)?
.to_str()
.map(|x| x.to_string());
} else {
repo.config.default_remote = Some(self.remote);
}
repo.save_config()?;
std::mem::drop(repo);
std::mem::forget(repo_path);
Ok(())
}
}
struct RepoPath(PathBuf);
impl Drop for RepoPath {
fn drop(&mut self) {
std::fs::remove_dir_all(&self.0).unwrap_or(());
}
}
use std::io::Write;
use std::path::PathBuf;
use crate::repository::Repository;
use anyhow::anyhow;
use anyhow::bail;
use clap::Clap;
use libpijul::{ChannelTxnT, MutTxnT, TxnT};
#[derive(Clap, Debug)]
pub struct Channel {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(subcommand)]
subcmd: Option<SubCommand>,
}
#[derive(Clap, Debug)]
pub enum SubCommand {
/// Delete a channel.
/// The channel must not be the current channel.
#[clap(name = "delete")]
Delete { delete: String },
/// Rename a channel.
#[clap(name = "rename")]
Rename { from: String, to: Option<String> },
/// Switch to a channel.
/// There must not be unrecorded changes in the working copy.
#[clap(name = "switch")]
Switch { to: Option<String> },
}
impl Channel {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut stdout = std::io::stdout();
match self.subcmd {
None => {
let repo = Repository::find_root(self.repo_path)?;
let current = repo.config.current_channel();
let txn = repo.pristine.txn_begin()?;
for channel in txn.iter_channels("")? {
let channel = channel?;
let channel = channel.borrow();
let name = libpijul::pristine::sanakirja::Txn::name(&channel);
if current == Some(name) {
writeln!(stdout, "* {}", name)?;
} else {
writeln!(stdout, " {}", name)?;
}
}
}
Some(SubCommand::Delete { ref delete }) => {
let repo = Repository::find_root(self.repo_path)?;
let current = repo.config.current_channel();
if Some(delete.as_str()) == current {
bail!("Cannot delete current channel")
}
let mut txn = repo.pristine.mut_txn_begin();
if !txn.drop_channel(delete)? {
return Err(anyhow!("Channel {} not found", delete));
}
txn.commit()?;
}
Some(SubCommand::Switch { to }) => {
(crate::commands::reset::Reset {
repo_path: self.repo_path,
channel: to,
dry_run: false,
files: Vec::new(),
})
.switch()?;
}
Some(SubCommand::Rename { ref from, ref to }) => {
let repo = Repository::find_root(self.repo_path)?;
let current = repo.config.current_channel();
let mut txn = repo.pristine.mut_txn_begin();
let (from, to) = if let Some(to) = to {
(from.as_str(), to.as_str())
} else if let Some(current) = current {
(current, from.as_str())
} else {
bail!("No current channel")
};
let mut channel = if let Some(channel) = txn.load_channel(from)? {
channel
} else {
bail!("No such channel: {:?}", from)
};
txn.rename_channel(&mut channel, to)?;
txn.commit()?;
}
}
Ok(())
}
}
use std::path::PathBuf;
use clap::Clap;
use libpijul::change::Local;
use libpijul::changestore::ChangeStore;
use libpijul::*;
use crate::repository::*;
#[derive(Clap, Debug)]
pub struct Change {
/// Use the repository at PATH instead of the current directory
#[clap(long = "repository", value_name = "PATH")]
repo_path: Option<PathBuf>,
/// The hash of the change to show, or an unambiguous prefix thereof
#[clap(value_name = "HASH")]
hash: Option<String>,
}
impl Change {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let txn = repo.pristine.txn_begin()?;
let changes = repo.changes;
let hash = if let Some(hash) = self.hash {
txn.hash_from_prefix(&hash)?.0
} else {
let channel_name = repo.config.get_current_channel(None);
let channel = if let Some(channel) = txn.load_channel(channel_name)? {
channel
} else {
return Ok(());
};
let channel = channel.borrow();
if let Some(h) = txn.reverse_log(&channel, None)?.next() {
(h?.1).0
} else {
return Ok(());
}
};
let change = changes.get_change(&hash).unwrap();
let file_name = |l: &Local, _| format!("{}:{}", l.path, l.line);
let o = std::io::stdout();
let mut o = o.lock();
super::pager();
change.write(&changes, Some(hash), file_name, true, &mut o)?;
Ok(())
}
}
use std::io::Write;
use std::path::PathBuf;
use clap::Clap;
use libpijul::{Hash, Merkle, MutTxnTExt, TxnT, TxnTExt};
use log::debug;
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Archive {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Use this channel, instead of the current channel
#[clap(long = "channel")]
channel: Option<String>,
/// Ask the remote to send an archive
#[clap(long = "remote")]
remote: Option<String>,
/// Do not check certificates (HTTPS remotes only, this option might be dangerous)
#[clap(short = 'k')]
no_cert_check: bool,
/// Archive in this state
#[clap(long = "state")]
state: Option<String>,
/// Apply these changes after switching to the channel
#[clap(long = "change", multiple = true)]
change: Vec<String>,
/// Append this path in front of each path inside the archive
#[clap(long = "prefix")]
prefix: Option<String>,
/// Name of the output file
#[clap(short = 'o')]
name: String,
}
impl Archive {
pub async fn run(mut self) -> Result<(), anyhow::Error> {
let state: Option<Merkle> = if let Some(ref state) = self.state {
Some(state.parse()?)
} else {
None
};
let mut extra: Vec<Hash> = Vec::new();
for h in self.change.iter() {
extra.push(h.parse()?);
}
if let Some(ref mut p) = self.prefix {
if !p.is_empty() && !p.ends_with("/") {
p.push('/');
}
}
if let Some(ref rem) = self.remote {
debug!("unknown");
let mut remote = crate::remote::unknown_remote(
None,
rem,
if let Some(ref channel) = self.channel {
channel
} else {
crate::DEFAULT_CHANNEL
},
self.no_cert_check,
)
.await?;
if let crate::remote::RemoteRepo::LocalChannel(_) = remote {
if let Some(ref mut path) = self.repo_path {
path.clear();
path.push(rem);
}
} else {
let mut p = std::path::Path::new(&self.name).to_path_buf();
if !self.name.ends_with(".tar.gz") {
p.set_extension("tar.gz");
}
let f = std::fs::File::create(&p)?;
remote
.archive(self.prefix, state.map(|x| (x, &extra[..])), f)
.await?;
return Ok(());
}
}
if let Ok(repo) = Repository::find_root(self.repo_path.clone()) {
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut p = std::path::Path::new(&self.name).to_path_buf();
if !self.name.ends_with(".tar.gz") {
p.set_extension("tar.gz");
}
let mut f = std::fs::File::create(&p)?;
let mut tarball = libpijul::output::Tarball::new(&mut f, self.prefix);
let conflicts = if let Some(state) = state {
let mut txn = repo.pristine.mut_txn_begin();
let mut channel = txn.load_channel(&channel_name)?.unwrap();
txn.archive_with_state(
&repo.changes,
&mut channel,
state,
&extra[..],
&mut tarball,
)?
} else {
let txn = repo.pristine.txn_begin()?;
let channel = txn.load_channel(&channel_name)?.unwrap();
txn.archive(&repo.changes, &channel, &mut tarball)?
};
if !conflicts.is_empty() {
writeln!(
std::io::stderr(),
"There were {} conflicts",
conflicts.len()
)?
}
}
Ok(())
}
}
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use anyhow::bail;
use clap::Clap;
use libpijul::changestore::ChangeStore;
use libpijul::{DepsTxnT, GraphTxnT, MutTxnT, MutTxnTExt, TxnT};
use crate::repository::Repository;
#[derive(Clap, Debug)]
pub struct Apply {
/// Set the repository where this command should run. Defaults to the first ancestor of the current directory that contains a `.pijul` directory.
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
/// Apply change to this channel
#[clap(long = "channel")]
channel: Option<String>,
/// Only apply the dependencies of the change, not the change itself. Only applicable for a single change.
#[clap(long = "deps-only")]
deps_only: bool,
/// The change that need to be applied. If this value is missing, read the change in text format on the standard input.
change: Vec<String>,
}
impl Apply {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut channel = if let Some(channel) = txn.load_channel(&channel_name)? {
channel
} else {
txn.open_or_create_channel(&channel_name)?
};
let mut hashes = Vec::new();
for ch in self.change.iter() {
hashes.push(if let Ok(h) = txn.hash_from_prefix(ch) {
h.0
} else {
let change = libpijul::change::Change::deserialize(&ch, None)?;
repo.changes.save_change(&change)?
})
}
if hashes.is_empty() {
let mut change = std::io::BufReader::new(std::io::stdin());
let change = libpijul::change::Change::read(&mut change, &mut HashMap::new())?;
hashes.push(repo.changes.save_change(&change)?)
}
if self.deps_only {
if hashes.len() > 1 {
bail!("--deps-only is only applicable to a single change")
}
txn.apply_deps_rec(&repo.changes, &mut channel, hashes.pop().unwrap())?;
} else {
for hash in hashes.drain(..) {
txn.apply_change_rec(&repo.changes, &mut channel, hash)?;
}
}
let mut touched = HashSet::new();
for d in hashes.iter() {
if let Some(int) = txn.get_internal(*d)? {
for inode in txn.iter_rev_touched(int)? {
let (int_, inode) = inode?;
if int_ < int {
continue;
} else if int_ > int {
break;
}
touched.insert(inode);
}
}
}
let mut done = HashMap::new();
for i in touched {
let (path, _) =
libpijul::fs::find_path(&repo.changes, &txn, &channel.borrow(), false, i)?;
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&mut done,
&path,
true,
None,
)?;
}
txn.commit()?;
Ok(())
}
}
[package]
name = "pijul"
description = "The sound distributed version control system."
version = "1.0.0-alpha.31"
authors = ["Pierre-Étienne Meunier <pe@pijul.org>"]
edition = "2018"
repository = "https://nest.pijul.com/pijul/pijul"
license = "GPL-2.0-or-later"
include = [
"Cargo.toml",
"src",
"src/commands",
"src/commands/log.rs",
"src/commands/protocol.rs",
"src/commands/apply.rs",
"src/commands/debug.rs",
"src/commands/checkout.rs",
"src/commands/file_operations.rs",
"src/commands/clone.rs",
"src/commands/git.rs",
"src/commands/record.rs",
"src/commands/change.rs",
"src/commands/diff.rs",
"src/commands/unrecord.rs",
"src/commands/channel.rs",
"src/commands/init.rs",
"src/commands/mod.rs",
"src/commands/archive.rs",
"src/commands/reset.rs",
"src/commands/fork.rs",
"src/commands/pushpull.rs",
"src/config.rs",
"src/repository.rs",
"src/main.rs",
"src/remote",
"src/remote/local.rs",
"src/remote/ssh.rs",
"src/remote/mod.rs",
]
[features]
git = [ "git2", "sanakirja/git2" ]
keep-changes = []
default = [ "keep-changes" ]
[dependencies]
human-panic = "1.0"
clap = "3.0.0-beta.2"
anyhow = "1.0"
libpijul = { path = "../libpijul", version = "1.0.0-alpha.27", features = [ "tarball" ] }
chrono = { version = "0.4" }
ignore = "0.4"
env_logger = "0.8"
log = "0.4"
serde = "1.0"
serde_json = "1.0"
serde_derive = "1.0"
toml = "0.5"
tokio = { version = "1.0", features = [ "rt-multi-thread", "macros", "sync" ] }
thrussh = "0.32.1"
thrussh-keys = "0.20.1"
thrussh-config = "0.5"
reqwest = { version = "0.11", features = [ "stream" ] }
byteorder = "1.3"
sanakirja = { version = "0.15.8", features = ["crc32"] }
futures = "0.3"
dirs-next = "2.0"
lazy_static = "1.4"
regex = "1.4"
whoami = "0.9"
rpassword = "5.0"
git2 = { version = "0.13", optional = true }
rand = "0.8"
edit = "0.1"
data-encoding = "2.3"
futures-util = "0.3"
indicatif = "0.15"
num_cpus = "1.13"
canonical-path = "2.0"
ctrlc = "3.1"
[target.'cfg(target_os = "linux")'.dependencies]
pager = "0.16.0"
use crate::pristine::InodeMetadata;
#[cfg(feature = "ondisk-repos")]
pub mod filesystem;
#[cfg(feature = "ondisk-repos")]
pub use filesystem::FileSystem;
pub mod memory;
pub use memory::Memory;
#[derive(Debug, Error)]
pub enum WriteError<E: std::error::Error + 'static> {
#[error(transparent)]
E(E),
#[error(transparent)]
Io(#[from] std::io::Error),
}
pub trait WorkingCopy {
type Error: std::error::Error;
fn create_dir_all(&mut self, path: &str) -> Result<(), Self::Error>;
fn file_metadata(&self, file: &str) -> Result<InodeMetadata, Self::Error>;
fn read_file(&self, file: &str, buffer: &mut Vec<u8>) -> Result<(), Self::Error>;
fn modified_time(&self, file: &str) -> Result<std::time::SystemTime, Self::Error>;
fn remove_path(&mut self, name: &str) -> Result<(), Self::Error>;
fn rename(&mut self, former: &str, new: &str) -> Result<(), Self::Error>;
fn set_permissions(&mut self, name: &str, permissions: u16) -> Result<(), Self::Error>;
fn write_file<A, E: std::error::Error, F: FnOnce(&mut dyn std::io::Write) -> Result<A, E>>(
&mut self,
file: &str,
writer: F,
) -> Result<A, WriteError<E>>;
}
use super::*;
use crate::pristine::InodeMetadata;
use std::collections::HashMap;
use std::time::SystemTime;
#[derive(Debug)]
pub struct Memory {
pub files: FileTree,
pub last_modified: SystemTime,
}
#[derive(Debug, Default)]
pub struct FileTree {
children: HashMap<String, Inode>,
}
#[derive(Debug)]
enum Inode {
File {
meta: InodeMetadata,
last_modified: SystemTime,
contents: Vec<u8>,
},
Directory {
meta: InodeMetadata,
last_modified: SystemTime,
children: FileTree,
},
}
impl Default for Memory {
fn default() -> Self {
Self {
files: FileTree::default(),
last_modified: SystemTime::now(),
}
}
}
impl Memory {
pub fn new() -> Self {
Self::default()
}
pub fn list_files(&self) -> Vec<String> {
let mut result = Vec::new();
let mut current_files = vec![(String::new(), &self.files)];
let mut next_files = Vec::new();
loop {
if current_files.is_empty() {
break;
}
for (path, tree) in current_files.iter() {
for (name, inode) in tree.children.iter() {
let mut path = path.clone();
crate::path::push(&mut path, name);
match inode {
Inode::File { .. } => {
result.push(path);
}
Inode::Directory { ref children, .. } => {
result.push(path.clone());
next_files.push((path, children))
}
}
}
}
std::mem::swap(&mut current_files, &mut next_files);
next_files.clear();
}
result
}
pub fn add_file(&mut self, file: &str, file_contents: Vec<u8>) {
let file_meta = InodeMetadata::new(0o644, false);
let last = SystemTime::now();
self.add_inode(
file,
Inode::File {
meta: file_meta,
last_modified: last,
contents: file_contents,
},
)
}
pub fn add_dir(&mut self, file: &str) {
let file_meta = InodeMetadata::new(0o755, true);
let last = SystemTime::now();
self.add_inode(
file,
Inode::Directory {
meta: file_meta,
last_modified: last,
children: FileTree {
children: HashMap::new(),
},
},
)
}
fn add_inode(&mut self, file: &str, inode: Inode) {
let mut file_tree = &mut self.files;
let last = SystemTime::now();
self.last_modified = last;
let file = file.split('/').filter(|c| !c.is_empty());
let mut p = file.peekable();
while let Some(f) = p.next() {
if p.peek().is_some() {
let entry = file_tree
.children
.entry(f.to_string())
.or_insert(Inode::Directory {
meta: InodeMetadata::new(0o755, true),
children: FileTree {
children: HashMap::new(),
},
last_modified: last,
});
match *entry {
Inode::Directory {
ref mut children, ..
} => file_tree = children,
_ => panic!("Not a directory"),
}
} else {
file_tree.children.insert(f.to_string(), inode);
break;
}
}
}
fn get_file(&self, file: &str) -> Option<&Inode> {
debug!("get_file {:?}", file);
debug!("repo = {:?}", self);
let mut t = Some(&self.files);
let mut inode = None;
let it = file.split('/').filter(|c| !c.is_empty());
for c in it {
debug!("c = {:?}", c);
inode = t.take().unwrap().children.get(c);
debug!("inode = {:?}", inode);
match inode {
Some(Inode::Directory { ref children, .. }) => t = Some(children),
_ => break,
}
}
inode
}
fn get_file_mut<'a>(&'a mut self, file: &str) -> Option<&'a mut Inode> {
debug!("get_file_mut {:?}", file);
debug!("repo = {:?}", self);
let mut t = Some(&mut self.files);
let mut it = file.split('/').filter(|c| !c.is_empty()).peekable();
self.last_modified = SystemTime::now();
while let Some(c) = it.next() {
debug!("c = {:?}", c);
let inode_ = t.take().unwrap().children.get_mut(c);
debug!("inode = {:?}", inode_);
if it.peek().is_none() {
return inode_;
}
match inode_ {
Some(Inode::Directory {
ref mut children, ..
}) => t = Some(children),
_ => return None,
}
}
None
}
fn remove_path_(&mut self, path: &str) -> Option<Inode> {
debug!("remove_path {:?}", path);
debug!("repo = {:?}", self);
let mut t = Some(&mut self.files);
let mut it = path.split('/').filter(|c| !c.is_empty());
let mut c = it.next().unwrap();
self.last_modified = SystemTime::now();
loop {
debug!("c = {:?}", c);
let next_c = it.next();
let t_ = t.take().unwrap();
let next_c = if let Some(next_c) = next_c {
next_c
} else {
return t_.children.remove(c);
};
let inode = t_.children.get_mut(c);
c = next_c;
debug!("inode = {:?}", inode);
match inode {
Some(Inode::Directory {
ref mut children, ..
}) => t = Some(children),
_ => return None,
}
}
}
}
#[derive(Debug, Error)]
pub enum Error {
#[error("Path not found: {path}")]
NotFound { path: String },
}
impl WorkingCopy for Memory {
type Error = Error;
fn create_dir_all(&mut self, file: &str) -> Result<(), Self::Error> {
if self.get_file(file).is_none() {
let last = SystemTime::now();
self.add_inode(
file,
Inode::Directory {
meta: InodeMetadata::new(0o755, true),
children: FileTree {
children: HashMap::new(),
},
last_modified: last,
},
);
}
Ok(())
}
fn file_metadata(&self, file: &str) -> Result<InodeMetadata, Self::Error> {
match self.get_file(file) {
Some(Inode::Directory { meta, .. }) => Ok(*meta),
Some(Inode::File { meta, .. }) => Ok(*meta),
None => Err(Error::NotFound {
path: file.to_string(),
}),
}
}
fn read_file(&self, file: &str, buffer: &mut Vec<u8>) -> Result<(), Self::Error> {
match self.get_file(file) {
Some(Inode::Directory { .. }) => panic!("Not a file: {:?}", file),
Some(Inode::File { ref contents, .. }) => {
buffer.extend(contents);
Ok(())
}
None => Err(Error::NotFound {
path: file.to_string(),
}),
}
}
fn modified_time(&self, _file: &str) -> Result<std::time::SystemTime, Self::Error> {
Ok(self.last_modified)
}
fn remove_path(&mut self, path: &str) -> Result<(), Self::Error> {
self.remove_path_(path);
Ok(())
}
fn rename(&mut self, old: &str, new: &str) -> Result<(), Self::Error> {
debug!("rename {:?} to {:?}", old, new);
if let Some(inode) = self.remove_path_(old) {
self.add_inode(new, inode)
}
Ok(())
}
fn set_permissions(&mut self, file: &str, permissions: u16) -> Result<(), Self::Error> {
debug!("set_permissions {:?}", file);
match self.get_file_mut(file) {
Some(Inode::File { ref mut meta, .. }) => {
*meta = InodeMetadata::new(permissions as usize, false);
}
Some(Inode::Directory { ref mut meta, .. }) => {
*meta = InodeMetadata::new(permissions as usize, true);
}
None => panic!("file not found: {:?}", file),
}
Ok(())
}
fn write_file<A, E: std::error::Error, F: FnOnce(&mut dyn std::io::Write) -> Result<A, E>>(
&mut self,
file: &str,
writer: F,
) -> Result<A, WriteError<E>> {
match self.get_file_mut(file) {
Some(Inode::File {
ref mut contents, ..
}) => {
contents.clear();
writer(contents).map_err(WriteError::E)
}
None => {
let mut contents = Vec::new();
let last_modified = SystemTime::now();
let a = writer(&mut contents).map_err(WriteError::E)?;
self.add_inode(
file,
Inode::File {
meta: InodeMetadata::new(0o644, false),
contents,
last_modified,
},
);
Ok(a)
}
_ => panic!("not a file: {:?}", file),
}
}
}
use super::*;
use crate::pristine::InodeMetadata;
use canonical_path::{CanonicalPath, CanonicalPathBuf};
use ignore::WalkBuilder;
use std::borrow::Cow;
use std::path::{Path, PathBuf};
pub struct FileSystem {
root: PathBuf,
}
pub fn filter_ignore(root_: &CanonicalPath, path: &CanonicalPath, is_dir: bool) -> bool {
debug!("path = {:?} root = {:?}", path, root_);
if let Ok(suffix) = path.as_path().strip_prefix(root_.as_path()) {
debug!("suffix = {:?}", suffix);
let mut root = root_.as_path().to_path_buf();
let mut ignore = ignore::gitignore::GitignoreBuilder::new(&root);
let mut add_root = |root: &mut PathBuf| {
ignore.add_line(None, crate::DOT_DIR).unwrap();
root.push(".ignore");
ignore.add(&root);
root.pop();
root.push(".gitignore");
ignore.add(&root);
root.pop();
};
add_root(&mut root);
for c in suffix.components() {
root.push(c);
add_root(&mut root);
}
if let Ok(ig) = ignore.build() {
let m = ig.matched(suffix, is_dir);
debug!("m = {:?}", m);
return !m.is_ignore();
}
}
false
}
/// From a path on the filesystem, return the canonical path (a `PathBuf`), and a
/// prefix relative to the root of the repository (a `String`).
pub fn get_prefix(
repo_path: Option<&CanonicalPath>,
prefix: &Path,
) -> Result<(canonical_path::CanonicalPathBuf, String), std::io::Error> {
let mut p = String::new();
let repo = if let Some(repo) = repo_path {
Cow::Borrowed(repo)
} else {
Cow::Owned(canonical_path::CanonicalPathBuf::canonicalize(
std::env::current_dir()?,
)?)
};
debug!("get prefix {:?}", repo);
let prefix_ = repo.join(&prefix)?;
debug!("get prefix {:?}", prefix_);
if let Ok(prefix) = prefix_.as_path().strip_prefix(repo.as_path()) {
for c in prefix.components() {
if !p.is_empty() {
p.push('/');
}
let c: &std::path::Path = c.as_ref();
p.push_str(&c.to_string_lossy())
}
}
Ok((prefix_, p))
}
#[derive(Debug, Error)]
pub enum AddError<T: std::error::Error + 'static> {
#[error(transparent)]
Ignore(#[from] ignore::Error),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Fs(#[from] crate::fs::FsError<T>),
}
#[derive(Debug, Error)]
pub enum Error<C: std::error::Error + 'static, T: std::error::Error + 'static> {
#[error(transparent)]
Add(#[from] AddError<T>),
#[error(transparent)]
Record(#[from] crate::record::RecordError<C, std::io::Error, T>),
}
impl FileSystem {
pub fn from_root<P: AsRef<Path>>(root: P) -> Self {
FileSystem {
root: root.as_ref().to_path_buf(),
}
}
pub fn record_prefixes<
T: crate::MutTxnTExt + crate::TxnTExt,
C: crate::changestore::ChangeStore,
P: AsRef<Path>,
>(
&mut self,
txn: &mut T,
channel: &mut crate::pristine::ChannelRef<T>,
changes: &C,
state: &mut crate::RecordBuilder,
repo_path: CanonicalPathBuf,
prefixes: &[P],
threads: usize,
) -> Result<(), Error<C::Error, T::GraphError>> {
for prefix in prefixes.iter() {
self.record_prefix(
txn,
channel,
changes,
state,
repo_path.clone(),
prefix.as_ref(),
threads,
)?
}
if prefixes.is_empty() {
self.record_prefix(
txn,
channel,
changes,
state,
repo_path.clone(),
Path::new(""),
threads,
)?
}
Ok(())
}
pub fn add_prefix_rec<T: crate::MutTxnTExt + crate::TxnTExt>(
&self,
txn: &mut T,
repo_path: CanonicalPathBuf,
full: CanonicalPathBuf,
prefix: &str,
threads: usize,
) -> Result<(), AddError<T::GraphError>> {
debug!("record_prefix {:?}", prefix);
debug!("full = {:?}", full);
let meta = std::fs::metadata(&full);
debug!("meta = {:?}", meta);
let (sender, receiver) = std::sync::mpsc::sync_channel(100);
debug!("{:?}", full.as_path().strip_prefix(repo_path.as_path()));
let t = std::thread::spawn(move || -> Result<(), AddError<T::GraphError>> {
if let Ok(meta) = meta {
if meta.is_dir() {
let mut walk = WalkBuilder::new(&full);
walk.ignore(true)
.git_ignore(true)
.filter_entry(|p| p.file_name() != crate::DOT_DIR)
.threads(threads - 1);
walk.build_parallel().run(|| {
Box::new(|entry| {
let entry: ignore::DirEntry = if let Ok(entry) = entry {
entry
} else {
return ignore::WalkState::Quit;
};
let p = entry.path();
if let Some(p) = p.file_name() {
if let Some(p) = p.to_str() {
if p.ends_with("~") || (p.starts_with("#") && p.ends_with("#"))
{
return ignore::WalkState::Skip;
}
}
}
debug!("entry path = {:?} {:?}", entry.path(), repo_path);
if let Ok(entry_path) = CanonicalPathBuf::canonicalize(entry.path()) {
if let Ok(path) = entry_path.as_path().strip_prefix(&repo_path) {
let is_dir = entry.file_type().unwrap().is_dir();
if sender.send((path.to_path_buf(), is_dir)).is_err() {
return ignore::WalkState::Quit;
}
} else {
debug!("entry = {:?}", entry.path());
}
}
ignore::WalkState::Continue
})
})
} else if let Ok(path) = full.as_path().strip_prefix(&repo_path.as_path()) {
sender.send((path.to_path_buf(), false)).unwrap();
}
}
Ok(())
});
while let Ok((path, is_dir)) = receiver.recv() {
info!("Adding {:?}", path);
let path_str = path.to_str().unwrap();
match txn.add(path_str, is_dir) {
Ok(()) => {}
Err(crate::fs::FsError::AlreadyInRepo(_)) => {}
Err(e) => return Err(e.into()),
}
}
if let Ok(t) = t.join() {
t?
}
Ok(())
}
pub fn record_prefix<
T: crate::MutTxnTExt + crate::TxnTExt,
C: crate::changestore::ChangeStore,
>(
&mut self,
txn: &mut T,
channel: &mut crate::pristine::ChannelRef<T>,
changes: &C,
state: &mut crate::RecordBuilder,
repo_path: CanonicalPathBuf,
prefix: &Path,
threads: usize,
) -> Result<(), Error<C::Error, T::GraphError>> {
if let Ok((full, prefix)) = get_prefix(Some(repo_path.as_ref()), prefix) {
self.add_prefix_rec(txn, repo_path, full, &prefix, threads)?;
debug!("recording from prefix {:?}", prefix);
txn.record(
state,
crate::Algorithm::default(),
channel,
self,
changes,
&prefix,
)?;
debug!("recorded");
}
Ok(())
}
fn path(&self, file: &str) -> PathBuf {
let mut path = self.root.clone();
path.extend(crate::path::components(file));
path
}
}
impl WorkingCopy for FileSystem {
type Error = std::io::Error;
fn create_dir_all(&mut self, file: &str) -> Result<(), Self::Error> {
Ok(std::fs::create_dir_all(&self.path(file))?)
}
fn file_metadata(&self, file: &str) -> Result<InodeMetadata, Self::Error> {
let attr = std::fs::metadata(&self.path(file))?;
let permissions = permissions(&attr).unwrap_or(0o755);
debug!("permissions = {:?}", permissions);
Ok(InodeMetadata::new(permissions & 0o777, attr.is_dir()))
}
fn read_file(&self, file: &str, buffer: &mut Vec<u8>) -> Result<(), Self::Error> {
use std::io::Read;
let mut f = std::fs::File::open(&self.path(file))?;
f.read_to_end(buffer)?;
Ok(())
}
fn modified_time(&self, file: &str) -> Result<std::time::SystemTime, Self::Error> {
let attr = std::fs::metadata(&self.path(file))?;
Ok(attr.modified()?)
}
fn remove_path(&mut self, path: &str) -> Result<(), Self::Error> {
let path = self.path(path);
if let Ok(meta) = std::fs::metadata(&path) {
if let Err(e) = if meta.is_dir() {
std::fs::remove_dir_all(&path)
} else {
std::fs::remove_file(&path)
} {
error!("while deleting {:?}: {:?}", path, e);
}
}
Ok(())
}
fn rename(&mut self, former: &str, new: &str) -> Result<(), Self::Error> {
let former = self.path(former);
let new = self.path(new);
if let Some(p) = new.parent() {
std::fs::create_dir_all(p)?
}
if let Err(e) = std::fs::rename(&former, &new) {
error!("while renaming {:?} to {:?}: {:?}", former, new, e)
}
Ok(())
}
#[cfg(not(windows))]
fn set_permissions(&mut self, name: &str, permissions: u16) -> Result<(), Self::Error> {
use std::os::unix::fs::PermissionsExt;
let name = self.path(name);
debug!("set_permissions: {:?}", name);
let metadata = std::fs::metadata(&name)?;
let mut current = metadata.permissions();
debug!(
"setting mode for {:?} to {:?} (currently {:?})",
name, permissions, current
);
current.set_mode(permissions as u32);
std::fs::set_permissions(name, current)?;
Ok(())
}
#[cfg(windows)]
fn set_permissions(&mut self, _name: &str, _permissions: u16) -> Result<(), Self::Error> {
Ok(())
}
fn write_file<A, E: std::error::Error, F: FnOnce(&mut dyn std::io::Write) -> Result<A, E>>(
&mut self,
file: &str,
writer: F,
) -> Result<A, WriteError<E>> {
let path = self.path(file);
if let Some(p) = path.parent() {
std::fs::create_dir_all(p).map_err(WriteError::Io)?
}
std::fs::remove_file(&path).unwrap_or(());
let mut file =
std::io::BufWriter::new(std::fs::File::create(&path).map_err(WriteError::Io)?);
writer(&mut file).map_err(WriteError::E)
}
}
#[cfg(not(windows))]
fn permissions(attr: &std::fs::Metadata) -> Option<usize> {
use std::os::unix::fs::PermissionsExt;
Some(attr.permissions().mode() as usize)
}
#[cfg(windows)]
fn permissions(_: &std::fs::Metadata) -> Option<usize> {
None
}
use crate::pristine::*;
pub const START_MARKER: &str = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n";
pub const SEPARATOR: &str = "\n================================\n";
pub const END_MARKER: &str = "\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n";
/// A trait for outputting keys and their contents. This trait allows
/// to retain more information about conflicts than directly
/// outputting as bytes to a `Write`. The diff algorithm uses that
/// information, for example.
pub trait VertexBuffer {
fn output_line<E, F>(&mut self, key: Vertex<ChangeId>, contents: F) -> Result<(), E>
where
E: From<std::io::Error>,
F: FnOnce(&mut Vec<u8>) -> Result<(), E>;
fn output_conflict_marker(&mut self, s: &str) -> Result<(), std::io::Error>;
fn begin_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), std::io::Error> {
self.begin_conflict()
}
fn begin_cyclic_conflict(&mut self) -> Result<(), std::io::Error> {
self.begin_conflict()
}
fn conflict_next(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(SEPARATOR)
}
fn end_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(END_MARKER)
}
fn end_zombie_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(END_MARKER)
}
fn end_cyclic_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(END_MARKER)
}
}
pub(crate) struct ConflictsWriter<'a, 'b, W: std::io::Write> {
pub w: W,
pub lines: usize,
pub new_line: bool,
pub path: &'b str,
pub conflicts: &'a mut Vec<crate::output::Conflict>,
pub buf: Vec<u8>,
}
impl<'a, 'b, W: std::io::Write> ConflictsWriter<'a, 'b, W> {
pub fn new(w: W, path: &'b str, conflicts: &'a mut Vec<crate::output::Conflict>) -> Self {
ConflictsWriter {
w,
new_line: true,
lines: 1,
path,
conflicts,
buf: Vec::new(),
}
}
}
impl<'a, 'b, W: std::io::Write> std::ops::Deref for ConflictsWriter<'a, 'b, W> {
type Target = W;
fn deref(&self) -> &Self::Target {
&self.w
}
}
impl<'a, 'b, W: std::io::Write> std::ops::DerefMut for ConflictsWriter<'a, 'b, W> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.w
}
}
impl<'a, 'b, W: std::io::Write> VertexBuffer for ConflictsWriter<'a, 'b, W> {
fn output_line<E, C>(&mut self, v: Vertex<ChangeId>, c: C) -> Result<(), E>
where
E: From<std::io::Error>,
C: FnOnce(&mut Vec<u8>) -> Result<(), E>,
{
self.buf.clear();
c(&mut self.buf)?;
debug!("vbuf {:?} {:?}", v, std::str::from_utf8(&self.buf));
let ends_with_newline = self.buf.ends_with(b"\n");
self.lines += self.buf.iter().filter(|c| **c == b'\n').count();
self.w.write_all(&self.buf)?;
if !self.buf.is_empty() {
// empty "lines" (such as in the beginning of a file)
// don't change the status of self.new_line.
self.new_line = ends_with_newline;
}
Ok(())
}
fn output_conflict_marker(&mut self, s: &str) -> Result<(), std::io::Error> {
debug!("output_conflict_marker {:?}", self.new_line);
if !self.new_line {
self.lines += 2;
self.w.write_all(s.as_bytes())?;
} else {
self.lines += 1;
debug!("{:?}", &s.as_bytes()[1..]);
self.w.write_all(&s.as_bytes()[1..])?;
}
self.new_line = true;
Ok(())
}
fn begin_conflict(&mut self) -> Result<(), std::io::Error> {
self.conflicts.push(crate::output::Conflict::Order {
path: self.path.to_string(),
line: self.lines,
});
self.output_conflict_marker(START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), std::io::Error> {
self.conflicts.push(crate::output::Conflict::Zombie {
path: self.path.to_string(),
line: self.lines,
});
self.output_conflict_marker(START_MARKER)
}
fn begin_cyclic_conflict(&mut self) -> Result<(), std::io::Error> {
self.conflicts.push(crate::output::Conflict::Cyclic {
path: self.path.to_string(),
line: self.lines,
});
self.output_conflict_marker(START_MARKER)
}
}
pub struct Writer<W: std::io::Write> {
w: W,
buf: Vec<u8>,
new_line: bool,
}
impl<W: std::io::Write> Writer<W> {
pub fn new(w: W) -> Self {
Writer {
w,
new_line: true,
buf: Vec::new(),
}
}
pub fn into_inner(self) -> W {
self.w
}
}
impl<W: std::io::Write> std::ops::Deref for Writer<W> {
type Target = W;
fn deref(&self) -> &Self::Target {
&self.w
}
}
impl<W: std::io::Write> std::ops::DerefMut for Writer<W> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.w
}
}
impl<W: std::io::Write> VertexBuffer for Writer<W> {
fn output_line<E, C>(&mut self, v: Vertex<ChangeId>, c: C) -> Result<(), E>
where
E: From<std::io::Error>,
C: FnOnce(&mut Vec<u8>) -> Result<(), E>,
{
self.buf.clear();
c(&mut self.buf)?;
debug!("vbuf {:?} {:?}", v, std::str::from_utf8(&self.buf));
let ends_with_newline = self.buf.ends_with(b"\n");
self.w.write_all(&self.buf[..])?;
if !self.buf.is_empty() {
// empty "lines" (such as in the beginning of a file)
// don't change the status of self.new_line.
self.new_line = ends_with_newline;
}
Ok(())
}
fn output_conflict_marker(&mut self, s: &str) -> Result<(), std::io::Error> {
debug!("output_conflict_marker {:?}", self.new_line);
if !self.new_line {
self.w.write_all(s.as_bytes())?;
} else {
debug!("{:?}", &s.as_bytes()[1..]);
self.w.write_all(&s.as_bytes()[1..])?;
}
Ok(())
}
fn begin_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(START_MARKER)
}
fn begin_cyclic_conflict(&mut self) -> Result<(), std::io::Error> {
self.output_conflict_marker(START_MARKER)
}
}
pub(crate) struct Vector2<A> {
v: Vec<A>,
bounds: Vec<usize>,
}
impl<A> Vector2<A> {
pub(crate) fn new() -> Self {
Vector2 {
v: Vec::new(),
bounds: vec![0],
}
}
pub(crate) fn len(&self) -> usize {
self.bounds.len() - 1
}
pub(crate) fn with_capacities(total: usize, n: usize) -> Self {
let mut bounds = Vec::with_capacity(n);
bounds.push(0);
Vector2 {
v: Vec::with_capacity(total),
bounds,
}
}
pub(crate) fn push_to_last(&mut self, a: A) {
assert!(self.bounds.len() > 1);
*self.bounds.last_mut().unwrap() += 1;
self.v.push(a)
}
pub(crate) fn push(&mut self) {
self.bounds.push(self.v.len())
}
pub(crate) fn last_mut(&mut self) -> Option<&mut [A]> {
if self.bounds.len() >= 2 {
let i = self.bounds.len() - 2;
Some(&mut self.v[self.bounds[i]..self.bounds[i + 1]])
} else {
None
}
}
}
impl<A> std::ops::Index<usize> for Vector2<A> {
type Output = [A];
fn index(&self, i: usize) -> &[A] {
&self.v[self.bounds[i]..self.bounds[i + 1]]
}
}
impl<A> std::ops::IndexMut<usize> for Vector2<A> {
fn index_mut(&mut self, i: usize) -> &mut [A] {
&mut self.v[self.bounds[i]..self.bounds[i + 1]]
}
}
impl<A: std::fmt::Debug> std::fmt::Debug for Vector2<A> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "[")?;
for i in 0..self.bounds.len() - 1 {
if i > 0 {
write!(fmt, ", ")?
}
write!(fmt, "{:?}", &self[i])?
}
write!(fmt, "]")?;
Ok(())
}
}
#[test]
fn test_v2() {
let mut v: Vector2<usize> = Vector2::new();
v.push();
v.push_to_last(0);
v.push_to_last(1);
v.push_to_last(2);
v.push();
v.push_to_last(4);
v.push_to_last(5);
v.push_to_last(6);
assert_eq!(&v[0], &[0, 1, 2][..]);
assert_eq!(&v[1], &[4, 5, 6][..]);
}
#[test]
#[should_panic]
fn test_v2_() {
let w: Vector2<usize> = Vector2::new();
println!("{:?}", &w[0]);
}
use crate::change::*;
use crate::changestore::*;
use crate::pristine::*;
use crate::small_string::*;
pub fn undo_file_addition<
T: GraphMutTxnT + TreeMutTxnT<TreeError = <T as GraphTxnT>::GraphError>,
>(
txn: &mut T,
change_id: ChangeId,
new_vertex: &NewVertex<Option<Hash>>,
) -> Result<(), TxnErr<T::GraphError>> {
if new_vertex.start == new_vertex.end {
let pos = Position {
change: change_id,
pos: new_vertex.start,
};
if let Some(inode) = txn.get_revinodes(pos, None)? {
del_inodes_with_rev(txn, inode, pos)?;
}
}
Ok(())
}
pub fn undo_file_deletion<
T: ChannelTxnT + TreeMutTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
P: ChangeStore,
>(
txn: &mut T,
changes: &P,
channel: &T::Channel,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
) -> Result<(), super::UnrecordError<P::Error, T::TreeError>> {
for e in newedges.edges.iter().rev() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
let source =
txn.find_block_end(T::graph(channel), internal_pos(txn, &e.from, change_id)?)?;
if e.flag.contains(EdgeFlags::FOLDER) && e.to.start_pos() == e.to.end_pos() {
let dest = internal_pos(txn, &e.to.start_pos(), change_id)?;
restore(txn, changes, channel, source, dest)?
}
}
Ok(())
}
fn restore<
T: ChannelTxnT + TreeMutTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
P: ChangeStore,
>(
txn: &mut T,
changes: &P,
channel: &T::Channel,
source: Vertex<ChangeId>,
dest: Position<ChangeId>,
) -> Result<(), super::UnrecordError<P::Error, T::TreeError>> {
let mut stack = vec![(source, dest)];
let mut return_value = None;
while let Some((source, dest)) = stack.pop() {
if let Some(parent_inode) = return_value {
return_value = Some(restore_inode(txn, changes, source, dest, parent_inode)?);
continue;
}
let mut source_parent = None;
for e in iter_adjacent(
txn,
T::graph(channel),
source,
EdgeFlags::PARENT | EdgeFlags::FOLDER,
EdgeFlags::all(),
)? {
let e = e?;
if e.flag.contains(EdgeFlags::PARENT | EdgeFlags::FOLDER) {
source_parent = Some(e.dest);
break;
}
}
let source_parent = source_parent.unwrap();
if source_parent.change.is_root() {
return_value = Some(restore_inode(txn, changes, source, dest, Inode::ROOT)?);
} else if let Some(inode) = txn.get_revinodes(source_parent, None)? {
return_value = Some(restore_inode(txn, changes, source, dest, inode)?);
} else {
let grandparent = find_youngest_parent(txn, channel, source_parent.inode_vertex())?;
stack.push((source, dest));
stack.push((grandparent, source_parent));
}
}
Ok(())
}
fn restore_inode<
T: TreeMutTxnT + GraphTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
P: ChangeStore,
>(
txn: &mut T,
changes: &P,
source: Vertex<ChangeId>,
dest: Position<ChangeId>,
parent_inode: Inode,
) -> Result<Inode, super::UnrecordError<P::Error, T::TreeError>> {
let mut name = Vec::new();
let (_, basename) = changes
.get_file_name(|h| txn.get_external(h).unwrap(), source, &mut name)
.map_err(super::UnrecordError::Changestore)?;
let basename = SmallString::from_str(basename);
let file_id = OwnedPathId {
parent_inode,
basename,
};
if let Some(inode) = txn.get_revinodes(dest, None)? {
Ok(inode)
} else {
let inode = crate::fs::create_new_inode(txn)?;
put_tree_with_rev(txn, file_id.as_file_id(), inode)?;
put_inodes_with_rev(txn, inode, dest)?;
Ok(inode)
}
}
fn find_youngest_parent<T: ChannelTxnT>(
txn: &T,
channel: &T::Channel,
current: Vertex<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<T::GraphError>> {
let mut next = None;
for e in iter_adjacent(
txn,
T::graph(channel),
current,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK,
)? {
let e = e?;
if !e.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT) {
continue;
}
if e.flag.contains(EdgeFlags::DELETED) {
debug!("deleted: {:?}", e.introduced_by);
let age = txn
.get_changeset(T::changes(channel), e.introduced_by)?
.unwrap();
if let Some((ref mut age0, ref mut v)) = next {
if age > *age0 {
*age0 = age;
*v = e.dest
}
} else {
next = Some((age, e.dest))
}
} else {
next = Some((0, e.dest));
break;
}
}
txn.find_block_end(T::graph(channel), next.unwrap().1)
}
pub fn undo_file_reinsertion<
P: ChangeStore,
T: GraphTxnT + TreeMutTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
>(
txn: &mut T,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
) -> Result<(), super::UnrecordError<P::Error, T::TreeError>> {
for e in newedges.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
if e.to.start_pos() == e.to.end_pos() {
let position = internal_pos(txn, &e.to.start_pos(), change_id)?;
if let Some(inode) = txn.get_revinodes(position, None)? {
del_inodes_with_rev(txn, inode, position)?;
}
}
}
Ok(())
}
use crate::apply;
use crate::change::*;
use crate::changestore::*;
use crate::missing_context::*;
use crate::pristine::*;
use std::collections::{HashMap, HashSet};
mod working_copy;
#[derive(Debug, Error)]
pub enum UnrecordError<
ChangestoreError: std::error::Error + 'static,
TxnError: std::error::Error + 'static,
> {
#[error("Changestore error: {0}")]
Changestore(ChangestoreError),
#[error(transparent)]
Txn(TxnError),
#[error(transparent)]
Block(#[from] crate::pristine::BlockError<TxnError>),
#[error(transparent)]
InconsistentChange(#[from] crate::pristine::InconsistentChange<TxnError>),
#[error("Change not in channel: {}", hash.to_base32())]
ChangeNotInChannel { hash: ChangeId },
#[error("Change not in channel: {}", change_id.to_base32())]
ChangeIsDependedUpon { change_id: ChangeId },
#[error(transparent)]
Missing(#[from] crate::missing_context::MissingError<TxnError>),
#[error(transparent)]
LocalApply(#[from] crate::apply::LocalApplyError<TxnError>),
#[error(transparent)]
Apply(#[from] crate::apply::ApplyError<ChangestoreError, TxnError>),
}
impl<T: std::error::Error + 'static, C: std::error::Error + 'static> std::convert::From<TxnErr<T>>
for UnrecordError<C, T>
{
fn from(t: TxnErr<T>) -> Self {
UnrecordError::Txn(t.0)
}
}
pub fn unrecord<T: MutTxnT, P: ChangeStore>(
txn: &mut T,
channel: &mut ChannelRef<T>,
changes: &P,
hash: &Hash,
) -> Result<bool, UnrecordError<P::Error, T::GraphError>> {
let change = changes
.get_change(hash)
.map_err(UnrecordError::Changestore)?;
let change_id = if let Some(h) = txn.get_internal(*hash)? {
h
} else {
return Ok(false);
};
let unused = unused_in_other_channels(txn, &channel, change_id)?;
let mut channel = channel.r.borrow_mut();
del_channel_changes::<T, P>(txn, &mut channel, change_id)?;
unapply(txn, &mut channel, changes, change_id, &change)?;
if unused {
assert!(txn.get_revdep(change_id, None)?.is_none());
while txn.del_dep(change_id, None)? {}
txn.del_external(change_id, None)?;
txn.del_internal(*hash, None)?;
for dep in change.dependencies.iter() {
let dep = txn.get_internal(*dep)?.unwrap();
txn.del_revdep(dep, Some(change_id))?;
}
Ok(false)
} else {
Ok(true)
}
}
fn del_channel_changes<
T: ChannelMutTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
P: ChangeStore,
>(
txn: &mut T,
channel: &mut T::Channel,
change_id: ChangeId,
) -> Result<(), UnrecordError<P::Error, T::GraphError>> {
let timestamp = if let Some(ts) = txn.get_changeset(T::changes(channel), change_id)? {
ts
} else {
return Err(UnrecordError::ChangeNotInChannel { hash: change_id });
};
for x in txn.iter_revdep(change_id)? {
let (p, d) = x?;
if p < change_id {
continue;
} else if p > change_id {
break;
}
if txn.get_changeset(T::changes(channel), d)?.is_some() {
return Err(UnrecordError::ChangeIsDependedUpon { change_id });
}
}
txn.del_changes(channel, change_id, timestamp)?;
Ok(())
}
fn unused_in_other_channels<T: TxnT>(
txn: &mut T,
channel: &ChannelRef<T>,
change_id: ChangeId,
) -> Result<bool, TxnErr<T::GraphError>> {
let channel = channel.borrow();
for br in txn.iter_channels("")? {
let br = br?;
let br = br.borrow();
if T::name(&br) != T::name(&channel)
&& txn.get_changeset(T::changes(&br), change_id)?.is_some()
{
return Ok(false);
}
}
Ok(true)
}
fn unapply<
T: ChannelMutTxnT + TreeMutTxnT<TreeError = <T as GraphTxnT>::GraphError>,
C: ChangeStore,
>(
txn: &mut T,
channel: &mut T::Channel,
changes: &C,
change_id: ChangeId,
change: &Change,
) -> Result<(), UnrecordError<C::Error, T::GraphError>> {
let mut clean_inodes = HashSet::new();
let mut ws = Workspace::default();
for change_ in change.changes.iter().rev().flat_map(|r| r.rev_iter()) {
match *change_ {
Atom::EdgeMap(ref newedges) => unapply_edges(
changes,
txn,
T::graph_mut(channel),
change_id,
newedges,
&mut ws,
)?,
Atom::NewVertex(ref newvertex) => {
if clean_inodes.insert(newvertex.inode) {
crate::alive::remove_forward_edges(
txn,
T::graph_mut(channel),
internal_pos(txn, &newvertex.inode, change_id)?,
)?
}
unapply_newvertex::<T, C>(
txn,
T::graph_mut(channel),
change_id,
&mut ws,
newvertex,
)?
}
}
}
repair_newvertex_contexts::<T, C>(txn, T::graph_mut(channel), &mut ws, change_id)?;
for change in change.changes.iter().rev().flat_map(|r| r.rev_iter()) {
if let Atom::EdgeMap(ref n) = *change {
remove_zombies::<_, C>(txn, T::graph_mut(channel), &mut ws, change_id, n)?;
repair_edges_context(
changes,
txn,
T::graph_mut(channel),
&mut ws.apply.missing_context,
change_id,
n,
)?
}
}
for change_ in change.changes.iter().rev().flat_map(|r| r.rev_iter()) {
match *change_ {
Atom::EdgeMap(ref newedges) if newedges.edges[0].flag.contains(EdgeFlags::FOLDER) => {
if newedges.edges[0].flag.contains(EdgeFlags::DELETED) {
working_copy::undo_file_deletion(txn, changes, channel, change_id, newedges)?
} else {
working_copy::undo_file_reinsertion::<C, _>(txn, change_id, newedges)?
}
}
Atom::NewVertex(ref new_vertex)
if new_vertex.flag.contains(EdgeFlags::FOLDER)
&& new_vertex.down_context.is_empty() =>
{
working_copy::undo_file_addition(txn, change_id, new_vertex)?;
}
_ => {}
}
}
crate::apply::clean_obsolete_pseudo_edges(
txn,
T::graph_mut(channel),
&mut ws.apply,
change_id,
)?;
crate::apply::repair_cyclic_paths(txn, T::graph_mut(channel), &mut ws.apply)?;
txn.touch_channel(channel, Some(0));
Ok(())
}
#[derive(Default)]
struct Workspace {
up: HashMap<Vertex<ChangeId>, Position<Option<Hash>>>,
down: HashMap<Vertex<ChangeId>, Position<Option<Hash>>>,
parents: HashSet<Vertex<ChangeId>>,
del: Vec<Edge>,
apply: crate::apply::Workspace,
stack: Vec<Vertex<ChangeId>>,
del_edges: Vec<(Vertex<ChangeId>, Edge)>,
}
fn unapply_newvertex<T: GraphMutTxnT, C: ChangeStore>(
txn: &mut T,
channel: &mut T::Graph,
change_id: ChangeId,
ws: &mut Workspace,
new_vertex: &NewVertex<Option<Hash>>,
) -> Result<(), UnrecordError<C::Error, T::GraphError>> {
let mut pos = Position {
change: change_id,
pos: new_vertex.start,
};
debug!("unapply_newvertex = {:?}", new_vertex);
while let Ok(vertex) = txn.find_block(channel, pos) {
debug!("vertex = {:?}", vertex);
for e in iter_adj_all(txn, channel, vertex)? {
let e = e?;
debug!("e = {:?}", e);
if !e.flag.is_deleted() {
if e.flag.is_parent() {
if !e.flag.is_folder() {
let up_v = txn.find_block_end(channel, e.dest)?;
ws.up.insert(up_v, new_vertex.inode);
}
} else {
let down_v = txn.find_block(channel, e.dest)?;
ws.down.insert(down_v, new_vertex.inode);
if e.flag.is_folder() {
ws.apply.missing_context.files.insert(down_v);
}
}
}
ws.del.push(e)
}
debug!("del = {:#?}", ws.del);
ws.up.remove(&vertex);
ws.down.remove(&vertex);
ws.perform_del::<C, T>(txn, channel, vertex)?;
if vertex.end < new_vertex.end {
pos.pos = vertex.end
}
}
Ok(())
}
impl Workspace {
fn perform_del<C: ChangeStore, T: GraphMutTxnT>(
&mut self,
txn: &mut T,
channel: &mut T::Graph,
vertex: Vertex<ChangeId>,
) -> Result<(), UnrecordError<C::Error, T::GraphError>> {
for e in self.del.drain(..) {
let (a, b) = if e.flag.is_parent() {
(txn.find_block_end(channel, e.dest)?, vertex)
} else {
(vertex, txn.find_block(channel, e.dest)?)
};
del_graph_with_rev(
txn,
channel,
e.flag - EdgeFlags::PARENT,
a,
b,
e.introduced_by,
)?;
}
Ok(())
}
}
fn repair_newvertex_contexts<T: GraphMutTxnT, C: ChangeStore>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
) -> Result<(), UnrecordError<C::Error, T::GraphError>> {
debug!("up = {:#?}", ws.up);
for (up, inode) in ws.up.drain() {
if !is_alive(txn, channel, up)? {
continue;
}
crate::missing_context::repair_missing_down_context(
txn,
channel,
&mut ws.apply.missing_context,
inode,
up,
&[up],
)?
}
debug!("down = {:#?}", ws.down);
for (down, inode) in ws.down.drain() {
if !is_alive(txn, channel, down)? {
continue;
}
for parent in iter_adjacent(
txn,
channel,
down,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let parent = parent?;
let parent = txn.find_block_end(channel, parent.dest)?;
if !is_alive(txn, channel, parent)? {
ws.parents.insert(parent);
}
}
debug!("parents {:#?}", ws.parents);
for up in ws.parents.drain() {
crate::missing_context::repair_missing_up_context(
txn,
channel,
&mut ws.apply.missing_context,
change_id,
inode,
up,
&[down],
)?
}
}
Ok(())
}
fn unapply_edges<T: GraphMutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut T::Graph,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
ws: &mut Workspace,
) -> Result<(), UnrecordError<P::Error, T::GraphError>> {
debug!("newedges = {:#?}", newedges);
let ext = txn.get_external(change_id)?.unwrap();
for edge in newedges.edges.iter() {
let intro = internal(txn, &edge.introduced_by, change_id)?.unwrap();
apply::put_newedge(
txn,
channel,
&mut ws.apply,
intro,
newedges.inode,
&edge.reverse(Some(ext)),
|txn, channel, a, b| {
must_reintroduce(txn, channel, changes, a, b, ext, intro, change_id)
},
)?;
}
Ok(())
}
fn must_reintroduce<T: GraphTxnT, C: ChangeStore>(
txn: &T,
channel: &T::Graph,
changes: &C,
a: Vertex<ChangeId>,
b: Vertex<ChangeId>,
intro: Hash,
intro_id: ChangeId,
current_id: ChangeId,
) -> Result<bool, UnrecordError<C::Error, T::GraphError>> {
debug!("a = {:?}, b = {:?}", a, b);
// does a patch introduced by an edge parallel to
// this one remove this edge from the graph?
let b_ext = Position {
change: txn.get_external(b.change)?,
pos: b.start,
};
let mut stack = Vec::new();
for e in iter_adj_all(txn, channel, a)? {
let e = e?;
if e.flag.contains(EdgeFlags::PARENT)
|| e.dest != b.start_pos()
|| e.introduced_by.is_root()
|| e.introduced_by == current_id
{
continue;
}
// Optimisation to avoid opening change files in the vast
// majority of cases: if there is an edge `e` parallel to a ->
// b introduced by the change that introduced a or b, don't
// reinsert a -> b: that edge was removed by `e`.
if a.change == intro_id || b.change == intro_id {
return Ok(false);
}
stack.push(e.introduced_by)
}
edge_is_in_channel(txn, changes, b_ext, intro, &mut stack)
}
fn edge_is_in_channel<T: GraphTxnT, C: ChangeStore>(
txn: &T,
changes: &C,
pos: Position<Option<Hash>>,
introduced_by: Hash,
stack: &mut Vec<ChangeId>,
) -> Result<bool, UnrecordError<C::Error, T::GraphError>> {
let mut visited = HashSet::new();
while let Some(s) = stack.pop() {
if !visited.insert(s) {
continue;
}
debug!("stack: {:?}", s);
for next in changes
.change_deletes_position(|c| txn.get_external(c).unwrap(), s, pos)
.map_err(UnrecordError::Changestore)?
{
if next == introduced_by {
return Ok(false);
} else if let Some(i) = txn.get_internal(next)? {
stack.push(i)
}
}
}
Ok(true)
}
fn remove_zombies<T: GraphMutTxnT, C: ChangeStore>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
) -> Result<(), UnrecordError<C::Error, T::GraphError>> {
debug!("remove_zombies, change_id = {:?}", change_id);
for edge in newedges.edges.iter() {
let to = internal_pos(txn, &edge.to.start_pos(), change_id)?;
collect_zombies(txn, channel, change_id, to, ws)?;
debug!("remove_zombies = {:#?}", ws.del_edges);
for (v, mut e) in ws.del_edges.drain(..) {
if e.flag.contains(EdgeFlags::PARENT) {
let u = txn.find_block_end(channel, e.dest)?;
e.flag -= EdgeFlags::PARENT;
del_graph_with_rev(txn, channel, e.flag, u, v, e.introduced_by)?;
} else {
let w = txn.find_block(channel, e.dest)?;
del_graph_with_rev(txn, channel, e.flag, v, w, e.introduced_by)?;
}
}
}
Ok(())
}
fn collect_zombies<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
change_id: ChangeId,
to: Position<ChangeId>,
ws: &mut Workspace,
) -> Result<(), BlockError<T::GraphError>> {
ws.stack.push(txn.find_block(channel, to)?);
while let Some(v) = ws.stack.pop() {
debug!("remove_zombies, v = {:?}", v);
if !ws.parents.insert(v) {
continue;
}
for e in iter_adj_all(txn, channel, v)? {
let e = e?;
debug!("e = {:?}", e);
if !(e.introduced_by == change_id || e.flag & EdgeFlags::bp() == EdgeFlags::PARENT) {
continue;
}
if e.flag.contains(EdgeFlags::PARENT) {
ws.stack.push(txn.find_block_end(channel, e.dest)?)
} else {
ws.stack.push(txn.find_block(channel, e.dest)?)
}
if e.introduced_by == change_id {
ws.del_edges.push((v, e))
}
}
}
ws.stack.clear();
ws.parents.clear();
Ok(())
}
fn repair_edges_context<T: GraphMutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut T::Graph,
ws: &mut crate::missing_context::Workspace,
change_id: ChangeId,
n: &EdgeMap<Option<Hash>>,
) -> Result<(), UnrecordError<P::Error, T::GraphError>> {
let change_hash = txn.get_external(change_id)?.unwrap();
for e in n.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
let intro = internal(txn, &e.introduced_by, change_id)?.unwrap();
if e.previous.contains(EdgeFlags::DELETED) {
repair_context_deleted(
txn,
channel,
ws,
n.inode,
intro,
|h| changes.knows(&change_hash, &h).unwrap(),
&e.reverse(Some(change_hash)),
)?
} else {
let to = internal_pos(txn, &e.to.start_pos(), change_id)?;
let to = txn.find_block(channel, to)?;
if !is_alive(txn, channel, to)? {
continue;
}
repair_context_nondeleted(
txn,
channel,
ws,
n.inode,
intro,
|h| changes.knows(&change_hash, &h).unwrap(),
&e.reverse(Some(change_hash)),
)?
}
}
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
/// Add a file, write to it, then fork the branch and unrecord once on
/// one side.
#[test]
fn test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
let _h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nx\nb\nd\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let channel2 = txn.fork(&channel, "main2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let mut buf = Vec::new();
repo.read_file("dir/file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), Ok("a\nb\nc\nd\n"));
debug_to_file(&txn, &channel.borrow(), "debug_un")?;
debug_to_file(&txn, &channel2.borrow(), "debug_un2")?;
txn.commit()?;
Ok(())
}
#[test]
fn replace() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
let _h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nx\ny\nd\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let channel2 = txn.fork(&channel, "main2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let mut buf = Vec::new();
repo.read_file("dir/file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), Ok("a\nb\nc\nd\n"));
debug_to_file(&txn, &channel.borrow(), "debug_un")?;
debug_to_file(&txn, &channel2.borrow(), "debug_un2")?;
txn.commit()?;
Ok(())
}
#[test]
fn file_move() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("file")?;
let mut channel = txn.open_or_create_channel("main")?;
let _h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.rename("file", "dir/file")?;
txn.move_file("file", "dir/file")?;
debug!("recording the move");
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug_un")?;
debug!("unrecording the move");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
debug_to_file(&txn, &channel.borrow(), "debug_un2")?;
assert_eq!(
crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect::<Vec<_>>(),
vec!["dir", "dir/file"]
);
assert_eq!(repo.list_files(), vec!["dir", "dir/file"]);
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
assert_eq!(
crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect::<Vec<_>>(),
vec!["file"]
);
assert_eq!(repo.list_files(), vec!["file"]);
txn.commit()?;
Ok(())
}
#[test]
fn reconnect_lines() -> Result<(), anyhow::Error> {
reconnect_(false)
}
#[test]
fn reconnect_files() -> Result<(), anyhow::Error> {
reconnect_(true)
}
fn reconnect_(delete_file: bool) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let mut repo2 = working_copy::memory::Memory::new();
let mut repo3 = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let env3 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut txn2 = env2.mut_txn_begin();
let mut txn3 = env3.mut_txn_begin();
txn.add_file("file")?;
let mut channel = txn.open_or_create_channel("main")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut channel2 = txn2.open_or_create_channel("main")?;
let mut channel3 = txn3.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn2, &mut channel2, h0)?;
output::output_repository_no_pending(&mut repo2, &changes, &mut txn2, &mut channel2, "", true)?;
apply::apply_change(&changes, &mut txn3, &mut channel3, h0)?;
output::output_repository_no_pending(&mut repo3, &changes, &mut txn3, &mut channel3, "", true)?;
// This test removes a line (in h1), then replaces it with another
// one (in h2), removes the pseudo-edges (output, below), and then
// unrecords h2 to delete the connection. Test: do the
// pseudo-edges reappear?
///////////
if delete_file {
repo.remove_path("file")?;
} else {
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nd\n")?;
Ok(())
})?;
}
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
///////////
repo2.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nb\nx\nc\nd\n")?;
Ok(())
})?;
let h2 = record_all(&mut repo2, &changes, &mut txn2, &mut channel2, "")?;
repo2.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nb\nx\nc\ny\nd\n")?;
Ok(())
})?;
let h3 = record_all(&mut repo2, &changes, &mut txn2, &mut channel2, "")?;
///////////
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
apply::apply_change(&changes, &mut txn, &mut channel, h3)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
debug_to_file(&txn, &channel.borrow(), "debug_un")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
debug_to_file(&txn, &channel.borrow(), "debug_un2")?;
Ok(())
}
#[test]
fn zombie_file_test() -> Result<(), anyhow::Error> {
zombie_(None)
}
#[test]
fn zombie_lines_test() -> Result<(), anyhow::Error> {
zombie_(Some(b"d\n"))
}
fn zombie_(file: Option<&[u8]>) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let mut repo2 = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut txn2 = env2.mut_txn_begin();
txn.add_file("file")?;
let mut channel = txn.open_or_create_channel("main")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut channel2 = txn2.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn2, &mut channel2, h0)?;
output::output_repository_no_pending(&mut repo2, &changes, &mut txn2, &mut channel2, "", true)?;
///////////
if let Some(file) = file {
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(file)?;
Ok(())
})?;
} else {
repo.remove_path("file")?;
}
let h1 = record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug_a")?;
///////////
repo2.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nb\nx\nc\nd\n")?;
Ok(())
})?;
let h2 = record_all_output(&mut repo2, &changes, &mut txn2, &mut channel2, "")?;
debug_to_file(&txn2, &channel2.borrow(), "debug_b")?;
///////////
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
debug_to_file(&txn, &channel.borrow(), "debug_un")?;
debug!("unrecording");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug_to_file(&txn, &channel.borrow(), "debug_un2")?;
let mut buf = Vec::new();
if let Some(f) = file {
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
repo.read_file("file", &mut buf)?;
assert_eq!(&buf[..], f);
} else {
if conflicts.len() != 1 {
panic!("conflicts = {:#?}", conflicts)
}
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "file"),
ref c => panic!("c = {:#?}", c),
}
}
let (alive_, reachable_) = check_alive(&txn, &channel.borrow().graph);
if !alive_.is_empty() {
panic!("alive: {:?}", alive_);
}
if !reachable_.is_empty() {
panic!("reachable: {:?}", reachable_);
}
txn.commit()?;
// Applying the symmetric.
apply::apply_change(&changes, &mut txn2, &mut channel2, h1)?;
debug_to_file(&txn2, &channel2.borrow(), "debug_un3")?;
debug!("unrecording h1 = {:?}", h1);
crate::unrecord::unrecord(&mut txn2, &mut channel2, &changes, &h1)?;
debug_to_file(&txn2, &channel2.borrow(), "debug_un4")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn2,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
Ok(())
}
// Should fail: we're resurrecting a file in a directory that doesn't
// exist any more.
#[test]
fn zombie_dir() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c/d", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("a/b/c/d")?;
let mut channel = txn.open_or_create_channel("main")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.remove_path("a/b/c/d")?;
let h1 = record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.remove_path("a/b")?;
let _h2 = record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
let files = repo.list_files();
assert_eq!(files, &["a"]);
debug!("files={:?}", files);
debug_to_file(&txn, &channel.borrow(), "debug_un")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
debug_to_file(&txn, &channel.borrow(), "debug_un2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("c = {:?}", c),
}
match conflicts[1] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b/c"),
ref c => panic!("c = {:?}", c),
}
let files = repo.list_files();
debug!("files={:?}", files);
assert_eq!(files, &["a", "a/b", "a/b/c", "a/b/c/d"]);
let (alive_, reachable_) = check_alive(&txn, &channel.borrow().graph);
if !alive_.is_empty() {
panic!("alive: {:?}", alive_);
}
if !reachable_.is_empty() {
panic!("reachable: {:?}", reachable_);
}
txn.commit()?;
Ok(())
}
#[test]
fn nodep() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
debug_inodes(&txn);
let mut channel = txn.open_or_create_channel("main")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nx\nb\nd\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_inodes(&txn);
match crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h0) {
Err(crate::unrecord::UnrecordError::ChangeIsDependedUpon { .. }) => {}
_ => panic!("Should not be able to unrecord"),
}
debug_inodes(&txn);
let mut channel2 = txn.open_or_create_channel("main2")?;
match crate::unrecord::unrecord(&mut txn, &mut channel2, &changes, &h0) {
Err(crate::unrecord::UnrecordError::ChangeNotInChannel { .. }) => {}
_ => panic!("Should not be able to unrecord"),
}
for p in txn.log(&channel.borrow(), 0).unwrap() {
debug!("p = {:?}", p);
}
debug_inodes(&txn);
debug_to_file(&txn, &channel.borrow(), "debug")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
for p in txn.log(&channel.borrow(), 0).unwrap() {
debug!("p = {:?}", p);
}
debug_inodes(&txn);
debug_to_file(&txn, &channel.borrow(), "debug2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h0)?;
debug_to_file(&txn, &channel.borrow(), "debug3")?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
let files = repo.list_files();
if !files.is_empty() {
panic!("Files should be empty {:?}", files);
}
assert!(crate::fs::iter_working_copy(&txn, Inode::ROOT)
.next()
.is_none());
txn.commit()?;
Ok(())
}
#[test]
fn file_del() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("file", b"blabla".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.remove_path("file")?;
let h = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug")?;
debug!("unrecord h");
// Unrecording the deletion.
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h)?;
debug_to_file(&txn, &channel.borrow(), "debug2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
assert_eq!(repo.list_files(), vec!["file"]);
// Unrecording the initial change.
debug!("unrecord h0");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h0)?;
debug_to_file(&txn, &channel.borrow(), "debug3")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let files = repo.list_files();
if !files.is_empty() {
panic!("Files should be empty {:?}", files);
}
txn.commit()?;
Ok(())
}
/// Unrecording a change that edits the file around a conflict marker.
#[test]
fn self_context() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("file", b"a\nb\n".to_vec());
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut channel2 = txn.fork(&channel, "main2")?;
repo.write_file::<_, std::io::Error, _>("file", |w| Ok(w.write_all(b"a\nx\nb\n")?))?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file::<_, std::io::Error, _>("file", |w| Ok(w.write_all(b"a\ny\nb\n")?))?;
let b = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
apply::apply_change(&changes, &mut txn, &mut channel, b)?;
debug_to_file(&txn, &channel.borrow(), "debug")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::Order { .. } => {}
ref c => panic!("c = {:?}", c),
}
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.starts_with(">>>") {
writeln!(w, "bla\n{}\nbli", l)?
} else {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
let c = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &c)?;
debug_to_file(&txn, &channel.borrow(), "debug3")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::Order { .. } => {}
ref c => panic!("c = {:?}", c),
}
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
let mut conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
conflict.sort();
assert_eq!(
conflict,
vec![
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"================================",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"a",
"b",
"x",
"y"
]
);
txn.commit()?;
Ok(())
}
#[test]
fn rollback_lines() -> Result<(), anyhow::Error> {
rollback_(false)
}
#[test]
fn rollback_file() -> Result<(), anyhow::Error> {
rollback_(true)
}
fn rollback_(delete_file: bool) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
// Write a-b-c
repo.add_file("file", b"a\nb\nc\n".to_vec());
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
// Delete -b-
if delete_file {
repo.remove_path("file")?
} else {
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nd\n")?;
Ok(())
})?;
}
let h_del = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
// Rollback the deletion of -b-
let p_del = changes.get_change(&h_del)?;
debug!("p_del = {:#?}", p_del);
let p_inv = p_del.inverse(
&h_del,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let h_inv = changes.save_change(&p_inv)?;
apply::apply_change(&changes, &mut txn, &mut channel, h_inv)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug_to_file(&txn, &channel.borrow(), "debug")?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), Ok("a\nb\nc\n"));
// Unrecord the rollback
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h_inv)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug_to_file(&txn, &channel.borrow(), "debug2")?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
let mut buf = Vec::new();
let r = repo.read_file("file", &mut buf);
if delete_file {
assert!(r.is_err())
} else {
assert_eq!(std::str::from_utf8(&buf), Ok("a\nd\n"));
}
txn.commit()?;
Ok(())
}
/// Delete a line twice on two different channels, merge and unrecord
/// only one of them. Does the deleted edge reappear? It shouldn't.
#[test]
fn double_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
let mut channel2 = txn.open_or_create_channel("main2")?;
repo.add_file("file", b"blabla\nblibli\nblublu\n".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h0 = {:?}", h0);
apply::apply_change(&changes, &mut txn, &mut channel2, h0)?;
// First deletion
repo.write_file::<_, std::io::Error, _>("file", |w| {
writeln!(w, "blabla\nblublu")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h1 = {:?}", h1);
debug_to_file(&txn, &channel.borrow(), "debug0")?;
// Second deletion
let h2 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
debug!("h2 = {:?}", h2);
// Both deletions together.
debug!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
debug_to_file(&txn, &channel.borrow(), "debug1a")?;
debug_to_file(&txn, &channel2.borrow(), "debug1b")?;
debug!("unrecord h");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
debug_to_file(&txn, &channel.borrow(), "debug2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
txn.commit()?;
Ok(())
}
/// Same as `double` above, but with a (slightly) more convoluted change
/// dependency graph made by rolling the change back a few times.
#[test]
fn double_convoluted() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
let mut channel2 = txn.open_or_create_channel("main2")?;
repo.add_file("file", b"blabla\nblibli\nblublu\n".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h0 = {:?}", h0);
apply::apply_change(&changes, &mut txn, &mut channel2, h0)?;
// First deletion
repo.write_file::<_, std::io::Error, _>("file", |w| {
write!(w, "blabla\nblibli\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h1 = {:?}", h1);
debug_to_file(&txn, &channel.borrow(), "debug0")?;
// Second deletion
repo.write_file::<_, std::io::Error, _>("file", |w| {
writeln!(w, "blabla")?;
Ok(())
})?;
let h2 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
debug!("h2 = {:?}", h2);
// Both deletions together, then unrecord on ~channel~.
debug!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
debug_to_file(&txn, &channel.borrow(), "debug1a")?;
debug_to_file(&txn, &channel2.borrow(), "debug1b")?;
debug!("unrecord h");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
debug_to_file(&txn, &channel.borrow(), "debug2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
// Same on ~channel2~, but with a few extra layers of rollbacks in between.
debug!("rolling back");
apply::apply_change(&changes, &mut txn, &mut channel2, h1)?;
let rollback = |h| {
let p = changes.get_change(&h).unwrap();
let p_inv = p.inverse(
&h,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let h_inv = changes.save_change(&p_inv).unwrap();
h_inv
};
let mut h = h2;
for i in 0..6 {
let r = rollback(h);
apply::apply_change(&changes, &mut txn, &mut channel2, r).unwrap();
debug_to_file(&txn, &channel2.borrow(), format!("debug_{}", i))?;
h = r
}
crate::unrecord::unrecord(&mut txn, &mut channel2, &changes, &h1)?;
debug_to_file(&txn, &channel2.borrow(), "debug_final")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
txn.commit()?;
Ok(())
}
/// Delete the same file on two different channels, merge, unrecord each patch on the same channel. What happens to tree/revtree?
#[test]
fn double_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
let mut channel2 = txn.open_or_create_channel("main2")?;
repo.add_file("file", b"blabla\nblibli\nblublu\n".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h0 = {:?}", h0);
apply::apply_change(&changes, &mut txn, &mut channel2, h0)?;
// First deletion
repo.remove_path("file")?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h1 = {:?}", h1);
// Second deletion
let h2 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
debug!("h2 = {:?}", h2);
// Both deletions together.
debug!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
let mut inodes = txn.iter_inodes().unwrap();
assert!(inodes.next().is_some());
assert!(inodes.next().is_none());
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn rollback_conflict_resolution_simple() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channela = txn.open_or_create_channel("main")?;
// Create a simple conflict between axb and ayb
repo.add_file("file", b"a\nb\n".to_vec());
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channela, "")?;
let mut channelb = txn.fork(&channela, "other")?;
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nx\nb\n")?;
Ok(())
})?;
let ha = record_all(&mut repo, &changes, &mut txn, &mut channela, "")?;
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\ny\nb\n")?;
Ok(())
})?;
let hb = record_all(&mut repo, &changes, &mut txn, &mut channelb, "")?;
apply::apply_change(&changes, &mut txn, &mut channelb, ha)?;
apply::apply_change(&changes, &mut txn, &mut channela, hb)?;
debug_to_file(&txn, &channela.borrow(), "debuga")?;
debug_to_file(&txn, &channelb.borrow(), "debugb")?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channela, "", true)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
debug!("{}", std::str::from_utf8(&buf).unwrap());
// Solve the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|l| l.len() == 1) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
buf.clear();
repo.read_file("file", &mut buf)?;
debug!("{}", std::str::from_utf8(&buf).unwrap());
let resb = record_all(&mut repo, &changes, &mut txn, &mut channela, "")?;
debug_to_file(&txn, &channela.borrow(), "debugres")?;
let p_inv = changes.get_change(&resb).unwrap().inverse(
&resb,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let h_inv = changes.save_change(&p_inv)?;
apply::apply_change(&changes, &mut txn, &mut channela, h_inv)?;
debug_to_file(&txn, &channela.borrow(), "debug")?;
Ok(())
}
#[test]
fn rollback_conflict_resolution_swap() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channela = txn.open_or_create_channel("main")?;
// Create a simple conflict between axb and ayb
repo.add_file("file", b"a\nb\n".to_vec());
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channela, "")?;
let mut channelb = txn.fork(&channela, "other")?;
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nx\nb\n")?;
Ok(())
})?;
let ha = record_all(&mut repo, &changes, &mut txn, &mut channela, "")?;
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\ny\nb\n")?;
Ok(())
})?;
let hb = record_all(&mut repo, &changes, &mut txn, &mut channelb, "")?;
apply::apply_change(&changes, &mut txn, &mut channelb, ha)?;
apply::apply_change(&changes, &mut txn, &mut channela, hb)?;
debug_to_file(&txn, &channela.borrow(), "debuga")?;
debug_to_file(&txn, &channelb.borrow(), "debugb")?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channela, "", true)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
debug!("{}", std::str::from_utf8(&buf).unwrap());
// Solve the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|l| l.len() == 1) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
buf.clear();
repo.read_file("file", &mut buf)?;
debug!("{}", std::str::from_utf8(&buf).unwrap());
let resb = record_all(&mut repo, &changes, &mut txn, &mut channelb, "")?;
debug_to_file(&txn, &channelb.borrow(), "debugres")?;
let p_inv = changes.get_change(&resb).unwrap().inverse(
&resb,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let h_inv = changes.save_change(&p_inv)?;
apply::apply_change(&changes, &mut txn, &mut channelb, h_inv)?;
debug_to_file(&txn, &channelb.borrow(), "debug")?;
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn remove_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/d", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/d").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Bob removes a/b and records
repo_bob.remove_path("a/b/c")?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
// Avoiding quadratic reconnects when possible.
#[test]
fn quadratic_pseudo_edges() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"TxnTX\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let n = 100;
for i in 0..=n {
repo.write_file::<_, std::io::Error, _>("file", |w| {
for j in 0..i {
writeln!(w, "{}", j)?;
}
w.write_all(&contents[..])?;
for j in (0..i).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), &format!("debug{}", i)).unwrap();
}
repo.write_file::<_, std::io::Error, _>("file", |w| {
for j in 0..n {
writeln!(w, "{}", j)?;
}
for j in (0..n).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), "debug_final").unwrap();
// Test that not too many edges have been inserted.
{
let channel = channel.borrow();
let mut m = 0;
let mut it = txn.iter_graph(&channel.graph).unwrap();
while let Some(Ok(_)) = txn.next_graph(&channel.graph, &mut it) {
m += 1
}
let m0 = n * 8 + 6;
if m > m0 {
panic!("{} > {}", m, m0)
}
}
txn.commit().unwrap();
Ok(())
}
// Avoiding linear context repairs when possible.
use crate::MutTxnTExt;
#[test]
fn linear_context_repair() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"TxnTX\nZZZZZ\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let n = 10;
for i in 0..=n {
repo.write_file::<_, std::io::Error, _>("file", |w| {
for j in 0..i {
writeln!(w, "{}", j)?;
}
w.write_all(&contents[..])?;
for j in (0..i).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), &format!("debug{}", i)).unwrap();
}
let mut channel2 = txn.fork(&channel, "fork")?;
repo.write_file::<_, std::io::Error, _>("file", |w| {
for j in 0..n {
writeln!(w, "{}", j)?;
}
w.write_all(b"TxnTX\nYYYYY\nZZZZZ\n")?;
for j in (0..n).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
let p1 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "").unwrap();
debug_to_file(&txn, &channel2.borrow(), "debug_bob0").unwrap();
repo.write_file::<_, std::io::Error, _>("file", |w| {
for j in 0..n {
writeln!(w, "{}", j)?;
}
for j in (0..n).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
let p2 = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), "debug_alice0").unwrap();
debug!("Applying P1");
txn.apply_change(&changes, &mut channel, p1)?;
debug_to_file(&txn, &channel.borrow(), "debug_alice").unwrap();
debug!("Applying P2");
txn.apply_change(&changes, &mut channel2, p2)?;
debug_to_file(&txn, &channel2.borrow(), "debug_bob").unwrap();
// Test that not too many edges have been inserted.
{
let channel = channel.borrow();
let mut m = 0;
let mut it = txn.iter_graph(&channel.graph).unwrap();
while let Some(Ok(_)) = txn.next_graph(&channel.graph, &mut it) {
m += 1
}
debug!("m (channel, alice) = {:?}", m);
let original_edges = 8 * n + 18;
if m > original_edges {
panic!("{} > {}", m, original_edges)
}
}
{
let channel = channel2.borrow();
let mut m = 0;
let mut it = txn.iter_graph(&channel.graph).unwrap();
while let Some(Ok(_)) = txn.next_graph(&channel.graph, &mut it) {
m += 1
}
debug!("m (channel2, bob) = {:?}", m);
let original_edges = 8 * n + 18;
if m > original_edges {
panic!("{} > {}", m, original_edges)
}
}
txn.commit().unwrap();
Ok(())
}
use crate::fs::*;
use crate::patch::*;
use crate::pristine::*;
use crate::record::*;
use crate::*;
fn hash_mismatch(patch: &Patch3) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
use crate::patch::*;
let mut buf = tempfile::NamedTempFile::new()?;
let mut h = patch.serialize(&mut buf)?;
match h {
crate::pristine::Hash::Blake3(ref mut h) => h[0] = h[0].wrapping_add(1),
_ => unreachable!(),
}
match Patch3::deserialize(buf.path().to_str().unwrap(), &h) {
Err(e) => {
let e = e.downcast();
if let Ok(Error::PatchHashMismatch { .. }) = e {
} else {
unreachable!()
}
}
_ => unreachable!(),
}
let mut f = PatchFile::open(buf.path().to_str().unwrap())?;
assert_eq!(f.read_header()?, patch.header);
assert_eq!(f.read_dependencies()?, patch.dependencies);
assert_eq!(f.read_metadata()?, &patch.metadata[..]);
assert_eq!(f.read_changes()?, patch.changes);
Ok(())
}
#[test]
fn hash_mism() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let patches = patchstore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
repo.add_file("file2", contents.to_vec());
let mut env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let branch = txn.open_or_create_branch("main")?;
let mut branch = branch.borrow_mut();
add_file(&mut txn, "file")?;
add_file(&mut txn, "file2")?;
let mut state = Builder::new();
state
.record(
&mut txn,
Algorithm::Myers,
&mut branch,
&mut repo,
&patches,
"",
)
.unwrap();
let rec = state.finish();
let changes: Vec<_> = rec.actions
.into_iter()
.flat_map(|x| x.globalize(&txn).into_iter())
.collect();
info!("changes = {:?}", changes);
let patch0 = crate::patch::Patch3::make_patch(
&txn,
&branch,
changes,
rec.contents,
crate::patch::PatchHeader {
name: "test".to_string(),
authors: vec![],
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
apply::apply_local_patch(&patches, &mut txn, &mut branch, &patch0, &rec.updatables)?;
hash_mismatch(&patch0)?;
debug_to_file(&debug_to_file((debug_to_file((txn, &branch, "debug")?;
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn partial_clone() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c", contents.to_vec());
repo.add_file("d/e/f", contents.to_vec());
repo.add_file("g/h/i", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
{
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("a/b/c")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.add_file("d/e/f")?;
let hd = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.add_file("g/h/i")?;
let hg = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.rename("g/h/i", "d/e/ff")?;
txn.move_file("g/h/i", "d/e/ff")?;
let hmove = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let inode = crate::fs::find_inode(&txn, "d")?;
let key = txn.get_inodes(inode, None).unwrap().unwrap();
let changes: Vec<_> = txn
.log_for_path(&channel.borrow(), key, 0)
.unwrap()
.map(|x| x.unwrap())
.collect();
let check = vec![hd, hg, hmove];
assert_eq!(changes, check)
}
txn.commit().unwrap();
Ok(())
}
#[test]
fn clone_prefixes() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c/d", contents.to_vec());
repo.add_file("e/f/g/h", contents.to_vec());
repo.add_file("i/j/k/l", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let h = {
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("a/b/c/d")?;
txn.add_file("e/f/g/h")?;
txn.add_file("i/j/k/l")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?
};
let h2 = {
let mut channel = txn.open_or_create_channel("main").unwrap();
repo.write_file::<_, std::io::Error, _>("a/b/c/d", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
repo.write_file::<_, std::io::Error, _>("e/f/g/h", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "a/b/c/d")?
};
txn.commit().unwrap();
// Cloning
debug!("Cloning");
let mut repo2 = working_copy::memory::Memory::new();
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
{
let mut channel = txn2.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn2, &mut channel, h).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"e/f",
true,
)?;
assert_eq!(
repo2.list_files(),
["e", "e/f", "e/f/g", "e/f/g/h"]
.iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
);
apply::apply_change(&changes, &mut txn2, &mut channel, h2).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"",
true,
)?;
let mut buf = Vec::new();
repo2.read_file("a/b/c/d", &mut buf)?;
assert_eq!(buf, b"edits\n");
buf.clear();
repo2.read_file("e/f/g/h", &mut buf)?;
assert_eq!(buf, contents);
}
txn2.commit().unwrap();
let mut txn2 = env2.mut_txn_begin();
txn2.open_or_create_channel("main2").unwrap();
Ok(())
}
use crate::changestore::ChangeStore;
use crate::pristine::*;
use crate::record::{Algorithm, Builder};
use crate::working_copy::WorkingCopy;
use crate::*;
use chrono::*;
mod add_file;
mod change;
mod clone;
mod conflict;
mod file_conflicts;
mod filesystem;
mod missing_context;
mod partial;
mod performance;
mod rm_file;
mod rollback;
mod unrecord;
fn record_all<T: MutTxnT, R: WorkingCopy, P: ChangeStore>(
repo: &mut R,
store: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
) -> Result<Hash, anyhow::Error>
where
R::Error: Send + Sync + 'static,
{
let mut state = Builder::new();
state.record(
txn,
Algorithm::default(),
&mut channel.borrow_mut(),
repo,
store,
prefix,
)?;
let rec = state.finish();
let changes = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn).unwrap())
.collect();
let change0 = crate::change::Change::make_change(
txn,
&channel,
changes,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
// Beware of changing the following line: two changes
// doing the same thing will be equal. Sometimes we don't
// want that, as in tests::unrecord::unrecord_double.
timestamp: Utc::now(),
},
Vec::new(),
)
.unwrap();
let hash = store.save_change(&change0)?;
if log_enabled!(log::Level::Debug) {
change0
.write(
store,
Some(hash),
|l, _p| format!("{}:{}", l.path, l.line),
true,
&mut std::io::stderr(),
)
.unwrap();
}
apply::apply_local_change(txn, channel, &change0, hash, &rec.updatables)?;
Ok(hash)
}
fn record_all_output<T: MutTxnT, R: WorkingCopy, P: ChangeStore + Clone + Send + 'static>(
repo: &mut R,
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
) -> Result<Hash, anyhow::Error>
where
R::Error: Send + Sync + 'static,
{
let hash = record_all(repo, changes, txn, channel, prefix)?;
output::output_repository_no_pending(repo, changes, txn, channel, "", true).unwrap();
Ok(hash)
}
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn missing_context_newnodes_lines() -> Result<(), anyhow::Error> {
missing_context_newnodes(Some("a\nf\n"))
}
#[test]
fn missing_context_newnodes_file() -> Result<(), anyhow::Error> {
missing_context_newnodes(None)
}
fn missing_context_newnodes(alice: Option<&str>) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let bob = b"a\nb\nc\nx\nz\nd\ne\nf\n";
let bob2 = b"a\nb\nc\nx\ny\nz\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main").unwrap();
txn_alice.add_file("file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)
.unwrap();
// Bob edits and records
repo_bob
.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})
.unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
repo_bob
.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob2).unwrap();
Ok(())
})
.unwrap();
let bob_h2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "bob0")?;
// Alice edits and records
if let Some(alice) = alice {
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice.as_bytes()).unwrap();
Ok(())
})?
} else {
repo_alice.remove_path("file")?;
}
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug0")?;
// Alice applies Bob's change
debug!("applying Bob's change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h2)?;
debug!("done applying Bob's change");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
crate::unrecord::unrecord(&mut txn_alice, &mut channel_alice, &changes, &bob_h2)?;
crate::unrecord::unrecord(&mut txn_alice, &mut channel_alice, &changes, &bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1_unrec")?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h2)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
if alice.is_some() {
assert_eq!(
std::str::from_utf8(&buf),
Ok(
&"a\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\ny\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\nf\n"
[..]
)
);
} else {
assert_eq!(
std::str::from_utf8(&buf),
Ok(&">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\ny\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"[..])
);
}
// Alice solves the conflict by confirming the deads.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|l| l.len() <= 3) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
info!("starting fix_deletion");
let _fix_deletion = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
info!("fix_deletion over");
// Bob applies Alice's change
info!("Bob applies Alice's change");
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h).unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
info!("Outputting Bob's working_copy");
crate::unrecord::unrecord(&mut txn_bob, &mut channel_bob, &changes, &alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2_unrec")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2_unrec_app")?;
let mut buf = Vec::new();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
repo_bob.read_file("file", &mut buf)?;
if alice.is_some() {
assert_eq!(
std::str::from_utf8(&buf),
Ok(
&"a\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\ny\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\nf\n"
[..]
)
);
} else {
assert_eq!(
std::str::from_utf8(&buf),
Ok(&">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\ny\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"[..])
);
}
// Bob solves the conflict by deleting the offending line.
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|&&l| l != "xyz") {
writeln!(w, "{}", l)?
}
Ok(())
})?;
info!("starting fix_insertion");
let _fix_insertion = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
info!("fix_insertion over");
Ok(())
}
#[test]
fn missing_context_newedges() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let alice = b"d\nf\n";
let bob = b"a\nb\nc\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
txn_alice.add_file("file")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
debug!("Bob edits and records");
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
debug!("Alice edits and records");
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let _alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
debug!("Alice applies Bob's change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1").unwrap();
// Bob reverts his change.
debug!("Bob reverts");
let bob_change = changes.get_change(&bob_h)?;
let inv = bob_change.inverse(
&bob_h,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let inv_h = changes.save_change(&inv)?;
// Alice applies Bob's inverse change.
info!("Applying inverse change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, inv_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2").unwrap();
Ok(())
}
use super::*;
#[test]
fn filesystem() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
repo.write_file::<_, std::io::Error, _>("dir/file", |f| {
Ok(f.write_all(&b"a\nb\nc\nd\ne\nf\n"[..])?)
})?;
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
let mut channel = txn.open_or_create_channel("main2").unwrap();
info!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
repo.rename("dir/file", "dir/file.old")?;
repo.remove_path("dir/file.old")?;
repo.remove_path("dir")?;
Ok(())
}
#[test]
fn symlink() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
repo.write_file::<_, std::io::Error, _>("dir/file", |f| {
Ok(f.write_all(&b"a\nb\nc\nd\ne\nf\n"[..])?)
})?;
std::os::unix::fs::symlink(&r.path().join("dir/file"), &r.path().join("dir/link")).unwrap();
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
txn.add_file("dir/link").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
info!("applying");
let mut channel = txn.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
repo.rename("dir/file", "dir/file.old")?;
repo.remove_path("dir/file.old")?;
repo.remove_path("dir")?;
Ok(())
}
#[test]
fn record_dead_symlink() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
std::fs::create_dir_all(&r.path().join("dir")).unwrap();
std::os::unix::fs::symlink("../file", &r.path().join("dir/link")).unwrap();
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/link").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
info!("applying");
let mut channel = txn.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
Ok(())
}
#[test]
fn overwrite_dead_symlink() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
repo.write_file::<_, std::io::Error, _>("dir/file", |f| {
Ok(f.write_all(&b"a\nb\nc\nd\ne\nf\n"[..])?)
})?;
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
info!("applying");
let mut channel = txn.open_or_create_channel("main2").unwrap();
// Substitute dir/file with a dead symlink
std::fs::remove_file(&r.path().join("dir/file")).unwrap();
std::os::unix::fs::symlink("a/b/c/d/file", &r.path().join("dir/file")).unwrap();
debug!("meta = {:?}", std::fs::metadata("dir/file"));
// And output.
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
/// Rename conflict
#[test]
fn same_file_test() -> Result<(), anyhow::Error> {
same_file_("file", "alice", "bob")
}
/// Rename conflict
#[test]
fn same_file_dirs_test() -> Result<(), anyhow::Error> {
same_file_("file", "alice/file", "bob/file")
}
fn same_file_(file: &str, alice: &str, bob: &str) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file(file, contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file(file).unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Alice renames "file" to "alice"
repo_alice.rename(file, alice)?;
txn_alice.move_file(file, alice)?;
debug!("repo_bob = {:?}", repo_alice.list_files());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob renames "file" to "bob"
repo_bob.rename(file, bob)?;
txn_bob.move_file(file, bob)?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
match conflicts[0] {
Conflict::MultipleNames { .. } => {}
ref c => panic!("{:#?}", c),
}
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
alice,
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
bob,
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
match conflicts[0] {
Conflict::MultipleNames { .. } => {}
ref c => panic!("{:#?}", c),
}
// Bob solves.
let bob_solution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
// Alice applies Bob's solution.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_solution)?;
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
debug_tree(&txn_bob, "debug_tree")?;
Ok(())
}
/// Alice and Bob move two different files to the same name.
#[test]
fn same_name_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file1", contents.to_vec());
repo_alice.add_file("file2", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("file1")?;
txn_alice.add_file("file2")?;
info!("recording file additions");
debug!("working_copy = {:?}", repo_alice);
debug_tree(&txn_alice, "debug_tree")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Alice renames "file1" to "file"
repo_alice.rename("file1", "file")?;
txn_alice.move_file("file1", "file")?;
debug!("repo_bob = {:?}", repo_alice.list_files());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob renames "file2" to "file"
repo_bob.rename("file2", "file")?;
txn_bob.move_file("file2", "file")?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
assert!(!conflicts.is_empty());
let mut files_alice = repo_alice.list_files();
debug!("repo_alice = {:?}", files_alice);
assert_eq!(files_alice.len(), 2);
files_alice.sort();
assert_eq!(files_alice[0], "file");
assert!(files_alice[1].starts_with("file."));
// Alice solves it.
txn_alice.move_file(&files_alice[1], "a1")?;
repo_alice.rename(&files_alice[0], "file")?;
repo_alice.rename(&files_alice[1], "a1")?;
let solution_alice = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
let mut files_bob = repo_bob.list_files();
debug!("repo_bob = {:?}", files_bob);
assert_eq!(files_bob.len(), 2);
files_bob.sort();
assert_eq!(files_bob[0], "file");
assert!(files_bob[1].starts_with("file."));
// Bob applies Alice's solution and checks that it does solve his problem.
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, solution_alice)?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
let mut files_bob = repo_bob.list_files();
files_bob.sort();
assert_eq!(files_bob, vec!["a1", "file"]);
Ok(())
}
#[test]
fn file_conflicts_same_name_and_two_names() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let mut repo_charlie = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file1", contents.to_vec());
repo_alice.add_file("file2", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("file1")?;
txn_alice.add_file("file2")?;
info!("recording file additions");
debug!("working_copy = {:?}", repo_alice);
debug_tree(&txn_alice, "debug_tree")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones and renames "file2" to "file"
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
repo_bob.rename("file2", "file")?;
txn_bob.move_file("file2", "file")?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice renames "file1" to "file"
repo_alice.rename("file1", "file")?;
txn_alice.move_file("file1", "file")?;
debug!("repo_bob = {:?}", repo_alice.list_files());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Charlie clones, moves "file1" to "file3" and applies both
// Alice's and Bob's change.
let env_charlie = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_charlie = env_charlie.mut_txn_begin();
let mut channel_charlie = txn_charlie.open_or_create_channel("charlie").unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
repo_charlie.rename("file1", "file3")?;
txn_charlie.move_file("file1", "file3")?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie1")?;
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, bob_h)?;
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, alice_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie2")?;
let mut files_charlie = repo_charlie.list_files();
files_charlie.sort();
// Two files with the same name (file), one of which also has another name (file3). This means that we don't know which one of the two names crate::output will pick, between "file3" and the conflicting name.
// This depends on which file gets output first.
assert_eq!(files_charlie[0], "file");
assert!(files_charlie[1] == "file3" || files_charlie[1].starts_with("file."));
debug!("files_charlie {:?}", files_charlie);
repo_charlie.rename(&files_charlie[1], "file3")?;
txn_charlie.move_file(&files_charlie[1], "file3")?;
let _charlie_solution = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie3")?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
let mut files_charlie = repo_charlie.list_files();
files_charlie.sort();
assert_eq!(files_charlie, &["file", "file3"]);
// Alice applies Bob's change and Charlie's change.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, charlie_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut files_alice = repo_alice.list_files();
files_alice.sort();
debug!("files_alice {:?}", files_alice);
repo_alice.remove_path(&files_alice[1]).unwrap();
let _alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3")?;
// Bob applies Alice's change and Charlie's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, charlie_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
debug!("files_bob {:?}", files_bob);
repo_bob.remove_path(&files_bob[1]).unwrap();
let _bob_solution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob3")?;
Ok(())
}
#[test]
fn zombie_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob edits "file"
repo_bob.write_file::<_, std::io::Error, _>("a/b/c/file", |w| {
w.write_all(contents2)?;
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
debug!("alice2");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
let files_alice = repo_alice.list_files();
assert_eq!(files_alice, vec!["a", "a/b", "a/b/c", "a/b/c/file"]);
for x in txn_alice
.iter_tree(
OwnedPathId {
parent_inode: Inode::ROOT,
basename: crate::small_string::SmallString::new(),
},
None,
)
.unwrap()
{
debug!("x = {:?}", x);
}
debug!("recording a solution");
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3")?;
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
assert_eq!(files_bob, vec!["a", "a/b", "a/b/c", "a/b/c/file"]);
Ok(())
}
#[test]
fn rename_zombie_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob renames "file"
repo_bob.rename("a/b/c/file", "a/b/c/file2")?;
repo_bob.write_file::<_, std::io::Error, _>("a/b/c/file2", |w| {
w.write_all(contents2)?;
Ok(())
})?;
txn_bob.move_file("a/b/c/file", "a/b/c/file2")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
debug!("alice2");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3")?;
let files_alice = repo_alice.list_files();
debug!("Alice records {:?}", files_alice);
repo_alice.rename("a/b/c/file", "a/b/c/file2").unwrap_or(());
// repo_alice.remove_path("a/b/c/file").unwrap_or(());
// repo_alice.remove_path("a/b/c/file2").unwrap_or(());
txn_alice
.move_file("a/b/c/file", "a/b/c/file2")
.unwrap_or(());
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice4")?;
debug!("Alice recorded {:?}", alice_solution);
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob3")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
assert!(["a", "a/b", "a/b/c", "a/b/c/file2"]
.iter()
.all(|n| files_bob.iter().any(|m| m == n)));
Ok(())
}
#[test]
fn rename_zombie_dir() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob renames "file"
repo_bob.rename("a/b/c", "a/b/d")?;
repo_bob.write_file::<_, std::io::Error, _>("a/b/d/file", |w| {
w.write_all(contents2)?;
Ok(())
})?;
txn_bob.move_file("a/b/c", "a/b/d")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
debug!("alice2");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3")?;
let files_alice = repo_alice.list_files();
if files_alice.iter().any(|x| x == "a/b/d/file") {
txn_alice.add_file("a/b/d/file").unwrap_or(());
} else {
assert!(files_alice.iter().any(|x| x == "a/b/c/file"));
txn_alice.move_file("a/b/c", "a/b/d").unwrap();
repo_alice.rename("a/b/c", "a/b/d").unwrap();
}
debug!("Alice records");
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice4")?;
debug!("Alice recorded {:?}", alice_solution);
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob3")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
debug!("files_bob = {:?}", files_bob);
assert!(["a", "a/b", "a/b/d", "a/b/d/file"]
.iter()
.all(|n| files_bob.iter().any(|m| m == n)));
Ok(())
}
#[test]
fn double_zombie_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let contents3 = b"a\nby\n\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let mut repo_charlie = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let env_charlie = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_charlie = env_charlie.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
repo_alice.add_file("a/b/c/file", contents.to_vec());
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
// Bob and Charlie clone
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
let mut channel_charlie = txn_charlie.open_or_create_channel("charlie").unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, init_h).unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie0")?;
if !conflicts.is_empty() {
panic!("charlie has conflicts: {:?}", conflicts);
}
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob edits "file"
repo_bob.write_file::<_, std::io::Error, _>("a/b/c/file", |w| {
w.write_all(contents2)?;
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
// Charlie edits "file"
repo_charlie.write_file::<_, std::io::Error, _>("a/b/c/file", |w| {
w.write_all(contents3)?;
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie1")?;
// Alice applies Bob's and Charlie's changes
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, charlie_h)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
debug!("alice2");
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
let files_alice = repo_alice.list_files();
assert_eq!(files_alice, vec!["a", "a/b", "a/b/c", "a/b/c/file"]);
assert_eq!(conflicts.len(), 5);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("unexpected conflict {:#?}", c),
}
let mut buf = Vec::new();
repo_alice.read_file("a/b/c/file", &mut buf)?;
// Alice removes conflict markers.
repo_alice.write_file::<_, std::io::Error, _>("a/b/c/file", |w| {
for l in std::str::from_utf8(&buf).unwrap().lines() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3")?;
// Bob applies
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, charlie_h)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
assert_eq!(conflicts.len(), 5);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("unexpected conflict {:#?}", c),
}
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("bob has conflicts: {:?}", conflicts);
}
// Charlie applies
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, bob_h)?;
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie2")?;
debug!("charlie applies Alice's change");
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, alice_h)?;
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
assert_eq!(conflicts.len(), 5);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("charlie applies Alice's solution");
apply::apply_change(
&changes,
&mut txn_charlie,
&mut channel_charlie,
alice_solution,
)?;
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie4")?;
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("charlie has conflicts: {:?}", conflicts);
}
Ok(())
}
#[test]
fn zombie_file_post_resolve() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let mut repo_charlie = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let env_charlie = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_charlie = env_charlie.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
repo_alice.add_file("a/b/c/file", contents.to_vec());
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice0")?;
repo_alice.rename("a/b/c/file", "a/b/c/alice")?;
txn_alice.move_file("a/b/c/file", "a/b/c/alice")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Bob deletes "file"
repo_bob.remove_path("a/b/c/file")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h).unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("Bob resolves");
let bob_resolution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("Bob has conflicts: {:?}", conflicts);
}
// Alice applies Bob's patch and solution.
debug!("Alice applies Bob's resolution");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h).unwrap();
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_resolution).unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
if !conflicts.is_empty() {
panic!("Alice has conflicts: {:?}", conflicts);
}
// Charlie applies Alice's move and deletes (i.e. does the same as Bob).
let mut channel_charlie = txn_charlie.open_or_create_channel("charlie").unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, init_h).unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, alice_h).unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie0")?;
if !conflicts.is_empty() {
panic!("charlie has conflicts: {:?}", conflicts);
}
debug!("Charlie applies Alice's move and deletes");
repo_charlie.remove_path("a/b/c/alice")?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie1")?;
//
debug!("Charlie applies Bob's deletion");
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, bob_h).unwrap();
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie2")?;
debug!("Charlie applies Bob's resolution");
apply::apply_change(
&changes,
&mut txn_charlie,
&mut channel_charlie,
bob_resolution,
)
.unwrap();
debug_to_file(&txn_charlie, &channel_charlie.borrow(), "debug_charlie3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
//
debug!("Alice applies Charlie's deletion");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, charlie_h).unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert!(path == "a/b/c/file" || path == "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
//
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, charlie_h).unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert!(path == "a/b/c/file" || path == "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
Ok(())
}
#[test]
fn move_vs_delete_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
repo_alice.add_file("file", b"a\n".to_vec());
txn_alice.add_file("file")?;
let init = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, init)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Alice moves "file"
repo_alice.rename("file", "alice/file").unwrap_or(());
txn_alice.move_file("file", "alice/file").unwrap_or(());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1").unwrap();
// Bob deletes "file"
repo_bob.remove_path("file").unwrap_or(());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0").unwrap();
// Bob applies Alice's change
debug!("Bob applies Alice's change");
txn_bob
.apply_change(&changes, &mut channel_bob, alice_h)
.unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "alice/file"),
ref c => panic!("unexpected conflict {:#?}", c),
}
let files = repo_bob.list_files();
if files.iter().any(|f| f == "alice/file") {
repo_bob.remove_path("bob").unwrap()
} else {
repo_bob.remove_path("alice").unwrap()
}
let resolution = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("Bob has conflicts: {:?}", conflicts);
}
// Alice applies Bob's change
debug!("Alice applies Bob's change");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "alice/file"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("Alice applies Bob's resolution");
txn_alice
.apply_change(&changes, &mut channel_alice, resolution)
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3").unwrap();
if !conflicts.is_empty() {
panic!("Alice has conflicts: {:?}", conflicts);
}
Ok(())
}
// Delete the context of an edit inside a file, then delete the file,
// and see if the edit has its context fixed.
#[test]
fn delete_zombie_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
repo_alice.add_file("file", b"a\nb\nc\nd\n".to_vec());
txn_alice.add_file("file")?;
let init = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, init)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Alice adds a zombie line.
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nb\nx\nc\nd\n")?;
Ok(())
})?;
record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1").unwrap();
// Bob deletes the context of Alice's new line, and then deletes
// "file".
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nd\n")?;
Ok(())
})?;
let bob_h1 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0").unwrap();
repo_bob.remove_path("file").unwrap_or(());
let bob_h2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1").unwrap();
// Alice applies Bob's changes.
debug!("Alice applies Bob's change");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h1)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2").unwrap();
debug!("Applying bob_h2");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h2)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3").unwrap();
let (alive, reachable) = check_alive(&txn_alice, &channel_alice.borrow().graph);
if !alive.is_empty() {
panic!("alive (bob0): {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable (bob0): {:?}", reachable);
}
crate::unrecord::unrecord(&mut txn_alice, &mut channel_alice, &changes, &bob_h2).unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice4").unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("file = {:?}", std::str::from_utf8(&buf));
Ok(())
}
#[test]
fn move_into_deleted_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
repo_alice.add_file("file", b"a\n".to_vec());
repo_alice.add_dir("dir");
txn_alice.add_file("file")?;
txn_alice.add_dir("dir")?;
let init = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, init)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Alice moves "file"
repo_alice.rename("file", "dir/file").unwrap_or(());
txn_alice.move_file("file", "dir/file").unwrap_or(());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1").unwrap();
// Bob deletes "dir"
repo_bob.remove_path("dir").unwrap_or(());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0").unwrap();
// Bob applies Alice's change
debug!("Bob applies Alice's change");
txn_bob
.apply_change(&changes, &mut channel_bob, alice_h)
.unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "dir"),
ref c => panic!("unexpected conflict {:#?}", c),
}
let resolution = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("Bob has conflicts: {:?}", conflicts);
}
// Alice applies Bob's change
debug!("Alice applies Bob's change");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h)
.unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "dir"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("Alice applies Bob's resolution");
txn_alice
.apply_change(&changes, &mut channel_alice, resolution)
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice3").unwrap();
if !conflicts.is_empty() {
panic!("Alice has conflicts: {:?}", conflicts);
}
Ok(())
}
use super::*;
#[test]
fn solve_order_conflict() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\ny\nz\nb\n";
let bob = b"a\nu\nv\nw\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
debug!("{:?}", conflict);
{
let mut conflict = conflict.clone();
(&mut conflict[2..9]).sort_unstable();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"u",
"v",
"w",
"x",
"y",
"z",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b"
]
);
}
Ok(())
};
// check_conflict(&buf)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for (n, l) in conflict.iter().enumerate() {
if n == 0 || n == 2 || n == 3 || n == 7 || n == 8 || n == 10 {
writeln!(w, "{}", l)?
} else if n == 4 {
writeln!(w, "{}\nbla!", l)?
} else if n == 6 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob2").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert!(std::str::from_utf8(&buf)?.lines().all(|l| l.len() < 10));
crate::unrecord::unrecord(&mut txn, &mut channel_bob, &changes, &resolution).unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob3").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
Ok(())
}
#[test]
fn order_conflict_simple() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\nb\n";
let bob = b"a\ny\nb\n";
let charlie = b"a\nz\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Charlie clones
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Charlie edits and records
repo_charlie.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
debug!("{:?}", conflict);
{
let mut conflict = conflict.clone();
(&mut conflict[2..7]).sort_unstable();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"================================",
"x",
"y",
"z",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b"
]
);
}
Ok(())
};
// check_conflict(&buf)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|l| l.len() == 1) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
let mut alice_resolution = Vec::new();
repo_alice.read_file("file", &mut alice_resolution)?;
info!("resolving");
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_bob, charlie_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
{
let mut conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
(&mut conflict[2..6]).sort_unstable();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"x",
"y",
"z",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b"
]
)
}
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|l| l.len() == 1) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
let mut bob_resolution = Vec::new();
repo_bob.read_file("file", &mut bob_resolution)?;
info!("resolving");
let resolution2 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
// Charlie applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h).unwrap();
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie1").unwrap();
buf.clear();
repo_charlie.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, resolution).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, resolution2).unwrap();
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
buf.clear();
repo_charlie.read_file("file", &mut buf)?;
assert_eq!(
std::str::from_utf8(&bob_resolution),
std::str::from_utf8(&buf)
);
Ok(())
}
#[test]
fn order_conflict_edit() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\ny\nb\n";
let bob = b"a\nu\nv\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
let mut is_conflict = 0;
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.len() == 1 {
if is_conflict < 2 {
writeln!(w, "{}", l)?
}
is_conflict += 1
} else if l.as_bytes()[0] == b'<' {
is_conflict = 0
} else {
// === or >>>
is_conflict = 1
}
}
Ok(())
})?;
let mut alice_resolution = Vec::new();
repo_alice.read_file("file", &mut alice_resolution)?;
info!("resolving {:?}", std::str::from_utf8(&alice_resolution));
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(alice_resolution, buf);
Ok(())
}
#[test]
fn edit_conflict_sides() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice = b"a\nx\nb\nc\n";
let bob = b"a\ny\nb\nc\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
// Alice edits sides of the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file::<_, std::io::Error, _>("file", |w| {
let mut ended = false;
let mut n = 0;
for l in conflict.iter() {
debug!("line: {:?}", l);
if l.len() > 5 {
if l.as_bytes()[0] == b'<' {
ended = true
}
if true {
writeln!(w, "pre{}\n{}\npost{}", n, l, n)?;
} else {
writeln!(w, "{}", l)?;
}
n += 1
} else if !ended {
writeln!(w, "{}", l)?
} else {
debug!("writing c: {:?}", l);
writeln!(w, "c")?
}
}
Ok(())
})?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
info!("resolving");
let resolution = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo.read_file("file", &mut buf2)?;
info!("{:?}", std::str::from_utf8(&buf2).unwrap());
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
// Bob applies
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
let mut buf3 = Vec::new();
repo.read_file("file", &mut buf3)?;
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort_unstable();
let mut lines3: Vec<_> = std::str::from_utf8(&buf3).unwrap().lines().collect();
lines3.sort_unstable();
assert_eq!(lines2, lines3);
Ok(())
}
#[test]
fn edit_after_conflict() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice = b"a\nx\ny\nb\nc\n";
let bob = b"a\nx\ny\nb\nc\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
// Alice edits sides of the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
debug!("line: {:?}", l);
if l.len() > 5 && l.as_bytes()[0] != b'<' {
writeln!(w, "pre\n{}\npost", l)?;
} else if *l != "b" && *l != "x" {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
info!("resolving");
let resolution = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo.read_file("file", &mut buf2)?;
info!("{:?}", std::str::from_utf8(&buf2).unwrap());
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
let mut buf3 = Vec::new();
repo.read_file("file", &mut buf3)?;
let mut lines2: Vec<_> = std::str::from_utf8(&buf2)?.lines().collect();
lines2.sort_unstable();
let mut lines3: Vec<_> = std::str::from_utf8(&buf3)?.lines().collect();
lines3.sort_unstable();
assert_eq!(lines2, lines3);
Ok(())
}
#[test]
fn delete_before_marker() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice0 = b"a\nx\ny\nb\nc\n";
let alice1 = b"a\nx\ny\nz\nb\nc\n";
let bob0 = b"a\nu\nv\nb\nc\n";
let bob1 = b"a\nu\nv\nw\nb\nc\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
let bob_edits: &[&[u8]] = &[bob0, bob1];
let bob_changes: Vec<_> = bob_edits
.iter()
.map(|bob| {
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel_bob, "").unwrap()
})
.collect();
// Alice edits and records
let alice_edits: &[&[u8]] = &[alice0, alice1];
let alice_changes: Vec<_> = alice_edits
.iter()
.map(|alice| {
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "").unwrap()
})
.collect();
// Alice applies Bob's changes
for bob_h in bob_changes.iter() {
apply::apply_change(&changes, &mut txn, &mut channel_alice, *bob_h)?;
}
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
// Alice edits sides of the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file::<_, std::io::Error, _>("file", |w| {
let mut ended = false;
for l in conflict.iter() {
debug!("line: {:?}", l);
if *l == "z" || *l == "w" {
} else if l.starts_with("<<<") {
writeln!(w, "{}", l)?;
ended = true
} else if ended {
writeln!(w, "end\n{}", l)?;
ended = false
} else {
writeln!(w, "{}", l)?;
}
}
Ok(())
})?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
info!("resolving");
let conflict_edits = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo.read_file("file", &mut buf2)?;
info!("{:?}", std::str::from_utf8(&buf2).unwrap());
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
// Bob pulls
for alice_h in alice_changes.iter() {
apply::apply_change(&changes, &mut txn, &mut channel_bob, *alice_h)?;
}
apply::apply_change(&changes, &mut txn, &mut channel_bob, conflict_edits)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
buf2.clear();
repo.read_file("file", &mut buf2)?;
let mut lines: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
lines.sort_unstable();
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort_unstable();
assert_eq!(lines, lines2);
Ok(())
}
#[test]
fn conflict_last_line() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\n";
let alice = b"a\nx";
let bob = b"a\ny";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
debug!("{:?}", conflict);
{
let mut conflict = conflict.clone();
(&mut conflict[2..5]).sort_unstable();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"x",
"y",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
]
);
}
Ok(())
};
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter().filter(|l| l.len() <= 2) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
info!("resolving");
let mut buf_alice = Vec::new();
repo_alice.read_file("file", &mut buf_alice)?;
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob2").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf_alice));
Ok(())
}
#[test]
fn zombie_last_line() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb";
let alice = b"a\nx";
let bob = b"";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
assert_eq!(
conflict,
vec![
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"x",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
]
);
Ok(())
};
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
{
let mut state = Builder::new();
state
.record(
&mut txn,
Algorithm::default(),
&mut channel_alice.borrow_mut(),
&mut repo_alice,
&changes,
"",
)
.unwrap();
let rec = state.finish();
assert!(rec.actions.is_empty())
}
// Alice solves the conflict.
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
write!(w, "x")?;
Ok(())
})?;
info!("resolving");
let mut buf_alice = Vec::new();
repo_alice.read_file("file", &mut buf_alice)?;
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob2").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf_alice));
Ok(())
}
#[test]
fn edit_post_conflict() -> Result<(), anyhow::Error> {
edit_post_conflict_(
|buf| {
let buf: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
assert!(
buf == [
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"0",
"1",
"2",
"================================",
"3",
"4",
"5",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
] || buf
== [
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"3",
"4",
"5",
"================================",
"0",
"1",
"2",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
]
)
},
|buf, w| {
let conflict: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
for l in conflict.iter() {
if *l == "a" {
writeln!(w, "a\na'")?
} else if l.len() == 1 && *l != "0" && *l != "3" {
writeln!(w, "{}", l)?
}
}
Ok(())
},
)
}
#[test]
fn edit_around_conflict() -> Result<(), anyhow::Error> {
edit_post_conflict_(
|buf| {
let buf: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
assert!(
buf == [
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"0",
"1",
"2",
"================================",
"3",
"4",
"5",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
] || buf
== [
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"3",
"4",
"5",
"================================",
"0",
"1",
"2",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
]
)
},
|buf, w| {
let conflict: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
for l in conflict.iter() {
if *l == "a" {
writeln!(w, "a\na'")?
} else if *l == "b" {
writeln!(w, "c")?
} else {
writeln!(w, "{}", l)?
}
}
Ok(())
},
)
}
fn edit_post_conflict_<
Check: FnMut(&[u8]),
Resolve: FnOnce(&[u8], &mut dyn std::io::Write) -> Result<(), std::io::Error>,
>(
mut check: Check,
resolve: Resolve,
) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\n0\n1\n2\nb\n";
let bob = b"a\n3\n4\n5\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
check(&buf);
// Alice solves the conflict.
repo_alice.write_file::<_, std::io::Error, _>("file", |mut w| {
resolve(&buf, &mut w)?;
Ok(())
})?;
info!("resolving");
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check(&buf);
// Bob applies Alice's solution.
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob2").unwrap();
let mut buf2 = Vec::new();
repo_bob.read_file("file", &mut buf2)?;
buf.clear();
repo_alice.read_file("file", &mut buf)?;
let mut lines: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
lines.sort_unstable();
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort_unstable();
assert_eq!(lines, lines2);
Ok(())
}
#[test]
fn nested_conflict() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\nb\n";
let bob = b"a\ny\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
debug!("Alice records");
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
debug!("Alice applies");
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
let buf = std::str::from_utf8(&buf).unwrap();
w.write_all(buf.replace("x\n", "u\nx\n").as_bytes())?;
Ok(())
})?;
info!("resolving");
let resolution_alice = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
// Bob resolves.
buf.clear();
repo_bob.read_file("file", &mut buf)?;
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
let buf = std::str::from_utf8(&buf).unwrap();
w.write_all(buf.replace("x\n", "i\nx\n").as_bytes())?;
Ok(())
})?;
info!("resolving");
let resolution_bob = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Alice applies Bob's resolution.
apply::apply_change(&changes, &mut txn, &mut channel_alice, resolution_bob).unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice3").unwrap();
buf.clear();
repo_alice.read_file("file", &mut buf)?;
debug!("{}", std::str::from_utf8(&buf).unwrap());
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution_alice).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
let mut buf2 = Vec::new();
repo_bob.read_file("file", &mut buf2)?;
let mut lines: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
lines.sort_unstable();
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort_unstable();
assert_eq!(lines, lines2);
Ok(())
}
#[test]
fn zombie_context_resolution() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
// Alice records
txn_alice.add_file("file").unwrap();
repo_alice.add_file("file", b"".to_vec());
let x: &[&[u8]] = &[b"c\n", b"a\nc\n", b"a\nb\nc\n", b"a\n", b""];
let p_alice: Vec<_> = x
.iter()
.map(|c| {
repo_alice
.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(c)?;
Ok(())
})
.unwrap();
record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap()
})
.collect();
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, p_alice[0]).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob")?;
// Bob creates an order conflict just to keep line "c" connected
// to the root.
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"x\nc\n")?;
Ok(())
})?;
debug!("bob records conflict");
let p_bob = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Bob applies all of Alice's other changes
for (n, p) in (&p_alice[1..]).iter().enumerate() {
info!("{}. Applying {:?}", n, p);
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, *p).unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), &format!("debug_bob_{}", n))?;
// if n == 2 {
// panic!("n")
// }
}
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
debug!("file = {:?}", std::str::from_utf8(&buf));
assert_eq!(
std::str::from_utf8(&buf),
Ok(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n")
);
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"x\nc\n")?;
Ok(())
})?;
let resolution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(buf, b"x\nc\n");
// Alice applies Bob's change and resolution.
debug!("Alice applies Bob's change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, p_bob).unwrap();
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice1")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo_alice.read_file("file", &mut buf2)?;
assert_eq!(
std::str::from_utf8(&buf2),
Ok(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n")
);
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_alice2")?;
let mut buf2 = Vec::new();
repo_alice.read_file("file", &mut buf2)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
Ok(())
}
#[test]
fn zombie_half_survivor() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
// Alice records
txn_alice.add_file("file").unwrap();
repo_alice.add_file("file", b"".to_vec());
let x: &[&[u8]] = &[b"a\nb\nc\nd\n", b""];
let p_alice: Vec<_> = x
.iter()
.map(|c| {
repo_alice
.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(c)?;
Ok(())
})
.unwrap();
record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap()
})
.collect();
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, p_alice[0]).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Bob creates an order conflict just to keep line "c" connected
// to the root.
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nb\nx\ny\nz\nc\nd\n")?;
Ok(())
})?;
let p_bob = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0")?;
// Bob applies all of Alice's other changes
for p in &p_alice[1..] {
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, *p).unwrap();
}
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1_")?;
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(
std::str::from_utf8(&buf),
Ok(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\ny\nz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n")
);
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(b"a\nz\nd\n")?;
Ok(())
})?;
let resolution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2")?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(buf, b"a\nz\nd\n");
// Alice applies Bob's change and resolution.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, p_bob).unwrap();
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo_alice.read_file("file", &mut buf2)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
Ok(())
}
#[test]
fn three_way_zombie() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"u\na\nb\nc\nd\nv\n";
let alice = b"u\na\nb\nx\nc\nd\nv\n";
let bob = b"u\na\nd\nv\n";
let alice_bob = b"u\na\nx\nd\nv\n";
let charlie = b"u\nv\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Charlie clones
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
// Alice adds a line.
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Bob deletes the context.
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Charlie also deletes the context.
repo_charlie.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("alice = {:?}", std::str::from_utf8(&buf));
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
repo_alice.write_file::<_, std::io::Error, _>("file", |w| Ok(w.write_all(alice_bob)?))?;
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's edits and resolution.
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
// Charlie applies all changes
/*output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;*/
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie1").unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie2").unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, resolution)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie3").unwrap();
Ok(())
}
#[test]
fn cyclic_conflict_resolution() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\ny\nz\nb\n";
let bob = b"a\nu\nv\nw\nb\n";
let charlie = b"a\nU\nV\nW\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Charlie clones and makes something independent.
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
repo_charlie.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
info!("Done outputting Charlie's working_copy");
{
let mut buf = Vec::new();
repo_charlie.read_file("file", &mut buf).unwrap();
info!("Charlie = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice").unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("alice: {:?}", std::str::from_utf8(&buf));
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let alices_resolution =
record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
debug!("bob: {:?}", std::str::from_utf8(&buf));
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let _bobs_resolution = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, alices_resolution).unwrap();
// Bob applies Charlie's side
apply::apply_change(&changes, &mut txn, &mut channel_bob, charlie_h).unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob2").unwrap();
debug!("outputting bob2");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob3").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Check that there is a conflict.
assert!(std::str::from_utf8(&buf)?.lines().any(|l| l.len() >= 10));
debug!("{:?}", std::str::from_utf8(&buf));
// Solve it again, in the same way and output the result.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
debug!("resolving again");
let second_resolution = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob4").unwrap();
// Check that the conflict is gone.
assert!(std::str::from_utf8(&buf)?.lines().all(|l| l.len() < 10));
// Unrecord
crate::unrecord::unrecord(&mut txn, &mut channel_bob, &changes, &second_resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob5").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Check that the conflict is back.
assert!(std::str::from_utf8(&buf)?.lines().any(|l| l.len() >= 10));
Ok(())
}
#[test]
fn cyclic_zombies() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice = b"a\nx\ny\nz\nb\nc\n";
let alice2 = b"a\nx\nX\ny\nz\nb\nc\n";
let alice3 = b"a\nx\nX\nY\ny\nz\nb\nc\n";
let bob = b"a\nu\nv\nw\nb\nc\n";
let bob2 = b"a\nu\nU\nv\nw\nb\nc\n";
let bob3 = b"a\nu\nU\nV\nv\nw\nb\nc\n";
let charlie = b"a\nc\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
let bob_h1 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob2).unwrap();
Ok(())
})?;
let bob_h2 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(bob3).unwrap();
Ok(())
})?;
let bob_h3 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h1 = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice2).unwrap();
Ok(())
})?;
let alice_h2 = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(alice3).unwrap();
Ok(())
})?;
let alice_h3 = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h1)?;
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h2)?;
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h3)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("alice: {:?}", std::str::from_utf8(&buf));
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let alices_resolution =
record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
debug_to_file(&txn, &channel_alice.borrow(), "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h1).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h2).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h3).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
debug!("bob: {:?}", std::str::from_utf8(&buf));
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file::<_, std::io::Error, _>("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
// Bob solves the conflict
let bobs_resolution = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
// Charlie clones and deletes
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h1)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h2)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h3)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h1)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h2)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h3)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
repo_charlie.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, alices_resolution).unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob2").unwrap();
debug!("outputting bob2");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob3").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Bob applies Charlie's side
debug!("applying charlie's patch");
apply::apply_change(&changes, &mut txn, &mut channel_bob, charlie_h).unwrap();
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob4").unwrap();
let (alive_, reachable_) = check_alive(&txn, &channel_bob.borrow().graph);
if !alive_.is_empty() {
error!("alive (bob0): {:?}", alive_);
}
if !reachable_.is_empty() {
error!("reachable (bob0): {:?}", reachable_);
}
debug!("outputting bob's repo");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn, &channel_bob.borrow(), "debug_bob5").unwrap();
let (alive, reachable) = check_alive(&txn, &channel_bob.borrow().graph);
if !alive.is_empty() {
panic!("alive (bob1): {:?}", alive);
} else if !alive_.is_empty() {
panic!("alive_ (bob1): {:?}", alive_);
}
if !reachable.is_empty() {
panic!("reachable (bob1): {:?}", reachable);
} else if !reachable_.is_empty() {
panic!("reachable_ (bob1): {:?}", reachable_);
}
// Symmetric: Charlie applies the other sides.
debug!("Charlie applies");
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alices_resolution).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bobs_resolution).unwrap();
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie").unwrap();
let (alive, reachable) = check_alive(&txn, &channel_charlie.borrow().graph);
if !alive.is_empty() {
panic!("alive (charlie0): {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable (charlie0): {:?}", reachable);
}
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
debug_to_file(&txn, &channel_charlie.borrow(), "debug_charlie1").unwrap();
let (alive, reachable) = check_alive(&txn, &channel_charlie.borrow().graph);
if !alive.is_empty() {
panic!("alive (charlie1): {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable (charlie1): {:?}", reachable);
}
Ok(())
}
#[test]
fn cyclic_files() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/file", contents.to_vec());
repo_alice.add_file("b/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("a/file")?;
txn_alice.add_file("b/file")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones and moves a -> a/b
let mut channel_bob = txn_bob.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
txn_bob.move_file("a", "b/a").unwrap();
repo_bob.rename("a", "b/a").unwrap();
let ab = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
// Alice moves b -> b/a
txn_alice.move_file("b", "a/b").unwrap();
repo_alice.rename("b", "a/b").unwrap();
let _ba = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, ab)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug").unwrap();
debug!("outputting cycle");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let v: Vec<_> = txn_alice.iter_working_copy().collect();
println!("{:?}", v);
let (alive, reachable) = check_alive(&txn_alice, &channel_alice.borrow().graph);
if !alive.is_empty() {
panic!("alive: {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable: {:?}", reachable);
}
debug!("recording the resolution");
let _resolution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug2").unwrap();
Ok(())
}
#[test]
fn tree_inodes_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("a/b/file")?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("bob")?;
txn_bob.add_file("a/b/file")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
debug_to_file(&txn_alice, &channel_alice.borrow(), "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob moves and deletes a/b
repo_bob.rename("a/b/file", "c/d/file")?;
txn_bob.move_file("a/b/file", "c/d/file")?;
repo_bob.remove_path("a")?;
txn_bob.remove_file("a")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob0").unwrap();
// Alice applies
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
check_tree_inodes(&txn_alice, &channel_alice.borrow());
Ok(())
}
fn check_tree_inodes<T: TxnT>(txn: &T, channel: &T::Channel) {
// Sanity check
for x in txn.iter_inodes().unwrap() {
let (inode, vertex) = x.unwrap();
debug!("inode = {:?}, vertex = {:?}", inode, vertex);
let mut inode_ = inode;
while !inode_.is_root() {
if let Some(next) = txn.get_revtree(inode_, None).unwrap() {
debug!("next = {:?}", next);
inode_ = next.parent_inode;
} else {
panic!("inode = {:?}, inode_ = {:?}", inode, inode_);
}
}
if !is_alive(txn, T::graph(&channel), vertex.inode_vertex()).unwrap() {
for e in iter_adjacent(
txn,
T::graph(&channel),
vertex.inode_vertex(),
EdgeFlags::empty(),
EdgeFlags::all(),
)
.unwrap()
{
error!("{:?} {:?} {:?}", inode, vertex, e)
}
panic!(
"inode {:?}, vertex {:?}, is not alive, {:?}",
inode,
vertex,
tree_path(txn, vertex)
)
}
}
}
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn clone_simple() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\n\nc\nd\nx\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut recorded_changes = Vec::new();
let mut txn = env.mut_txn_begin();
{
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("file")?;
recorded_changes.push(record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap());
debug_to_file(&txn, &channel.borrow(), "debug0").unwrap();
repo.write_file::<_, std::io::Error, _>("file", |w| {
w.write_all(contents2).unwrap();
Ok(())
})
.unwrap();
recorded_changes.push(record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap());
debug_to_file(&txn, &channel.borrow(), "debug1").unwrap();
}
txn.commit().unwrap();
let mut channel_changes = Vec::new();
{
let txn = env.txn_begin()?;
for channel in txn.iter_channels("").unwrap() {
let channel = channel.unwrap();
for x in txn.log(&channel.borrow(), 0).unwrap() {
let (_, (i, _)) = x.unwrap();
channel_changes.push(i)
}
}
}
info!("{:?}", channel_changes);
assert_eq!(channel_changes, recorded_changes);
let mut repo2 = working_copy::memory::Memory::new();
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
{
let mut channel = txn2.open_or_create_channel("main2").unwrap();
for h in channel_changes.iter() {
info!("applying {:?}", h);
apply::apply_change(&changes, &mut txn2, &mut channel, *h).unwrap();
debug_to_file(&txn2, &channel.borrow(), "debug2").unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"",
true,
)
.unwrap();
}
assert_eq!(repo2.list_files(), vec!["file".to_string()]);
let mut file = Vec::new();
repo2.read_file("file", &mut file).unwrap();
assert_eq!(file, contents2);
}
txn2.commit().unwrap();
Ok(())
}
#[test]
fn clone_prefixes() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c/d", contents.to_vec());
repo.add_file("e/f/g/h", contents.to_vec());
repo.add_file("i/j/k/l", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let h = {
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("a/b/c/d")?;
txn.add_file("e/f/g/h")?;
txn.add_file("i/j/k/l")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?
};
let h2 = {
let mut channel = txn.open_or_create_channel("main").unwrap();
repo.write_file::<_, std::io::Error, _>("a/b/c/d", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
repo.write_file::<_, std::io::Error, _>("e/f/g/h", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "a/b/c/d")?
};
txn.commit().unwrap();
// Cloning
debug!("Cloning");
let mut repo2 = working_copy::memory::Memory::new();
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
{
let mut channel = txn2.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn2, &mut channel, h).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"e/f",
true,
)?;
assert_eq!(
repo2.list_files(),
["e", "e/f", "e/f/g", "e/f/g/h"]
.iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
);
apply::apply_change(&changes, &mut txn2, &mut channel, h2).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"",
true,
)?;
let mut buf = Vec::new();
repo2.read_file("a/b/c/d", &mut buf)?;
assert_eq!(buf, b"edits\n");
buf.clear();
repo2.read_file("e/f/g/h", &mut buf)?;
assert_eq!(buf, contents);
}
txn2.commit().unwrap();
let mut txn2 = env2.mut_txn_begin();
txn2.open_or_create_channel("main2").unwrap();
Ok(())
}
use crate::change::*;
use crate::changestore::*;
use crate::pristine::*;
use crate::record::*;
use crate::working_copy::*;
use crate::*;
fn hash_mismatch(change: &Change) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
use crate::change::*;
let mut buf = tempfile::NamedTempFile::new()?;
let mut h = change.serialize(&mut buf)?;
match h {
crate::pristine::Hash::Blake3(ref mut h) => h[0] = h[0].wrapping_add(1),
_ => unreachable!(),
}
match Change::deserialize(buf.path().to_str().unwrap(), Some(&h)) {
Err(ChangeError::ChangeHashMismatch { .. }) => {}
_ => unreachable!(),
}
let f = ChangeFile::open(h, buf.path().to_str().unwrap())?;
assert_eq!(f.hashed(), &change.hashed);
Ok(())
}
#[test]
fn hash_mism() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let store = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
repo.add_file("file2", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
txn.add_file("file")?;
txn.add_file("file2")?;
let mut state = Builder::new();
state
.record(
&mut txn,
Algorithm::Myers,
&mut channel.borrow_mut(),
&mut repo,
&store,
"",
)
.unwrap();
let rec = state.finish();
let changes: Vec<_> = rec
.actions
.into_iter()
.map(|rec| rec.globalize(&txn).unwrap())
.collect();
info!("changes = {:?}", changes);
let change0 = crate::change::Change::make_change(
&txn,
&channel,
changes,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
)
.unwrap();
let hash0 = store.save_change(&change0)?;
apply::apply_local_change(&mut txn, &mut channel, &change0, hash0, &rec.updatables)?;
hash_mismatch(&change0)?;
debug_to_file(&txn, &channel.borrow(), "debug")?;
Ok(())
}
fn record_all<T: MutTxnT, R: WorkingCopy, P: ChangeStore>(
repo: &mut R,
store: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
) -> Result<(Hash, Change), anyhow::Error>
where
R::Error: Send + Sync + 'static,
{
let mut state = Builder::new();
state.record(
txn,
Algorithm::default(),
&mut channel.borrow_mut(),
repo,
store,
prefix,
)?;
let rec = state.finish();
let changes = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn).unwrap())
.collect();
let change0 = crate::change::Change::make_change(
txn,
&channel,
changes,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
// Beware of changing the following line: two changes
// doing the same thing will be equal. Sometimes we don't
// want that, as in tests::unrecord::unrecord_double.
timestamp: chrono::Utc::now(),
},
Vec::new(),
)
.unwrap();
let hash = store.save_change(&change0)?;
apply::apply_local_change(txn, channel, &change0, hash, &rec.updatables)?;
Ok((hash, change0))
}
#[cfg(feature = "text-changes")]
#[test]
fn text() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let store = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
repo.add_file("file2", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
txn.add_file("file")?;
txn.add_file("file2")?;
let (h0, change0) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change0, h0);
repo.write_file::<_, std::io::Error, _>("file", |w| {
write!(w, "a\nx\nc\ne\ny\nf\n")?;
Ok(())
})?;
let (h1, change1) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change1, h1);
repo.remove_path("file2")?;
let (h2, change2) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change2, h2);
repo.rename("file", "file3")?;
txn.move_file("file", "file3")?;
let (h3, change3) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change3, h3);
// name conflicts
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
let mut channel2 = txn2.open_or_create_channel("main")?;
let mut repo2 = working_copy::memory::Memory::new();
apply::apply_change(&store, &mut txn2, &mut channel2, h0)?;
apply::apply_change(&store, &mut txn2, &mut channel2, h1)?;
apply::apply_change(&store, &mut txn2, &mut channel2, h2)?;
output::output_repository_no_pending(&mut repo2, &store, &mut txn2, &mut channel2, "", true)?;
repo2.rename("file", "file4")?;
txn2.move_file("file", "file4")?;
let (_, _) = record_all(&mut repo2, &store, &mut txn2, &mut channel2, "")?;
apply::apply_change(&store, &mut txn2, &mut channel2, h3)?;
output::output_repository_no_pending(&mut repo2, &store, &mut txn2, &mut channel2, "", true)?;
let (h, solution) = record_all(&mut repo2, &store, &mut txn2, &mut channel2, "")?;
text_test(&store, &solution, h);
Ok(())
}
fn text_test<C: ChangeStore>(c: &C, change0: &Change, h: Hash) {
let mut v = Vec::new();
// let channel = channel.borrow();
change0
.write(
c,
Some(h),
|l, _p| format!("{}:{}", l.path, l.line),
true,
&mut v,
)
.unwrap();
for i in std::str::from_utf8(&v).unwrap().lines() {
debug!("{}", i);
}
let change1 = Change::read(std::io::Cursor::new(&v[..]), &mut HashMap::new()).unwrap();
if change0.header != change1.header {
error!("header: {:#?} != {:#?}", change0.header, change1.header);
}
if change0.dependencies != change1.dependencies {
error!(
"deps: {:#?} != {:#?}",
change0.dependencies, change1.dependencies
);
}
if change0.extra_known != change1.extra_known {
error!(
"extra: {:#?} != {:#?}",
change0.extra_known, change1.extra_known
);
}
if change0.metadata != change1.metadata {
error!("meta: {:#?} != {:#?}", change0.metadata, change1.metadata);
}
if change0.changes != change1.changes {
if change0.changes.len() != change1.changes.len() {
error!("change0.changes = {:#?}", change0.changes);
error!("change1.changes = {:#?}", change1.changes);
} else {
for (a, b) in change0.changes.iter().zip(change1.changes.iter()) {
error!("change0: {:#?}", a);
error!("change1: {:#?}", b);
}
}
}
if change0.contents != change1.contents {
error!("change0.contents = {:?}", change0.contents);
error!("change1.contents = {:?}", change1.contents);
}
assert_eq!(change0, &change1);
}
use super::*;
/// Add a simple file and clone.
#[test]
fn add_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
// Adding the inode another time.
assert!(txn.add_file("dir/file").is_err());
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
txn.commit().unwrap();
}
{
let txn = env.txn_begin()?;
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
assert_eq!(files, vec!["dir", "dir/file"]);
let channel_ = txn.load_channel("main").unwrap().unwrap();
let channel = channel_.borrow();
let mut it =
crate::fs::iter_graph_children(&txn, &changes, &channel.graph, Position::ROOT).unwrap();
let (key, meta, file) = it.next().unwrap().unwrap();
assert!(meta.is_dir());
assert_eq!(file, "dir");
assert!(it.next().is_none());
let mut it = crate::fs::iter_graph_children(&txn, &changes, &channel.graph, key).unwrap();
let (file_key, _, _) = it.next().unwrap().unwrap();
crate::fs::iter_paths(&txn, &channel_.borrow().graph, file_key, |path| {
debug!("begin path");
for path in path {
debug!("path = {:?}", path);
}
debug!("end path");
true
})
.unwrap();
debug_to_file(&txn, &channel_.borrow(), "debug2").unwrap();
let mut it = crate::fs::iter_basenames(&txn, &changes, &channel.graph, key).unwrap();
let (key, _, name) = it.next().unwrap().unwrap();
assert_eq!(key, Position::ROOT);
assert_eq!(name, "dir");
assert!(it.next().is_none());
assert!(txn.is_tracked("dir/file").unwrap());
}
Ok(())
}
/// Test that we can add a directory with a file in it.
#[test]
fn add_dir_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_dir("dir/file")?;
assert!(txn.is_tracked("dir").unwrap());
assert!(txn.is_tracked("dir/file").unwrap());
let (name, inode) = crate::fs::working_copy_children(&txn, Inode::ROOT)
.unwrap()
.next()
.unwrap()
.unwrap();
assert_eq!(name.as_str(), "dir");
assert!(txn.is_directory(inode).unwrap());
debug!("name = {:?}", inode);
debug_tree(&txn, "debug_tree")?;
let mut it = crate::fs::working_copy_children(&txn, inode).unwrap();
let (name, _) = it.next().unwrap().unwrap();
assert_eq!(name.as_str(), "file");
assert!(it.next().is_none());
Ok(())
}
/// Test that we can delete a file.
#[test]
fn del_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug0").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|f| f.unwrap().1)
.collect();
assert_eq!(files, vec!["dir", "dir/file"]);
repo.remove_path("dir/file")?;
txn.remove_file("dir")?;
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
debug!("files = {:?}", files);
assert!(files.is_empty());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
debug!("files = {:?}", files);
assert!(files.is_empty());
// Test deletions without recording.
txn.add_file("dir2/file")?;
txn.remove_file("dir2")?;
assert!(crate::fs::iter_working_copy(&txn, Inode::ROOT).all(|f| f.unwrap().1 != "dir2"));
assert!(txn.remove_file("dir2").is_err());
txn.commit()?;
}
{
let txn = env.txn_begin()?;
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
debug!("files = {:?}", files);
assert!(files.is_empty());
}
Ok(())
}
/// Test that `record` notices the deletion of a file.
#[test]
fn del_obsolete_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("a/b/c/d/e")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("a/b/c/d/e", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug0").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|f| f.unwrap().1)
.collect();
assert_eq!(files, vec!["a", "a/b", "a/b/c", "a/b/c/d", "a/b/c/d/e"]);
repo.remove_path("a/b/c")?;
debug!("Recording the deletion");
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let mut repo2 = working_copy::memory::Memory::new();
output::output_repository_no_pending(&mut repo2, &changes, &mut txn, &mut channel, "", true)?;
assert_eq!(repo2.list_files(), vec!["a", "a/b"]);
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
debug!("files = {:?}", files);
assert_eq!(files, vec!["a", "a/b"]);
Ok(())
}
/// Test that we can delete the end of a file.
#[test]
fn del_eof_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nb\nc\n")?;
Ok(())
})?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
let mut file = Vec::new();
repo.read_file("dir/file", &mut file).unwrap();
assert_eq!(std::str::from_utf8(&file), Ok("a\nb\nc\n"));
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
txn.commit()?;
Ok(())
}
/// Just delete a few lines of a file.
#[test]
fn del_nonzombie_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nb\nc\ne\nf\n")?;
Ok(())
})?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nb\nc\nf\n")?;
Ok(())
})?;
debug_to_file(&txn, &channel.borrow(), "debug0").unwrap();
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug1").unwrap();
repo.write_file::<_, std::io::Error, _>("dir/file", |w| {
w.write_all(b"a\nb\nc\n")?;
Ok(())
})?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut file = Vec::new();
repo.read_file("dir/file", &mut file).unwrap();
assert_eq!(std::str::from_utf8(&file), Ok("a\nb\nc\n"));
debug_to_file(&txn, &channel.borrow(), "debug2").unwrap();
txn.commit()?;
Ok(())
}
/// Are permissions properly recorded?
#[test]
fn permissions_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("file")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice0 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug_to_file(&txn_alice, &channel.borrow(), "debug0").unwrap();
repo_alice.set_permissions("file", 0o755)?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug_to_file(&txn_alice, &channel.borrow(), "debug1").unwrap();
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel = txn_bob.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice0)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
debug_to_file(&txn_bob, &channel.borrow(), "debug_bob1").unwrap();
let bob_perm = repo_bob.file_metadata("file")?;
assert_eq!(bob_perm.0, 0o644);
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice1)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
debug_to_file(&txn_bob, &channel.borrow(), "debug_bob2").unwrap();
let bob_perm = repo_bob.file_metadata("file")?;
assert_eq!(bob_perm.0, 0o755);
let alice_perm = repo_alice.file_metadata("file")?;
assert_eq!(alice_perm.0, 0o755);
Ok(())
}
/// Move a file to a directory, then delete the file and clone the whole thing.
#[test]
fn move_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("file")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice0 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice0 = {:?}", alice0);
debug_to_file(&txn_alice, &channel.borrow(), "debug0").unwrap();
txn_alice.add_dir("dir")?;
txn_alice.move_file("file", "dir/file2")?;
repo_alice.add_dir("dir");
repo_alice.rename("file", "dir/file2")?;
debug_tree(&txn_alice, "debug_tree")?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice1 = {:?}", alice1);
debug_to_file(&txn_alice, &channel.borrow(), "debug1").unwrap();
debug_tree(&txn_alice, "debug_tree")?;
debug_inodes(&txn_alice);
debug!("{:?}", repo_alice);
repo_alice.remove_path("dir/file2")?;
debug!("{:?}", repo_alice);
let alice2 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug_to_file(&txn_alice, &channel.borrow(), "debug2").unwrap();
txn_alice.commit()?;
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel = txn_bob.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice0)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
assert_eq!(repo_bob.list_files(), &["file"]);
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice1)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
let mut files = repo_bob.list_files();
files.sort();
assert_eq!(files, &["dir", "dir/file2"]);
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice2)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
assert_eq!(repo_bob.list_files(), &["dir"]);
Ok(())
}
/// Overwrite a file with a move.
#[test]
fn move_file_existing_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
repo_alice.add_file("file2", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("file")?;
txn_alice.add_file("file2")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.move_file("file", "file2")?;
repo_alice.rename("file", "file2")?;
record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug_to_file(&txn_alice, &channel.borrow(), "debug1").unwrap();
let mut files = repo_alice.list_files();
files.sort();
assert_eq!(files, &["file2"]);
Ok(())
}
#[test]
fn move_back_delete_test() -> Result<(), anyhow::Error> {
move_back_test_(true)
}
#[test]
fn move_back_test() -> Result<(), anyhow::Error> {
move_back_test_(false)
}
fn move_back_test_(resolve_by_deleting: bool) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("a")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
// Alice moves a -> b
txn_alice.move_file("a", "b")?;
repo_alice.rename("a", "b")?;
let alice2 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug_to_file(&txn_alice, &channel.borrow(), "debug1").unwrap();
// Alice moves b back -> a
txn_alice.move_file("b", "a")?;
repo_alice.rename("b", "a")?;
let alice3 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug_to_file(&txn_alice, &channel.borrow(), "debug2").unwrap();
// Bob deletes in parallel to the move + moveback
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, alice1)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
repo_bob.remove_path("a")?;
let bob1 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob1").unwrap();
txn_bob
.apply_change(&changes, &mut channel_bob, alice2)
.unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob2").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
debug!("APPLYING {:?}", alice3);
txn_bob
.apply_change(&changes, &mut channel_bob, alice3)
.unwrap();
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob3").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
if resolve_by_deleting {
debug!("Bob records a solution");
let bob2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob4").unwrap();
// Alice applies Bob's patch.
txn_alice
.apply_change(&changes, &mut channel, bob1)
.unwrap();
debug_to_file(&txn_alice, &channel.borrow(), "debug_alice2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(!conflicts.is_empty());
// Alice applies Bob's resolution
txn_alice
.apply_change(&changes, &mut channel, bob2)
.unwrap();
debug_to_file(&txn_alice, &channel.borrow(), "debug_alice3").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
// Testing Bob's tree by outputting
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug_to_file(&txn_bob, &channel_bob.borrow(), "debug_bob4").unwrap();
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
} else {
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("Bob records a solution");
let bob2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
// Alice applies Bob's patch.
txn_alice
.apply_change(&changes, &mut channel, bob1)
.unwrap();
debug_to_file(&txn_alice, &channel.borrow(), "debug_alice2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a"),
ref c => panic!("unexpected conflict {:#?}", c),
}
// Alice applies Bob's resolution
txn_alice
.apply_change(&changes, &mut channel, bob2)
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
// Testing Bob's tree by outputting
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
}
Ok(())
}
// Move a file into a directory, and delete the former parent in the same change.
#[test]
fn move_delete_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
repo_alice.add_file("dir/file2", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("dir/file")?;
txn_alice.add_file("dir/file2")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice0 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice0 = {:?}", alice0);
debug_to_file(&txn_alice, &channel.borrow(), "debug0").unwrap();
repo_alice.add_dir("dir2");
repo_alice.rename("dir/file", "dir2/file")?;
repo_alice.rename("dir/file2", "dir2/file2")?;
repo_alice.remove_path("dir")?;
txn_alice.move_file("dir/file", "dir2/file")?;
txn_alice.move_file("dir/file2", "dir2/file2")?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice1 = {:?}", alice1);
debug_to_file(&txn_alice, &channel.borrow(), "debug1").unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug_to_file(&txn_alice, &channel.borrow(), "debug2").unwrap();
repo_alice.rename("dir2/file", "dir/file").unwrap_or(());
repo_alice.rename("dir2/file2", "dir/file2").unwrap_or(());
txn_alice.move_file("dir2/file", "dir/file").unwrap_or(());
txn_alice.move_file("dir2/file2", "dir/file2").unwrap_or(());
repo_alice.remove_path("dir2")?;
let mut state = Builder::new();
debug!("recording in dir");
state.record(
&mut txn_alice,
Algorithm::default(),
&mut channel.borrow_mut(),
&mut repo_alice,
&changes,
"dir",
)?;
debug!("recording in dir2");
state.record(
&mut txn_alice,
Algorithm::default(),
&mut channel.borrow_mut(),
&mut repo_alice,
&changes,
"dir2",
)?;
let rec = state.finish();
let changes_ = rec
.actions
.into_iter()
.map(|rec| rec.globalize(&txn_alice).unwrap())
.collect();
let alice2 = crate::change::Change::make_change(
&txn_alice,
&channel,
changes_,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
timestamp: Utc::now(),
},
Vec::new(),
)
.unwrap();
let h_alice2 = changes.save_change(&alice2)?;
apply::apply_local_change(
&mut txn_alice,
&mut channel,
&alice2,
h_alice2,
&rec.updatables,
)?;
debug!("done {:?}", h_alice2);
debug_to_file(&txn_alice, &channel.borrow(), "debug3").unwrap();
let (alive, reachable) = check_alive(&txn_alice, &channel.borrow().graph);
if !alive.is_empty() {
panic!("alive: {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable: {:?}", reachable);
}
let mut files = repo_alice.list_files();
files.sort();
assert_eq!(files, &["dir", "dir/file", "dir/file2"]);
Ok(())
}
#[test]
fn file_becomes_dir_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("filedir", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("filedir").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
repo.remove_path("filedir").unwrap();
repo.add_file("filedir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
txn.add_file("filedir/file").unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
Ok(())
}
#[test]
fn record_deleted_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
assert!(files.is_empty());
}
Ok(())
}
#[test]
fn record_prefix() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug_to_file(&txn, &channel.borrow(), "debug").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|n| n.unwrap().1)
.collect();
assert!(files.is_empty());
}
Ok(())
}
#[test]
fn record_not_in_repo() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
assert!(record_all_output(&mut repo, &changes, &mut txn, &mut channel, "dir").is_err());
Ok(())
}
#[test]
fn record_not_modified() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
txn.add_file("file")?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
std::thread::sleep(std::time::Duration::from_secs(1));
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
Ok(())
}
pub const MAX_LENGTH: usize = 255;
/// A string of length at most 255, with a more compact on-disk
/// encoding.
#[repr(packed)]
pub struct SmallString {
pub len: u8,
pub str: [u8; MAX_LENGTH],
}
/// A borrowed version of `SmallStr`.
#[derive(Clone, Copy)]
pub struct SmallStr<'a> {
pub p: *const u8,
pub marker: std::marker::PhantomData<&'a ()>,
}
impl Clone for SmallString {
fn clone(&self) -> Self {
Self::from_str(self.as_str())
}
}
impl std::fmt::Debug for SmallString {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
self.as_small_str().fmt(fmt)
}
}
impl<'a> PartialEq for SmallStr<'a> {
fn eq(&self, x: &SmallStr) -> bool {
self.as_str().eq(x.as_str())
}
}
#[test]
fn eq() {
let s0 = SmallString::from_str("blabla");
let s1 = SmallString::from_str("blabla");
assert_eq!(s0, s1);
assert_eq!(s0, s1);
assert_eq!(s0.as_small_str(), s1.as_small_str());
assert_eq!(s0.as_small_str(), s0.as_small_str());
assert_eq!(s1.as_small_str(), s1.as_small_str());
}
#[test]
fn debug() {
let s = SmallString::from_str("blabla");
assert_eq!(format!("{:?}", s), "\"blabla\"");
assert_eq!(format!("{:?}", s.as_small_str()), "\"blabla\"");
}
impl<'a> Eq for SmallStr<'a> {}
impl PartialEq for SmallString {
fn eq(&self, x: &SmallString) -> bool {
self.as_str().eq(x.as_str())
}
}
impl Eq for SmallString {}
/*
impl<'a> std::hash::Hash for SmallStr<'a> {
fn hash<H: std::hash::Hasher>(&self, x: &mut H) {
self.as_str().hash(x)
}
}
*/
impl std::hash::Hash for SmallString {
fn hash<H: std::hash::Hasher>(&self, x: &mut H) {
self.as_str().hash(x)
}
}
impl<'a> PartialOrd for SmallStr<'a> {
fn partial_cmp(&self, x: &SmallStr) -> Option<std::cmp::Ordering> {
self.as_str().partial_cmp(x.as_str())
}
}
impl<'a> Ord for SmallStr<'a> {
fn cmp(&self, x: &SmallStr) -> std::cmp::Ordering {
self.as_str().cmp(x.as_str())
}
}
impl PartialOrd for SmallString {
fn partial_cmp(&self, x: &SmallString) -> Option<std::cmp::Ordering> {
self.as_str().partial_cmp(x.as_str())
}
}
impl Ord for SmallString {
fn cmp(&self, x: &SmallString) -> std::cmp::Ordering {
self.as_str().cmp(x.as_str())
}
}
#[test]
fn ord() {
let s0 = SmallString::from_str("1234");
let s1 = SmallString::from_str("5678");
assert!(s0.as_small_str() < s1.as_small_str());
assert!(s0 < s1);
assert_eq!(s0.cmp(&s1), std::cmp::Ordering::Less);
}
impl<'a> std::fmt::Debug for SmallStr<'a> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
self.as_str().fmt(fmt)
}
}
impl Default for SmallString {
fn default() -> Self {
Self {
len: 0,
str: [0; MAX_LENGTH],
}
}
}
impl SmallString {
pub fn new() -> Self {
Self::default()
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah!");
/// assert_eq!(s.len(), s.as_str().len());
/// ```
pub fn len(&self) -> usize {
self.len as usize
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// s.clear();
/// assert_eq!(s.as_str(), "");
/// assert!(s.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn from_str(s: &str) -> Self {
let mut b = SmallString {
len: s.len() as u8,
str: [0; MAX_LENGTH],
};
b.clone_from_str(s);
b
}
pub fn clone_from_str(&mut self, s: &str) {
self.len = s.len() as u8;
(&mut self.str[..s.len()]).copy_from_slice(s.as_bytes());
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// s.clear();
/// assert!(s.is_empty());
/// ```
pub fn clear(&mut self) {
self.len = 0;
}
pub fn push_str(&mut self, s: &str) {
let l = self.len as usize;
assert!(l + s.len() <= 0xff);
(&mut self.str[l..l + s.len()]).copy_from_slice(s.as_bytes());
self.len += s.len() as u8;
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// let s_ = s.as_small_str();
/// let s2_ = s_;
/// let s3_ = s_.clone();
/// assert_eq!(s_, s2_);
/// assert_eq!(s_, s3_);
/// ```
pub fn as_small_str(&self) -> SmallStr {
SmallStr {
p: self as *const SmallString as *const u8,
marker: std::marker::PhantomData,
}
}
pub fn as_str(&self) -> &str {
self.as_small_str().as_str()
}
pub fn as_bytes(&self) -> &[u8] {
self.as_small_str().as_bytes()
}
}
impl SmallStr<'static> {
pub const EMPTY: SmallStr<'static> = SmallStr {
p: [0].as_ptr(),
marker: std::marker::PhantomData,
};
}
impl<'a> SmallStr<'a> {
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("");
/// assert!(s.as_small_str().is_empty());
/// s.push_str("blah");
/// assert!(!s.as_small_str().is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// assert_eq!(s.as_small_str().len(), "blah".len())
/// ```
pub fn len(&self) -> usize {
unsafe { (*self.p) as usize }
}
pub fn as_str(&self) -> &'a str {
unsafe { std::str::from_utf8_unchecked(self.as_bytes()) }
}
pub fn as_bytes(&self) -> &'a [u8] {
unsafe { std::slice::from_raw_parts(self.p.offset(1), *self.p as usize) }
}
pub fn to_owned(&self) -> SmallString {
SmallString::from_str(self.as_str())
}
}
/// Faster than running doc tests.
#[test]
fn all_doc_tests() {
{
let s = SmallString::from_str("blah!");
assert_eq!(s.len(), s.as_str().len());
}
{
let mut s = SmallString::from_str("blah");
s.clear();
assert_eq!(s.as_str(), "");
assert!(s.is_empty());
}
{
let mut s = SmallString::from_str("blah");
s.clear();
assert!(s.is_empty());
}
{
let s = SmallString::from_str("blah");
let s_ = s.as_small_str();
let s2_ = s_;
let s3_ = s_.clone();
assert_eq!(s_, s2_);
assert_eq!(s_, s3_);
}
{
let mut s = SmallString::from_str("");
assert!(s.as_small_str().is_empty());
s.push_str("blah");
assert!(!s.as_small_str().is_empty());
}
{
let s = SmallString::from_str("blah");
assert_eq!(s.as_small_str().len(), "blah".len())
}
}
/// An internal "unsafe" version of a [`small_string::SmallStr`], used
/// to circumvent the absence of associated type constructors in Rust
/// (else this would be borrow on a table).
#[derive(Clone, Copy)]
#[doc(hidden)]
pub struct UnsafeSmallStr(*const u8);
impl std::fmt::Debug for UnsafeSmallStr {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
unsafe { self.to_small_str().fmt(fmt) }
}
}
impl UnsafeSmallStr {
pub fn from_small_str(u: SmallStr) -> UnsafeSmallStr {
UnsafeSmallStr(u.p)
}
pub unsafe fn to_small_str<'a>(&self) -> SmallStr<'a> {
SmallStr {
p: self.0,
marker: std::marker::PhantomData,
}
}
}
impl sanakirja::Representable for UnsafeSmallStr {
fn alignment() -> sanakirja::Alignment {
sanakirja::Alignment::B1
}
fn onpage_size(&self) -> u16 {
unsafe {
let len = (*self.0) as u16;
1 + len
}
}
unsafe fn write_value(&self, p: *mut u8) {
std::ptr::copy(self.0, p, self.onpage_size() as usize)
}
unsafe fn read_value(p: *const u8) -> Self {
UnsafeSmallStr(p)
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
let a = UnsafeSmallStr(self.0).to_small_str();
let b = x.to_small_str();
a.as_str().cmp(b.as_str())
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
//! Hunk a change from a pristine and a working copy.
use crate::alive::retrieve;
use crate::change::*;
use crate::changestore::ChangeStore;
use crate::diff;
pub use crate::diff::Algorithm;
use crate::path::{components, Components};
use crate::pristine::*;
use crate::small_string::SmallString;
use crate::working_copy::WorkingCopy;
use std::collections::{HashMap, HashSet};
#[derive(Debug, Error)]
pub enum RecordError<
C: std::error::Error + 'static,
W: std::error::Error,
T: std::error::Error + 'static,
> {
#[error("Changestore error: {0}")]
Changestore(C),
#[error("Working copy error: {0}")]
WorkingCopy(W),
#[error("System time error: {0}")]
SystemTimeError(#[from] std::time::SystemTimeError),
#[error(transparent)]
Txn(T),
#[error(transparent)]
Diff(#[from] diff::DiffError<C, T>),
#[error("Path not in repository: {0}")]
PathNotInRepo(String),
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl<
C: std::error::Error + 'static,
W: std::error::Error + 'static,
T: std::error::Error + 'static,
> std::convert::From<TxnErr<T>> for RecordError<C, W, T>
{
fn from(e: TxnErr<T>) -> Self {
RecordError::Txn(e.0)
}
}
/// A change in the process of being recorded. This is typically
/// created using `Builder::new`.
pub struct Builder {
pub(crate) rec: Recorded,
pub(crate) redundant: Vec<(Vertex<ChangeId>, Edge)>,
recorded_inodes: HashMap<Inode, Position<Option<ChangeId>>>,
deleted_vertices: HashSet<Position<ChangeId>>,
former_parents: Vec<Parent>,
force_rediff: bool,
}
#[derive(Debug)]
struct Parent {
basename: String,
metadata: InodeMetadata,
parent: Position<Option<ChangeId>>,
}
/// The result of recording a change:
pub struct Recorded {
/// The "byte contents" of the change.
pub contents: Vec<u8>,
/// The current records, to be lated converted into change operations.
pub actions: Vec<Hunk<Option<ChangeId>, Local>>,
/// The updates that need to be made to the ~tree~ and ~revtree~
/// tables when this change is applied to the local repository.
pub updatables: HashMap<usize, InodeUpdate>,
/// The size of the largest file that was recorded in this change.
pub largest_file: u64,
/// Whether we have recorded binary files.
pub has_binary_files: bool,
/// Timestamp of the oldest changed file. If nothing changed,
/// returns now().
pub oldest_change: std::time::SystemTime,
}
impl Default for Builder {
fn default() -> Self {
Self {
rec: Recorded {
contents: Vec::new(),
actions: Vec::new(),
updatables: HashMap::new(),
largest_file: 0,
has_binary_files: false,
oldest_change: std::time::SystemTime::now(),
},
redundant: Vec::new(),
recorded_inodes: HashMap::new(),
deleted_vertices: HashSet::new(),
former_parents: Vec::new(),
force_rediff: false,
}
}
}
impl Builder {
/// Initialise a `Builder`.
pub fn new() -> Self {
Self::default()
}
/// Finish the recording.
pub fn finish(self) -> Recorded {
self.rec
}
}
/// An account of the files that have been added, moved or deleted, as
/// returned by record, and used by apply (when applying a change
/// created locally) to update the trees and inodes databases.
#[derive(Debug, Hash, PartialEq, Eq)]
pub enum InodeUpdate {
Add {
/// Inode vertex in the graph.
pos: ChangePosition,
/// `Inode` added by this file addition.
inode: Inode,
},
Deleted {
/// `Inode` of the deleted file.
inode: Inode,
},
}
#[derive(Debug, Clone)]
struct RecordItem<'a> {
v_papa: Position<Option<ChangeId>>,
papa: Inode,
inode: Inode,
basename: String,
full_path: String,
metadata: InodeMetadata,
components: Components<'a>,
}
impl<'a> RecordItem<'a> {
fn root(prefix: &'a str) -> Self {
RecordItem {
inode: Inode::ROOT,
papa: Inode::ROOT,
v_papa: Position::OPTION_ROOT,
basename: String::new(),
full_path: String::new(),
metadata: InodeMetadata::new(0, true),
components: components(prefix),
}
}
}
const CHECK_UTF8: usize = 1000;
/// Ignore inodes that are in another channel
fn get_inodes<T: ChannelTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>>(
txn: &T,
channel: &T::Channel,
inode: Inode,
) -> Result<Option<Position<ChangeId>>, TxnErr<T::GraphError>> {
if let Some(vertex) = txn.get_inodes(inode, None)? {
if txn
.get_changeset(T::changes(channel), vertex.change)?
.is_some()
{
Ok(Some(vertex))
} else {
Ok(None)
}
} else {
Ok(None)
}
}
impl Builder {
pub fn record<T, W: WorkingCopy, C: ChangeStore>(
&mut self,
txn: &mut T,
diff_algorithm: diff::Algorithm,
channel: &mut T::Channel,
working_copy: &mut W,
changes: &C,
prefix: &str,
) -> Result<(), RecordError<C::Error, W::Error, T::GraphError>>
where
T: ChannelMutTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
<W as WorkingCopy>::Error: 'static,
{
let now = std::time::Instant::now();
let mut stack = vec![RecordItem::root(prefix)];
while let Some(mut item) = stack.pop() {
debug!("stack.pop() = Some({:?})", item);
// Check for moves and file conflicts.
let vertex = if let Some(vertex) = self.recorded_inodes.get(&item.inode) {
*vertex
} else if item.inode == Inode::ROOT {
self.recorded_inodes
.insert(Inode::ROOT, Position::OPTION_ROOT);
self.delete_obsolete_children(
txn,
T::graph(channel),
working_copy,
changes,
&item.full_path,
Position::ROOT,
)?;
Position::OPTION_ROOT
} else if let Some(vertex) = get_inodes(txn, &channel, item.inode)? {
self.delete_obsolete_children(
txn,
T::graph(channel),
working_copy,
changes,
&item.full_path,
vertex,
)?;
self.record_existing_file(
txn,
diff_algorithm,
&channel,
working_copy,
changes,
&item,
vertex,
)?;
self.recorded_inodes.insert(item.inode, vertex.to_option());
vertex.to_option()
} else {
match self.add_file(working_copy, item.clone()) {
Ok(Some(vertex)) => {
// Path addition (maybe just a single directory).
self.recorded_inodes.insert(item.inode, vertex);
vertex
}
_ => continue,
}
};
// Move on to the next step.
self.push_children::<_, _, C>(
txn,
&channel,
working_copy,
&mut item,
vertex,
&mut stack,
prefix,
)?;
}
crate::TIMERS.lock().unwrap().record += now.elapsed();
Ok(())
}
fn add_file<W: WorkingCopy>(
&mut self,
working_copy: &mut W,
item: RecordItem,
) -> Result<Option<Position<Option<ChangeId>>>, W::Error> {
debug!("record_file_addition {:?}", item);
let meta = working_copy.file_metadata(&item.full_path)?;
let name_start = ChangePosition(self.rec.contents.len() as u64);
meta.write(&mut self.rec.contents).unwrap();
self.rec.contents.extend(item.basename.as_bytes());
let name_end = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
let inode_pos = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
let contents = if meta.is_file() {
let start = ChangePosition(self.rec.contents.len() as u64);
working_copy.read_file(&item.full_path, &mut self.rec.contents)?;
let end = ChangePosition(self.rec.contents.len() as u64);
self.rec.largest_file = self.rec.largest_file.max(end.0 - start.0);
self.rec.has_binary_files |= {
let s = start.0 as usize;
let e = (end.0 as usize).min(s + CHECK_UTF8 + 4);
let utf8 = std::str::from_utf8(&self.rec.contents[s..e]);
debug!("utf8 = {:?}", utf8);
match utf8 {
Err(e) => e.valid_up_to() < CHECK_UTF8,
Ok(_) => false,
}
};
self.rec.contents.push(0);
if self.rec.contents.len() as u64 > inode_pos.0 + 1 {
Some(Atom::NewVertex(NewVertex {
up_context: vec![Position {
change: None,
pos: inode_pos,
}],
down_context: vec![],
start,
end,
flag: EdgeFlags::BLOCK,
inode: Position {
change: None,
pos: inode_pos,
},
}))
} else {
None
}
} else {
None
};
self.rec.actions.push(Hunk::FileAdd {
add_name: Atom::NewVertex(NewVertex {
up_context: vec![item.v_papa],
down_context: vec![],
start: name_start,
end: name_end,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
inode: item.v_papa,
}),
add_inode: Atom::NewVertex(NewVertex {
up_context: vec![Position {
change: None,
pos: name_end,
}],
down_context: vec![],
start: inode_pos,
end: inode_pos,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
inode: item.v_papa,
}),
contents,
path: item.full_path.clone(),
});
debug!("{:?}", self.rec.actions.last().unwrap());
self.rec.updatables.insert(
self.rec.actions.len(),
InodeUpdate::Add {
inode: item.inode,
pos: inode_pos,
},
);
if meta.is_dir() {
Ok(Some(Position {
change: None,
pos: inode_pos,
}))
} else {
Ok(None)
}
}
fn record_existing_file<
T: ChannelMutTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
W: WorkingCopy,
C: ChangeStore,
>(
&mut self,
txn: &mut T,
diff_algorithm: diff::Algorithm,
channel: &T::Channel,
working_copy: &mut W,
changes: &C,
item: &RecordItem,
vertex: Position<ChangeId>,
) -> Result<(), RecordError<C::Error, W::Error, T::GraphError>>
where
<W as crate::working_copy::WorkingCopy>::Error: 'static,
{
debug!(
"record_existing_file {:?}: {:?} {:?}",
item.full_path, item.inode, vertex
);
// Former parent(s) of vertex
self.former_parents.clear();
let f0 = EdgeFlags::FOLDER | EdgeFlags::PARENT;
let f1 = EdgeFlags::all();
let mut is_deleted = true;
for name_ in iter_adjacent(txn, T::graph(channel), vertex.inode_vertex(), f0, f1)? {
let name_ = name_?;
if !name_.flag.contains(EdgeFlags::PARENT) {
continue;
}
debug!("name_ = {:?}", name_);
if name_.flag.contains(EdgeFlags::DELETED) {
debug!("is_deleted {:?}: {:?}", item.full_path, name_);
is_deleted = true;
break;
}
let name_dest = txn.find_block_end(T::graph(channel), name_.dest).unwrap();
let mut name = Vec::new();
changes
.get_contents(|p| txn.get_external(p).unwrap(), name_dest, &mut name)
.map_err(RecordError::Changestore)?;
let (metadata, basename) = name.split_at(2);
let metadata = InodeMetadata::from_basename(metadata);
let basename = std::str::from_utf8(basename).unwrap().to_string();
debug!("former basename of {:?}: {:?}", vertex, basename);
if let Some(v_papa) = iter_adjacent(txn, T::graph(channel), name_dest, f0, f1)?.next() {
let v_papa = v_papa?;
self.former_parents.push(Parent {
basename,
metadata,
parent: v_papa.dest.to_option(),
})
}
}
debug!(
"record_existing_file: {:?} {:?} {:?}",
item, self.former_parents, is_deleted,
);
assert!(!self.former_parents.is_empty());
if let Ok(new_meta) = working_copy.file_metadata(&item.full_path) {
debug!("new_meta = {:?}", new_meta);
if self.former_parents.len() > 1
|| self.former_parents[0].basename != item.basename
|| self.former_parents[0].metadata != item.metadata
|| self.former_parents[0].parent != item.v_papa
|| is_deleted
{
// This parent has changed.
let new_papa = *self.recorded_inodes.get(&item.papa).unwrap();
debug!("new_papa = {:?}", new_papa);
self.record_moved_file::<_, _, W>(
changes,
txn,
T::graph(channel),
&item,
vertex,
new_papa,
self.former_parents[0].metadata,
)?
}
if new_meta.is_file()
&& (self.force_rediff
|| self.modified_since_last_commit::<T, _>(
&channel,
working_copy,
&item.full_path,
)?)
{
let mut ret = retrieve(txn, T::graph(channel), vertex)?;
let mut b = Vec::new();
working_copy
.read_file(&item.full_path, &mut b)
.map_err(RecordError::WorkingCopy)?;
debug!("diffing…");
let len = self.rec.actions.len();
self.diff(
changes,
txn,
&channel,
diff_algorithm,
item.full_path.clone(),
vertex.to_option(),
&mut ret,
&b,
)?;
if self.rec.actions.len() > len {
if let Ok(last_modified) = working_copy.modified_time(&item.full_path) {
self.rec.oldest_change = self.rec.oldest_change.min(last_modified);
}
}
debug!(
"new actions: {:?}, total {:?}",
&self.rec.actions.len() - len,
self.rec.actions.len()
);
}
} else {
debug!("calling record_deleted_file on {:?}", item.full_path);
self.record_deleted_file(
txn,
T::graph(channel),
working_copy,
&item.full_path,
vertex,
)?
}
Ok(())
}
fn delete_obsolete_children<
T: GraphTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
W: WorkingCopy,
C: ChangeStore,
>(
&mut self,
txn: &T,
channel: &T::Graph,
working_copy: &W,
changes: &C,
full_path: &str,
v: Position<ChangeId>,
) -> Result<(), RecordError<C::Error, W::Error, T::GraphError>>
where
<W as WorkingCopy>::Error: 'static,
{
let f0 = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
let f1 = f0 | EdgeFlags::PSEUDO;
debug!("delete_obsolete_children, v = {:?}", v);
for child in iter_adjacent(txn, channel, v.inode_vertex(), f0, f1)? {
let child = child?;
let child = txn.find_block(channel, child.dest).unwrap();
for grandchild in iter_adjacent(txn, channel, child, f0, f1)? {
let grandchild = grandchild?;
debug!("grandchild {:?}", grandchild);
let needs_deletion =
if let Some(inode) = txn.get_revinodes(grandchild.dest, None)? {
debug!("inode = {:?} {:?}", inode, txn.get_revtree(inode, None));
if let Some(path) = crate::fs::inode_filename(txn, inode)? {
working_copy.file_metadata(&path).is_err()
} else {
true
}
} else {
true
};
if needs_deletion {
let mut name = Vec::new();
changes
.get_contents(|p| txn.get_external(p).unwrap(), child, &mut name)
.map_err(RecordError::Changestore)?;
let mut full_path = full_path.to_string();
if name.len() > 2 {
if let Ok(name) = std::str::from_utf8(&name[2..]) {
if !full_path.is_empty() {
full_path.push('/');
}
full_path.push_str(name);
}
}
// delete recursively.
self.record_deleted_file(
txn,
&channel,
working_copy,
&full_path,
grandchild.dest,
)?
}
}
}
Ok(())
}
fn push_children<
'a,
T: ChannelTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
W: WorkingCopy,
C: ChangeStore,
>(
&mut self,
txn: &T,
channel: &T::Channel,
working_copy: &W,
item: &mut RecordItem<'a>,
vertex: Position<Option<ChangeId>>,
stack: &mut Vec<RecordItem<'a>>,
prefix: &str,
) -> Result<(), RecordError<C::Error, W::Error, T::GraphError>>
where
<W as crate::working_copy::WorkingCopy>::Error: 'static,
{
debug!("push_children, item = {:?}", item);
let comp = item.components.next();
let full_path = item.full_path.clone();
let fileid = OwnedPathId {
parent_inode: item.inode,
basename: SmallString::new(),
};
let mut has_matching_children = false;
for x in txn.iter_tree(fileid.clone(), None)? {
let (fileid_, child_inode) = x?;
debug!("push_children {:?} {:?}", fileid_, child_inode);
if fileid_.parent_inode < fileid.parent_inode || fileid_.basename.is_empty() {
continue;
} else if fileid_.parent_inode > fileid.parent_inode {
break;
}
if let Some(comp) = comp {
if comp != fileid_.basename.as_str() {
continue;
}
}
has_matching_children = true;
let basename = fileid_.basename.as_str().to_string();
let full_path = if full_path.is_empty() {
basename.clone()
} else {
full_path.clone() + "/" + &basename
};
debug!("fileid_ {:?} child_inode {:?}", fileid_, child_inode);
if let Ok(meta) = working_copy.file_metadata(&full_path) {
stack.push(RecordItem {
papa: item.inode,
inode: child_inode,
v_papa: vertex,
basename,
full_path,
metadata: meta,
components: item.components.clone(),
})
} else if let Some(vertex) = get_inodes(txn, &channel, child_inode)? {
self.record_deleted_file(txn, T::graph(channel), working_copy, &full_path, vertex)?
}
}
if comp.is_some() && !has_matching_children {
debug!("comp = {:?}", comp);
return Err(RecordError::PathNotInRepo(prefix.to_string()));
}
Ok(())
}
fn modified_since_last_commit<T: ChannelTxnT, W: WorkingCopy>(
&mut self,
channel: &T::Channel,
working_copy: &W,
prefix: &str,
) -> Result<bool, std::time::SystemTimeError> {
if let Ok(last_modified) = working_copy.modified_time(prefix) {
debug!(
"last_modified = {:?}, channel.last = {:?}",
last_modified
.duration_since(std::time::UNIX_EPOCH)?
.as_secs(),
T::last_modified(channel)
);
Ok(last_modified
.duration_since(std::time::UNIX_EPOCH)?
.as_secs()
+ 2
>= T::last_modified(channel))
} else {
Ok(true)
}
}
}
impl Builder {
fn record_moved_file<T: GraphTxnT, C: ChangeStore, W: WorkingCopy>(
&mut self,
changes: &C,
txn: &T,
channel: &T::Graph,
item: &RecordItem,
vertex: Position<ChangeId>,
new_papa: Position<Option<ChangeId>>,
old_meta: InodeMetadata,
) -> Result<(), RecordError<C::Error, W::Error, T::GraphError>>
where
<W as crate::working_copy::WorkingCopy>::Error: 'static,
{
debug!("record_moved_file {:?} {:?}", item, old_meta);
let name_start = ChangePosition(self.rec.contents.len() as u64);
item.metadata.write(&mut self.rec.contents).unwrap();
self.rec.contents.extend(item.basename.as_bytes());
let name_end = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
let name = &self.rec.contents[name_start.0 as usize..name_end.0 as usize];
let mut moved = collect_moved_edges::<_, _, W>(
txn,
changes,
channel,
new_papa,
vertex,
item.metadata,
old_meta,
name,
)?;
debug!("moved = {:#?}", moved);
if !moved.resurrect.is_empty() {
moved.resurrect.extend(moved.alive.into_iter());
if !moved.need_new_name {
moved.resurrect.extend(moved.edges.drain(..));
}
self.rec.actions.push(Hunk::FileUndel {
undel: Atom::EdgeMap(EdgeMap {
edges: moved.resurrect,
inode: item.v_papa,
}),
contents: None,
path: item.full_path.clone(),
});
}
if !moved.edges.is_empty() {
if moved.need_new_name {
self.rec.actions.push(Hunk::FileMove {
del: Atom::EdgeMap(EdgeMap {
edges: moved.edges,
inode: item.v_papa,
}),
add: Atom::NewVertex(NewVertex {
up_context: vec![item.v_papa],
down_context: vec![vertex.to_option()],
start: name_start,
end: name_end,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
inode: item.v_papa,
}),
path: item.full_path.clone(),
});
} else {
self.rec.actions.push(Hunk::SolveNameConflict {
name: Atom::EdgeMap(EdgeMap {
edges: moved.edges,
inode: item.v_papa,
}),
path: item.full_path.clone(),
});
self.rec.contents.truncate(name_start.0 as usize)
}
} else {
self.rec.contents.truncate(name_start.0 as usize)
}
Ok(())
}
}
#[derive(Debug)]
struct MovedEdges {
edges: Vec<NewEdge<Option<ChangeId>>>,
alive: Vec<NewEdge<Option<ChangeId>>>,
resurrect: Vec<NewEdge<Option<ChangeId>>>,
need_new_name: bool,
}
fn collect_moved_edges<T: GraphTxnT, C: ChangeStore, W: WorkingCopy>(
txn: &T,
changes: &C,
channel: &T::Graph,
parent_pos: Position<Option<ChangeId>>,
current_pos: Position<ChangeId>,
new_meta: InodeMetadata,
old_meta: InodeMetadata,
name: &[u8],
) -> Result<MovedEdges, RecordError<C::Error, W::Error, T::GraphError>>
where
<W as crate::working_copy::WorkingCopy>::Error: 'static,
{
debug!("collect_moved_edges {:?}", current_pos);
let mut moved = MovedEdges {
edges: Vec::new(),
alive: Vec::new(),
resurrect: Vec::new(),
need_new_name: true,
};
let mut del_del = HashMap::new();
let mut alive = HashMap::new();
let mut previous_name = Vec::new();
for parent in iter_adjacent(
txn,
channel,
current_pos.inode_vertex(),
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)? {
let parent = parent?;
if !parent.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT) {
continue;
}
debug!("parent = {:?}", parent);
let mut parent_was_resurrected = false;
if !parent.flag.contains(EdgeFlags::PSEUDO) {
if parent.flag.contains(EdgeFlags::DELETED) {
moved.resurrect.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: current_pos.inode_vertex().to_option(),
introduced_by: Some(parent.introduced_by),
});
parent_was_resurrected = true;
let v = alive
.entry((parent.dest, current_pos.inode_vertex()))
.or_insert_with(Vec::new);
v.push(None)
} else {
let v = alive
.entry((parent.dest, current_pos.inode_vertex()))
.or_insert_with(Vec::new);
v.push(Some(parent.introduced_by))
}
}
previous_name.clear();
let parent_dest = txn.find_block_end(channel, parent.dest).unwrap();
changes
.get_contents(
|p| txn.get_external(p).unwrap(),
parent_dest,
&mut previous_name,
)
.map_err(RecordError::Changestore)?;
debug!(
"parent_dest {:?} {:?}",
parent_dest,
std::str::from_utf8(&previous_name[2..])
);
debug!("new_meta = {:?}, old_meta = {:?}", new_meta, old_meta);
let name_changed =
(previous_name[2..] != name[2..]) || (new_meta != old_meta && cfg!(not(windows)));
for grandparent in iter_adjacent(
txn,
channel,
parent_dest,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)? {
let grandparent = grandparent?;
if !grandparent
.flag
.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT)
|| grandparent.flag.contains(EdgeFlags::PSEUDO)
{
continue;
}
debug!("grandparent: {:?}", grandparent);
let grandparent_dest = txn.find_block_end(channel, grandparent.dest).unwrap();
assert_eq!(grandparent_dest.start, grandparent_dest.end);
debug!(
"grandparent_dest {:?} {:?}",
grandparent_dest,
std::str::from_utf8(&previous_name[2..])
);
let grandparent_changed = parent_pos != grandparent.dest.to_option();
debug!("change = {:?}", grandparent_changed || name_changed);
if grandparent.flag.contains(EdgeFlags::DELETED) {
if !grandparent_changed && !name_changed {
// We resurrect the name
moved.resurrect.push(NewEdge {
previous: grandparent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: grandparent.dest.to_option(),
to: parent_dest.to_option(),
introduced_by: Some(grandparent.introduced_by),
});
if !parent_was_resurrected && !parent.flag.contains(EdgeFlags::PSEUDO) {
moved.alive.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: current_pos.inode_vertex().to_option(),
introduced_by: Some(parent.introduced_by),
})
}
moved.need_new_name = false
} else {
// Clean up the extra deleted edges.
debug!("cleanup");
let v = del_del
.entry((grandparent.dest, parent_dest))
.or_insert_with(Vec::new);
v.push(Some(grandparent.introduced_by))
}
} else if grandparent_changed || name_changed {
moved.edges.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: grandparent.dest.to_option(),
to: parent_dest.to_option(),
introduced_by: Some(grandparent.introduced_by),
});
// The following is really important in missing context detection:
if !parent_was_resurrected && !parent.flag.contains(EdgeFlags::PSEUDO) {
moved.alive.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: current_pos.inode_vertex().to_option(),
introduced_by: Some(parent.introduced_by),
})
}
} else {
let v = alive
.entry((grandparent.dest, parent_dest))
.or_insert_with(Vec::new);
v.push(Some(grandparent.introduced_by));
moved.need_new_name = false
}
}
}
for ((from, to), intro) in del_del {
if intro.len() > 1 {
for introduced_by in intro {
if introduced_by.is_some() {
moved.edges.push(NewEdge {
previous: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: from.to_option(),
to: to.to_option(),
introduced_by,
})
}
}
}
}
for ((from, to), intro) in alive {
if intro.len() > 1 || !moved.resurrect.is_empty() {
for introduced_by in intro {
if introduced_by.is_some() {
moved.alive.push(NewEdge {
previous: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: from.to_option(),
to: to.to_option(),
introduced_by,
})
}
}
}
}
Ok(moved)
}
impl Builder {
fn record_deleted_file<
T: GraphTxnT + TreeTxnT<TreeError = <T as GraphTxnT>::GraphError>,
W: WorkingCopy,
>(
&mut self,
txn: &T,
channel: &T::Graph,
working_copy: &W,
full_path: &str,
current_vertex: Position<ChangeId>,
) -> Result<(), TxnErr<T::GraphError>> {
debug!("record_deleted_file {:?} {:?}", current_vertex, full_path);
let mut stack = vec![(current_vertex.inode_vertex(), None)];
let mut visited = HashSet::new();
while let Some((vertex, inode)) = stack.pop() {
debug!("vertex {:?}, inode {:?}", vertex, inode);
if let Some(path) = tree_path(txn, vertex.start_pos())? {
if working_copy.file_metadata(&path).is_ok() {
debug!("not deleting {:?}", path);
continue;
}
}
// Kill this vertex
if let Some(inode) = inode {
self.delete_file_edge(txn, channel, vertex, inode)?
} else if vertex.start == vertex.end {
debug!("delete_recursively {:?}", vertex);
// Killing an inode.
if !self.deleted_vertices.insert(vertex.start_pos()) {
continue;
}
if let Some(inode) = txn.get_revinodes(vertex.start_pos(), None)? {
debug!(
"delete_recursively, vertex = {:?}, inode = {:?}",
vertex, inode
);
self.recorded_inodes
.insert(inode, vertex.start_pos().to_option());
self.rec
.updatables
.insert(self.rec.actions.len(), InodeUpdate::Deleted { inode });
}
self.delete_inode_vertex(txn, channel, vertex, vertex.start_pos(), full_path)?
}
// Move on to the descendants.
for edge in iter_adjacent(
txn,
channel,
vertex,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED - EdgeFlags::PARENT,
)? {
let edge = edge?;
debug!("delete_recursively, edge: {:?}", edge);
let dest = txn
.find_block(channel, edge.dest)
.expect("delete_recursively, descendants");
let inode = if inode.is_some() {
assert!(!edge.flag.contains(EdgeFlags::FOLDER));
inode
} else if edge.flag.contains(EdgeFlags::FOLDER) {
None
} else {
assert_eq!(vertex.start, vertex.end);
Some(vertex.start_pos())
};
if visited.insert(edge.dest) {
stack.push((dest, inode))
}
}
}
Ok(())
}
fn delete_inode_vertex<T: GraphTxnT>(
&mut self,
txn: &T,
channel: &T::Graph,
vertex: Vertex<ChangeId>,
inode: Position<ChangeId>,
path: &str,
) -> Result<(), TxnErr<T::GraphError>> {
debug!("delete_inode_vertex {:?}", path);
let mut edges = Vec::new();
for parent in iter_adjacent(
txn,
channel,
vertex,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)? {
let parent = parent?;
if !parent.flag.contains(EdgeFlags::PARENT) {
continue;
}
assert!(parent.flag.contains(EdgeFlags::FOLDER));
let parent_dest = txn.find_block_end(channel, parent.dest).unwrap();
for grandparent in iter_adjacent(
txn,
channel,
parent_dest,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)? {
let grandparent = grandparent?;
if !grandparent.flag.contains(EdgeFlags::PARENT)
|| grandparent.flag.contains(EdgeFlags::PSEUDO)
{
continue;
}
assert!(grandparent.flag.contains(EdgeFlags::PARENT));
assert!(grandparent.flag.contains(EdgeFlags::FOLDER));
edges.push(NewEdge {
previous: grandparent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: grandparent.dest.to_option(),
to: parent_dest.to_option(),
introduced_by: Some(grandparent.introduced_by),
});
}
if !parent.flag.contains(EdgeFlags::PSEUDO) {
edges.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: parent.dest.to_option(),
to: vertex.to_option(),
introduced_by: Some(parent.introduced_by),
});
}
}
debug!("deleting {:?}", edges);
if !edges.is_empty() {
self.rec.actions.push(Hunk::FileDel {
del: Atom::EdgeMap(EdgeMap {
edges,
inode: inode.to_option(),
}),
contents: None,
path: path.to_string(),
})
}
Ok(())
}
fn delete_file_edge<T: GraphTxnT>(
&mut self,
txn: &T,
channel: &T::Graph,
to: Vertex<ChangeId>,
inode: Position<ChangeId>,
) -> Result<(), TxnErr<T::GraphError>> {
if let Some(Hunk::FileDel {
ref mut contents, ..
}) = self.rec.actions.last_mut()
{
if contents.is_none() {
*contents = Some(Atom::EdgeMap(EdgeMap {
edges: Vec::new(),
inode: inode.to_option(),
}))
}
if let Some(Atom::EdgeMap(mut e)) = contents.take() {
for parent in iter_adjacent(
txn,
channel,
to,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let parent = parent?;
if parent.flag.contains(EdgeFlags::PSEUDO) {
continue;
}
assert!(parent.flag.contains(EdgeFlags::PARENT));
assert!(!parent.flag.contains(EdgeFlags::FOLDER));
e.edges.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: (parent.flag - EdgeFlags::PARENT) | EdgeFlags::DELETED,
from: parent.dest.to_option(),
to: to.to_option(),
introduced_by: Some(parent.introduced_by),
})
}
if !e.edges.is_empty() {
*contents = Some(Atom::EdgeMap(e))
}
}
}
Ok(())
}
}
use super::change_id::*;
/// A node in the repository graph, made of a change internal
/// identifier, and a line identifier in that change.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct Vertex<H> {
/// The change that introduced this node.
pub change: H,
/// The line identifier of the node in that change. Here,
/// "line" does not imply anything on the contents of the
/// chunk.
pub start: ChangePosition,
pub end: ChangePosition,
}
impl Vertex<ChangeId> {
/// The node at the root of the repository graph.
pub const ROOT: Vertex<ChangeId> = Vertex {
change: ChangeId::ROOT,
start: ChangePosition::ROOT,
end: ChangePosition::ROOT,
};
/// The node at the root of the repository graph.
pub(crate) const BOTTOM: Vertex<ChangeId> = Vertex {
change: ChangeId::ROOT,
start: ChangePosition::BOTTOM,
end: ChangePosition::BOTTOM,
};
/// Is this the root key? (the root key is all 0s).
pub fn is_root(&self) -> bool {
self == &Vertex::ROOT
}
pub(crate) fn to_option(&self) -> Vertex<Option<ChangeId>> {
Vertex {
change: Some(self.change),
start: self.start,
end: self.end,
}
}
}
impl<H: Clone> Vertex<H> {
/// Convenience function to get the start position of a
/// [`Vertex<ChangeId>`](struct.Vertex.html) as a
/// [`Position`](struct.Position.html).
pub fn start_pos(&self) -> Position<H> {
Position {
change: self.change.clone(),
pos: self.start,
}
}
/// Convenience function to get the end position of a
/// [`Vertex<ChangeId>`](struct.Vertex.html) as a
/// [`Position`](struct.Position.html).
pub fn end_pos(&self) -> Position<H> {
Position {
change: self.change.clone(),
pos: self.end,
}
}
/// Is this vertex of zero length?
pub fn is_empty(&self) -> bool {
self.end == self.start
}
/// Length of this key, in bytes.
pub fn len(&self) -> usize {
self.end - self.start
}
}
/// The position of a byte within a change.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct ChangePosition(pub u64);
impl ChangePosition {
pub(crate) const ROOT: ChangePosition = ChangePosition(0);
pub(crate) const BOTTOM: ChangePosition = ChangePosition(1);
}
impl std::ops::Add<usize> for ChangePosition {
type Output = ChangePosition;
fn add(self, x: usize) -> Self::Output {
ChangePosition(self.0 + x as u64)
}
}
impl std::ops::Sub<ChangePosition> for ChangePosition {
type Output = usize;
fn sub(self, x: ChangePosition) -> Self::Output {
(self.0 - x.0) as usize
}
}
/// A byte identifier, i.e. a change together with a position.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[doc(hidden)]
pub struct Position<P> {
pub change: P,
pub pos: ChangePosition,
}
use super::Base32;
use byteorder::{ByteOrder, LittleEndian};
impl<H: super::Base32> Base32 for Position<H> {
fn to_base32(&self) -> String {
let mut v = self.change.to_base32();
let mut bytes = [0; 8];
LittleEndian::write_u64(&mut bytes, self.pos.0);
let mut i = 7;
while i > 2 && bytes[i] == 0 {
i -= 1
}
i += 1;
let len = data_encoding::BASE32_NOPAD.encode_len(i);
let len0 = v.len() + 1;
v.push_str("..............");
v.truncate(len0 + len);
data_encoding::BASE32_NOPAD.encode_mut(&bytes[..i], unsafe {
v.split_at_mut(len0).1.as_bytes_mut()
});
v
}
fn from_base32(s: &[u8]) -> Option<Self> {
let n = s.iter().position(|c| *c == b'.')?;
let (s, pos) = s.split_at(n);
let pos = &pos[1..];
let change = H::from_base32(s)?;
let mut dec = [0; 8];
let len = data_encoding::BASE32_NOPAD.decode_len(pos.len()).ok()?;
let pos = data_encoding::BASE32_NOPAD
.decode_mut(pos, &mut dec[..len])
.map(|_| LittleEndian::read_u64(&dec))
.ok()?;
Some(Position {
change,
pos: ChangePosition(pos),
})
}
}
impl<H> std::ops::Add<usize> for Position<H> {
type Output = Position<H>;
fn add(self, x: usize) -> Self::Output {
Position {
change: self.change,
pos: self.pos + x,
}
}
}
impl Position<ChangeId> {
pub fn inode_vertex(&self) -> Vertex<ChangeId> {
Vertex {
change: self.change,
start: self.pos,
end: self.pos,
}
}
pub fn is_root(&self) -> bool {
self.change.is_root()
}
pub(crate) fn to_option(&self) -> Position<Option<ChangeId>> {
Position {
change: Some(self.change),
pos: self.pos,
}
}
pub const ROOT: Position<ChangeId> = Position {
change: ChangeId::ROOT,
pos: ChangePosition(0),
};
pub(crate) const OPTION_ROOT: Position<Option<ChangeId>> = Position {
change: Some(ChangeId::ROOT),
pos: ChangePosition(0),
};
pub(crate) const BOTTOM: Position<ChangeId> = Position {
change: ChangeId::ROOT,
pos: ChangePosition::BOTTOM,
};
}
use super::*;
use crate::small_string;
use ::sanakirja::{Alignment, Commit, Db, Representable, Transaction, UnsafeDb};
use byteorder::{ByteOrder, LittleEndian};
use rand::rngs::ThreadRng;
use rand::thread_rng;
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::path::Path;
use std::sync::Arc;
/// A Sanakirja pristine.
pub struct Pristine {
pub env: Arc<::sanakirja::Env<::sanakirja::Exclusive>>,
}
#[derive(Debug, Error)]
pub enum SanakirjaError {
#[error(transparent)]
Sanakirja(#[from] ::sanakirja::Error),
#[error("Pristine locked")]
PristineLocked,
#[error("Pristine corrupt")]
PristineCorrupt,
#[error(transparent)]
Borrow(#[from] std::cell::BorrowError),
}
impl std::convert::From<::sanakirja::CRCError> for SanakirjaError {
fn from(_: ::sanakirja::CRCError) -> Self {
SanakirjaError::PristineCorrupt
}
}
impl std::convert::From<::sanakirja::CRCError> for TxnErr<SanakirjaError> {
fn from(_: ::sanakirja::CRCError) -> Self {
TxnErr(SanakirjaError::PristineCorrupt)
}
}
impl std::convert::From<::sanakirja::Error> for TxnErr<SanakirjaError> {
fn from(e: ::sanakirja::Error) -> Self {
TxnErr(e.into())
}
}
impl std::convert::From<TxnErr<::sanakirja::Error>> for TxnErr<SanakirjaError> {
fn from(e: TxnErr<::sanakirja::Error>) -> Self {
TxnErr(e.0.into())
}
}
impl Pristine {
pub fn new<P: AsRef<Path>>(name: P) -> Result<Self, SanakirjaError> {
Self::new_with_size(name, 1 << 20)
}
pub unsafe fn new_nolock<P: AsRef<Path>>(name: P) -> Result<Self, SanakirjaError> {
Self::new_with_size_nolock(name, 1 << 20)
}
pub fn new_with_size<P: AsRef<Path>>(name: P, size: u64) -> Result<Self, SanakirjaError> {
let env = ::sanakirja::Env::try_new(name, size);
match env {
Ok(env) => Ok(Pristine { env: Arc::new(env) }),
Err(::sanakirja::Error::IO(e)) => {
if let std::io::ErrorKind::WouldBlock = e.kind() {
Err(SanakirjaError::PristineLocked)
} else {
Err(SanakirjaError::Sanakirja(::sanakirja::Error::IO(e)))
}
}
Err(e) => Err(SanakirjaError::Sanakirja(e)),
}
}
pub unsafe fn new_with_size_nolock<P: AsRef<Path>>(
name: P,
size: u64,
) -> Result<Self, SanakirjaError> {
Ok(Pristine {
env: Arc::new(::sanakirja::Env::new_nolock(name, size)?),
})
}
pub fn new_anon() -> Result<Self, SanakirjaError> {
Self::new_anon_with_size(1 << 20)
}
pub fn new_anon_with_size(size: u64) -> Result<Self, SanakirjaError> {
Ok(Pristine {
env: Arc::new(::sanakirja::Env::new_anon(size)?),
})
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
#[repr(usize)]
pub enum Root {
Tree,
RevTree,
Inodes,
RevInodes,
Internal,
External,
RevDep,
Channels,
TouchedFiles,
Dep,
RevTouchedFiles,
Partials,
Remotes,
}
impl Pristine {
pub fn txn_begin(&self) -> Result<Txn, SanakirjaError> {
let txn = ::sanakirja::Env::txn_begin(self.env.clone())?;
fn begin(
txn: ::sanakirja::Txn<
::sanakirja::Exclusive,
Arc<::sanakirja::Env<::sanakirja::Exclusive>>,
>,
) -> Option<Txn> {
Some(Txn {
channels: txn.root(Root::Channels as usize)?,
external: txn.root(Root::External as usize)?,
internal: txn.root(Root::Internal as usize)?,
inodes: txn.root(Root::Inodes as usize)?,
revinodes: txn.root(Root::RevInodes as usize)?,
tree: txn.root(Root::Tree as usize)?,
revtree: txn.root(Root::RevTree as usize)?,
revdep: txn.root(Root::RevDep as usize)?,
touched_files: txn.root(Root::TouchedFiles as usize)?,
rev_touched_files: txn.root(Root::RevTouchedFiles as usize)?,
partials: txn.root(Root::Partials as usize)?,
dep: txn.root(Root::Dep as usize)?,
remotes: txn.root(Root::Remotes as usize)?,
rng: thread_rng(),
open_channels: RefCell::new(HashMap::new()),
open_remotes: RefCell::new(HashMap::new()),
txn,
})
}
if let Some(txn) = begin(txn) {
Ok(txn)
} else {
Err(SanakirjaError::PristineCorrupt)
}
}
pub fn mut_txn_begin(&self) -> MutTxn<()> {
let mut txn = ::sanakirja::Env::mut_txn_begin(self.env.clone()).unwrap();
MutTxn {
channels: txn
.root(Root::Channels as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
external: txn
.root(Root::External as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
internal: txn
.root(Root::Internal as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
inodes: txn
.root(Root::Inodes as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
revinodes: txn
.root(Root::RevInodes as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
tree: txn
.root(Root::Tree as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
revtree: txn
.root(Root::RevTree as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
revdep: txn
.root(Root::RevDep as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
dep: txn
.root(Root::Dep as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
touched_files: txn
.root(Root::TouchedFiles as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
rev_touched_files: txn
.root(Root::RevTouchedFiles as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
partials: txn
.root(Root::Partials as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
remotes: txn
.root(Root::Remotes as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
rng: thread_rng(),
open_channels: RefCell::new(HashMap::new()),
open_remotes: RefCell::new(HashMap::new()),
txn,
}
}
}
type Graph = ::sanakirja::Db<Vertex<ChangeId>, Edge>;
type ChangeSet = ::sanakirja::Db<ChangeId, ApplyTimestamp>;
type RevChangeSet = ::sanakirja::Db<ApplyTimestamp, (ChangeId, Merkle)>;
type ChannelStates = ::sanakirja::Db<Merkle, u64>;
pub type Txn = GenericTxn<
::sanakirja::Txn<::sanakirja::Exclusive, Arc<::sanakirja::Env<::sanakirja::Exclusive>>>,
>;
pub type MutTxn<T> =
GenericTxn<::sanakirja::MutTxn<Arc<::sanakirja::Env<::sanakirja::Exclusive>>, T>>;
/// A transaction, used both for mutable and immutable transactions,
/// depending on type parameter `T`.
///
/// In Sanakirja, both `sanakirja::Txn` and `sanakirja::MutTxn`
/// implement `sanakirja::Transaction`, explaining our implementation
/// of `TxnT` for `Txn<T>` for all `T: sanakirja::Transaction`. This
/// covers both mutable and immutable transactions in a single
/// implementation.
pub struct GenericTxn<T: ::sanakirja::Transaction> {
#[doc(hidden)]
pub txn: T,
#[doc(hidden)]
pub internal: Db<Hash, ChangeId>,
#[doc(hidden)]
pub external: Db<ChangeId, Hash>,
inodes: Db<Inode, Position<ChangeId>>,
revinodes: Db<Position<ChangeId>, Inode>,
tree: Db<UnsafePathId, Inode>,
revtree: Db<Inode, UnsafePathId>,
revdep: Db<ChangeId, ChangeId>,
dep: Db<ChangeId, ChangeId>,
touched_files: Db<Position<ChangeId>, ChangeId>,
rev_touched_files: Db<ChangeId, Position<ChangeId>>,
partials: Db<UnsafeSmallStr, Position<ChangeId>>,
channels: Db<UnsafeSmallStr, (Graph, ChangeSet, RevChangeSet, ChannelStates, u64, u64)>,
remotes: Db<UnsafeSmallStr, (Db<u64, (Hash, Merkle)>, Db<Hash, u64>, Db<Merkle, u64>)>,
rng: ThreadRng,
open_channels: RefCell<HashMap<SmallString, ChannelRef<Self>>>,
open_remotes: RefCell<HashMap<SmallString, RemoteRef<Self>>>,
}
/// This is actually safe because the only non-Send fields are
/// `open_channels` and `open_remotes`, but we can't do anything with
/// a `ChannelRef` whose transaction has been moved to another thread.
unsafe impl<T: ::sanakirja::Transaction> Send for GenericTxn<T> {}
#[derive(Debug)]
pub struct DatabaseReport {
pub refs: usize,
pub stats: ::sanakirja::Statistics,
}
impl Txn {
pub fn check_database(&self) -> DatabaseReport {
let mut refs = HashMap::new();
self.txn.references(&mut refs, self.internal);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.external);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.inodes);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.revinodes);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.tree);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.revtree);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.revdep);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.dep);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.touched_files);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.rev_touched_files);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.partials);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.channels);
info!("refs = {:?}", refs);
for x in self.txn.iter(&self.channels, None).unwrap() {
let (a, (g, c, r, s, _, _)) = x.unwrap();
info!("channel = {:?}", a);
self.txn.references(&mut refs, g);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, c);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, r);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, s);
info!("refs = {:?}", refs);
}
self.txn.references(&mut refs, self.remotes);
info!("refs = {:?}", refs);
for x in self.txn.iter(&self.remotes, None).unwrap() {
let (a, (u, v, w)) = x.unwrap();
info!("remote = {:?}", a);
self.txn.references(&mut refs, u);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, v);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, w);
info!("refs = {:?}", refs);
}
let stats = self.txn.statistics();
let report = DatabaseReport {
refs: refs.len(),
stats: stats.clone(),
};
let mut channel_roots: Vec<UnsafeDb> = Vec::new();
for x in self.txn.iter(&self.channels, None).unwrap() {
let (a, (g, c, r, s, _, _)) = x.unwrap();
info!("channel: {:?}", a);
unsafe {
channel_roots.push(std::mem::transmute(g));
channel_roots.push(std::mem::transmute(c));
channel_roots.push(std::mem::transmute(r));
channel_roots.push(std::mem::transmute(s));
}
}
::sanakirja::debug_(&self.txn, &channel_roots[..], "debug_sanakirja", true);
self.txn.check_references(&mut refs);
info!("stats = {:?}", stats);
let occupied_pages =
stats.total_pages - stats.free_pages.len() - stats.bookkeeping_pages.len();
for i in 1..(stats.total_pages as u64) {
let p = i * 4096;
if !refs.contains_key(&p)
&& !stats.free_pages.contains(&p)
&& !stats.bookkeeping_pages.contains(&p)
{
panic!("does not contain {:?} ({:?})", i, p);
}
}
// check that there is no intersection.
for (r, _) in refs.iter() {
if stats.free_pages.contains(r) {
panic!("referenced page is free: {:?}", r);
}
if stats.bookkeeping_pages.contains(r) {
panic!("referenced page is a bookkeeping page: {:?}", r);
}
}
for p in stats.free_pages.iter() {
if stats.bookkeeping_pages.contains(p) {
panic!("bookkeeping inter free: {:?}", p);
}
}
assert_eq!(1 + refs.len(), occupied_pages);
report
}
}
impl<T: ::sanakirja::Transaction> GraphTxnT for GenericTxn<T> {
type Graph = Db<Vertex<ChangeId>, Edge>;
type GraphError = SanakirjaError;
sanakirja_get!(graph, Vertex<ChangeId>, Edge, GraphError);
fn get_external(&self, p: ChangeId) -> Result<Option<Hash>, TxnErr<Self::GraphError>> {
if p.is_root() {
Ok(Some(Hash::None))
} else if let Ok(x) = self.txn.get(&self.external, p, None) {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn get_internal(&self, p: Hash) -> Result<Option<ChangeId>, TxnErr<Self::GraphError>> {
if let Hash::None = p {
Ok(Some(ChangeId::ROOT))
} else if let Ok(x) = self.txn.get(&self.internal, p, None) {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
type Adj = Adj;
fn init_adj(
&self,
g: &Self::Graph,
key: Vertex<ChangeId>,
dest: Position<ChangeId>,
min_flag: EdgeFlags,
max_flag: EdgeFlags,
) -> Result<Self::Adj, TxnErr<Self::GraphError>> {
let edge = Edge {
flag: min_flag,
dest,
introduced_by: ChangeId::ROOT,
};
if let Ok((cursor, _)) = self.txn.set_cursors(g, Some((key, Some(edge)))) {
Ok(Adj {
cursor,
key,
min_flag,
max_flag,
})
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn next_adj(
&self,
_: &Self::Graph,
a: &mut Self::Adj,
) -> Option<Result<Edge, TxnErr<Self::GraphError>>> {
next_adj(&self.txn, a)
}
fn find_block(
&self,
graph: &Self::Graph,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<Self::GraphError>> {
find_block(&self.txn, graph, p)
}
fn find_block_end(
&self,
graph: &Self::Graph,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<Self::GraphError>> {
find_block_end(&self.txn, graph, p)
}
}
unsafe fn next_graph<T: ::sanakirja::Transaction>(
txn: &T,
cursor: &mut ::sanakirja::Cursor,
) -> Result<Option<(Vertex<ChangeId>, Edge)>, BlockError<SanakirjaError>> {
match ::sanakirja::next::<_, Vertex<ChangeId>, Edge>(txn, cursor) {
Ok(x) => Ok(x),
Err(::sanakirja::CRCError {}) => Err(BlockError::Txn(SanakirjaError::PristineCorrupt)),
}
}
unsafe fn prev_graph<T: ::sanakirja::Transaction>(
txn: &T,
cursor: &mut ::sanakirja::Cursor,
) -> Result<Option<(Vertex<ChangeId>, Edge)>, BlockError<SanakirjaError>> {
match ::sanakirja::prev::<_, Vertex<ChangeId>, Edge>(txn, cursor) {
Ok(x) => Ok(x),
Err(::sanakirja::CRCError {}) => Err(BlockError::Txn(SanakirjaError::PristineCorrupt)),
}
}
#[doc(hidden)]
pub fn next_adj<T: ::sanakirja::Transaction>(
txn: &T,
a: &mut Adj,
) -> Option<Result<Edge, TxnErr<SanakirjaError>>> {
loop {
let x: Result<Option<(Vertex<ChangeId>, Edge)>, _> =
unsafe { ::sanakirja::next(txn, &mut a.cursor) };
match x {
Ok(Some((v, e))) => {
debug!("adjacent iterator: {:?} {:?}", v, e);
if v == a.key {
if e.flag >= a.min_flag {
if e.flag <= a.max_flag {
return Some(Ok(e));
} else {
return None;
}
}
} else if v > a.key {
return None;
}
}
Err(e) => return Some(Err(TxnErr(e.into()))),
Ok(None) => {
debug!("adjacent iterator: over");
return None;
}
}
}
}
#[doc(hidden)]
pub fn find_block<T: ::sanakirja::Transaction>(
txn: &T,
graph: &::sanakirja::Db<Vertex<ChangeId>, Edge>,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<SanakirjaError>> {
if p.change.is_root() {
return Ok(Vertex::ROOT);
}
let key = Vertex {
change: p.change,
start: p.pos,
end: p.pos,
};
debug!(target: "libpijul::find_block", "find_block {:?}", key);
let mut cursor = if let Ok((cursor, _)) = txn.set_cursors(&graph, Some((key, None))) {
cursor
} else {
return Err(BlockError::Txn(SanakirjaError::PristineCorrupt));
};
let mut k = if let Some((k, _)) = unsafe { next_graph(txn, &mut cursor)? } {
k
} else {
return Err(BlockError::Block { block: p });
};
debug!("k = {:?}", k);
// The only guarantee here is that k is either the first key
// >= `key`, or the key just before that. We might need to
// rewind by one step if key is strictly larger than the
// result (i.e. if `p` is in the middle of the key).
while k.change > p.change || (k.change == p.change && k.start > p.pos) {
debug!(target: "libpijul::find_block", "find_block while {:?}", k);
if let Some((k_, _)) = unsafe { prev_graph(txn, &mut cursor)? } {
k = k_
} else {
break;
}
}
loop {
debug!(target: "libpijul::find_block", "find_block loop {:?}", k);
if k.change == p.change && k.start <= p.pos {
if k.end > p.pos || (k.start == k.end && k.end == p.pos) {
return Ok(k);
}
} else if k.change > p.change {
return Err(BlockError::Block { block: p });
}
if let Some((k_, _)) = unsafe { next_graph(txn, &mut cursor)? } {
k = k_
} else {
break;
}
}
debug!(target: "libpijul::find_block", "find_block None, {:?}", k);
Err(BlockError::Block { block: p })
}
#[doc(hidden)]
pub fn find_block_end<T: ::sanakirja::Transaction>(
txn: &T,
graph: &::sanakirja::Db<Vertex<ChangeId>, Edge>,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<SanakirjaError>> {
if p.change.is_root() {
return Ok(Vertex::ROOT);
}
let key = Vertex {
change: p.change,
start: p.pos,
end: p.pos,
};
debug!(target: "libpijul::find_block_end", "find_block_end {:?}, p.change.0 = {:?}", key, p.change.0);
let mut cursor = if let Ok((cursor, _)) = txn.set_cursors(&graph, Some((key, None))) {
cursor
} else {
return Err(BlockError::Txn(SanakirjaError::PristineCorrupt));
};
let mut k = if let Some((k, _)) = unsafe { next_graph(txn, &mut cursor)? } {
k
} else {
return Err(BlockError::Block { block: p });
};
// The only guarantee here is that k is either the first key
// before `key`, or the key just before that.
loop {
debug!(target: "libpijul::find_block_end", "find_block_end loop {:?} k.change.0 = {:?}", k, k.change.0);
if k.change < p.change {
break;
} else if k.change == p.change {
// Here we want to create an edge pointing between `p`
// and its successor. If k.start == p.pos, the only
// case where that's what we want is if k.start ==
// k.end.
if k.start == p.pos && k.end == p.pos {
return Ok(k);
} else if k.start < p.pos {
break;
}
}
if let Some((k_, _)) = unsafe { prev_graph(txn, &mut cursor)? } {
k = k_
} else {
break;
}
}
// We also want k.end >= p.pos, so we just call next() until
// we have that.
debug!(target: "libpijul::find_block_end", "find_block_end k(0) = {:?} k.change.0 = {:?}", k, k.change.0);
while k.change < p.change || (k.change == p.change && p.pos > k.end) {
if let Some((k_, _)) = unsafe { next_graph(txn, &mut cursor)? } {
k = k_
} else {
break;
}
}
debug!(target: "libpijul::find_block_end", "find_block_end k(1) = {:?}, k.change.0 = {:?}", k, k.change.0);
if k.change == p.change
&& ((k.start < p.pos && p.pos <= k.end) || (k.start == k.end && k.start == p.pos))
{
debug!("ok");
Ok(k)
} else {
Err(BlockError::Block { block: p })
}
}
pub struct Adj {
pub cursor: ::sanakirja::Cursor,
pub key: Vertex<ChangeId>,
pub min_flag: EdgeFlags,
pub max_flag: EdgeFlags,
}
impl<T: ::sanakirja::Transaction> GraphIter for GenericTxn<T> {
type GraphCursor = ::sanakirja::Cursor;
fn iter_graph(&self, g: &Self::Graph) -> Result<Self::GraphCursor, TxnErr<Self::GraphError>> {
if let Ok((cursor, _)) = self.txn.set_cursors(&g, None) {
Ok(cursor)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn next_graph(
&self,
_: &Self::Graph,
a: &mut Self::GraphCursor,
) -> Option<Result<(Vertex<ChangeId>, Edge), TxnErr<Self::GraphError>>> {
match unsafe { ::sanakirja::next(&self.txn, a) } {
Ok(Some(x)) => Some(Ok(x)),
Ok(None) => None,
Err(::sanakirja::CRCError {}) => Some(Err(TxnErr(SanakirjaError::PristineCorrupt))),
}
}
}
pub struct Channel {
pub graph: Db<Vertex<ChangeId>, Edge>,
pub changes: Db<ChangeId, u64>,
pub revchanges: Db<u64, (ChangeId, Merkle)>,
pub states: Db<Merkle, u64>,
pub apply_counter: ApplyTimestamp,
pub name: SmallString,
pub last_modified: u64,
}
impl<T: ::sanakirja::Transaction> ChannelTxnT for GenericTxn<T> {
type Channel = Channel;
fn graph(c: &Channel) -> &Db<Vertex<ChangeId>, Edge> {
&c.graph
}
fn name(c: &Channel) -> &str {
c.name.as_str()
}
fn apply_counter(channel: &Self::Channel) -> u64 {
channel.apply_counter
}
fn last_modified(channel: &Self::Channel) -> u64 {
channel.last_modified
}
fn changes(channel: &Self::Channel) -> &Self::Changeset {
&channel.changes
}
fn rev_changes(channel: &Self::Channel) -> &Self::RevChangeset {
&channel.revchanges
}
type Changeset = ::sanakirja::Db<ChangeId, u64>;
type RevChangeset = ::sanakirja::Db<u64, (ChangeId, Merkle)>;
fn get_changeset(
&self,
channel: &Self::Changeset,
c: ChangeId,
) -> Result<Option<u64>, TxnErr<Self::GraphError>> {
if let Ok(x) = self.txn.get(channel, c, None) {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn get_revchangeset(
&self,
revchanges: &Self::RevChangeset,
c: u64,
) -> Result<Option<(ChangeId, Merkle)>, TxnErr<Self::GraphError>> {
if let Ok(x) = self.txn.get(revchanges, c, None) {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
type ChangesetCursor = ::sanakirja::Cursor;
fn cursor_changeset<'a>(
&'a self,
channel: &Self::Changeset,
pos: Option<ChangeId>,
) -> Result<Cursor<Self, &'a Self, Self::ChangesetCursor, ChangeId, u64>, TxnErr<SanakirjaError>>
{
if let Ok((cursor, _)) = self.txn.set_cursors(&channel, pos.map(|x| (x, None))) {
Ok(Cursor {
cursor,
txn: self,
marker: std::marker::PhantomData,
})
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
type RevchangesetCursor = ::sanakirja::Cursor;
fn cursor_revchangeset_ref<'a, RT: std::ops::Deref<Target = Self>>(
txn: RT,
channel: &Self::RevChangeset,
pos: Option<u64>,
) -> Result<
Cursor<Self, RT, Self::RevchangesetCursor, u64, (ChangeId, Merkle)>,
TxnErr<SanakirjaError>,
> {
if let Ok((cursor, _)) = txn.txn.set_cursors(channel, pos.map(|x| (x, None))) {
Ok(Cursor {
cursor,
txn,
marker: std::marker::PhantomData,
})
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn rev_cursor_revchangeset<'a>(
&'a self,
channel: &Self::RevChangeset,
pos: Option<u64>,
) -> Result<
RevCursor<Self, &'a Self, Self::RevchangesetCursor, u64, (ChangeId, Merkle)>,
TxnErr<SanakirjaError>,
> {
let cursor = if let Some(pos) = pos {
if let Ok((x, _)) = self.txn.set_cursors(channel, Some((pos, None))) {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt));
}
} else if let Ok(x) = self.txn.set_cursors_last(channel) {
x
} else {
return Err(TxnErr(SanakirjaError::PristineCorrupt));
};
Ok(RevCursor {
cursor,
txn: self,
marker: std::marker::PhantomData,
})
}
fn cursor_revchangeset_next(
&self,
cursor: &mut Self::RevchangesetCursor,
) -> Result<Option<(u64, (ChangeId, Merkle))>, TxnErr<SanakirjaError>> {
if let Ok(x) = unsafe { ::sanakirja::next(&self.txn, cursor) } {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn cursor_revchangeset_prev(
&self,
cursor: &mut Self::RevchangesetCursor,
) -> Result<Option<(u64, (ChangeId, Merkle))>, TxnErr<SanakirjaError>> {
if let Ok(x) = unsafe { ::sanakirja::prev(&self.txn, cursor) } {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn cursor_changeset_next(
&self,
cursor: &mut Self::ChangesetCursor,
) -> Result<Option<(ChangeId, u64)>, TxnErr<SanakirjaError>> {
if let Ok(x) = unsafe { ::sanakirja::next(&self.txn, cursor) } {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn cursor_changeset_prev(
&self,
cursor: &mut Self::ChangesetCursor,
) -> Result<Option<(ChangeId, u64)>, TxnErr<SanakirjaError>> {
if let Ok(x) = unsafe { ::sanakirja::prev(&self.txn, cursor) } {
Ok(x)
} else {
Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
fn channel_has_state(
&self,
channel: &Self::Channel,
m: Merkle,
) -> Result<Option<u64>, TxnErr<Self::GraphError>> {
Ok(self.txn.get(&channel.states, m, None)?)
}
}
impl<T: ::sanakirja::Transaction> DepsTxnT for GenericTxn<T> {
type DepsError = SanakirjaError;
type Dep = Db<ChangeId, ChangeId>;
type Revdep = Db<ChangeId, ChangeId>;
sanakirja_table_get!(dep, ChangeId, ChangeId, DepsError);
sanakirja_table_get!(revdep, ChangeId, ChangeId, DepsError);
sanakirja_cursor_ref!(dep, ChangeId, ChangeId);
fn iter_dep_ref<RT: std::ops::Deref<Target = Self> + Clone>(
txn: RT,
p: ChangeId,
) -> Result<super::Cursor<Self, RT, Self::DepCursor, ChangeId, ChangeId>, TxnErr<Self::DepsError>>
{
Self::cursor_dep_ref(txn.clone(), &txn.dep, Some((p, None)))
}
sanakirja_table_get!(touched_files, Position<ChangeId>, ChangeId, DepsError);
sanakirja_table_get!(rev_touched_files, ChangeId, Position<ChangeId>, DepsError);
type Touched_files = Db<Position<ChangeId>, ChangeId>;
type Rev_touched_files = Db<ChangeId, Position<ChangeId>>;
sanakirja_iter!(touched_files, Position<ChangeId>, ChangeId);
sanakirja_iter!(rev_touched_files, ChangeId, Position<ChangeId>);
fn iter_revdep(
&self,
k: ChangeId,
) -> Result<
super::Cursor<Self, &Self, Self::DepCursor, ChangeId, ChangeId>,
TxnErr<Self::DepsError>,
> {
self.cursor_dep(&self.revdep, Some((k, None)))
}
fn iter_dep(
&self,
k: ChangeId,
) -> Result<
super::Cursor<Self, &Self, Self::DepCursor, ChangeId, ChangeId>,
TxnErr<Self::DepsError>,
> {
self.cursor_dep(&self.dep, Some((k, None)))
}
fn iter_touched(
&self,
k: Position<ChangeId>,
) -> Result<
super::Cursor<Self, &Self, Self::Touched_filesCursor, Position<ChangeId>, ChangeId>,
TxnErr<Self::DepsError>,
> {
self.cursor_touched_files(&self.touched_files, Some((k, None)))
}
fn iter_rev_touched(
&self,
k: ChangeId,
) -> Result<
super::Cursor<Self, &Self, Self::Rev_touched_filesCursor, ChangeId, Position<ChangeId>>,
TxnErr<Self::DepsError>,
> {
self.cursor_rev_touched_files(&self.rev_touched_files, Some((k, None)))
}
}
impl<T: ::sanakirja::Transaction> TreeTxnT for GenericTxn<T> {
type TreeError = SanakirjaError;
type Inodes = Db<Inode, Position<ChangeId>>;
type Revinodes = Db<Position<ChangeId>, Inode>;
sanakirja_table_get!(inodes, Inode, Position<ChangeId>, TreeError);
sanakirja_table_get!(revinodes, Position<ChangeId>, Inode, TreeError);
sanakirja_cursor!(inodes, Inode, Position<ChangeId>);
#[cfg(debug_assertions)]
sanakirja_cursor!(revinodes, Position<ChangeId>, Inode);
type Tree = Db<UnsafePathId, Inode>;
sanakirja_table_get!(
tree,
PathId,
Inode,
TreeError,
(UnsafePathId::from_fileid(key), value),
);
sanakirja_iter!(
tree,
OwnedPathId,
Inode,
if let Some((ref k, ref v)) = pos {
info!("tree iter {:?} {:?}", k, v);
Some((UnsafePathId::from_fileid(k.as_file_id()), *v))
} else {
None
},
map(|(k, v): (UnsafePathId, Inode)| (unsafe { k.to_fileid().to_owned() }, v))
);
sanakirja_iter!(
revtree,
Inode,
OwnedPathId,
if let Some((ref k, ref v)) = pos {
let v = if let Some(ref v) = *v {
Some(UnsafePathId::from_fileid(v.as_file_id()))
} else {
None
};
Some((*k, v))
} else {
None
},
map(|(k, v): (Inode, UnsafePathId)| (k, unsafe { v.to_fileid().to_owned() }))
);
type Revtree = Db<Inode, UnsafePathId>;
sanakirja_table_get!(
revtree,
Inode,
PathId,
TreeError,
(
key,
if let Some(value) = value {
Some(UnsafePathId::from_fileid(value))
} else {
None
}
),
map(|value| unsafe { value.to_fileid() })
);
type Partials = Db<UnsafeSmallStr, Position<ChangeId>>;
sanakirja_cursor!(
partials,
SmallString,
Position<ChangeId>,
if let Some((ref k, ref v)) = pos {
Some((UnsafeSmallStr::from_small_str(k.as_small_str()), *v))
} else {
None
},
map(|(k, v): (UnsafeSmallStr, Position<ChangeId>)| (
unsafe { k.to_small_str().to_owned() },
v
))
);
fn iter_inodes(
&self,
) -> Result<
super::Cursor<Self, &Self, Self::InodesCursor, Inode, Position<ChangeId>>,
TxnErr<Self::TreeError>,
> {
self.cursor_inodes(&self.inodes, None)
}
#[cfg(debug_assertions)]
fn iter_revinodes(
&self,
) -> Result<
super::Cursor<Self, &Self, Self::RevinodesCursor, Position<ChangeId>, Inode>,
TxnErr<SanakirjaError>,
> {
self.cursor_revinodes(&self.revinodes, None)
}
fn iter_partials<'txn>(
&'txn self,
k: &str,
) -> Result<
super::Cursor<Self, &'txn Self, Self::PartialsCursor, SmallString, Position<ChangeId>>,
TxnErr<SanakirjaError>,
> {
let k0 = SmallString::from_str(k);
self.cursor_partials(&self.partials, Some((k0, None)))
}
}
impl<T: ::sanakirja::Transaction> GenericTxn<T> {
#[doc(hidden)]
pub unsafe fn unsafe_load_channel(
&self,
name: SmallString,
) -> Result<Option<Channel>, TxnErr<SanakirjaError>> {
if let Some((channel, changes, revchanges, states, counter, last_modified)) = self.txn.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)? {
debug!("unsafe_load_channel: found {:?} {:?}", changes, revchanges);
Ok(Some(Channel {
graph: channel,
changes,
revchanges,
apply_counter: counter,
states,
name: name.clone(),
last_modified,
}))
} else {
debug!("unsafe_load_channel: not found");
Ok(None)
}
}
}
impl<T: ::sanakirja::Transaction> TxnT for GenericTxn<T> {
fn hash_from_prefix(
&self,
s: &str,
) -> Result<(Hash, ChangeId), super::HashPrefixError<Self::GraphError>> {
let h = if let Some(h) = Hash::from_prefix(s) {
h
} else {
return Err(super::HashPrefixError::Parse(s.to_string()));
};
let mut result = None;
debug!("h = {:?}", h);
for x in self
.txn
.iter(&self.internal, Some((h, None)))
.map_err(|e| super::HashPrefixError::Txn(e.into()))?
{
let (e, i) = x.map_err(|e| super::HashPrefixError::Txn(e.into()))?;
debug!("{:?} {:?}", e, i);
if e < h {
continue;
} else {
let b32 = e.to_base32();
debug!("{:?}", b32);
let (b32, _) = b32.split_at(s.len().min(b32.len()));
if b32 != s {
break;
} else if result.is_none() {
result = Some((e, i))
} else {
return Err(super::HashPrefixError::Ambiguous(s.to_string()));
}
}
}
if let Some(result) = result {
Ok(result)
} else {
Err(super::HashPrefixError::NotFound(s.to_string()))
}
}
fn hash_from_prefix_remote<'txn>(
&'txn self,
remote: &RemoteRef<Self>,
s: &str,
) -> Result<Hash, super::HashPrefixError<Self::GraphError>> {
let remote = remote.borrow();
let h = if let Some(h) = Hash::from_prefix(s) {
h
} else {
return Err(super::HashPrefixError::Parse(s.to_string()));
};
let mut result = None;
debug!("h = {:?}", h);
for x in self
.txn
.iter(&remote.rev, Some((h, None)))
.map_err(|e| super::HashPrefixError::Txn(e.into()))?
{
let (e, _) = x.map_err(|e| super::HashPrefixError::Txn(e.into()))?;
debug!("{:?}", e);
if e < h {
continue;
} else {
let b32 = e.to_base32();
debug!("{:?}", b32);
let (b32, _) = b32.split_at(s.len().min(b32.len()));
if b32 != s {
break;
} else if result.is_none() {
result = Some(e)
} else {
return Err(super::HashPrefixError::Ambiguous(s.to_string()));
}
}
}
if let Some(result) = result {
Ok(result)
} else {
Err(super::HashPrefixError::NotFound(s.to_string()))
}
}
fn load_channel(
&self,
name: &str,
) -> Result<Option<ChannelRef<Self>>, TxnErr<Self::GraphError>> {
let name = SmallString::from_str(name);
match self.open_channels.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
if let Some(c) = unsafe { self.unsafe_load_channel(name)? } {
Ok(Some(
v.insert(ChannelRef {
r: Rc::new(RefCell::new(c)),
})
.clone(),
))
} else {
Ok(None)
}
}
Entry::Occupied(occ) => Ok(Some(occ.get().clone())),
}
}
fn load_remote(&self, name: &str) -> Result<Option<RemoteRef<Self>>, TxnErr<Self::GraphError>> {
let name = SmallString::from_str(name);
match self.open_remotes.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
if let Some(remote) = self.txn.get(
&self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)? {
let r = RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: remote.0,
rev: remote.1,
states: remote.2,
})),
name: name.clone(),
};
Ok(Some(v.insert(r).clone()))
} else {
return Ok(None);
}
}
Entry::Occupied(occ) => Ok(Some(occ.get().clone())),
}
}
///
type Channels = Db<UnsafeSmallStr, (u64, u64, u64, u64, u64, u64)>;
sanakirja_cursor!(
channels,
SmallString,
(u64, u64, u64, u64, u64, u64),
if let Some((ref k, ref v)) = pos {
Some((UnsafeSmallStr::from_small_str(k.as_small_str()), *v))
} else {
None
},
map(|(k, v): (UnsafeSmallStr, (u64, u64, u64, u64, u64, u64))| (
unsafe { k.to_small_str().to_owned() },
v
))
);
fn iter_channels<'txn>(
&'txn self,
start: &str,
) -> Result<ChannelIterator<'txn, Self>, TxnErr<Self::GraphError>> {
let name = SmallString::from_str(start);
let name = UnsafeSmallStr::from_small_str(name.as_small_str());
Ok(ChannelIterator {
cursor: self.txn.set_cursors(&self.channels, Some((name, None)))?.0,
txn: self,
})
}
type Remotes = Db<UnsafeSmallStr, (u64, u64, u64)>;
sanakirja_cursor!(
remotes,
SmallString,
(u64, u64, u64),
if let Some((ref k, ref v)) = pos {
Some((UnsafeSmallStr::from_small_str(k.as_small_str()), *v))
} else {
None
},
map(|(k, v): (UnsafeSmallStr, (u64, u64, u64))| (
unsafe { k.to_small_str().to_owned() },
v
))
);
fn iter_remotes<'txn>(
&'txn self,
start: &str,
) -> Result<RemotesIterator<'txn, Self>, TxnErr<Self::GraphError>> {
let name = SmallString::from_str(start);
let name = UnsafeSmallStr::from_small_str(name.as_small_str());
Ok(RemotesIterator {
cursor: self.txn.set_cursors(&self.remotes, Some((name, None)))?.0,
txn: self,
})
}
type Remote = Db<u64, (Hash, Merkle)>;
type Revremote = Db<Hash, u64>;
type Remotestates = Db<Merkle, u64>;
sanakirja_cursor!(remote, u64, (Hash, Merkle));
sanakirja_rev_cursor!(remote, u64, (Hash, Merkle));
fn iter_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: u64,
) -> Result<
super::Cursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)>,
TxnErr<Self::GraphError>,
> {
self.cursor_remote(remote, Some((k, None)))
}
fn iter_rev_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: Option<u64>,
) -> Result<
super::RevCursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)>,
TxnErr<Self::GraphError>,
> {
self.rev_cursor_remote(remote, k.map(|k| (k, None)))
}
fn get_remote(
&mut self,
name: &str,
) -> Result<Option<RemoteRef<Self>>, TxnErr<Self::GraphError>> {
let name = SmallString::from_str(name);
match self.open_remotes.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
if let Some(remote) = self.txn.get(
&self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)? {
let r = RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: remote.0,
rev: remote.1,
states: remote.2,
})),
name: name.clone(),
};
v.insert(r);
} else {
return Ok(None);
}
}
Entry::Occupied(_) => {}
}
Ok(self.open_remotes.borrow().get(&name).cloned())
}
fn last_remote(
&self,
remote: &Self::Remote,
) -> Result<Option<(u64, (Hash, Merkle))>, TxnErr<Self::GraphError>> {
Ok(self.txn.rev_iter(remote, None)?.next().transpose()?)
}
fn get_remote_state(
&self,
remote: &Self::Remote,
n: u64,
) -> Result<Option<(u64, (Hash, Merkle))>, TxnErr<Self::GraphError>> {
for x in self.txn.iter(remote, Some((n, None)))? {
let (k, m) = x?;
if k >= n {
return Ok(Some((k, m)));
}
}
Ok(None)
}
fn remote_has_change(
&self,
remote: &RemoteRef<Self>,
hash: Hash,
) -> Result<bool, TxnErr<Self::GraphError>> {
Ok(self.txn.get(&remote.db.borrow().rev, hash, None)?.is_some())
}
fn remote_has_state(
&self,
remote: &RemoteRef<Self>,
m: Merkle,
) -> Result<bool, TxnErr<Self::GraphError>> {
Ok(self.txn.get(&remote.db.borrow().states, m, None)?.is_some())
}
}
impl GraphMutTxnT for MutTxn<()> {
fn put_graph(
&mut self,
graph: &mut Self::Graph,
k: Vertex<ChangeId>,
e: Edge,
) -> Result<bool, TxnErr<Self::GraphError>> {
Ok(self.txn.put(&mut self.rng, graph, k, e)?)
}
fn del_graph(
&mut self,
graph: &mut Self::Graph,
k: Vertex<ChangeId>,
e: Option<Edge>,
) -> Result<bool, TxnErr<Self::GraphError>> {
debug!("del_graph {:?} {:?}", k, e);
Ok(self.txn.del(&mut self.rng, graph, k, e)?)
}
sanakirja_put_del!(internal, Hash, ChangeId, GraphError);
sanakirja_put_del!(external, ChangeId, Hash, GraphError);
fn split_block(
&mut self,
graph: &mut Self::Graph,
key: Vertex<ChangeId>,
pos: ChangePosition,
buf: &mut Vec<Edge>,
) -> Result<(), TxnErr<Self::GraphError>> {
let (mut cursor, _) = self.txn.set_cursors(graph, Some((key, None)))?;
loop {
match unsafe { ::sanakirja::next::<_, Vertex<ChangeId>, Edge>(&self.txn, &mut cursor) }
{
Ok(Some((k, v))) => {
if k > key {
break;
} else if k < key {
continue;
}
buf.push(v)
}
Ok(None) => break,
Err(::sanakirja::CRCError {}) => {
return Err(TxnErr(SanakirjaError::PristineCorrupt))
}
}
}
for chi in buf.drain(..) {
assert!(chi.introduced_by != ChangeId::ROOT || chi.flag.contains(EdgeFlags::PSEUDO));
if chi.flag.contains(EdgeFlags::PARENT | EdgeFlags::BLOCK) {
put_graph_with_rev(
self,
graph,
chi.flag - EdgeFlags::PARENT,
Vertex {
change: key.change,
start: key.start,
end: pos,
},
Vertex {
change: key.change,
start: pos,
end: key.end,
},
chi.introduced_by,
)?;
}
self.del_graph(graph, key, Some(chi))?;
self.put_graph(
graph,
if chi.flag.contains(EdgeFlags::PARENT) {
Vertex {
change: key.change,
start: key.start,
end: pos,
}
} else {
Vertex {
change: key.change,
start: pos,
end: key.end,
}
},
chi,
)?;
}
Ok(())
}
}
impl ChannelMutTxnT for MutTxn<()> {
fn graph_mut(c: &mut Self::Channel) -> &mut Self::Graph {
&mut c.graph
}
fn touch_channel(&mut self, channel: &mut Self::Channel, t: Option<u64>) {
use std::time::SystemTime;
if let Some(t) = t {
channel.last_modified = t
} else if let Ok(duration) = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {
channel.last_modified = duration.as_secs()
}
}
fn put_changes(
&mut self,
channel: &mut Self::Channel,
p: ChangeId,
t: ApplyTimestamp,
h: &Hash,
) -> Result<Option<Merkle>, TxnErr<Self::GraphError>> {
if self.get_changeset(&channel.changes, p)?.is_none() {
channel.apply_counter += 1;
debug!("put_changes {:?} {:?}", t, p);
for i in self.txn.iter(&channel.changes, None).unwrap() {
debug!("changes {:?}", i);
}
let m = if let Some(x) = self.txn.rev_iter(&channel.revchanges, None)?.next() {
(x?.1).1
} else {
Merkle::zero()
};
let m = m.next(h);
assert!(self.get_revchangeset(&channel.revchanges, t)?.is_none());
assert!(self.txn.put(&mut self.rng, &mut channel.changes, p, t)?);
assert!(self
.txn
.put(&mut self.rng, &mut channel.revchanges, t, (p, m))?);
Ok(Some(m))
} else {
Ok(None)
}
}
fn del_changes(
&mut self,
channel: &mut Self::Channel,
p: ChangeId,
t: ApplyTimestamp,
) -> Result<bool, TxnErr<Self::GraphError>> {
let mut repl = Vec::new();
for x in self.txn.iter(&channel.revchanges, Some((t, None)))? {
let (t_, (p, _)) = x?;
if t_ >= t {
repl.push((t_, p))
}
}
let mut m = Merkle::zero();
for x in self.txn.rev_iter(&channel.revchanges, Some((t, None)))? {
let (t_, (_, m_)) = x?;
if t_ < t {
m = m_;
break;
}
}
for (t_, p) in repl.iter() {
debug!("del_changes {:?} {:?}", t_, p);
self.txn
.del(&mut self.rng, &mut channel.revchanges, *t_, None)?;
if *t_ > t {
m = m.next(&self.get_external(*p)?.unwrap());
self.txn
.put(&mut self.rng, &mut channel.revchanges, *t_, (*p, m))?;
}
}
Ok(self
.txn
.del(&mut self.rng, &mut channel.changes, p, Some(t))?)
}
}
impl DepsMutTxnT for MutTxn<()> {
sanakirja_put_del!(dep, ChangeId, ChangeId, DepsError);
sanakirja_put_del!(revdep, ChangeId, ChangeId, DepsError);
sanakirja_put_del!(touched_files, Position<ChangeId>, ChangeId, DepsError);
sanakirja_put_del!(rev_touched_files, ChangeId, Position<ChangeId>, DepsError);
}
impl TreeMutTxnT for MutTxn<()> {
sanakirja_put_del!(inodes, Inode, Position<ChangeId>, TreeError);
sanakirja_put_del!(revinodes, Position<ChangeId>, Inode, TreeError);
sanakirja_put_del!(
tree,
PathId,
Inode,
TreeError,
UnsafePathId::from_fileid(k),
v
);
sanakirja_put_del!(
revtree,
Inode,
PathId,
TreeError,
k,
UnsafePathId::from_fileid(v)
);
fn put_partials(
&mut self,
k: &str,
e: Position<ChangeId>,
) -> Result<bool, TxnErr<Self::TreeError>> {
let k = SmallString::from_str(k);
Ok(self.txn.put(
&mut self.rng,
&mut self.partials,
UnsafeSmallStr::from_small_str(k.as_small_str()),
e,
)?)
}
fn del_partials(
&mut self,
k: &str,
e: Option<Position<ChangeId>>,
) -> Result<bool, TxnErr<Self::TreeError>> {
let k = SmallString::from_str(k);
Ok(self.txn.del(
&mut self.rng,
&mut self.partials,
UnsafeSmallStr::from_small_str(k.as_small_str()),
e,
)?)
}
}
impl MutTxnT for MutTxn<()> {
fn put_remote(
&mut self,
remote: &mut RemoteRef<Self>,
k: u64,
v: (Hash, Merkle),
) -> Result<bool, Self::GraphError> {
let mut remote = remote.borrow_mut();
self.txn.put(&mut self.rng, &mut remote.remote, k, v)?;
self.txn.put(&mut self.rng, &mut remote.states, v.1, k)?;
Ok(self.txn.put(&mut self.rng, &mut remote.rev, v.0, k)?)
}
fn del_remote(
&mut self,
remote: &mut RemoteRef<Self>,
k: u64,
) -> Result<bool, Self::GraphError> {
let mut remote = remote.borrow_mut();
if let Some((h, m)) = self.txn.get(&remote.remote, k, None)? {
self.txn.del(&mut self.rng, &mut remote.rev, h, None)?;
self.txn.del(&mut self.rng, &mut remote.states, m, None)?;
Ok(self.txn.del(&mut self.rng, &mut remote.remote, k, None)?)
} else {
Ok(false)
}
}
fn open_or_create_channel(&mut self, name: &str) -> Result<ChannelRef<Self>, Self::GraphError> {
let name = small_string::SmallString::from_str(name);
let mut commit = None;
let result = match self.open_channels.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
let r = if let Some((
graph,
changes,
revchanges,
states,
apply_counter,
last_modified,
)) = self.txn.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)? {
ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph,
changes,
revchanges,
states,
apply_counter,
name: name.clone(),
last_modified,
})),
}
} else {
let br = ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph: self.txn.create_db()?,
changes: self.txn.create_db()?,
revchanges: self.txn.create_db()?,
states: self.txn.create_db()?,
apply_counter: 0,
name: name.clone(),
last_modified: 0,
})),
};
commit = Some(br.clone());
br
};
v.insert(r).clone()
}
Entry::Occupied(occ) => occ.get().clone(),
};
if let Some(commit) = commit {
self.put_channel(commit)?;
}
Ok(result)
}
fn fork(
&mut self,
channel: &ChannelRef<Self>,
new_name: &str,
) -> Result<ChannelRef<Self>, ForkError<Self::GraphError>> {
let channel = channel.r.borrow();
let name = SmallString::from_str(new_name);
if self
.txn
.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)
.map_err(|e| ForkError::Txn(e.into()))?
.is_none()
{
let br = ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph: self
.txn
.fork(&mut self.rng, &channel.graph)
.map_err(|e| ForkError::Txn(e.into()))?,
changes: self
.txn
.fork(&mut self.rng, &channel.changes)
.map_err(|e| ForkError::Txn(e.into()))?,
revchanges: self
.txn
.fork(&mut self.rng, &channel.revchanges)
.map_err(|e| ForkError::Txn(e.into()))?,
states: self
.txn
.fork(&mut self.rng, &channel.states)
.map_err(|e| ForkError::Txn(e.into()))?,
name: name.clone(),
apply_counter: channel.apply_counter,
last_modified: channel.last_modified,
})),
};
self.open_channels.borrow_mut().insert(name, br.clone());
Ok(br)
} else {
Err(super::ForkError::ChannelNameExists(new_name.to_string()))
}
}
fn rename_channel(
&mut self,
channel: &mut ChannelRef<Self>,
new_name: &str,
) -> Result<(), ForkError<Self::GraphError>> {
let name = SmallString::from_str(new_name);
if self
.txn
.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)
.map_err(|e| ForkError::Txn(e.into()))?
.is_none()
{
let mut dbs_channels: ::sanakirja::Db<UnsafeSmallStr, (u64, u64, u64, u64, u64, u64)> =
unsafe { std::mem::transmute(self.channels) };
self.txn
.del(
&mut self.rng,
&mut dbs_channels,
UnsafeSmallStr::from_small_str(channel.borrow().name.as_small_str()),
None,
)
.map_err(|e| ForkError::Txn(e.into()))?;
self.channels = unsafe { std::mem::transmute(dbs_channels) };
std::mem::drop(
self.open_channels
.borrow_mut()
.remove(&channel.borrow().name)
.unwrap(),
);
std::cell::RefCell::borrow_mut(&std::rc::Rc::get_mut(&mut channel.r).unwrap()).name =
name.clone();
self.open_channels
.borrow_mut()
.insert(name, channel.clone());
Ok(())
} else {
Err(ForkError::ChannelNameExists(new_name.to_string()))
}
}
fn drop_channel(&mut self, name: &str) -> Result<bool, Self::GraphError> {
let name = SmallString::from_str(name);
self.open_channels.borrow_mut().remove(&name);
debug!("drop_channel {:?}", name);
let result = self.txn.del(
&mut self.rng,
&mut self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)?;
debug!("/drop_channel {:?}: {:?}", name, result);
Ok(result)
}
fn open_or_create_remote(&mut self, name: &str) -> Result<RemoteRef<Self>, Self::GraphError> {
let name = small_string::SmallString::from_str(name);
let mut commit = None;
match self.open_remotes.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
let r = if let Some(remote) = self.txn.get(
&self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)? {
RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: remote.0,
rev: remote.1,
states: remote.2,
})),
name: name.clone(),
}
} else {
let br = RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: self.txn.create_db()?,
rev: self.txn.create_db()?,
states: self.txn.create_db()?,
})),
name: name.clone(),
};
commit = Some(br.clone());
br
};
v.insert(r);
}
Entry::Occupied(_) => {}
}
if let Some(commit) = commit {
self.put_remotes(commit)?;
}
Ok(self.open_remotes.borrow().get(&name).unwrap().clone())
}
fn drop_remote(&mut self, remote: RemoteRef<Self>) -> Result<bool, Self::GraphError> {
let name = remote.name.clone();
let r = self.open_remotes.borrow_mut().remove(&name).unwrap();
std::mem::drop(remote);
assert_eq!(Rc::strong_count(&r.db), 1);
Ok(self.txn.del(
&mut self.rng,
&mut self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)?)
}
fn drop_named_remote(&mut self, name: &str) -> Result<bool, Self::GraphError> {
let name = SmallString::from_str(name);
if let Some(r) = self.open_remotes.borrow_mut().remove(&name) {
assert_eq!(Rc::strong_count(&r.db), 1);
}
Ok(self.txn.del(
&mut self.rng,
&mut self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)?)
}
fn commit(mut self) -> Result<(), Self::GraphError> {
use std::ops::DerefMut;
{
let open_channels =
std::mem::replace(self.open_channels.borrow_mut().deref_mut(), HashMap::new());
for (name, channel) in open_channels {
debug!("commit_channel {:?}", name);
self.commit_channel(channel)?
}
}
{
let open_remotes =
std::mem::replace(self.open_remotes.borrow_mut().deref_mut(), HashMap::new());
for (_, remote) in open_remotes {
self.commit_remote(remote)?
}
}
self.txn.set_root(Root::Tree as usize, self.tree);
self.txn.set_root(Root::RevTree as usize, self.revtree);
self.txn.set_root(Root::Inodes as usize, self.inodes);
self.txn.set_root(Root::RevInodes as usize, self.revinodes);
self.txn.set_root(Root::Internal as usize, self.internal);
self.txn.set_root(Root::External as usize, self.external);
self.txn.set_root(Root::RevDep as usize, self.revdep);
self.txn.set_root(Root::Channels as usize, self.channels);
self.txn.set_root(Root::Remotes as usize, self.remotes);
self.txn
.set_root(Root::TouchedFiles as usize, self.touched_files);
self.txn.set_root(Root::Dep as usize, self.dep);
self.txn
.set_root(Root::RevTouchedFiles as usize, self.rev_touched_files);
self.txn.set_root(Root::Partials as usize, self.partials);
self.txn.commit()?;
Ok(())
}
}
impl Txn {
pub fn load_const_channel(&self, name: &str) -> Result<Option<Channel>, SanakirjaError> {
let name = SmallString::from_str(name);
if let Some((channel, changes, revchanges, states, counter, last_modified)) = self.txn.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)? {
Ok(Some(Channel {
graph: channel,
changes,
revchanges,
states,
apply_counter: counter,
name,
last_modified,
}))
} else {
Ok(None)
}
}
}
impl<T> MutTxn<T> {
fn put_channel(&mut self, channel: ChannelRef<Self>) -> Result<(), SanakirjaError> {
debug!("Commit_channel.");
let channel = channel.r.try_borrow()?;
// Since we are replacing the value, we don't want to
// decrement its reference counter (which del would do), hence
// the transmute.
//
// This would normally be wrong. The only reason it works is
// because we know that dbs_channels has never been forked
// from another database, hence all the reference counts to
// its elements are 1 (and therefore represented as "not
// referenced" in Sanakirja).
let mut dbs_channels: ::sanakirja::Db<UnsafeSmallStr, (u64, u64, u64, u64, u64, u64)> =
unsafe { std::mem::transmute(self.channels) };
debug!("Commit_channel, dbs_channels = {:?}", dbs_channels);
self.txn.del(
&mut self.rng,
&mut dbs_channels,
UnsafeSmallStr::from_small_str(channel.name.as_small_str()),
None,
)?;
debug!("Commit_channel, dbs_channels = {:?}", dbs_channels);
self.channels = unsafe { std::mem::transmute(dbs_channels) };
self.txn.put(
&mut self.rng,
&mut self.channels,
UnsafeSmallStr::from_small_str(channel.name.as_small_str()),
(
channel.graph,
channel.changes,
channel.revchanges,
channel.states,
channel.apply_counter,
channel.last_modified,
),
)?;
debug!("Commit_channel, self.dbs.channels = {:?}", self.channels);
Ok(())
}
fn commit_channel(&mut self, channel: ChannelRef<Self>) -> Result<(), SanakirjaError> {
debug!("Commit_channel. This is not too safe.");
std::mem::drop(
self.open_channels
.borrow_mut()
.remove(&channel.r.borrow().name),
);
self.put_channel(channel)
}
fn put_remotes(&mut self, remote: RemoteRef<Self>) -> Result<(), SanakirjaError> {
let mut dbs_remotes: ::sanakirja::Db<UnsafeSmallStr, (u64, u64, u64)> =
unsafe { std::mem::transmute(self.remotes) };
debug!("Commit_remote, dbs_remotes = {:?}", dbs_remotes);
self.txn.del(
&mut self.rng,
&mut dbs_remotes,
UnsafeSmallStr::from_small_str(remote.name.as_small_str()),
None,
)?;
debug!("Commit_remote, dbs_remotes = {:?}", dbs_remotes);
self.remotes = unsafe { std::mem::transmute(dbs_remotes) };
let r = remote.db.borrow();
self.txn.put(
&mut self.rng,
&mut self.remotes,
UnsafeSmallStr::from_small_str(remote.name.as_small_str()),
(r.remote, r.rev, r.states),
)?;
debug!("Commit_remote, self.dbs.remotes = {:?}", self.remotes);
Ok(())
}
fn commit_remote(&mut self, remote: RemoteRef<Self>) -> Result<(), SanakirjaError> {
std::mem::drop(self.open_remotes.borrow_mut().remove(&remote.name));
// assert_eq!(Rc::strong_count(&remote.db), 1);
self.put_remotes(remote)
}
}
const CHANGE_ID_SIZE: usize = 8;
impl Representable for ChangeId {
fn alignment() -> Alignment {
Alignment::B8
}
fn onpage_size(&self) -> u16 {
CHANGE_ID_SIZE as u16
}
unsafe fn write_value(&self, p: *mut u8) {
LittleEndian::write_u64(std::slice::from_raw_parts_mut(p, 8), self.0)
}
unsafe fn read_value(p: *const u8) -> Self {
ChangeId(LittleEndian::read_u64(std::slice::from_raw_parts(p, 8)))
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.0.cmp(&x.0)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
const VERTEX_SIZE: usize = CHANGE_ID_SIZE + 16;
impl Representable for Vertex<ChangeId> {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
VERTEX_SIZE as u16
}
unsafe fn write_value(&self, p: *mut u8) {
let p = std::slice::from_raw_parts_mut(p, VERTEX_SIZE);
LittleEndian::write_u64(p, self.change.0);
LittleEndian::write_u64(&mut p[CHANGE_ID_SIZE..], self.start.0);
LittleEndian::write_u64(&mut p[CHANGE_ID_SIZE + 8..], self.end.0);
}
unsafe fn read_value(p: *const u8) -> Self {
let p = std::slice::from_raw_parts(p, VERTEX_SIZE);
let change = LittleEndian::read_u64(p);
let start = LittleEndian::read_u64(&p[CHANGE_ID_SIZE..]);
let end = LittleEndian::read_u64(&p[CHANGE_ID_SIZE + 8..]);
Vertex {
change: ChangeId(change),
start: ChangePosition(start),
end: ChangePosition(end),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.cmp(&x)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Position<ChangeId> {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
(CHANGE_ID_SIZE + 8) as u16
}
unsafe fn write_value(&self, p: *mut u8) {
let p = std::slice::from_raw_parts_mut(p, CHANGE_ID_SIZE + 8);
LittleEndian::write_u64(p, self.change.0);
LittleEndian::write_u64(&mut p[CHANGE_ID_SIZE..], self.pos.0);
}
unsafe fn read_value(p: *const u8) -> Self {
let p = std::slice::from_raw_parts(p, CHANGE_ID_SIZE + 8);
let change = LittleEndian::read_u64(p);
let pos = LittleEndian::read_u64(&p[CHANGE_ID_SIZE..]);
Position {
change: ChangeId(change),
pos: ChangePosition(pos),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.cmp(&x)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Edge {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
25
}
unsafe fn write_value(&self, p: *mut u8) {
let s = std::slice::from_raw_parts_mut(p, 25);
s[0] = (*self).flag.bits();
LittleEndian::write_u64(&mut s[1..], (*self).dest.change.0);
LittleEndian::write_u64(&mut s[9..], (*self).dest.pos.0);
LittleEndian::write_u64(&mut s[17..], (*self).introduced_by.0);
}
unsafe fn read_value(p: *const u8) -> Self {
let s = std::slice::from_raw_parts(p, 25);
Edge {
flag: if let Some(b) = EdgeFlags::from_bits(s[0]) {
b
} else {
panic!("read_value, edge = {:?}", s);
},
dest: Position {
change: ChangeId(LittleEndian::read_u64(&s[1..])),
pos: ChangePosition(LittleEndian::read_u64(&s[9..])),
},
introduced_by: ChangeId(LittleEndian::read_u64(&s[17..])),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
let a: &Edge = self;
let b: &Edge = &x;
a.cmp(b)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
/// An internal "unsafe" version of a [`pristine::PathId`], used to
/// circumvent the absence of associated type constructors in Rust
/// (else this would be borrow on a table).
#[derive(Clone, Copy, Debug)]
pub struct UnsafePathId {
parent_inode: Inode,
basename: UnsafeSmallStr,
}
impl UnsafePathId {
pub fn from_fileid(f: PathId) -> UnsafePathId {
UnsafePathId {
parent_inode: f.parent_inode,
basename: UnsafeSmallStr::from_small_str(f.basename),
}
}
pub unsafe fn to_fileid<'a>(&self) -> PathId<'a> {
PathId {
parent_inode: self.parent_inode,
basename: self.basename.to_small_str(),
}
}
}
impl Representable for UnsafePathId {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
INODE_SIZE + self.basename.onpage_size()
}
unsafe fn write_value(&self, p: *mut u8) {
self.parent_inode.write_value(p);
self.basename.write_value(p.offset(INODE_SIZE as isize));
}
unsafe fn read_value(p: *const u8) -> Self {
UnsafePathId {
parent_inode: Inode::read_value(p),
basename: UnsafeSmallStr::read_value(p.offset(INODE_SIZE as isize)),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
let a: PathId = self.to_fileid();
let b: PathId = x.to_fileid();
a.cmp(&b)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
const INODE_SIZE: u16 = 8;
impl Representable for Inode {
fn alignment() -> Alignment {
Alignment::B8
}
fn onpage_size(&self) -> u16 {
INODE_SIZE
}
unsafe fn write_value(&self, p: *mut u8) {
LittleEndian::write_u64(std::slice::from_raw_parts_mut(p, 8), self.0)
}
unsafe fn read_value(p: *const u8) -> Self {
Inode(LittleEndian::read_u64(std::slice::from_raw_parts(p, 8)))
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.0.cmp(&x.0)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Hash {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
1 + (match *self {
Hash::Blake3(_) => 32,
Hash::None => 0,
})
}
unsafe fn write_value(&self, p: *mut u8) {
match *self {
Hash::Blake3(q) => {
*p = HashAlgorithm::Blake3 as u8;
std::ptr::copy(q.as_ptr(), p.offset(1), 32)
}
Hash::None => *p = HashAlgorithm::None as u8,
}
}
unsafe fn read_value(p: *const u8) -> Self {
assert!(*p <= HashAlgorithm::Blake3 as u8);
match std::mem::transmute(*p) {
HashAlgorithm::Blake3 => {
let mut h = [0; BLAKE3_BYTES];
std::ptr::copy(p.offset(1), h.as_mut_ptr(), BLAKE3_BYTES);
Hash::Blake3(h)
}
HashAlgorithm::None => Hash::None,
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.cmp(&x)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Merkle {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
33
}
unsafe fn write_value(&self, p: *mut u8) {
match *self {
Merkle::Ed25519(q) => {
*p = MerkleAlgorithm::Ed25519 as u8;
assert_eq!(*p, 1);
let q = q.compress();
let q = q.as_bytes();
std::ptr::copy(q.as_ptr(), p.offset(1), 32);
}
}
}
unsafe fn read_value(p: *const u8) -> Self {
assert_eq!(*p, MerkleAlgorithm::Ed25519 as u8);
let slice = std::slice::from_raw_parts(p.offset(1), 32);
Merkle::Ed25519(
curve25519_dalek::edwards::CompressedEdwardsY::from_slice(slice)
.decompress()
.unwrap(),
)
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.to_bytes().cmp(&x.to_bytes())
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
use super::inode::*;
use crate::small_string::*;
/// A key in the file tree, i.e. a directory (`parent_inode`) and the
/// name of the child (file or directory).
#[doc(hidden)]
#[derive(Debug, Hash, Eq, PartialEq, Clone, PartialOrd, Ord)]
pub struct OwnedPathId {
/// The parent of this path.
pub parent_inode: Inode,
/// Name of the file.
pub basename: SmallString,
}
impl OwnedPathId {
pub fn as_file_id(&self) -> PathId {
PathId {
parent_inode: self.parent_inode,
basename: self.basename.as_small_str(),
}
}
}
/// A borrow on a [`OwnedPathId`](struct.OwnedPathId.html).
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Copy)]
#[doc(hidden)]
pub struct PathId<'a> {
pub parent_inode: Inode,
pub basename: SmallStr<'a>,
}
impl<'a> PathId<'a> {
/// Make an owned version of this `PathId`.
pub fn to_owned(&self) -> OwnedPathId {
OwnedPathId {
parent_inode: self.parent_inode.clone(),
basename: self.basename.to_owned(),
}
}
}
use byteorder::{ByteOrder, LittleEndian};
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct ChangeId(pub u64);
impl std::fmt::Debug for ChangeId {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "ChangeId({})", self.to_base32())
}
}
impl ChangeId {
pub(crate) const ROOT: ChangeId = ChangeId(0);
pub fn is_root(&self) -> bool {
*self == ChangeId::ROOT
}
pub fn to_base32(&self) -> String {
let mut b = [0; 8];
LittleEndian::write_u64(&mut b, self.0);
base32::encode(base32::Alphabet::Crockford, &b)
}
}
use crate::change::*;
use crate::small_string::*;
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::io::Write;
use std::rc::Rc;
mod change_id;
pub use change_id::*;
mod vertex;
pub use vertex::*;
mod edge;
pub use edge::*;
mod hash;
pub use hash::*;
mod inode;
pub use inode::*;
mod inode_metadata;
pub use inode_metadata::*;
mod path_id;
pub use path_id::*;
mod merkle;
pub use merkle::*;
#[cfg(feature = "dump")]
pub mod channel_dump;
pub trait Base32: Sized {
fn to_base32(&self) -> String;
fn from_base32(b: &[u8]) -> Option<Self>;
}
pub mod sanakirja;
pub type ApplyTimestamp = u64;
pub struct ChannelRef<T: ChannelTxnT> {
pub(crate) r: Rc<RefCell<T::Channel>>,
}
impl<T: ChannelTxnT> Clone for ChannelRef<T> {
fn clone(&self) -> Self {
ChannelRef { r: self.r.clone() }
}
}
impl<T: TxnT> RemoteRef<T> {
pub fn name(&self) -> &str {
self.name.as_str()
}
}
impl<T: ChannelTxnT> ChannelRef<T> {
pub fn borrow(&self) -> std::cell::Ref<T::Channel> {
self.r.borrow()
}
pub fn borrow_mut(&mut self) -> std::cell::RefMut<T::Channel> {
self.r.borrow_mut()
}
}
pub struct Remote<T: TxnT> {
pub remote: T::Remote,
pub rev: T::Revremote,
pub states: T::Remotestates,
}
pub struct RemoteRef<T: TxnT> {
db: Rc<RefCell<Remote<T>>>,
name: SmallString,
}
impl<T: TxnT> Clone for RemoteRef<T> {
fn clone(&self) -> Self {
RemoteRef {
db: self.db.clone(),
name: self.name.clone(),
}
}
}
impl<T: TxnT> RemoteRef<T> {
pub fn borrow(&self) -> std::cell::Ref<Remote<T>> {
self.db.borrow()
}
pub fn borrow_mut(&mut self) -> std::cell::RefMut<Remote<T>> {
self.db.borrow_mut()
}
}
#[derive(Debug, Error)]
pub enum HashPrefixError<T: std::error::Error + 'static> {
#[error("Failed to parse hash prefix: {0}")]
Parse(String),
#[error("Ambiguous hash prefix: {0}")]
Ambiguous(String),
#[error("Change not found: {0}")]
NotFound(String),
#[error(transparent)]
Txn(T),
}
#[derive(Debug, Error)]
pub enum ForkError<T: std::error::Error + 'static> {
#[error("Channel name already exists: {0}")]
ChannelNameExists(String),
#[error(transparent)]
Txn(T),
}
#[derive(Debug, Error)]
#[error(transparent)]
pub struct TxnErr<E: std::error::Error + 'static>(pub E);
pub trait GraphTxnT: Sized {
type GraphError: std::error::Error + Send + Sync + 'static;
table!(graph);
get!(graph, Vertex<ChangeId>, Edge, GraphError);
/// Returns the external hash of an internal change identifier, if
/// the change is known.
fn get_external(&self, p: ChangeId) -> Result<Option<Hash>, TxnErr<Self::GraphError>>;
/// Returns the internal change identifier of change with external
/// hash `hash`, if the change is known.
fn get_internal(&self, p: Hash) -> Result<Option<ChangeId>, TxnErr<Self::GraphError>>;
type Adj;
fn init_adj(
&self,
g: &Self::Graph,
v: Vertex<ChangeId>,
dest: Position<ChangeId>,
min: EdgeFlags,
max: EdgeFlags,
) -> Result<Self::Adj, TxnErr<Self::GraphError>>;
fn next_adj(
&self,
g: &Self::Graph,
a: &mut Self::Adj,
) -> Option<Result<Edge, TxnErr<Self::GraphError>>>;
fn find_block(
&self,
graph: &Self::Graph,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<Self::GraphError>>;
fn find_block_end(
&self,
graph: &Self::Graph,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, BlockError<Self::GraphError>>;
}
pub trait ChannelTxnT: GraphTxnT {
type Channel;
fn name(channel: &Self::Channel) -> &str;
fn graph(channel: &Self::Channel) -> &Self::Graph;
fn apply_counter(channel: &Self::Channel) -> u64;
fn last_modified(channel: &Self::Channel) -> u64;
fn changes(channel: &Self::Channel) -> &Self::Changeset;
fn rev_changes(channel: &Self::Channel) -> &Self::RevChangeset;
type Changeset;
type RevChangeset;
fn get_changeset(
&self,
channel: &Self::Changeset,
c: ChangeId,
) -> Result<Option<u64>, TxnErr<Self::GraphError>>;
fn get_revchangeset(
&self,
channel: &Self::RevChangeset,
c: u64,
) -> Result<Option<(ChangeId, Merkle)>, TxnErr<Self::GraphError>>;
type ChangesetCursor;
fn cursor_changeset<'txn>(
&'txn self,
channel: &Self::Changeset,
pos: Option<ChangeId>,
) -> Result<
crate::pristine::Cursor<Self, &'txn Self, Self::ChangesetCursor, ChangeId, u64>,
TxnErr<Self::GraphError>,
>;
fn cursor_changeset_next(
&self,
cursor: &mut Self::ChangesetCursor,
) -> Result<Option<(ChangeId, u64)>, TxnErr<Self::GraphError>>;
fn cursor_changeset_prev(
&self,
cursor: &mut Self::ChangesetCursor,
) -> Result<Option<(ChangeId, u64)>, TxnErr<Self::GraphError>>;
type RevchangesetCursor;
fn cursor_revchangeset_ref<RT: std::ops::Deref<Target = Self>>(
txn: RT,
channel: &Self::RevChangeset,
pos: Option<u64>,
) -> Result<
Cursor<Self, RT, Self::RevchangesetCursor, u64, (ChangeId, Merkle)>,
TxnErr<Self::GraphError>,
>;
fn rev_cursor_revchangeset<'txn>(
&'txn self,
channel: &Self::RevChangeset,
pos: Option<u64>,
) -> Result<
RevCursor<Self, &'txn Self, Self::RevchangesetCursor, u64, (ChangeId, Merkle)>,
TxnErr<Self::GraphError>,
>;
fn cursor_revchangeset_next(
&self,
cursor: &mut Self::RevchangesetCursor,
) -> Result<Option<(u64, (ChangeId, Merkle))>, TxnErr<Self::GraphError>>;
fn cursor_revchangeset_prev(
&self,
cursor: &mut Self::RevchangesetCursor,
) -> Result<Option<(u64, (ChangeId, Merkle))>, TxnErr<Self::GraphError>>;
fn channel_has_state(
&self,
channel: &Self::Channel,
hash: Merkle,
) -> Result<Option<u64>, TxnErr<Self::GraphError>>;
}
pub trait GraphIter: GraphTxnT {
type GraphCursor;
fn iter_graph(&self, g: &Self::Graph) -> Result<Self::GraphCursor, TxnErr<Self::GraphError>>;
fn next_graph(
&self,
g: &Self::Graph,
a: &mut Self::GraphCursor,
) -> Option<Result<(Vertex<ChangeId>, Edge), TxnErr<Self::GraphError>>>;
}
#[derive(Debug, Error)]
pub enum BlockError<T: std::error::Error + 'static> {
#[error(transparent)]
Txn(T),
#[error("Block error: {:?}", block)]
Block { block: Position<ChangeId> },
}
impl<T: std::error::Error + 'static> std::convert::From<TxnErr<T>> for BlockError<T> {
fn from(e: TxnErr<T>) -> Self {
BlockError::Txn(e.0)
}
}
pub trait DepsTxnT: Sized {
type DepsError: std::error::Error + Send + Sync + 'static;
table!(revdep);
table!(dep);
table_get!(dep, ChangeId, ChangeId, DepsError);
cursor_ref!(dep, ChangeId, ChangeId, DepsError);
table_get!(revdep, ChangeId, ChangeId, DepsError);
fn iter_revdep(
&self,
p: ChangeId,
) -> Result<Cursor<Self, &Self, Self::DepCursor, ChangeId, ChangeId>, TxnErr<Self::DepsError>>;
fn iter_dep(
&self,
p: ChangeId,
) -> Result<Cursor<Self, &Self, Self::DepCursor, ChangeId, ChangeId>, TxnErr<Self::DepsError>>;
fn iter_dep_ref<RT: std::ops::Deref<Target = Self> + Clone>(
txn: RT,
p: ChangeId,
) -> Result<Cursor<Self, RT, Self::DepCursor, ChangeId, ChangeId>, TxnErr<Self::DepsError>>;
fn iter_touched(
&self,
p: Position<ChangeId>,
) -> Result<
Cursor<Self, &Self, Self::Touched_filesCursor, Position<ChangeId>, ChangeId>,
TxnErr<Self::DepsError>,
>;
fn iter_rev_touched(
&self,
p: ChangeId,
) -> Result<
Cursor<Self, &Self, Self::Rev_touched_filesCursor, ChangeId, Position<ChangeId>>,
TxnErr<Self::DepsError>,
>;
table!(touched_files);
table!(rev_touched_files);
table_get!(touched_files, Position<ChangeId>, ChangeId, DepsError);
table_get!(rev_touched_files, ChangeId, Position<ChangeId>, DepsError);
iter!(touched_files, Position<ChangeId>, ChangeId, DepsError);
iter!(rev_touched_files, ChangeId, Position<ChangeId>, DepsError);
}
pub trait TreeTxnT: Sized {
type TreeError: std::error::Error + Send + Sync + 'static;
table!(tree);
table_get!(tree, PathId, Inode, TreeError);
iter!(tree, OwnedPathId, Inode, TreeError);
table!(revtree);
table_get!(revtree, Inode, PathId, TreeError);
iter!(revtree, Inode, OwnedPathId, TreeError);
table!(inodes);
table!(revinodes);
table_get!(inodes, Inode, Position<ChangeId>, TreeError);
table_get!(revinodes, Position<ChangeId>, Inode, TreeError);
table!(partials);
cursor!(partials, SmallString, Position<ChangeId>, TreeError);
cursor!(inodes, Inode, Position<ChangeId>, TreeError);
fn iter_inodes(
&self,
) -> Result<
Cursor<Self, &Self, Self::InodesCursor, Inode, Position<ChangeId>>,
TxnErr<Self::TreeError>,
>;
#[cfg(debug_assertions)]
cursor!(revinodes, Position<ChangeId>, Inode, TreeError);
#[cfg(debug_assertions)]
fn iter_revinodes(
&self,
) -> Result<
Cursor<Self, &Self, Self::RevinodesCursor, Position<ChangeId>, Inode>,
TxnErr<Self::TreeError>,
>;
fn iter_partials<'txn>(
&'txn self,
channel: &str,
) -> Result<
Cursor<Self, &'txn Self, Self::PartialsCursor, SmallString, Position<ChangeId>>,
TxnErr<Self::TreeError>,
>;
}
/// The trait of immutable transactions.
pub trait TxnT:
GraphTxnT
+ ChannelTxnT
+ DepsTxnT<DepsError = <Self as GraphTxnT>::GraphError>
+ TreeTxnT<TreeError = <Self as GraphTxnT>::GraphError>
{
table!(channels);
cursor!(channels, SmallString, (u64, u64, u64, u64, u64, u64));
fn hash_from_prefix(
&self,
prefix: &str,
) -> Result<(Hash, ChangeId), HashPrefixError<Self::GraphError>>;
fn hash_from_prefix_remote(
&self,
remote: &RemoteRef<Self>,
prefix: &str,
) -> Result<Hash, HashPrefixError<Self::GraphError>>;
fn load_channel(
&self,
name: &str,
) -> Result<Option<ChannelRef<Self>>, TxnErr<Self::GraphError>>;
fn load_remote(&self, name: &str) -> Result<Option<RemoteRef<Self>>, TxnErr<Self::GraphError>>;
/// Iterate a function over all channels. The loop stops the first
/// time `f` returns `false`.
fn iter_channels<'txn>(
&'txn self,
start: &str,
) -> Result<ChannelIterator<'txn, Self>, TxnErr<Self::GraphError>>;
fn iter_remotes<'txn>(
&'txn self,
start: &str,
) -> Result<RemotesIterator<'txn, Self>, TxnErr<Self::GraphError>>;
table!(remotes);
cursor!(remotes, SmallString, (u64, u64, u64));
table!(remote);
table!(revremote);
table!(remotestates);
cursor!(remote, u64, (Hash, Merkle));
rev_cursor!(remote, u64, (Hash, Merkle));
fn iter_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: u64,
) -> Result<
Cursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)>,
TxnErr<Self::GraphError>,
>;
fn iter_rev_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: Option<u64>,
) -> Result<
RevCursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)>,
TxnErr<Self::GraphError>,
>;
fn get_remote(
&mut self,
name: &str,
) -> Result<Option<RemoteRef<Self>>, TxnErr<Self::GraphError>>;
fn last_remote(
&self,
remote: &Self::Remote,
) -> Result<Option<(u64, (Hash, Merkle))>, TxnErr<Self::GraphError>>;
fn get_remote_state(
&self,
remote: &Self::Remote,
n: u64,
) -> Result<Option<(u64, (Hash, Merkle))>, TxnErr<Self::GraphError>>;
fn remote_has_change(
&self,
remote: &RemoteRef<Self>,
hash: Hash,
) -> Result<bool, TxnErr<Self::GraphError>>;
fn remote_has_state(
&self,
remote: &RemoteRef<Self>,
hash: Merkle,
) -> Result<bool, TxnErr<Self::GraphError>>;
}
/// Iterate the graph between `(key, min_flag)` and `(key,
/// max_flag)`, where both bounds are included.
pub(crate) fn iter_adjacent<'txn, T: GraphTxnT>(
txn: &'txn T,
graph: &'txn T::Graph,
key: Vertex<ChangeId>,
min_flag: EdgeFlags,
max_flag: EdgeFlags,
) -> Result<AdjacentIterator<'txn, T>, TxnErr<T::GraphError>> {
Ok(AdjacentIterator {
it: txn.init_adj(graph, key, Position::ROOT, min_flag, max_flag)?,
graph,
txn,
})
}
pub(crate) fn iter_alive_children<'txn, T: GraphTxnT>(
txn: &'txn T,
graph: &'txn T::Graph,
key: Vertex<ChangeId>,
) -> Result<AdjacentIterator<'txn, T>, TxnErr<T::GraphError>> {
iter_adjacent(
txn,
graph,
key,
EdgeFlags::empty(),
EdgeFlags::alive_children(),
)
}
pub(crate) fn iter_deleted_parents<'txn, T: GraphTxnT>(
txn: &'txn T,
graph: &'txn T::Graph,
key: Vertex<ChangeId>,
) -> Result<AdjacentIterator<'txn, T>, TxnErr<T::GraphError>> {
iter_adjacent(
txn,
graph,
key,
EdgeFlags::DELETED | EdgeFlags::PARENT,
EdgeFlags::all(),
)
}
pub(crate) fn iter_adj_all<'txn, T: GraphTxnT>(
txn: &'txn T,
graph: &'txn T::Graph,
key: Vertex<ChangeId>,
) -> Result<AdjacentIterator<'txn, T>, TxnErr<T::GraphError>> {
iter_adjacent(txn, graph, key, EdgeFlags::empty(), EdgeFlags::all())
}
pub(crate) fn tree_path<T: TreeTxnT>(
txn: &T,
v: Position<ChangeId>,
) -> Result<Option<String>, TxnErr<T::TreeError>> {
if let Some(mut inode) = txn.get_revinodes(v, None)? {
let mut components = Vec::new();
while !inode.is_root() {
if let Some(next) = txn.get_revtree(inode, None)? {
components.push(next.basename.as_str().to_string());
inode = next.parent_inode;
} else {
assert!(components.is_empty());
return Ok(None);
}
}
if let Some(mut result) = components.pop() {
while let Some(c) = components.pop() {
result = result + "/" + c.as_str()
}
return Ok(Some(result));
}
}
Ok(None)
}
pub(crate) fn internal<T: GraphTxnT>(
txn: &T,
h: &Option<Hash>,
p: ChangeId,
) -> Result<Option<ChangeId>, TxnErr<T::GraphError>> {
match *h {
Some(Hash::None) => Ok(Some(ChangeId::ROOT)),
Some(h) => txn.get_internal(h),
None => Ok(Some(p)),
}
}
#[derive(Error, Debug)]
pub enum InconsistentChange<T: std::error::Error + 'static> {
#[error("Undeclared dependency")]
UndeclaredDep,
#[error(transparent)]
Txn(T),
}
impl<T: std::error::Error + 'static> std::convert::From<TxnErr<T>> for InconsistentChange<T> {
fn from(e: TxnErr<T>) -> Self {
InconsistentChange::Txn(e.0)
}
}
pub(crate) fn internal_pos<T: GraphTxnT>(
txn: &T,
pos: &Position<Option<Hash>>,
change_id: ChangeId,
) -> Result<Position<ChangeId>, InconsistentChange<T::GraphError>> {
let change = if let Some(p) = pos.change {
if let Some(p) = txn.get_internal(p)? {
p
} else {
return Err(InconsistentChange::UndeclaredDep);
}
} else {
change_id
};
Ok(Position {
change,
pos: pos.pos,
})
}
pub fn changeid_log<'db, 'txn: 'db, T: ChannelTxnT>(
txn: &'txn T,
channel: &'db T::Channel,
from: u64,
) -> Result<Cursor<T, &'txn T, T::RevchangesetCursor, u64, (ChangeId, Merkle)>, TxnErr<T::GraphError>>
{
T::cursor_revchangeset_ref(txn, T::rev_changes(&channel), Some(from))
}
pub(crate) fn current_state<'db, 'txn: 'db, T: ChannelTxnT>(
txn: &'txn T,
channel: &'db T::Channel,
) -> Result<Merkle, TxnErr<T::GraphError>> {
if let Some(e) = txn
.rev_cursor_revchangeset(T::rev_changes(&channel), None)?
.next()
{
let (_, (_, m)) = e?;
Ok(m)
} else {
Ok(Merkle::zero())
}
}
pub(crate) fn changeid_log_ref<T: ChannelTxnT, RT: std::ops::Deref<Target = T>>(
txn: RT,
channel: &T::Channel,
from: u64,
) -> Result<Cursor<T, RT, T::RevchangesetCursor, u64, (ChangeId, Merkle)>, TxnErr<T::GraphError>> {
Ok(T::cursor_revchangeset_ref(
txn,
T::rev_changes(&channel),
Some(from),
)?)
}
pub(crate) fn changeid_rev_log<'db, 'txn: 'db, T: ChannelTxnT>(
txn: &'txn T,
channel: &'db T::Channel,
from: Option<u64>,
) -> Result<
RevCursor<T, &'txn T, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
TxnErr<T::GraphError>,
> {
Ok(txn.rev_cursor_revchangeset(T::rev_changes(&channel), from)?)
}
pub(crate) fn log_for_path<
'txn,
'channel,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
>(
txn: &'txn T,
channel: &'channel T::Channel,
key: Position<ChangeId>,
from_timestamp: u64,
) -> Result<PathChangeset<'channel, 'txn, T>, TxnErr<T::GraphError>> {
Ok(PathChangeset {
iter: T::cursor_revchangeset_ref(txn, T::rev_changes(&channel), Some(from_timestamp))?,
txn,
channel,
key,
})
}
pub(crate) fn rev_log_for_path<
'txn,
'channel,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
>(
txn: &'txn T,
channel: &'channel T::Channel,
key: Position<ChangeId>,
from_timestamp: u64,
) -> Result<RevPathChangeset<'channel, 'txn, T>, TxnErr<T::GraphError>> {
Ok(RevPathChangeset {
iter: txn.rev_cursor_revchangeset(T::rev_changes(&channel), Some(from_timestamp))?,
txn,
channel,
key,
})
}
/// Is there an alive/pseudo edge from `a` to `b`.
pub(crate) fn test_edge<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
a: Position<ChangeId>,
b: Position<ChangeId>,
min: EdgeFlags,
max: EdgeFlags,
) -> Result<bool, TxnErr<T::GraphError>> {
debug!("is_connected {:?} {:?}", a, b);
let mut adj = txn.init_adj(channel, a.inode_vertex(), b, min, max)?;
match txn.next_adj(channel, &mut adj) {
Some(Ok(dest)) => Ok(dest.dest == b),
Some(Err(e)) => Err(e.into()),
None => Ok(false),
}
}
/// Is there an alive/pseudo edge to `a`.
pub(crate) fn is_alive<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
a: Vertex<ChangeId>,
) -> Result<bool, TxnErr<T::GraphError>> {
if a.is_root() {
return Ok(true);
}
for e in iter_adjacent(
txn,
channel,
a,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let e = e?;
if !e.flag.contains(EdgeFlags::PSEUDO)
&& (e.flag.contains(EdgeFlags::BLOCK) || a.is_empty())
{
return Ok(true);
}
}
Ok(false)
}
pub(crate) fn make_changeid<T: GraphTxnT>(
txn: &T,
h: &Hash,
) -> Result<ChangeId, TxnErr<T::GraphError>> {
if let Some(h) = txn.get_internal(*h)? {
return Ok(h);
}
use byteorder::{ByteOrder, LittleEndian};
use rand::Rng;
let mut p = match h {
Hash::None => return Ok(ChangeId::ROOT),
Hash::Blake3(ref s) => ChangeId(LittleEndian::read_u64(&s[..])),
};
while txn.get_external(p)?.is_some() {
p = ChangeId(rand::thread_rng().gen());
}
Ok(p)
}
#[cfg(debug_assertions)]
pub fn debug_tree<P: AsRef<std::path::Path>, T: TreeTxnT>(
txn: &T,
file: P,
) -> Result<(), std::io::Error> {
let root = OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::from_str(""),
};
let mut f = std::fs::File::create(file)?;
for t in txn.iter_tree(root, None).unwrap() {
writeln!(f, "{:?}", t.unwrap())?
}
Ok(())
}
#[cfg(debug_assertions)]
pub fn debug_tree_print<T: TreeTxnT>(txn: &T) {
let root = OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::from_str(""),
};
for t in txn.iter_tree(root, None).unwrap() {
debug!("{:?}", t.unwrap())
}
}
/// Write the graph of a channel to file `f` in graphviz
/// format. **Warning:** this can be really large on old channels.
#[cfg(debug_assertions)]
pub fn debug_to_file<P: AsRef<std::path::Path>, T: GraphIter + ChannelTxnT>(
txn: &T,
channel: &T::Channel,
f: P,
) -> Result<bool, std::io::Error> {
info!("debug {:?}", f.as_ref());
let mut f = std::fs::File::create(f)?;
let done = debug(txn, T::graph(channel), &mut f)?;
f.flush()?;
info!("done debugging {:?}", done);
Ok(done)
}
#[cfg(debug_assertions)]
pub fn debug_revtree<P: AsRef<std::path::Path>, T: TreeTxnT>(
txn: &T,
file: P,
) -> Result<(), std::io::Error> {
let mut f = std::fs::File::create(file)?;
for t in txn.iter_revtree(Inode::ROOT, None).unwrap() {
writeln!(f, "{:?}", t.unwrap())?
}
Ok(())
}
#[cfg(debug_assertions)]
pub fn debug_revtree_print<T: TreeTxnT>(txn: &T) {
for t in txn.iter_revtree(Inode::ROOT, None).unwrap() {
debug!("{:?}", t.unwrap())
}
}
#[cfg(debug_assertions)]
pub fn debug_inodes<T: TreeTxnT>(txn: &T) {
debug!("debug_inodes");
for t in txn.iter_inodes().unwrap() {
debug!("debug_inodes = {:?}", t.unwrap())
}
debug!("/debug_inodes");
}
#[cfg(debug_assertions)]
pub fn debug_revinodes<T: TreeTxnT>(txn: &T) {
debug!("debug_revinodes");
for t in txn.iter_revinodes().unwrap() {
debug!("debug_revinodes = {:?}", t.unwrap())
}
debug!("/debug_revinodes");
}
/// Write the graph of a channel to write `W` in graphviz
/// format. **Warning:** this can be really large on old channels.
#[cfg(debug_assertions)]
pub fn debug<W: Write, T: GraphIter>(
txn: &T,
channel: &T::Graph,
mut f: W,
) -> Result<bool, std::io::Error> {
let mut cursor = txn.iter_graph(&channel).unwrap();
writeln!(f, "digraph {{")?;
let mut keys = std::collections::HashSet::new();
let mut at_least_one = false;
while let Some(x) = txn.next_graph(&channel, &mut cursor) {
let (k, v) = x.unwrap();
at_least_one = true;
debug!("debug {:?} {:?}", k, v);
if keys.insert(k) {
debug_vertex(&mut f, k)?
}
debug_edge(txn, channel, &mut f, k, v)?
}
writeln!(f, "}}")?;
Ok(at_least_one)
}
pub fn check_alive<T: ChannelTxnT + GraphIter>(
txn: &T,
channel: &T::Graph,
) -> (
HashMap<Vertex<ChangeId>, Option<Vertex<ChangeId>>>,
Vec<(Vertex<ChangeId>, Option<Vertex<ChangeId>>)>,
) {
// Find the reachable with a DFS.
let mut reachable = HashSet::new();
let mut stack = vec![Vertex::ROOT];
while let Some(v) = stack.pop() {
if !reachable.insert(v) {
continue;
}
for e in iter_adjacent(
txn,
&channel,
v,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED - EdgeFlags::PARENT,
)
.unwrap()
{
let e = e.unwrap();
stack.push(txn.find_block(&channel, e.dest).unwrap());
}
}
debug!("reachable = {:#?}", reachable);
// Find the alive
let mut alive_unreachable = HashMap::new();
let mut cursor = txn.iter_graph(&channel).unwrap();
let mut visited = HashSet::new();
let mut k0 = Vertex::ROOT;
let mut k0_has_pseudo_parents = false;
let mut k0_has_regular_parents = false;
let mut reachable_pseudo = Vec::new();
while let Some(x) = txn.next_graph(&channel, &mut cursor) {
let (k, v) = x.unwrap();
debug!("check_alive, k = {:?}, v = {:?}", k, v);
if k0 != k {
if k0_has_pseudo_parents && !k0_has_regular_parents {
reachable_pseudo.push((
k0,
find_file(txn, &channel, k0, &mut stack, &mut visited).unwrap(),
))
}
k0 = k;
k0_has_pseudo_parents = false;
k0_has_regular_parents = false;
}
if v.flag.contains(EdgeFlags::PARENT)
&& !v.flag.contains(EdgeFlags::FOLDER)
&& !v.flag.contains(EdgeFlags::DELETED)
{
if v.flag.contains(EdgeFlags::PSEUDO) {
k0_has_pseudo_parents = true
} else {
k0_has_regular_parents = true
}
}
if v.flag.contains(EdgeFlags::PARENT)
&& (v.flag.contains(EdgeFlags::BLOCK) || k.is_empty())
&& !v.flag.contains(EdgeFlags::DELETED)
&& !reachable.contains(&k)
{
let file = find_file(txn, &channel, k, &mut stack, &mut visited).unwrap();
alive_unreachable.insert(k, file);
}
}
if !k0.is_root() && k0_has_pseudo_parents && !k0_has_regular_parents {
reachable_pseudo.push((
k0,
find_file(txn, &channel, k0, &mut stack, &mut visited).unwrap(),
));
}
(alive_unreachable, reachable_pseudo)
}
fn find_file<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
k: Vertex<ChangeId>,
stack: &mut Vec<Vertex<ChangeId>>,
visited: &mut HashSet<Vertex<ChangeId>>,
) -> Result<Option<Vertex<ChangeId>>, TxnErr<T::GraphError>> {
let mut file = None;
stack.clear();
stack.push(k);
visited.clear();
'outer: while let Some(kk) = stack.pop() {
if !visited.insert(kk) {
continue;
}
for e in iter_adjacent(txn, &channel, kk, EdgeFlags::PARENT, EdgeFlags::all())? {
let e = e?;
if e.flag.contains(EdgeFlags::PARENT) {
if e.flag.contains(EdgeFlags::FOLDER) {
file = Some(kk);
break 'outer;
}
stack.push(txn.find_block_end(&channel, e.dest).unwrap());
}
}
}
Ok(file)
}
pub fn debug_root<W: Write, T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
root: Vertex<ChangeId>,
mut f: W,
down: bool,
) -> Result<(), std::io::Error> {
writeln!(f, "digraph {{")?;
let mut visited = HashSet::new();
let mut stack = vec![root];
while let Some(v) = stack.pop() {
if !visited.insert(v) {
continue;
}
debug_vertex(&mut f, v)?;
for e in iter_adj_all(txn, &channel, v).unwrap() {
let e = e.unwrap();
if e.flag.contains(EdgeFlags::PARENT) ^ down {
debug_edge(txn, &channel, &mut f, v, e)?;
let v = if e.flag.contains(EdgeFlags::PARENT) {
txn.find_block_end(&channel, e.dest).unwrap()
} else {
txn.find_block(&channel, e.dest).unwrap()
};
stack.push(v);
}
}
}
writeln!(f, "}}")?;
Ok(())
}
fn debug_vertex<W: std::io::Write>(mut f: W, k: Vertex<ChangeId>) -> Result<(), std::io::Error> {
writeln!(
f,
"node_{}_{}_{}[label=\"{} [{};{}[\"];",
k.change.to_base32(),
k.start.0,
k.end.0,
k.change.to_base32(),
k.start.0,
k.end.0,
)
}
fn debug_edge<T: GraphTxnT, W: std::io::Write>(
txn: &T,
channel: &T::Graph,
mut f: W,
k: Vertex<ChangeId>,
v: Edge,
) -> Result<(), std::io::Error> {
let style = if v.flag.contains(EdgeFlags::DELETED) {
", style=dashed"
} else if v.flag.contains(EdgeFlags::PSEUDO) {
", style=dotted"
} else {
""
};
let color = if v.flag.contains(EdgeFlags::PARENT) {
if v.flag.contains(EdgeFlags::FOLDER) {
"orange"
} else {
"red"
}
} else if v.flag.contains(EdgeFlags::FOLDER) {
"royalblue"
} else {
"forestgreen"
};
if v.flag.contains(EdgeFlags::PARENT) {
let dest = if v.dest.change.is_root() {
Vertex::ROOT
} else if let Ok(dest) = txn.find_block_end(channel, v.dest) {
dest
} else {
return Ok(());
};
writeln!(
f,
"node_{}_{}_{} -> node_{}_{}_{} [label=\"{}{}{}\", color=\"{}\"{}];",
k.change.to_base32(),
k.start.0,
k.end.0,
dest.change.to_base32(),
dest.start.0,
dest.end.0,
if v.flag.contains(EdgeFlags::BLOCK) {
"["
} else {
""
},
v.introduced_by.to_base32(),
if v.flag.contains(EdgeFlags::BLOCK) {
"]"
} else {
""
},
color,
style
)?;
} else if let Ok(dest) = txn.find_block(&channel, v.dest) {
writeln!(
f,
"node_{}_{}_{} -> node_{}_{}_{} [label=\"{}{}{}\", color=\"{}\"{}];",
k.change.to_base32(),
k.start.0,
k.end.0,
dest.change.to_base32(),
dest.start.0,
dest.end.0,
if v.flag.contains(EdgeFlags::BLOCK) {
"["
} else {
""
},
v.introduced_by.to_base32(),
if v.flag.contains(EdgeFlags::BLOCK) {
"]"
} else {
""
},
color,
style
)?;
} else {
writeln!(
f,
"node_{}_{}_{} -> node_{}_{} [label=\"{}{}{}\", color=\"{}\"{}];",
k.change.to_base32(),
k.start.0,
k.end.0,
v.dest.change.to_base32(),
v.dest.pos.0,
if v.flag.contains(EdgeFlags::BLOCK) {
"["
} else {
""
},
v.introduced_by.to_base32(),
if v.flag.contains(EdgeFlags::BLOCK) {
"]"
} else {
""
},
color,
style
)?;
}
Ok(())
}
/// A cursor over a table, initialised at a certain value.
pub struct Cursor<T: Sized, RT: std::ops::Deref<Target = T>, Cursor, K, V> {
pub cursor: Cursor,
pub txn: RT,
pub marker: std::marker::PhantomData<(T, K, V)>,
}
pub struct RevCursor<T: Sized, RT: std::ops::Deref<Target = T>, Cursor, K, V> {
pub cursor: Cursor,
pub txn: RT,
pub marker: std::marker::PhantomData<(T, K, V)>,
}
initialized_cursor!(changeset, ChangeId, u64, ChannelTxnT, GraphError);
initialized_cursor!(
revchangeset,
u64,
(ChangeId, Merkle),
ChannelTxnT,
GraphError
);
initialized_rev_cursor!(
revchangeset,
u64,
(ChangeId, Merkle),
ChannelTxnT,
GraphError
);
initialized_cursor!(tree, OwnedPathId, Inode, TreeTxnT, TreeError);
initialized_cursor!(revtree, Inode, OwnedPathId, TreeTxnT, TreeError);
initialized_cursor!(dep, ChangeId, ChangeId, DepsTxnT, DepsError);
initialized_cursor!(
partials,
SmallString,
Position<ChangeId>,
TreeTxnT,
TreeError
);
initialized_cursor!(
rev_touched_files,
ChangeId,
Position<ChangeId>,
DepsTxnT,
DepsError
);
initialized_cursor!(
touched_files,
Position<ChangeId>,
ChangeId,
DepsTxnT,
DepsError
);
initialized_cursor!(remote, u64, (Hash, Merkle));
initialized_rev_cursor!(remote, u64, (Hash, Merkle));
initialized_cursor!(inodes, Inode, Position<ChangeId>, TreeTxnT, TreeError);
#[cfg(debug_assertions)]
initialized_cursor!(revinodes, Position<ChangeId>, Inode, TreeTxnT, TreeError);
/// An iterator for nodes adjacent to `key` through an edge with flags smaller than `max_flag`.
pub struct AdjacentIterator<'txn, T: GraphTxnT> {
it: T::Adj,
graph: &'txn T::Graph,
txn: &'txn T,
}
impl<'txn, T: GraphTxnT> Iterator for AdjacentIterator<'txn, T> {
type Item = Result<Edge, TxnErr<T::GraphError>>;
fn next(&mut self) -> Option<Self::Item> {
self.txn.next_adj(self.graph, &mut self.it)
}
}
pub struct PathChangeset<'channel, 'txn: 'channel, T: ChannelTxnT + DepsTxnT> {
txn: &'txn T,
channel: &'channel T::Channel,
iter: Cursor<T, &'txn T, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
key: Position<ChangeId>,
}
pub struct RevPathChangeset<'channel, 'txn: 'channel, T: ChannelTxnT + DepsTxnT> {
txn: &'txn T,
channel: &'channel T::Channel,
iter: RevCursor<T, &'txn T, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
key: Position<ChangeId>,
}
impl<
'channel,
'txn: 'channel,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
> Iterator for PathChangeset<'channel, 'txn, T>
{
type Item = Result<Hash, TxnErr<T::GraphError>>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(x) = self.iter.next() {
let (_, (changeid, _)) = match x {
Ok(x) => x,
Err(e) => return Some(Err(e)),
};
let iter = match self.txn.iter_rev_touched_files(changeid, None) {
Ok(iter) => iter,
Err(e) => return Some(Err(e)),
};
for x in iter {
let (p, touched) = match x {
Ok(x) => x,
Err(e) => return Some(Err(e)),
};
if p > changeid {
break;
} else if p < changeid {
continue;
}
match is_ancestor_of(self.txn, T::graph(&self.channel), self.key, touched) {
Ok(true) => return self.txn.get_external(changeid).transpose(),
Err(e) => return Some(Err(e)),
Ok(false) => {}
}
}
}
None
}
}
impl<
'channel,
'txn: 'channel,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
> Iterator for RevPathChangeset<'channel, 'txn, T>
{
type Item = Result<Hash, TxnErr<T::GraphError>>;
fn next(&mut self) -> Option<Self::Item> {
loop {
let changeid = match self.iter.next()? {
Err(e) => return Some(Err(e)),
Ok((_, (changeid, _))) => changeid,
};
let iter = match self.txn.iter_rev_touched_files(changeid, None) {
Ok(iter) => iter,
Err(e) => return Some(Err(e)),
};
for x in iter {
let (p, touched) = match x {
Ok(x) => x,
Err(e) => return Some(Err(e)),
};
if p > changeid {
break;
} else if p < changeid {
continue;
}
match is_ancestor_of(self.txn, T::graph(&self.channel), self.key, touched) {
Ok(true) => return self.txn.get_external(changeid).transpose(),
Err(e) => return Some(Err(e)),
Ok(false) => {}
}
}
}
}
}
fn is_ancestor_of<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
a: Position<ChangeId>,
b: Position<ChangeId>,
) -> Result<bool, TxnErr<T::GraphError>> {
let mut stack = vec![b];
let mut visited = std::collections::HashSet::new();
debug!("a = {:?}", a);
while let Some(b) = stack.pop() {
debug!("pop {:?}", b);
if a == b {
return Ok(true);
}
if !visited.insert(b) {
continue;
}
for p in iter_adjacent(
txn,
channel,
b.inode_vertex(),
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO,
)? {
let p = p?;
// Ok, since `p` is in the channel.
let parent = txn.find_block_end(channel, p.dest).unwrap();
for pp in iter_adjacent(
txn,
channel,
parent,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO,
)? {
let pp = pp?;
if pp.dest == a {
return Ok(true);
}
stack.push(pp.dest)
}
}
}
Ok(false)
}
pub struct ChannelIterator<'txn, T: TxnT> {
txn: &'txn T,
cursor: T::ChannelsCursor,
}
impl<'txn, T: TxnT> Iterator for ChannelIterator<'txn, T> {
type Item = Result<ChannelRef<T>, TxnErr<T::GraphError>>;
fn next(&mut self) -> Option<Self::Item> {
// Option<(SmallString, (u64, u64, u64, u64, u64, u64))>
match self.txn.cursor_channels_next(&mut self.cursor) {
Err(e) => Some(Err(e)),
Ok(Some((name, _))) => self.txn.load_channel(name.as_str()).transpose(),
Ok(None) => None,
}
}
}
pub struct RemotesIterator<'txn, T: TxnT> {
txn: &'txn T,
cursor: T::RemotesCursor,
}
impl<'txn, T: TxnT> Iterator for RemotesIterator<'txn, T> {
type Item = Result<RemoteRef<T>, TxnErr<T::GraphError>>;
fn next(&mut self) -> Option<Self::Item> {
match self.txn.cursor_remotes_next(&mut self.cursor) {
Ok(Some((name, _))) => self.txn.load_remote(name.as_str()).transpose(),
Ok(None) => None,
Err(e) => Some(Err(e)),
}
}
}
pub trait GraphMutTxnT: GraphTxnT {
put_del!(internal, Hash, ChangeId, GraphError);
put_del!(external, ChangeId, Hash, GraphError);
/// Insert a key and a value to a graph. Returns `false` if and only if `(k, v)` was already in the graph, in which case no insertion happened.
fn put_graph(
&mut self,
channel: &mut Self::Graph,
k: Vertex<ChangeId>,
v: Edge,
) -> Result<bool, TxnErr<Self::GraphError>>;
/// Delete a key and a value from a graph. Returns `true` if and only if `(k, v)` was in the graph.
fn del_graph(
&mut self,
channel: &mut Self::Graph,
k: Vertex<ChangeId>,
v: Option<Edge>,
) -> Result<bool, TxnErr<Self::GraphError>>;
/// Split a key `[a, b[` at position `pos`, yielding two keys `[a,
/// pos[` and `[pos, b[` linked by an edge.
fn split_block(
&mut self,
graph: &mut Self::Graph,
key: Vertex<ChangeId>,
pos: ChangePosition,
buf: &mut Vec<Edge>,
) -> Result<(), TxnErr<Self::GraphError>>;
}
pub trait ChannelMutTxnT: ChannelTxnT + GraphMutTxnT {
fn graph_mut(channel: &mut Self::Channel) -> &mut Self::Graph;
fn touch_channel(&mut self, channel: &mut Self::Channel, t: Option<u64>);
/// Add a change and a timestamp to a change table. Returns `None` if and only if `(p, t)` was already in the change table, in which case no insertion happened. Returns the new state else.
fn put_changes(
&mut self,
channel: &mut Self::Channel,
p: ChangeId,
t: ApplyTimestamp,
h: &Hash,
) -> Result<Option<Merkle>, TxnErr<Self::GraphError>>;
/// Delete a change from a change table. Returns `true` if and only if `(p, t)` was in the change table.
fn del_changes(
&mut self,
channel: &mut Self::Channel,
p: ChangeId,
t: ApplyTimestamp,
) -> Result<bool, TxnErr<Self::GraphError>>;
}
pub trait DepsMutTxnT: DepsTxnT {
put_del!(dep, ChangeId, ChangeId, DepsError);
put_del!(revdep, ChangeId, ChangeId, DepsError);
put_del!(touched_files, Position<ChangeId>, ChangeId, DepsError);
put_del!(rev_touched_files, ChangeId, Position<ChangeId>, DepsError);
}
pub trait TreeMutTxnT: TreeTxnT {
put_del!(inodes, Inode, Position<ChangeId>, TreeError);
put_del!(revinodes, Position<ChangeId>, Inode, TreeError);
put_del!(tree, PathId, Inode, TreeError);
put_del!(revtree, Inode, PathId, TreeError);
fn put_partials(
&mut self,
k: &str,
e: Position<ChangeId>,
) -> Result<bool, TxnErr<Self::TreeError>>;
fn del_partials(
&mut self,
k: &str,
e: Option<Position<ChangeId>>,
) -> Result<bool, TxnErr<Self::TreeError>>;
}
/// The trait of immutable transactions.
pub trait MutTxnT:
GraphMutTxnT
+ ChannelMutTxnT
+ DepsMutTxnT<DepsError = <Self as GraphTxnT>::GraphError>
+ TreeMutTxnT<TreeError = <Self as GraphTxnT>::GraphError>
+ TxnT
{
/// Open a channel, creating it if it is missing. The return type
/// is a `Rc<RefCell<…>>` in order to avoid:
/// - opening the same channel twice. Since a channel contains pointers, that could potentially lead to double-borrow issues. We absolutely have to check that at runtime (hence the `RefCell`).
/// - writing the channel to disk (if the backend is written on the disk) for every minor operation on the channel.
///
/// Additionally, the `Rc` is used to:
/// - avoid having to commit channels explicitly (channels are
/// committed automatically upon committing the transaction), and
/// - to return a value that doesn't borrow the transaction, so
/// that the channel can actually be used in a mutable transaction.
fn open_or_create_channel(&mut self, name: &str) -> Result<ChannelRef<Self>, Self::GraphError>;
fn fork(
&mut self,
channel: &ChannelRef<Self>,
name: &str,
) -> Result<ChannelRef<Self>, ForkError<Self::GraphError>>;
fn rename_channel(
&mut self,
channel: &mut ChannelRef<Self>,
name: &str,
) -> Result<(), ForkError<Self::GraphError>>;
fn drop_channel(&mut self, name: &str) -> Result<bool, Self::GraphError>;
/// Commit this transaction.
fn commit(self) -> Result<(), Self::GraphError>;
fn open_or_create_remote(&mut self, name: &str) -> Result<RemoteRef<Self>, Self::GraphError>;
fn put_remote(
&mut self,
remote: &mut RemoteRef<Self>,
k: u64,
v: (Hash, Merkle),
) -> Result<bool, Self::GraphError>;
fn del_remote(
&mut self,
remote: &mut RemoteRef<Self>,
k: u64,
) -> Result<bool, Self::GraphError>;
fn drop_remote(&mut self, remote: RemoteRef<Self>) -> Result<bool, Self::GraphError>;
fn drop_named_remote(&mut self, remote: &str) -> Result<bool, Self::GraphError>;
}
pub(crate) fn put_inodes_with_rev<T: TreeMutTxnT>(
txn: &mut T,
inode: Inode,
position: Position<ChangeId>,
) -> Result<(), TxnErr<T::TreeError>> {
txn.put_inodes(inode, position)?;
txn.put_revinodes(position, inode)?;
Ok(())
}
pub(crate) fn del_inodes_with_rev<T: TreeMutTxnT>(
txn: &mut T,
inode: Inode,
position: Position<ChangeId>,
) -> Result<bool, TxnErr<T::TreeError>> {
if txn.del_inodes(inode, Some(position))? {
assert!(txn.del_revinodes(position, Some(inode))?);
Ok(true)
} else {
Ok(false)
}
}
pub(crate) fn put_tree_with_rev<T: TreeMutTxnT>(
txn: &mut T,
file_id: PathId,
inode: Inode,
) -> Result<(), TxnErr<T::TreeError>> {
txn.put_tree(file_id, inode)?;
txn.put_revtree(inode, file_id)?;
Ok(())
}
pub(crate) fn del_tree_with_rev<T: TreeMutTxnT>(
txn: &mut T,
file_id: PathId,
inode: Inode,
) -> Result<bool, TxnErr<T::TreeError>> {
if txn.del_tree(file_id, Some(inode))? {
if !file_id.basename.is_empty() {
assert!(txn.del_revtree(inode, Some(file_id))?);
}
Ok(true)
} else {
Ok(false)
}
}
pub(crate) fn del_graph_with_rev<T: GraphMutTxnT>(
txn: &mut T,
graph: &mut T::Graph,
mut flag: EdgeFlags,
mut k0: Vertex<ChangeId>,
mut k1: Vertex<ChangeId>,
introduced_by: ChangeId,
) -> Result<bool, TxnErr<T::GraphError>> {
if flag.contains(EdgeFlags::PARENT) {
std::mem::swap(&mut k0, &mut k1);
flag -= EdgeFlags::PARENT
}
debug!("del_graph_with_rev {:?} {:?} {:?}", flag, k0, k1);
let a = txn.del_graph(
graph,
k0,
Some(Edge {
flag,
dest: Position {
change: k1.change,
pos: k1.start,
},
introduced_by,
}),
)?;
let b = txn.del_graph(
graph,
k1,
Some(Edge {
flag: flag | EdgeFlags::PARENT,
dest: Position {
change: k0.change,
pos: k0.end,
},
introduced_by,
}),
)?;
assert!((a && b) || (!a && !b));
Ok(a && b)
}
pub(crate) fn put_graph_with_rev<T: GraphMutTxnT>(
txn: &mut T,
graph: &mut T::Graph,
flag: EdgeFlags,
k0: Vertex<ChangeId>,
k1: Vertex<ChangeId>,
introduced_by: ChangeId,
) -> Result<bool, TxnErr<T::GraphError>> {
debug_assert!(!flag.contains(EdgeFlags::PARENT));
if k0.change == k1.change {
assert_ne!(k0.start_pos(), k1.start_pos());
}
if introduced_by == ChangeId::ROOT {
assert!(flag.contains(EdgeFlags::PSEUDO));
}
debug!("put_graph_with_rev {:?} {:?} {:?}", k0, k1, flag);
let a = txn.put_graph(
graph,
k0,
Edge {
flag,
dest: Position {
change: k1.change,
pos: k1.start,
},
introduced_by,
},
)?;
let b = txn.put_graph(
graph,
k1,
Edge {
flag: flag ^ EdgeFlags::PARENT,
dest: Position {
change: k0.change,
pos: k0.end,
},
introduced_by,
},
)?;
assert!((a && b) || (!a && !b));
Ok(a && b)
}
pub(crate) fn register_change<
T: GraphMutTxnT + DepsMutTxnT<DepsError = <T as GraphTxnT>::GraphError>,
>(
txn: &mut T,
internal: ChangeId,
hash: Hash,
change: &Change,
) -> Result<(), TxnErr<T::GraphError>> {
debug!("registering change {:?}", hash);
txn.put_external(internal, hash)?;
txn.put_internal(hash, internal)?;
for dep in change.dependencies.iter() {
debug!("dep = {:?}", dep);
let dep_internal = txn.get_internal(*dep)?.unwrap();
debug!("{:?} depends on {:?}", internal, dep_internal);
txn.put_revdep(dep_internal, internal)?;
txn.put_dep(internal, dep_internal)?;
}
for hunk in change.changes.iter().flat_map(|r| r.iter()) {
let (inode, pos) = match *hunk {
Atom::NewVertex(NewVertex {
ref inode,
ref flag,
ref start,
ref end,
..
}) => {
if flag.contains(EdgeFlags::FOLDER) && start == end {
(inode, Some(*start))
} else {
(inode, None)
}
}
Atom::EdgeMap(EdgeMap { ref inode, .. }) => (inode, None),
};
let change = if let Some(c) = inode.change {
txn.get_internal(c)?.unwrap_or(internal)
} else {
internal
};
let inode = Position {
change,
pos: inode.pos,
};
debug!("touched: {:?} {:?}", inode, internal);
txn.put_touched_files(inode, internal)?;
txn.put_rev_touched_files(internal, inode)?;
if let Some(pos) = pos {
let inode = Position {
change: internal,
pos,
};
txn.put_touched_files(inode, internal)?;
txn.put_rev_touched_files(internal, inode)?;
}
}
Ok(())
}
fn first_state_after<T: ChannelTxnT>(
txn: &T,
c: &T::Channel,
pos: u64,
) -> Result<Option<(u64, Merkle)>, TxnErr<T::GraphError>> {
for x in T::cursor_revchangeset_ref(txn, T::rev_changes(&c), Some(pos))? {
let (n, (_, m)) = x?;
if n >= pos {
return Ok(Some((n, m)));
}
}
Ok(None)
}
fn last_state<T: ChannelTxnT>(
txn: &T,
c: &T::Channel,
) -> Result<Option<(u64, Merkle)>, TxnErr<T::GraphError>> {
if let Some(e) = txn
.rev_cursor_revchangeset(T::rev_changes(&c), None)?
.next()
{
let (b, (_, state)) = e?;
Ok(Some((b, state)))
} else {
Ok(None)
}
}
/// Find the last state of c1 that is also in c0.
pub fn last_common_state<T: ChannelTxnT>(
txn: &T,
c0: &T::Channel,
c1: &T::Channel,
) -> Result<(u64, u64, Merkle), TxnErr<T::GraphError>> {
let mut a = 0;
let (mut b, mut state) = if let Some(x) = last_state(txn, c1)? {
x
} else {
return Ok((0, 0, Merkle::zero()));
};
if let Some(aa) = txn.channel_has_state(c0, state)? {
return Ok((aa, b, state));
}
let mut aa = 0;
while a < b {
let mid = (a + b) / 2;
let (_, s) = first_state_after(txn, c1, mid)?.unwrap();
state = s;
if let Some(aa_) = txn.channel_has_state(c0, state)? {
aa = aa_;
a = mid
} else {
b = mid
}
}
Ok((aa, a, state))
}
/// Check that each inode in the inodes table maps to an alive vertex,
/// and that each inode in the tree table is reachable by only one
/// path.
pub fn check_tree_inodes<T: GraphTxnT + TreeTxnT>(txn: &T, channel: &T::Graph) {
// Sanity check
for x in txn.iter_inodes().unwrap() {
let (inode, vertex) = x.unwrap();
let mut inode_ = inode;
while !inode_.is_root() {
if let Some(next) = txn.get_revtree(inode_, None).unwrap() {
inode_ = next.parent_inode;
} else {
panic!("inode = {:?}, inode_ = {:?}", inode, inode_);
}
}
if !is_alive(txn, &channel, vertex.inode_vertex()).unwrap() {
for e in iter_adj_all(txn, channel, vertex.inode_vertex()).unwrap() {
error!("{:?} {:?} {:?}", inode, vertex, e.unwrap())
}
panic!(
"inode {:?}, vertex {:?}, is not alive, {:?}",
inode,
vertex,
tree_path(txn, vertex)
)
}
}
let mut h = HashMap::new();
let id0 = OwnedPathId {
parent_inode: Inode::ROOT,
basename: crate::small_string::SmallString::new(),
};
for x in txn.iter_tree(id0, None).unwrap() {
let (id, inode) = x.unwrap();
if let Some(inode_) = h.insert(id.clone(), inode) {
panic!("id {:?} maps to two inodes: {:?} {:?}", id, inode, inode_);
}
}
}
/// Check that each alive vertex in the graph is reachable, and vice-versa.
pub fn check_alive_debug<T: GraphIter + ChannelTxnT, C: crate::changestore::ChangeStore>(
changes: &C,
txn: &T,
channel: &T::Channel,
line: u32,
) -> Result<(), std::io::Error> {
let (alive, reachable) = crate::pristine::check_alive(txn, T::graph(channel));
let mut h = HashSet::new();
if !alive.is_empty() {
for (k, file) in alive.iter() {
debug!("alive = {:?}, file = {:?}", k, file);
h.insert(file);
}
}
if !reachable.is_empty() {
for (k, file) in reachable.iter() {
debug!("reachable = {:?}, file = {:?}", k, file);
h.insert(file);
}
}
for file in h.iter() {
let file_ = file.unwrap().start_pos();
let (path, _) = crate::fs::find_path(changes, txn, channel, true, file_).unwrap();
let path = path.replace("/", "_");
let name = format!(
"debug_{:?}_{}_{}",
path,
file_.change.to_base32(),
file_.pos.0
);
let mut f = std::fs::File::create(&name)?;
let graph = crate::alive::retrieve::retrieve(txn, T::graph(channel), file_).unwrap();
graph.debug(changes, txn, T::graph(channel), false, false, &mut f)?;
let mut f = std::fs::File::create(&format!("{}_all", name))?;
debug_root(txn, T::graph(channel), file.unwrap(), &mut f, false)?;
}
if !h.is_empty() {
if !alive.is_empty() {
panic!("alive call line {}: {:?}", line, alive);
} else {
panic!("reachable: {:?}", reachable);
}
}
Ok(())
}
use super::Base32;
use curve25519_dalek::constants::ED25519_BASEPOINT_POINT;
#[doc(hidden)]
#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Merkle {
Ed25519(curve25519_dalek::edwards::EdwardsPoint),
}
#[doc(hidden)]
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[repr(u8)]
pub enum MerkleAlgorithm {
Ed25519 = 1,
}
impl std::fmt::Debug for Merkle {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "{:?}", self.to_base32())
}
}
impl Merkle {
pub fn zero() -> Self {
Merkle::Ed25519(ED25519_BASEPOINT_POINT)
}
pub fn next(&self, h: &super::Hash) -> Self {
match self {
Merkle::Ed25519(ref h0) => {
let scalar = match *h {
super::Hash::Blake3(h) => {
curve25519_dalek::scalar::Scalar::from_bytes_mod_order(h)
}
_ => unreachable!(),
};
Merkle::Ed25519(h0 * scalar)
}
}
}
pub fn to_bytes(&self) -> [u8; 32] {
match *self {
Merkle::Ed25519(ref e) => e.compress().to_bytes(),
}
}
}
impl super::Base32 for Merkle {
fn to_base32(&self) -> String {
match *self {
Merkle::Ed25519(ref s) => {
let mut hash = [0; 33];
(&mut hash[..32]).clone_from_slice(s.compress().as_bytes());
hash[32] = MerkleAlgorithm::Ed25519 as u8;
data_encoding::BASE32_NOPAD.encode(&hash)
}
}
}
/// Parses a base-32 string into a `Merkle`.
fn from_base32(s: &[u8]) -> Option<Self> {
let bytes = if let Ok(b) = data_encoding::BASE32_NOPAD.decode(s) {
b
} else {
return None;
};
if bytes.len() == 33 && *bytes.last().unwrap() == MerkleAlgorithm::Ed25519 as u8 {
curve25519_dalek::edwards::CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.map(Merkle::Ed25519)
} else {
None
}
}
}
impl std::str::FromStr for Merkle {
type Err = crate::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Some(b) = Self::from_base32(s.as_bytes()) {
Ok(b)
} else {
Err(crate::Error::ParseError { s: s.to_string() })
}
}
}
use super::vertex::*;
use super::inode_metadata::*;
use super::change_id::*;
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord)]
#[doc(hidden)]
pub struct InodeVertex {
pub metadata: InodeMetadata,
pub position: Position<ChangeId>,
}
/// Metadata about an inode, including unix-style permissions and
/// whether this inode is a directory.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)]
#[doc(hidden)]
pub struct InodeMetadata(pub u16);
const DIR_BIT: u16 = 0x200;
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
impl InodeMetadata {
/// Read the file metadata from the file name encoded in the
/// repository.
pub fn from_basename(p: &[u8]) -> Self {
debug_assert!(p.len() == 2);
InodeMetadata(BigEndian::read_u16(p))
}
/// Create a new file metadata with the given Unix permissions,
/// and "is directory" bit.
pub fn new(perm: usize, is_dir: bool) -> Self {
let mut m = InodeMetadata(0);
m.set_permissions(perm as u16);
if is_dir {
m.set_dir()
} else {
m.unset_dir()
}
m
}
/// Permissions of this inode (as in Unix).
pub fn permissions(&self) -> u16 {
u16::from_le(self.0) & 0x1ff
}
/// Set the permissions to the supplied parameters.
pub fn set_permissions(&mut self, perm: u16) {
let bits = u16::from_le(self.0);
let perm = (bits & !0x1ff) | perm;
self.0 = perm.to_le()
}
/// Tell whether this `InodeMetadata` is a directory.
pub fn is_dir(&self) -> bool {
u16::from_le(self.0) & DIR_BIT != 0
}
/// Tell whether this `InodeMetadata` is a file.
pub fn is_file(&self) -> bool {
u16::from_le(self.0) & DIR_BIT == 0
}
/// Set the metadata to be a directory.
pub fn set_dir(&mut self) {
let bits = u16::from_le(self.0);
self.0 = (bits | DIR_BIT).to_le()
}
/// Set the metadata to be a file.
pub fn unset_dir(&mut self) {
let bits = u16::from_le(self.0);
self.0 = (bits & !DIR_BIT).to_le()
}
pub fn write<W: std::io::Write>(&self, mut w: W) -> std::io::Result<()> {
w.write_u16::<BigEndian>(self.0)
}
}
/// A unique identifier for files or directories in the actual
/// file system, to map "files from the graph" to real files.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct Inode(pub(in crate::pristine) u64);
impl std::fmt::Debug for Inode {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
use byteorder::{ByteOrder, LittleEndian};
let mut b = [0; 8];
LittleEndian::write_u64(&mut b, self.0);
write!(fmt, "Inode({})", data_encoding::BASE32_NOPAD.encode(&b))
}
}
impl Inode {
pub const ROOT: Inode = Inode(0);
pub fn is_root(&self) -> bool {
*self == Inode::ROOT
}
pub(crate) fn random() -> Self {
Inode(rand::random())
}
}
use super::Base32;
pub(crate) const BLAKE3_BYTES: usize = 32;
pub(crate) const BASE32_BYTES: usize = 53;
/// The external hash of changes.
#[derive(Serialize, Deserialize, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub enum Hash {
/// None is the hash of the "null change", which introduced a
/// single root vertex at the beginning of the repository.
None,
Blake3([u8; BLAKE3_BYTES]),
}
pub(crate) enum Hasher {
Blake3(blake3::Hasher),
}
impl Default for Hasher {
fn default() -> Self {
Hasher::Blake3(blake3::Hasher::new())
}
}
impl Hasher {
pub(crate) fn update(&mut self, bytes: &[u8]) {
match self {
Hasher::Blake3(ref mut h) => {
h.update(bytes);
}
}
}
pub(crate) fn finish(&self) -> Hash {
match self {
Hasher::Blake3(ref h) => {
let result = h.finalize();
let mut hash = [0; BLAKE3_BYTES];
hash.clone_from_slice(result.as_bytes());
Hash::Blake3(hash)
}
}
}
}
impl std::fmt::Debug for Hash {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "{:?}", self.to_base32())
}
}
/// Algorithm used to compute change hashes.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
#[repr(u8)]
pub enum HashAlgorithm {
None = 0,
Blake3 = 1,
}
impl Hash {
pub fn to_bytes(&self) -> [u8; 1 + BLAKE3_BYTES] {
match *self {
Hash::None => unimplemented!(),
Hash::Blake3(ref s) => {
let mut out = [0; 1 + BLAKE3_BYTES];
out[0] = HashAlgorithm::Blake3 as u8;
(&mut out[1..]).clone_from_slice(s);
out
}
}
}
pub fn from_bytes(s: &[u8]) -> Option<Self> {
if s.len() >= 1 + BLAKE3_BYTES && s[0] == HashAlgorithm::Blake3 as u8 {
let mut out = [0; BLAKE3_BYTES];
out.clone_from_slice(&s[1..]);
Some(Hash::Blake3(out))
} else {
None
}
}
pub fn from_prefix(s: &str) -> Option<Self> {
let mut b32 = [b'A'; BASE32_BYTES];
if s.len() > BASE32_BYTES {
return None;
}
(&mut b32[..s.len()]).clone_from_slice(s.as_bytes());
let bytes = if let Ok(bytes) = data_encoding::BASE32_NOPAD.decode(&b32) {
bytes
} else {
return None;
};
let mut hash = [0; BLAKE3_BYTES];
hash.clone_from_slice(&bytes[..BLAKE3_BYTES]);
Some(Hash::Blake3(hash))
}
}
impl super::Base32 for Hash {
/// Returns the base-32 representation of a hash.
fn to_base32(&self) -> String {
match *self {
Hash::None => data_encoding::BASE32_NOPAD.encode(&[0]),
Hash::Blake3(ref s) => {
let mut hash = [0; 1 + BLAKE3_BYTES];
hash[BLAKE3_BYTES] = HashAlgorithm::Blake3 as u8;
(&mut hash[..BLAKE3_BYTES]).clone_from_slice(s);
data_encoding::BASE32_NOPAD.encode(&hash)
}
}
}
/// Parses a base-32 string into a hash.
fn from_base32(s: &[u8]) -> Option<Self> {
let bytes = if let Ok(s) = data_encoding::BASE32_NOPAD.decode(s) {
s
} else {
return None;
};
if bytes == [0] {
Some(Hash::None)
} else if bytes.len() == BLAKE3_BYTES + 1
&& bytes[BLAKE3_BYTES] == HashAlgorithm::Blake3 as u8
{
let mut hash = [0; BLAKE3_BYTES];
hash.clone_from_slice(&bytes[..BLAKE3_BYTES]);
Some(Hash::Blake3(hash))
} else {
None
}
}
}
impl std::str::FromStr for Hash {
type Err = crate::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Some(b) = Self::from_base32(s.as_bytes()) {
Ok(b)
} else {
Err(crate::Error::ParseError { s: s.to_string() })
}
}
}
#[test]
fn from_to() {
let mut h = Hasher::default();
h.update(b"blabla");
let h = h.finish();
assert_eq!(Hash::from_base32(&h.to_base32().as_bytes()), Some(h));
let h = Hash::None;
assert_eq!(Hash::from_base32(&h.to_base32().as_bytes()), Some(h));
let b = data_encoding::BASE32_NOPAD.encode(&[19, 18, 17]);
assert_eq!(Hash::from_base32(&b.as_bytes()), None);
}
use super::change_id::*;
use super::vertex::*;
bitflags! {
/// Possible flags of edges.
#[derive(Serialize, Deserialize)]
pub struct EdgeFlags: u8 {
const BLOCK = 1;
/// A pseudo-edge, computed when applying the change to
/// restore connectivity, and/or mark conflicts.
const PSEUDO = 4;
/// An edge encoding file system hierarchy.
const FOLDER = 16;
/// A "reverse" edge (all edges in the graph have a reverse edge).
const PARENT = 32;
/// An edge whose target (if not also `PARENT`) or
/// source (if also `PARENT`) is marked as deleted.
const DELETED = 128;
}
}
impl EdgeFlags {
#[inline]
pub(crate) fn db() -> Self {
Self::DELETED | Self::BLOCK
}
#[inline]
pub(crate) fn bp() -> Self {
Self::BLOCK | Self::PARENT
}
#[inline]
pub(crate) fn pseudof() -> Self {
Self::PSEUDO | Self::FOLDER
}
#[inline]
pub(crate) fn alive_children() -> Self {
Self::BLOCK | Self::PSEUDO | Self::FOLDER
}
#[inline]
pub(crate) fn parent_folder() -> Self {
Self::PARENT | Self::FOLDER
}
#[inline]
pub(crate) fn is_deleted(&self) -> bool {
self.contains(EdgeFlags::DELETED)
}
#[inline]
pub(crate) fn is_parent(&self) -> bool {
self.contains(EdgeFlags::PARENT)
}
#[inline]
pub(crate) fn is_folder(&self) -> bool {
self.contains(EdgeFlags::FOLDER)
}
#[inline]
pub(crate) fn is_block(&self) -> bool {
self.contains(EdgeFlags::BLOCK)
}
}
/// The target half of an edge in the repository graph.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[doc(hidden)]
pub struct Edge {
/// Flags of this edge.
pub flag: EdgeFlags,
/// Target of this edge.
pub dest: Position<ChangeId>,
/// Change that introduced this edge (possibly as a
/// pseudo-edge, i.e. not explicitly in the change, but
/// computed from it).
pub introduced_by: ChangeId,
}
use super::*;
use byteorder::ByteOrder;
#[derive(Debug, Error)]
pub enum ChannelDumpError<T: std::error::Error + 'static> {
#[error(transparent)]
Txn(T),
#[error("Channel name already exists: {0}")]
ChannelNameExists(String),
}
impl<T: std::error::Error + 'static> std::convert::From<TxnErr<T>> for ChannelDumpError<T> {
fn from(e: TxnErr<T>) -> Self {
ChannelDumpError::Txn(e.0)
}
}
enum DumpChannelState<
T: ChannelTxnT + GraphIter + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
RT: std::ops::Deref<Target = T>,
> {
Changes {
log: crate::pristine::Cursor<T, RT, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
current: ChangeId,
deps: Option<crate::pristine::Cursor<T, RT, T::DepCursor, ChangeId, ChangeId>>,
},
Graph {
cursor: T::GraphCursor,
current: Vertex<ChangeId>,
is_first: bool,
},
}
pub struct DumpChannel<
T: ChannelTxnT + GraphIter + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
RT: std::ops::Deref<Target = T>,
C: std::ops::Deref<Target = T::Channel>,
> {
state: Option<DumpChannelState<T, RT>>,
txn: RT,
channel: C,
}
pub enum DumpChunk {
Dep([u8; 9]),
Hash([u8; 42]),
Edge([u8; 50]),
End([u8; 25]),
}
impl std::ops::Deref for DumpChunk {
type Target = [u8];
fn deref(&self) -> &Self::Target {
match *self {
DumpChunk::Dep(ref h) => h,
DumpChunk::Hash(ref h) => h,
DumpChunk::Edge(ref e) => e,
DumpChunk::End(ref e) => e,
}
}
}
#[repr(u8)]
enum Msg {
Hash = 253,
Dep = 254,
Vertex = 255,
}
impl<
T: ChannelTxnT + GraphIter + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
RT: std::ops::Deref<Target = T> + Clone,
C: std::ops::Deref<Target = T::Channel>,
> Iterator for DumpChannel<T, RT, C>
{
type Item = Result<DumpChunk, TxnErr<T::GraphError>>;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.state.take() {
Some(DumpChannelState::Changes {
mut log,
current,
deps,
}) => {
if let Some(mut deps) = deps {
while let Some(x) = deps.next() {
let (h, dep) = match x {
Ok(x) => x,
Err(e) => return Some(Err(e)),
};
if h > current {
break;
} else if h < current {
continue;
}
let mut msg = [Msg::Dep as u8; 9];
byteorder::BigEndian::write_u64(&mut msg[1..], dep.0);
self.state = Some(DumpChannelState::Changes {
log,
current,
deps: Some(deps),
});
debug!("dep msg = {:?}", msg);
return Some(Ok(DumpChunk::Dep(msg)));
}
}
if let Some(x) = log.next() {
let (_, (h, _)) = match x {
Ok(e) => e,
Err(e) => return Some(Err(e)),
};
let deps = match T::iter_dep_ref(self.txn.clone(), h) {
Ok(e) => Some(e),
Err(e) => return Some(Err(e)),
};
self.state = Some(DumpChannelState::Changes {
log,
current: h,
deps,
});
let ext = match self.txn.get_external(h) {
Ok(ext) => ext.unwrap(),
Err(e) => return Some(Err(e)),
};
let mut msg = [Msg::Hash as u8; 1 + 33 + 8];
(&mut msg[1..34]).clone_from_slice(&ext.to_bytes()[..]);
byteorder::BigEndian::write_u64(&mut msg[34..], h.0);
return Some(Ok(DumpChunk::Hash(msg)));
} else {
self.state = Some(DumpChannelState::Graph {
cursor: match self.txn.iter_graph(T::graph(&self.channel)) {
Ok(c) => c,
Err(e) => return Some(Err(e)),
},
current: Vertex::ROOT,
is_first: true,
})
}
}
Some(DumpChannelState::Graph {
mut cursor,
mut current,
is_first,
}) => {
if let Some(x) = self.txn.next_graph(T::graph(&self.channel), &mut cursor) {
let (v, e) = match x {
Err(e) => return Some(Err(e)),
Ok(x) => x,
};
if !e.flag.contains(EdgeFlags::PARENT) {
self.state = Some(DumpChannelState::Graph {
cursor,
current,
is_first,
});
continue;
}
if v != current || is_first {
let mut buf = [Msg::Vertex as u8; 50];
byteorder::LittleEndian::write_u64(&mut buf[1..], v.change.0);
byteorder::LittleEndian::write_u64(&mut buf[9..], v.start.0);
byteorder::LittleEndian::write_u64(&mut buf[17..], v.end.0);
current = v;
buf[25] = e.flag.bits();
byteorder::LittleEndian::write_u64(&mut buf[26..], e.dest.change.0);
byteorder::LittleEndian::write_u64(&mut buf[34..], e.dest.pos.0);
byteorder::LittleEndian::write_u64(&mut buf[42..], e.introduced_by.0);
self.state = Some(DumpChannelState::Graph {
cursor,
current,
is_first: false,
});
debug!("sending {:?}", &buf[..]);
return Some(Ok(DumpChunk::Edge(buf)));
} else {
let mut buf = [0; 25];
buf[0] = e.flag.bits();
byteorder::LittleEndian::write_u64(&mut buf[1..], e.dest.change.0);
byteorder::LittleEndian::write_u64(&mut buf[9..], e.dest.pos.0);
byteorder::LittleEndian::write_u64(&mut buf[17..], e.introduced_by.0);
self.state = Some(DumpChannelState::Graph {
cursor,
current,
is_first: false,
});
debug!("sending {:?}", &buf[..]);
return Some(Ok(DumpChunk::End(buf)));
}
} else {
self.state = None;
let mut buf = [Msg::Vertex as u8; 25];
byteorder::LittleEndian::write_u64(&mut buf[1..], Vertex::BOTTOM.change.0);
byteorder::LittleEndian::write_u64(&mut buf[9..], Vertex::BOTTOM.start.0);
byteorder::LittleEndian::write_u64(&mut buf[17..], Vertex::BOTTOM.end.0);
debug!("sending {:?}", buf);
return Some(Ok(DumpChunk::End(buf)));
}
}
None => return None,
}
}
}
}
pub fn dump_channel<
T: ChannelTxnT + GraphIter + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
RT: std::ops::Deref<Target = T> + Clone,
C: std::ops::Deref<Target = T::Channel>,
>(
txn: RT,
channel: C,
) -> Result<DumpChannel<T, RT, C>, TxnErr<T::GraphError>> {
Ok(DumpChannel {
state: Some(DumpChannelState::Changes {
log: changeid_log_ref(txn.clone(), &channel, 0)?,
current: ChangeId::ROOT,
deps: None,
}),
txn,
channel,
})
}
pub struct ChannelFromDump<'a, T: ChannelMutTxnT> {
txn: &'a mut T,
channel: ChannelRef<T>,
buf: Buf,
current: Vertex<ChangeId>,
reverses: Vec<(Vertex<ChangeId>, Edge)>,
starts: HashMap<Position<ChangeId>, ChangePosition>,
current_changeid: Option<ChangeId>,
local_changeid: HashMap<ChangeId, ChangeId>,
pub alive: HashSet<ChangeId>,
}
/// The following type does zero-copy buffering: if there are enough
/// bytes in the part we just read, we just return these bytes. Else,
/// we copy the bytes in cache, and complete the cache once we have
/// enough bytes.
///
/// The size of the cache could be generic, but since edges and
/// vertices take 25 bytes, hashes 33, and [u8; 33] doesn't implement
/// AsMut<[u8]>, we just use a fixed-sized array of length 33.
pub struct Buf {
/// Internal cache.
buf: [u8; 68],
/// Length of the internal cache that is currently used.
buf_len: usize,
/// Position in the last buffer that was read. The `read` method
/// must be called with the same buffer until that method returns
/// `None`.
pos: usize,
}
impl Buf {
/// Create a new buffer.
fn new() -> Self {
Buf {
buf: [0; 68],
buf_len: 0,
pos: 0,
}
}
/// Read `wanted` number of bytes from `bytes` using the internal
/// cache if needed. This method must be called with the same
/// `bytes` buffer until it returns `None`.
fn read<'a>(&'a mut self, bytes: &'a [u8], wanted: usize) -> Option<&'a [u8]> {
trace!(
"bytes = {:?}, self.buf = {:?} {:?} {:?}",
bytes,
&self.buf[..],
self.buf_len,
self.pos
);
assert!(wanted < self.buf.len());
if self.buf_len > 0 {
let needs = wanted - self.buf_len;
if self.pos + needs > bytes.len() {
// Not enough bytes to complete the internal buffer,
// we need to save these extra bytes.
let len = self.buf_len + bytes.len();
(&mut self.buf[self.buf_len..len]).clone_from_slice(&bytes[self.pos..]);
self.buf_len = len;
self.pos = 0;
None
} else {
// There are enough bytes, output them.
(&mut self.buf[self.buf_len..wanted])
.clone_from_slice(&bytes[self.pos..self.pos + needs]);
self.buf_len = 0;
self.pos += needs;
Some(&self.buf[..wanted])
}
} else if bytes.len() - self.pos >= wanted {
// The internal buffer is empty, and `bytes` is long enough.
let buf = &bytes[self.pos..self.pos + wanted];
self.pos += wanted;
Some(buf)
} else {
// The internal buffer is empty and `bytes` is too short,
// save the extra bytes.
self.buf_len = bytes.len() - self.pos;
(&mut self.buf[..self.buf_len]).clone_from_slice(&bytes[self.pos..]);
self.pos = 0;
None
}
}
fn current<'a>(&self, bytes: &'a [u8]) -> Option<&'a u8> {
// debug!("self.pos = {:?}", self.pos);
bytes.get(self.pos)
}
}
impl<'a, T: ChannelMutTxnT + DepsMutTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>>
ChannelFromDump<'a, T>
{
pub fn new(txn: &'a mut T, channel: ChannelRef<T>) -> Self {
let mut starts = HashMap::with_capacity(4096);
starts.insert(Position::ROOT, Position::ROOT.pos);
ChannelFromDump {
txn,
channel,
buf: Buf::new(),
current: Vertex::ROOT,
reverses: Vec::with_capacity(4096),
starts,
current_changeid: None,
local_changeid: HashMap::new(),
alive: HashSet::new(),
}
}
pub fn read(&mut self, bytes: &[u8]) -> Result<bool, TxnErr<T::GraphError>> {
let mut channel = self.channel.borrow_mut();
while let Some(&cur) = self.buf.current(bytes) {
debug!("cur = {:?}", cur);
if cur == Msg::Hash as u8 {
if let Some(buf) = self.buf.read(bytes, 42) {
let hash = Hash::from_bytes(&buf[1..34]).unwrap();
let mut p = ChangeId(byteorder::BigEndian::read_u64(&buf[34..]));
// Test if `p` is already taken for another hash.
if let Some(hh) = self.txn.get_external(p)? {
if hh != hash {
let pp = make_changeid(self.txn, &hash)?;
self.local_changeid.insert(p, pp);
p = pp
}
}
let t = T::apply_counter(&channel);
debug!("hash = {:?} {:?}", hash, p);
self.txn.put_external(p, hash)?;
self.txn.put_internal(hash, p)?;
self.txn.put_changes(&mut channel, p, t, &hash)?;
self.current_changeid = Some(p);
}
} else if cur == Msg::Dep as u8 {
debug!("dep");
if let Some(buf) = self.buf.read(bytes, 9) {
let mut a = ChangeId(byteorder::BigEndian::read_u64(&buf[1..9]));
if let Some(aa) = self.local_changeid.get(&a) {
a = *aa
}
if let Some(cur) = self.current_changeid {
self.txn.put_dep(cur, a)?;
self.txn.put_revdep(a, cur)?;
}
debug!("cur = {:?}, a = {:?}", self.current_changeid, a);
}
} else if cur == Msg::Vertex as u8 {
// New vertex
if let Some(buf) = self.buf.read(bytes, 25) {
self.current = read_vertex(buf);
debug!("new vertex {:?}", self.current);
if self.current == Vertex::BOTTOM {
finish_channel(self.txn, &mut channel, &self.reverses, &self.starts)?;
return Ok(true);
}
if let Some(aa) = self.local_changeid.get(&self.current.change) {
self.current.change = *aa
}
} else {
break;
}
} else if let Some(buf) = self.buf.read(bytes, 25) {
// Edge
let mut edge = read_edge(buf);
if let Some(aa) = self.local_changeid.get(&edge.dest.change) {
edge.dest.change = *aa
}
if let Some(aa) = self.local_changeid.get(&edge.introduced_by) {
edge.introduced_by = *aa
}
if !edge.flag.contains(EdgeFlags::DELETED) {
self.alive.insert(edge.dest.change);
}
debug!("put edge {:?} {:?}", self.current, edge);
self.txn
.put_graph(T::graph_mut(&mut channel), self.current, edge)?;
self.reverses.push((self.current, edge));
self.starts
.insert(self.current.end_pos(), self.current.start);
} else {
break;
}
}
self.buf.pos = 0;
Ok(false)
}
pub fn edges(&self) -> &[(Vertex<ChangeId>, Edge)] {
&self.reverses[..]
}
}
fn finish_channel<T: super::ChannelMutTxnT>(
txn: &mut T,
channel: &mut T::Channel,
reverses: &[(Vertex<ChangeId>, Edge)],
ends: &HashMap<Position<ChangeId>, ChangePosition>,
) -> Result<(), TxnErr<T::GraphError>> {
debug!("ends: {:?}", ends);
for &(v, e) in reverses {
debug!("{:?}", e);
let u = Vertex {
change: e.dest.change,
start: *ends.get(&e.dest).unwrap(),
end: e.dest.pos,
};
let mut e = e;
e.flag ^= EdgeFlags::PARENT;
e.dest = v.start_pos();
txn.put_graph(T::graph_mut(channel), u, e)?;
}
Ok(())
}
fn read_vertex(bytes: &[u8]) -> Vertex<ChangeId> {
let change = byteorder::LittleEndian::read_u64(&bytes[1..]);
let start = byteorder::LittleEndian::read_u64(&bytes[9..]);
let end = byteorder::LittleEndian::read_u64(&bytes[17..]);
Vertex {
change: ChangeId(change),
start: ChangePosition(start),
end: ChangePosition(end),
}
}
fn read_edge(bytes: &[u8]) -> Edge {
let flag = EdgeFlags::from_bits(bytes[0]).unwrap();
let change = byteorder::LittleEndian::read_u64(&bytes[1..]);
let p = byteorder::LittleEndian::read_u64(&bytes[9..]);
let introduced_by = byteorder::LittleEndian::read_u64(&bytes[17..]);
Edge {
flag,
dest: Position {
change: ChangeId(change),
pos: ChangePosition(p),
},
introduced_by: ChangeId(introduced_by),
}
}
use super::Base32;
use byteorder::{ByteOrder, LittleEndian};
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[doc(hidden)]
pub struct ChangeId(pub u64);
impl std::fmt::Debug for ChangeId {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "ChangeId({})", self.to_base32())
}
}
impl ChangeId {
pub const ROOT: ChangeId = ChangeId(0);
pub fn is_root(&self) -> bool {
*self == ChangeId::ROOT
}
}
impl super::Base32 for ChangeId {
fn to_base32(&self) -> String {
let mut b = [0; 8];
LittleEndian::write_u64(&mut b, self.0);
data_encoding::BASE32_NOPAD.encode(&b)
}
fn from_base32(b: &[u8]) -> Option<Self> {
let mut dec = [0; 8];
let len = if let Ok(len) = data_encoding::BASE32_NOPAD.decode_len(b.len()) {
len
} else {
return None;
};
if data_encoding::BASE32_NOPAD
.decode_mut(b, &mut dec[..len])
.is_ok()
{
Some(ChangeId(LittleEndian::read_u64(&dec)))
} else {
None
}
}
}
//! Treating strings as paths. For portability reasons, paths must
//! internally be treated as strings, and converted to paths only by
//! the backend, if required (in-memory backends will typically not
//! need that conversion).
/// Returns the parent of the path, if it exists. This function tries
/// to replicate the behaviour of `std::path::Path::parent`, but with
/// `&str` instead of `Path`.
///
/// ```ignore
/// use libpijul::path::parent;
/// assert_eq!(parent("/foo/bar"), Some("/foo"));
/// assert_eq!(parent("foo"), Some(""));
/// assert_eq!(parent("/"), None);
/// assert_eq!(parent(""), None);
/// ```
pub fn parent(mut path: &str) -> Option<&str> {
loop {
if path == "/" || path.is_empty() {
return None;
} else if let Some(i) = path.rfind('/') {
let (a, b) = path.split_at(i);
if b == "/" {
path = a
} else {
return Some(a);
}
} else {
return Some("");
}
}
}
/// Returns the file name of the path. if it exists. This function
/// tries to replicate the behaviour of `std::path::Path::file_name`,
/// but with `&str` instead of `Path`.
///
/// Like the original, returns `None` if the path terminates in `..`.
///
/// ```ignore
/// use libpijul::path::file_name;
/// assert_eq!(file_name("/usr/bin/"), Some("bin"));
/// assert_eq!(file_name("tmp/foo.txt"), Some("foo.txt"));
/// assert_eq!(file_name("foo.txt/."), Some("foo.txt"));
/// assert_eq!(file_name("foo.txt/.//"), Some("foo.txt"));
/// assert_eq!(file_name("foo.txt/.."), None);
/// assert_eq!(file_name("/"), None);
/// ```
pub fn file_name(mut path: &str) -> Option<&str> {
if path == "/" || path.is_empty() {
None
} else {
while let Some(i) = path.rfind('/') {
let (_, f) = path.split_at(i + 1);
if f == ".." {
return None;
} else if f.is_empty() || f == "." {
path = path.split_at(i).0
} else {
return Some(f);
}
}
Some(path)
}
}
#[test]
fn test_file_name() {
assert_eq!(file_name("/usr/bin/"), Some("bin"));
assert_eq!(file_name("tmp/foo.txt"), Some("foo.txt"));
assert_eq!(file_name("foo.txt/."), Some("foo.txt"));
assert_eq!(file_name("foo.txt/.//"), Some("foo.txt"));
assert_eq!(file_name("foo.txt/.."), None);
assert_eq!(file_name("/"), None);
}
/// Returns an iterator of the non-empty components of a path,
/// delimited by `/`. Note that `.` and `..` are treated as
/// components.
#[cfg(not(windows))]
pub fn components(path: &str) -> Components {
Components(path.split('/'))
}
#[cfg(windows)]
pub fn components(path: &str) -> Components {
Components(path.split('\\'))
}
#[derive(Clone)]
pub struct Components<'a>(std::str::Split<'a, char>);
impl<'a> std::fmt::Debug for Components<'a> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "Components {{ .. }}")
}
}
impl<'a> Iterator for Components<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(n) = self.0.next() {
if !n.is_empty() {
return Some(n);
}
} else {
return None;
}
}
}
}
/// Push a path component on an existing path. Only works if `extra`
/// is a relative path.
/// ```ignore
/// use libpijul::path::push;
/// let mut s = "a".to_string();
/// push(&mut s, "b");
/// assert_eq!(s, "a/b");
/// push(&mut s, "c");
/// assert_eq!(s, "a/b/c");
/// ```
pub fn push(path: &mut String, extra: &str) {
assert!(!extra.starts_with('/')); // Make sure the extra path is relative.
if !path.ends_with('/') && !path.is_empty() {
path.push('/');
}
path.push_str(extra)
}
/// Pop the last component off an existing path.
/// ```ignore
/// use libpijul::path::pop;
/// let mut s = "a/b/c".to_string();
/// pop(&mut s);
/// assert_eq!(s, "a/b");
/// pop(&mut s);
/// assert_eq!(s, "a");
/// pop(&mut s);
/// assert_eq!(s, "");
/// ```
pub fn pop(path: &mut String) {
if let Some(i) = path.rfind('/') {
path.truncate(i)
} else {
path.clear()
}
}
//! Output the pristine to the working copy, synchronising file
//! changes (file additions, deletions and renames) in the process.
use super::{collect_children, OutputError, OutputItem, PristineOutputError};
use crate::alive::retrieve;
use crate::changestore::ChangeStore;
use crate::fs::{create_new_inode, inode_filename};
use crate::pristine::*;
use crate::small_string::SmallString;
use crate::working_copy::WorkingCopy;
use crate::{alive, path, vertex_buffer};
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
/// A structure representing a file with conflicts.
#[derive(Debug)]
pub enum Conflict {
Name { path: String },
ZombieFile { path: String },
MultipleNames { pos: Position<ChangeId> },
Zombie { path: String, line: usize },
Cyclic { path: String, line: usize },
Order { path: String, line: usize },
}
/// Output updates the working copy after applying changes, including
/// the graph-file correspondence.
///
/// **WARNING:** This overwrites the working copy, cancelling any
/// unrecorded change.
pub fn output_repository_no_pending<T: MutTxnT, R: WorkingCopy, P: ChangeStore>(
repo: &mut R,
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
done: &mut HashMap<Position<ChangeId>, Vertex<ChangeId>>,
prefix: &str,
output_name_conflicts: bool,
if_modified_since: Option<std::time::SystemTime>,
) -> Result<Vec<Conflict>, OutputError<P::Error, T::GraphError, R::Error>> {
output_repository(
repo,
changes,
txn,
&mut channel.r.borrow_mut(),
ChangeId::ROOT,
done,
&mut crate::path::components(prefix),
output_name_conflicts,
if_modified_since,
)
}
fn output_repository<
'a,
T: TreeMutTxnT + ChannelMutTxnT + GraphMutTxnT<GraphError = <T as TreeTxnT>::TreeError>,
R: WorkingCopy,
P: ChangeStore,
I: Iterator<Item = &'a str>,
>(
repo: &mut R,
changes: &P,
txn: &mut T,
channel: &mut T::Channel,
pending_change_id: ChangeId,
done: &mut HashMap<Position<ChangeId>, Vertex<ChangeId>>,
prefix: &mut I,
output_name_conflicts: bool,
if_modified_after: Option<std::time::SystemTime>,
) -> Result<Vec<Conflict>, OutputError<P::Error, T::TreeError, R::Error>> {
let mut conflicts = Vec::new();
let mut files = HashMap::new();
let mut next_files = HashMap::new();
let mut next_prefix_basename = prefix.next();
collect_children(
txn,
changes,
T::graph(channel),
Position::ROOT,
Inode::ROOT,
"",
next_prefix_basename,
&mut files,
)?;
let mut is_first_none = true;
if next_prefix_basename.is_none() {
let dead = collect_dead_files(txn, T::graph(channel), pending_change_id, Inode::ROOT)?;
debug!("dead (line {}) = {:?}", line!(), dead);
kill_dead_files::<T, R, P>(txn, repo, &dead)?;
is_first_none = false;
}
debug!("done collecting: {:?}", files);
let mut done_inodes = HashSet::new();
// Actual moves is used to avoid a situation where have two files
// a and b, first rename a -> b, and then b -> c.
let mut actual_moves = Vec::new();
while !files.is_empty() {
debug!("files {:?}", files.len());
next_files.clear();
next_prefix_basename = prefix.next();
for (a, mut b) in files.drain() {
debug!("files: {:?} {:?}", a, b);
b.sort_unstable_by(|u, v| {
txn.get_changeset(T::changes(&channel), u.0.change)
.unwrap()
.cmp(&txn.get_changeset(T::changes(&channel), v.0.change).unwrap())
});
let mut is_first_name = true;
for (name_key, mut output_item) in b {
match done.entry(output_item.pos) {
Entry::Occupied(e) => {
debug!("pos already visited: {:?} {:?}", a, output_item.pos);
if *e.get() != name_key {
conflicts.push(Conflict::MultipleNames {
pos: output_item.pos,
});
}
continue;
}
Entry::Vacant(e) => {
e.insert(name_key);
}
}
let output_item_inode =
if let Some(inode) = txn.get_revinodes(output_item.pos, None)? {
Some((inode, txn.get_inodes(inode, None)?.unwrap()))
} else {
None
};
if let Some((inode, _)) = output_item_inode {
if !done_inodes.insert(inode) {
debug!("inode already visited: {:?} {:?}", a, inode);
continue;
}
}
let name = if !is_first_name {
if output_name_conflicts {
let name = make_conflicting_name(&a, name_key);
conflicts.push(Conflict::Name { path: name.clone() });
name
} else {
debug!("not outputting {:?} {:?}", a, name_key);
conflicts.push(Conflict::Name {
path: a.to_string(),
});
break;
}
} else {
is_first_name = false;
a.clone()
};
let file_name = path::file_name(&name).unwrap();
path::push(&mut output_item.path, file_name);
let path = std::mem::replace(&mut output_item.path, String::new());
let inode = move_or_create::<T, R, P>(
txn,
repo,
&output_item,
output_item_inode,
&path,
&file_name,
&mut actual_moves,
)?;
if next_prefix_basename.is_none() && is_first_none {
let dead =
collect_dead_files(txn, T::graph(channel), pending_change_id, inode)?;
debug!("dead (line {}) = {:?}", line!(), dead);
kill_dead_files::<T, R, P>(txn, repo, &dead)?;
is_first_none = false;
}
if output_item.meta.is_dir() {
repo.create_dir_all(&path)
.map_err(OutputError::WorkingCopy)?;
collect_children(
txn,
changes,
T::graph(channel),
output_item.pos,
inode,
&path,
next_prefix_basename,
&mut next_files,
)?;
} else {
if needs_output(repo, if_modified_after, &path) {
repo.write_file(&path, |w: &mut dyn std::io::Write| {
output_file::<_, _, R>(
txn,
channel,
changes,
&output_item,
&mut conflicts,
w,
)
})
.map_err(OutputError::from)?
} else {
debug!("Not outputting {:?}", path)
}
}
if output_item.is_zombie {
conflicts.push(Conflict::ZombieFile {
path: name.to_string(),
})
}
repo.set_permissions(&path, output_item.meta.permissions())
.map_err(OutputError::WorkingCopy)?;
}
}
std::mem::swap(&mut files, &mut next_files);
}
for (a, b) in actual_moves.iter() {
repo.rename(a, b).map_err(OutputError::WorkingCopy)?
}
Ok(conflicts)
}
fn make_conflicting_name(name: &str, name_key: Vertex<ChangeId>) -> String {
let parent = path::parent(name).unwrap();
let basename = path::file_name(name).unwrap();
let mut parent = parent.to_string();
path::push(
&mut parent,
&format!("{}.{}", basename, &name_key.change.to_base32()),
);
parent
}
fn needs_output<R: WorkingCopy>(
repo: &R,
if_modified_after: Option<std::time::SystemTime>,
path: &str,
) -> bool {
if let Some(m) = if_modified_after {
if let Ok(last) = repo.modified_time(path) {
debug!("modified_after: {:?} {:?}", m, last);
if let Ok(d) = last.duration_since(m) {
return d.as_secs() > 0;
} else {
return false;
}
}
}
true
}
fn move_or_create<T: TreeMutTxnT, R: WorkingCopy, C: ChangeStore>(
txn: &mut T,
repo: &mut R,
output_item: &OutputItem,
output_item_inode: Option<(Inode, Position<ChangeId>)>,
path: &str,
file_name: &str,
actual_moves: &mut Vec<(String, String)>,
) -> Result<Inode, OutputError<C::Error, T::TreeError, R::Error>> {
let file_id = OwnedPathId {
parent_inode: output_item.parent,
basename: SmallString::from_str(&file_name),
};
let file_id_ = file_id.as_file_id();
debug!("move_or_create {:?}", file_id_);
if let Some((inode, _)) = output_item_inode {
// If the file already exists, find its
// current name and rename it if that name
// is different.
if let Some(ref current_name) = inode_filename(txn, inode)? {
debug!("current_name = {:?}, path = {:?}", current_name, path);
if current_name != path {
let parent = txn.get_revtree(inode, None)?.unwrap().to_owned();
debug!("parent = {:?}, inode = {:?}", parent, inode);
del_tree_with_rev(txn, parent.as_file_id(), inode)?;
let mut tmp_path = path.to_string();
crate::path::pop(&mut tmp_path);
use rand::Rng;
let s: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(30)
.map(|c| c as char)
.collect();
crate::path::push(&mut tmp_path, &s);
repo.rename(¤t_name, &tmp_path)
.map_err(OutputError::WorkingCopy)?;
actual_moves.push((tmp_path, path.to_string()));
// If the new location is overwriting an existing one,
// actually overwrite.
if let Some(inode) = txn.get_tree(file_id_, None)? {
crate::fs::rec_delete(txn, file_id.clone(), inode, true)
.map_err(PristineOutputError::Fs)?;
}
put_inodes_with_rev(txn, inode, output_item.pos)?;
put_tree_with_rev(txn, file_id_, inode)?;
}
} else {
debug!("no current name, inserting {:?} {:?}", file_id_, inode);
if let Some(inode) = txn.get_tree(file_id_, None)? {
crate::fs::rec_delete(txn, file_id.clone(), inode, true)
.map_err(PristineOutputError::Fs)?;
}
put_inodes_with_rev(txn, inode, output_item.pos)?;
put_tree_with_rev(txn, file_id_, inode)?;
}
Ok(inode)
} else {
if let Some(inode) = txn.get_tree(file_id_, None)? {
crate::fs::rec_delete(txn, file_id.clone(), inode, true).map_err(PristineOutputError::Fs)?;
}
let inode = create_new_inode(txn)?;
debug!(
"created new inode {:?} {:?} {:?}",
inode, output_item.pos, file_id_
);
put_inodes_with_rev(txn, inode, output_item.pos)?;
put_tree_with_rev(txn, file_id_, inode)?;
if output_item.meta.is_dir() {
let path_id = OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
};
txn.put_tree(path_id.as_file_id(), inode)?;
}
Ok(inode)
}
}
fn output_file<T: ChannelMutTxnT + GraphMutTxnT, P: ChangeStore, W: WorkingCopy>(
txn: &mut T,
channel: &mut T::Channel,
changes: &P,
output_item: &OutputItem,
conflicts: &mut Vec<Conflict>,
w: &mut dyn std::io::Write,
) -> Result<(), OutputError<P::Error, T::GraphError, W::Error>> {
let mut l = retrieve(txn, T::graph(channel), output_item.pos)?;
let mut f = vertex_buffer::ConflictsWriter::new(w, &output_item.path, conflicts);
let mut forward = Vec::new();
alive::output_graph(changes, txn, channel, &mut f, &mut l, &mut forward)
.map_err(PristineOutputError::from)?;
for &(vertex, edge) in forward.iter() {
// Unwrap ok since `edge` is in the channel.
let dest = txn.find_block(T::graph(channel), edge.dest).unwrap();
debug!("deleting forward edge {:?} {:?} {:?}", vertex, dest, edge);
del_graph_with_rev(
txn,
T::graph_mut(channel),
edge.flag,
vertex,
dest,
edge.introduced_by,
)?;
}
Ok(())
}
fn collect_dead_files<T: TreeMutTxnT + GraphTxnT<GraphError = <T as TreeTxnT>::TreeError>>(
txn: &mut T,
channel: &T::Graph,
pending_change_id: ChangeId,
inode: Inode,
) -> Result<HashMap<OwnedPathId, (Inode, Option<String>)>, TxnErr<T::GraphError>> {
let mut inodes = vec![(inode, false)];
let mut next_inodes = Vec::new();
let mut dead = HashMap::new();
while !inodes.is_empty() {
for (inode, parent_is_dead) in inodes.drain(..) {
for x in txn.iter_tree(
OwnedPathId {
parent_inode: inode,
basename: SmallString::from_str(""),
},
None,
)? {
let (id, inode_) = x?;
if id.parent_inode > inode {
break;
} else if id.parent_inode < inode {
continue;
}
let is_dead = parent_is_dead || {
if let Some(vertex) = txn.get_inodes(inode_, None)? {
vertex.change != pending_change_id
&& !is_alive(txn, channel, vertex.inode_vertex())?
} else {
true
}
};
if is_dead {
dead.insert(id.to_owned(), (inode_, inode_filename(txn, inode_)?));
}
if inode_ != inode {
next_inodes.push((inode_, is_dead))
}
}
}
std::mem::swap(&mut inodes, &mut next_inodes)
}
Ok(dead)
}
fn kill_dead_files<T: TreeMutTxnT, W: WorkingCopy, C: ChangeStore>(
txn: &mut T,
repo: &mut W,
dead: &HashMap<OwnedPathId, (Inode, Option<String>)>,
) -> Result<(), OutputError<C::Error, T::TreeError, W::Error>> {
for (fileid, (inode, ref name)) in dead.iter() {
debug!("killing {:?} {:?} {:?}", fileid, inode, name);
del_tree_with_rev(txn, fileid.as_file_id(), *inode)?;
if let Some(vertex) = txn.get_inodes(*inode, None)? {
debug!("kill_dead_files {:?} {:?}", inode, vertex);
del_inodes_with_rev(txn, *inode, vertex)?;
if let Some(name) = name {
repo.remove_path(&name).map_err(OutputError::WorkingCopy)?
}
}
}
Ok(())
}
use crate::changestore::ChangeStore;
use crate::path;
use crate::pristine::*;
use std::collections::HashMap;
mod output;
pub use output::*;
mod archive;
pub use archive::*;
#[derive(Debug, Error)]
pub enum OutputError<
ChangestoreError: std::error::Error + 'static,
Txn: std::error::Error + 'static,
W: std::error::Error + 'static,
> {
#[error("Working copy error: {0}")]
WorkingCopy(W),
#[error(transparent)]
Pristine(#[from] PristineOutputError<ChangestoreError, Txn>),
}
#[derive(Debug, Error)]
pub enum PristineOutputError<ChangestoreError: std::error::Error, Txn: std::error::Error + 'static>
{
#[error(transparent)]
Txn(Txn),
#[error("Changestore error: {0}")]
Changestore(ChangestoreError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Fs(#[from] crate::fs::FsError<Txn>),
}
use crate::working_copy::WriteError;
impl<C: std::error::Error, T: std::error::Error + 'static, W: std::error::Error>
OutputError<C, T, W>
{
fn from(e: WriteError<Self>) -> Self {
match e {
WriteError::Io(e) => OutputError::Pristine(PristineOutputError::Io(e)),
WriteError::E(e) => e,
}
}
}
impl<C: std::error::Error, T: std::error::Error + 'static> From<TxnErr<T>>
for PristineOutputError<C, T>
{
fn from(e: TxnErr<T>) -> Self {
PristineOutputError::Txn(e.0)
}
}
impl<C: std::error::Error, T: std::error::Error + 'static, W: std::error::Error> From<TxnErr<T>>
for OutputError<C, T, W>
{
fn from(e: TxnErr<T>) -> Self {
OutputError::Pristine(e.into())
}
}
#[derive(Debug, Error)]
pub enum FileError<ChangestoreError: std::error::Error + 'static, T: std::error::Error + 'static> {
#[error(transparent)]
Changestore(ChangestoreError),
#[error(transparent)]
Txn(T),
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl<C: std::error::Error, T: std::error::Error + 'static> From<FileError<C, T>>
for PristineOutputError<C, T>
{
fn from(e: FileError<C, T>) -> Self {
match e {
FileError::Changestore(e) => PristineOutputError::Changestore(e),
FileError::Io(e) => PristineOutputError::Io(e),
FileError::Txn(t) => PristineOutputError::Txn(t),
}
}
}
impl<C: std::error::Error, T: std::error::Error + 'static> From<TxnErr<T>> for FileError<C, T> {
fn from(e: TxnErr<T>) -> Self {
FileError::Txn(e.0)
}
}
#[derive(Debug)]
struct OutputItem {
parent: Inode,
path: String,
meta: InodeMetadata,
pos: Position<ChangeId>,
is_zombie: bool,
}
fn collect_children<T: GraphTxnT, P: ChangeStore>(
txn: &T,
changes: &P,
channel: &T::Graph,
inode_pos: Position<ChangeId>,
inode: Inode,
path: &str,
prefix_basename: Option<&str>,
files: &mut HashMap<String, Vec<(Vertex<ChangeId>, OutputItem)>>,
) -> Result<(), PristineOutputError<P::Error, T::GraphError>> {
debug!("path = {:?}", path);
for e in iter_adjacent(
txn,
channel,
inode_pos.inode_vertex(),
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)? {
let e = e?;
let name_vertex = txn.find_block(channel, e.dest).unwrap();
let mut name_buf = Vec::new();
changes
.get_contents(|h| txn.get_external(h).unwrap(), name_vertex, &mut name_buf)
.map_err(PristineOutputError::Changestore)?;
let (perms, basename) = name_buf.as_slice().split_at(2);
let (perms, basename) = (
InodeMetadata::from_basename(perms),
std::str::from_utf8(basename).unwrap(),
);
debug!("filename: {:?} {:?}", perms, basename);
let mut name = path.to_string();
if let Some(next) = prefix_basename {
if next != basename {
continue;
}
}
path::push(&mut name, basename);
debug!("name_vertex: {:?} {:?}", e, name_vertex);
let child = if let Some(child) = iter_adjacent(
txn,
channel,
name_vertex,
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::PSEUDO,
)?
.next()
{
child?
} else {
let mut edge = None;
for e in iter_adjacent(
txn,
channel,
name_vertex,
EdgeFlags::FOLDER,
EdgeFlags::all(),
)? {
let e = e?;
if !e.flag.contains(EdgeFlags::PARENT) {
edge = Some(e);
break;
}
}
let e = edge.unwrap();
let mut f = std::fs::File::create("debug_output").unwrap();
debug_root(txn, channel, e.dest.inode_vertex(), &mut f, false).unwrap();
panic!("no child");
};
debug!("child: {:?}", child);
let v = files.entry(name).or_insert_with(Vec::new);
v.push((
name_vertex,
OutputItem {
parent: inode,
path: path.to_string(),
meta: perms,
pos: child.dest,
is_zombie: is_zombie(txn, channel, child.dest)?,
},
));
}
Ok(())
}
fn is_zombie<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
pos: Position<ChangeId>,
) -> Result<bool, TxnErr<T::GraphError>> {
let f = EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::DELETED;
if let Some(n) =
iter_adjacent(txn, channel, pos.inode_vertex(), f, f | EdgeFlags::BLOCK)?.next()
{
n?;
Ok(true)
} else {
Ok(false)
}
}
use super::*;
use crate::changestore::ChangeStore;
use crate::Conflict;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
pub trait Archive {
type File: std::io::Write;
type Error: std::error::Error;
fn create_file(&mut self, path: &str, mtime: u64, perm: u16) -> Self::File;
fn close_file(&mut self, f: Self::File) -> Result<(), Self::Error>;
}
#[cfg(feature = "tarball")]
pub struct Tarball<W: std::io::Write> {
pub archive: tar::Builder<flate2::write::GzEncoder<W>>,
pub prefix: Option<String>,
pub buffer: Vec<u8>,
}
#[cfg(feature = "tarball")]
pub struct File {
buf: Vec<u8>,
path: String,
permissions: u16,
mtime: u64,
}
#[cfg(feature = "tarball")]
impl std::io::Write for File {
fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> {
self.buf.write(buf)
}
fn flush(&mut self) -> Result<(), std::io::Error> {
Ok(())
}
}
#[cfg(feature = "tarball")]
impl<W: std::io::Write> Tarball<W> {
pub fn new(w: W, prefix: Option<String>) -> Self {
let encoder = flate2::write::GzEncoder::new(w, flate2::Compression::best());
Tarball {
archive: tar::Builder::new(encoder),
buffer: Vec::new(),
prefix,
}
}
}
#[cfg(feature = "tarball")]
impl<W: std::io::Write> Archive for Tarball<W> {
type File = File;
type Error = std::io::Error;
fn create_file(&mut self, path: &str, mtime: u64, permissions: u16) -> Self::File {
self.buffer.clear();
File {
buf: std::mem::replace(&mut self.buffer, Vec::new()),
path: if let Some(ref prefix) = self.prefix {
prefix.clone() + path
} else {
path.to_string()
},
mtime,
permissions,
}
}
fn close_file(&mut self, file: Self::File) -> Result<(), Self::Error> {
let mut header = tar::Header::new_gnu();
header.set_size(file.buf.len() as u64);
header.set_mode(file.permissions as u32);
header.set_mtime(file.mtime);
header.set_cksum();
self.archive
.append_data(&mut header, &file.path, &file.buf[..])?;
self.buffer = file.buf;
Ok(())
}
}
#[derive(Debug, Error)]
pub enum ArchiveError<
P: std::error::Error + 'static,
T: std::error::Error + 'static,
A: std::error::Error + 'static,
> {
#[error(transparent)]
A(A),
#[error(transparent)]
P(P),
#[error(transparent)]
Txn(T),
#[error(transparent)]
Unrecord(#[from] crate::unrecord::UnrecordError<P, T>),
#[error(transparent)]
Apply(#[from] crate::apply::ApplyError<P, T>),
#[error("State not found: {:?}", state)]
StateNotFound { state: crate::pristine::Merkle },
#[error(transparent)]
File(#[from] crate::output::FileError<P, T>),
#[error(transparent)]
Output(#[from] crate::output::PristineOutputError<P, T>),
}
impl<
P: std::error::Error + 'static,
T: std::error::Error + 'static,
A: std::error::Error + 'static,
> std::convert::From<TxnErr<T>> for ArchiveError<P, T, A>
{
fn from(e: TxnErr<T>) -> Self {
ArchiveError::Txn(e.0)
}
}
pub(crate) fn archive<
'a,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
P: ChangeStore,
I: Iterator<Item = &'a str>,
A: Archive,
>(
changes: &P,
txn: &T,
channel: &ChannelRef<T>,
prefix: &mut I,
arch: &mut A,
) -> Result<Vec<Conflict>, ArchiveError<P::Error, T::GraphError, A::Error>> {
let channel = channel.borrow();
let mut conflicts = Vec::new();
let mut files = HashMap::new();
let mut next_files = HashMap::new();
let mut next_prefix_basename = prefix.next();
collect_children(
txn,
changes,
T::graph(&channel),
Position::ROOT,
Inode::ROOT,
"",
next_prefix_basename,
&mut files,
)?;
let mut done = HashMap::new();
let mut done_inodes = HashSet::new();
while !files.is_empty() {
debug!("files {:?}", files.len());
next_files.clear();
next_prefix_basename = prefix.next();
for (a, mut b) in files.drain() {
debug!("files: {:?} {:?}", a, b);
b.sort_by(|u, v| {
txn.get_changeset(T::changes(&channel), u.0.change)
.unwrap()
.cmp(&txn.get_changeset(T::changes(&channel), v.0.change).unwrap())
});
let mut is_first_name = true;
for (name_key, mut output_item) in b {
match done.entry(output_item.pos) {
Entry::Occupied(e) => {
debug!("pos already visited: {:?} {:?}", a, output_item.pos);
if *e.get() != name_key {
conflicts.push(Conflict::MultipleNames {
pos: output_item.pos,
});
}
continue;
}
Entry::Vacant(e) => {
e.insert(name_key);
}
}
if !done_inodes.insert(output_item.pos) {
debug!("inode already visited: {:?} {:?}", a, output_item.pos);
continue;
}
let name = if !is_first_name {
conflicts.push(Conflict::Name {
path: a.to_string(),
});
break;
} else {
is_first_name = false;
a.clone()
};
let file_name = path::file_name(&name).unwrap();
path::push(&mut output_item.path, file_name);
let path = std::mem::replace(&mut output_item.path, String::new());
let (_, latest_touch) =
crate::fs::get_latest_touch(txn, &channel, output_item.pos)?;
let latest_touch = {
let ext = txn.get_external(latest_touch)?.unwrap();
let c = changes.get_header(&ext).map_err(ArchiveError::P)?;
c.timestamp.timestamp() as u64
};
if output_item.meta.is_dir() {
collect_children(
txn,
changes,
T::graph(&channel),
output_item.pos,
Inode::ROOT, // unused
&path,
next_prefix_basename,
&mut next_files,
)?;
} else {
debug!("latest_touch: {:?}", latest_touch);
let mut l = crate::alive::retrieve(txn, T::graph(&channel), output_item.pos)?;
let mut f =
arch.create_file(&path, latest_touch, output_item.meta.permissions());
{
let mut f = crate::vertex_buffer::ConflictsWriter::new(
&mut f,
&output_item.path,
&mut conflicts,
);
crate::alive::output_graph(
changes,
txn,
&channel,
&mut f,
&mut l,
&mut Vec::new(),
)?;
}
arch.close_file(f).map_err(ArchiveError::A)?;
}
if output_item.is_zombie {
conflicts.push(Conflict::ZombieFile {
path: name.to_string(),
})
}
}
}
std::mem::swap(&mut files, &mut next_files);
}
Ok(conflicts)
}
use crate::alive::{Graph, VertexId};
use crate::change::*;
use crate::find_alive::*;
use crate::pristine::*;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
#[derive(Debug, Error)]
pub enum MissingError<TxnError: std::error::Error + 'static> {
#[error(transparent)]
Txn(TxnError),
#[error(transparent)]
Block(#[from] BlockError<TxnError>),
#[error(transparent)]
Inconsistent(#[from] InconsistentChange<TxnError>),
}
impl<T: std::error::Error + 'static> std::convert::From<TxnErr<T>> for MissingError<T> {
fn from(e: TxnErr<T>) -> Self {
MissingError::Txn(e.0)
}
}
impl Workspace {
pub(crate) fn load_graph<T: GraphTxnT>(
&mut self,
txn: &T,
channel: &T::Graph,
inode: Position<Option<Hash>>,
) -> Result<
Option<&(Graph, HashMap<Vertex<ChangeId>, VertexId>)>,
InconsistentChange<T::GraphError>,
> {
if let Some(change) = inode.change {
match self.graphs.0.entry(inode) {
Entry::Occupied(e) => Ok(Some(e.into_mut())),
Entry::Vacant(v) => {
let pos = Position {
change: if let Some(i) = txn.get_internal(change)? {
i
} else {
return Err(InconsistentChange::UndeclaredDep);
},
pos: inode.pos,
};
let mut graph = crate::alive::retrieve(txn, channel, pos)?;
graph.tarjan();
let mut ids = HashMap::new();
for (i, l) in graph.lines.iter().enumerate() {
ids.insert(l.vertex, VertexId(i));
}
Ok(Some(v.insert((graph, ids))))
}
}
} else {
Ok(None)
}
}
}
pub(crate) fn repair_missing_up_context<
'a,
T: GraphMutTxnT,
I: IntoIterator<Item = &'a Vertex<ChangeId>>,
>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
inode: Position<Option<Hash>>,
c: Vertex<ChangeId>,
d: I,
) -> Result<(), MissingError<T::GraphError>> {
let now = std::time::Instant::now();
let mut alive = find_alive_up(txn, channel, &mut ws.files, c, change_id)?;
crate::TIMERS.lock().unwrap().find_alive += now.elapsed();
ws.load_graph(txn, channel, inode)?;
debug!("repair_missing_up_context, alive = {:?}", alive);
for &d in d {
if let Some((graph, vids)) = ws.graphs.0.get(&inode) {
crate::alive::remove_redundant_parents(
graph,
vids,
&mut alive,
&mut ws.covered_parents,
d,
);
}
repair_regular_up(txn, channel, &alive, d, EdgeFlags::PSEUDO)?;
}
Ok(())
}
fn repair_regular_up<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
alive: &HashSet<Vertex<ChangeId>>,
d: Vertex<ChangeId>,
flag: EdgeFlags,
) -> Result<(), TxnErr<T::GraphError>> {
for &ancestor in alive.iter() {
debug!("put_graph_with_rev {:?} -> {:?}", ancestor, d);
if ancestor == d {
info!(
"repair_missing_up_context, alive: {:?} == {:?}",
ancestor, d
);
continue;
}
debug!("repair_missing_up {:?} {:?}", ancestor, d);
put_graph_with_rev(txn, channel, flag, ancestor, d, ChangeId::ROOT)?;
}
Ok(())
}
pub(crate) fn repair_missing_down_context<
'a,
T: GraphMutTxnT,
I: IntoIterator<Item = &'a Vertex<ChangeId>>,
>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
c: Vertex<ChangeId>,
d: I,
) -> Result<(), MissingError<T::GraphError>> {
let now = std::time::Instant::now();
let mut alive = find_alive_down(txn, channel, c)?;
crate::TIMERS.lock().unwrap().find_alive += now.elapsed();
ws.load_graph(txn, channel, inode)?;
if let Some((graph, vids)) = ws.graphs.0.get(&inode) {
crate::alive::remove_redundant_children(graph, vids, &mut alive, c);
}
if !alive.is_empty() {
debug!("repair_missing_down_context alive = {:#?}", alive);
}
for &d in d {
for &desc in alive.iter() {
if d == desc {
info!("repair_missing_down_context, alive: {:?} == {:?}", d, desc);
continue;
}
debug!("repair_missing_down {:?} {:?}", d, desc);
put_graph_with_rev(txn, channel, EdgeFlags::PSEUDO, d, desc, ChangeId::ROOT)?;
}
}
Ok(())
}
pub(crate) fn repair_context_nondeleted<T: GraphMutTxnT, K>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
change_id: ChangeId,
mut known: K,
e: &NewEdge<Option<Hash>>,
) -> Result<(), MissingError<T::GraphError>>
where
K: FnMut(Hash) -> bool,
{
if e.flag.contains(EdgeFlags::FOLDER) {
return Ok(());
}
let source = txn.find_block_end(&channel, internal_pos(txn, &e.from, change_id)?)?;
let target = txn.find_block(&channel, internal_pos(txn, &e.to.start_pos(), change_id)?)?;
if deleted_by_unknown(txn, channel, source, change_id, &mut known)? {
debug!("deleted by unknown, repairing {:?}", source);
repair_missing_up_context(txn, channel, ws, change_id, inode, source, &[target])?;
}
reconnect_target_up(txn, channel, ws, inode, target, change_id)?;
if e.flag.contains(EdgeFlags::BLOCK) {
repair_missing_down_context(txn, channel, ws, inode, target, &[target])?;
} else if is_alive(txn, channel, source)? {
repair_missing_down_context(txn, channel, ws, inode, target, &[source])?;
}
Ok(())
}
fn reconnect_target_up<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
target: Vertex<ChangeId>,
change_id: ChangeId,
) -> Result<(), MissingError<T::GraphError>> {
let mut unknown = HashSet::new();
for v in iter_deleted_parents(txn, channel, target)? {
let v = v?;
if v.dest.change.is_root() || v.introduced_by.is_root() {
continue;
}
if v.introduced_by == change_id {
unknown.clear();
break;
}
// Else change ~v.introduced_by~ is a change we don't know,
// since no change can create a conflict with itself.
unknown.insert(txn.find_block_end(channel, v.dest)?);
}
for up in unknown.drain() {
repair_missing_up_context(txn, channel, ws, change_id, inode, up, &[target])?;
}
Ok(())
}
fn deleted_by_unknown<T: GraphTxnT, K>(
txn: &T,
channel: &T::Graph,
source: Vertex<ChangeId>,
change_id: ChangeId,
known: &mut K,
) -> Result<bool, TxnErr<T::GraphError>>
where
K: FnMut(Hash) -> bool,
{
let mut deleted_by_unknown = false;
for v in iter_deleted_parents(txn, channel, source)? {
let v = v?;
if v.dest.change.is_root() || v.introduced_by.is_root() {
continue;
}
if v.introduced_by == change_id || known(txn.get_external(v.introduced_by)?.unwrap()) {
// If a known change also delete the context, we're good.
return Ok(false);
} else {
// If an unknown change deletes the context, wait: maybe a
// known change will delete it too.
deleted_by_unknown = true;
}
}
Ok(deleted_by_unknown)
}
pub(crate) fn repair_context_deleted<T: GraphMutTxnT, K>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
change_id: ChangeId,
mut known: K,
e: &NewEdge<Option<Hash>>,
) -> Result<(), MissingError<T::GraphError>>
where
K: FnMut(Hash) -> bool,
{
if e.flag.contains(EdgeFlags::FOLDER) {
return Ok(());
}
debug!("repair_context_deleted {:?}", e);
let mut pos = internal_pos(txn, &e.to.start_pos(), change_id)?;
while let Ok(dest_vertex) = txn.find_block(&channel, pos) {
debug!("repair_context_deleted, dest_vertex = {:?}", dest_vertex);
repair_children_of_deleted(txn, channel, ws, inode, &mut known, change_id, dest_vertex)?;
if dest_vertex.end < e.to.end {
pos.pos = dest_vertex.end
} else {
break;
}
}
Ok(())
}
#[derive(Default)]
pub struct Workspace {
unknown_parents: Vec<(
Vertex<ChangeId>,
Vertex<ChangeId>,
Position<Option<Hash>>,
EdgeFlags,
)>,
unknown: Vec<Edge>,
parents: HashSet<Edge>,
pseudo: Vec<(Vertex<ChangeId>, Edge)>,
repaired: HashSet<Vertex<ChangeId>>,
pub(crate) graphs: Graphs,
pub(crate) covered_parents: HashSet<(Vertex<ChangeId>, Vertex<ChangeId>)>,
pub(crate) files: HashSet<Vertex<ChangeId>>,
}
#[derive(Debug, Default)]
pub(crate) struct Graphs(
pub HashMap<Position<Option<Hash>>, (Graph, HashMap<Vertex<ChangeId>, crate::alive::VertexId>)>,
);
impl Graphs {
pub(crate) fn get(
&self,
inode: Position<Option<Hash>>,
) -> Option<&(Graph, HashMap<Vertex<ChangeId>, VertexId>)> {
self.0.get(&inode)
}
pub fn split(
&mut self,
inode: Position<Option<Hash>>,
vertex: Vertex<ChangeId>,
mid: ChangePosition,
) {
if let Some((_, vids)) = self.0.get_mut(&inode) {
if let Some(vid) = vids.remove(&vertex) {
vids.insert(Vertex { end: mid, ..vertex }, vid);
vids.insert(
Vertex {
start: mid,
..vertex
},
vid,
);
}
}
}
}
impl Workspace {
pub fn clear(&mut self) {
self.unknown.clear();
self.unknown_parents.clear();
self.pseudo.clear();
self.parents.clear();
self.graphs.0.clear();
self.repaired.clear();
self.covered_parents.clear();
}
pub fn assert_empty(&self) {
assert!(self.unknown.is_empty());
assert!(self.unknown_parents.is_empty());
assert!(self.pseudo.is_empty());
assert!(self.parents.is_empty());
assert!(self.graphs.0.is_empty());
assert!(self.repaired.is_empty());
assert!(self.covered_parents.is_empty());
}
}
fn collect_unknown_children<T: GraphTxnT, K>(
txn: &T,
channel: &T::Graph,
ws: &mut Workspace,
dest_vertex: Vertex<ChangeId>,
change_id: ChangeId,
known: &mut K,
) -> Result<(), TxnErr<T::GraphError>>
where
K: FnMut(Hash) -> bool,
{
for v in iter_alive_children(txn, channel, dest_vertex)? {
let v = v?;
debug!(
"collect_unknown_children dest_vertex = {:?}, v = {:?}",
dest_vertex, v
);
if v.introduced_by == change_id || v.dest.change.is_root() {
continue;
}
if v.introduced_by.is_root() {
ws.pseudo.push((dest_vertex, v));
continue;
}
let mut not_del_by_change = true;
for e in iter_adjacent(
txn,
channel,
dest_vertex,
EdgeFlags::PARENT | EdgeFlags::DELETED,
EdgeFlags::all(),
)? {
let e = e?;
if e.introduced_by == v.introduced_by {
not_del_by_change = false;
break;
}
}
if not_del_by_change {
let intro = txn.get_external(v.introduced_by)?.unwrap();
if !known(intro) {
ws.unknown.push(v);
}
}
}
Ok(())
}
fn repair_children_of_deleted<T: GraphMutTxnT, K>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
mut known: K,
change_id: ChangeId,
dest_vertex: Vertex<ChangeId>,
) -> Result<(), MissingError<T::GraphError>>
where
K: FnMut(Hash) -> bool,
{
trace!("repair_children_of_deleted {:?}", dest_vertex);
collect_unknown_children(txn, channel, ws, dest_vertex, change_id, &mut known)?;
let mut unknown = std::mem::replace(&mut ws.unknown, Vec::new());
debug!("dest_vertex = {:?}, unknown = {:?}", dest_vertex, unknown);
for edge in unknown.drain(..) {
let p = txn.find_block(channel, edge.dest)?;
assert!(!edge.flag.contains(EdgeFlags::FOLDER));
debug!("dest_vertex {:?}, p {:?}", dest_vertex, p);
put_graph_with_rev(txn, channel, EdgeFlags::db(), dest_vertex, p, change_id)?;
let mut u = p;
while let Ok(v) = txn.find_block(channel, u.end_pos()) {
if u != v {
debug!("repair_children_of_deleted: {:?} -> {:?}", u, v);
put_graph_with_rev(txn, channel, EdgeFlags::db(), u, v, change_id)?;
u = v
} else {
break;
}
}
if is_alive(txn, channel, p)? {
repair_missing_up_context(txn, channel, ws, change_id, inode, dest_vertex, &[p])?;
} else {
let alive = find_alive_down(txn, channel, p)?;
repair_missing_up_context(txn, channel, ws, change_id, inode, dest_vertex, &alive)?;
}
}
ws.unknown = unknown;
Ok(())
}
pub(crate) fn delete_pseudo_edges<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
) -> Result<(), MissingError<T::GraphError>> {
if ws.pseudo.is_empty() {
debug!("no pseudo edges")
}
for (dest_vertex, mut e) in ws.pseudo.drain(..) {
debug!("repair_context_deleted, deleting {:?} {:?}", dest_vertex, e);
if !is_alive(txn, channel, dest_vertex)? && !ws.repaired.contains(&dest_vertex) {
if e.flag.contains(EdgeFlags::PARENT) {
let p = txn.find_block_end(channel, e.dest)?;
if !is_alive(txn, channel, p)? {
debug!("delete {:?} {:?}", p, dest_vertex);
e.flag -= EdgeFlags::PARENT;
del_graph_with_rev(txn, channel, e.flag, p, dest_vertex, e.introduced_by)?;
}
} else {
let p = txn.find_block(channel, e.dest)?;
if !is_alive(txn, channel, p)? {
debug!("delete (2) {:?} {:?}", dest_vertex, p);
del_graph_with_rev(txn, channel, e.flag, dest_vertex, p, e.introduced_by)?;
}
}
}
}
Ok(())
}
pub(crate) fn collect_zombie_context<T: GraphMutTxnT, K>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
n: &NewEdge<Option<Hash>>,
change_id: ChangeId,
mut known: K,
) -> Result<(), MissingError<T::GraphError>>
where
K: FnMut(Hash) -> bool,
{
if n.flag.contains(EdgeFlags::FOLDER) {
return Ok(());
}
let mut pos = internal_pos(txn, &n.to.start_pos(), change_id)?;
let end_pos = internal_pos(txn, &n.to.end_pos(), change_id)?;
let mut unknown_parents = Vec::new();
while let Ok(dest_vertex) = txn.find_block(&channel, pos) {
debug!("collect zombie context: {:?}", dest_vertex);
for v in iter_adjacent(
txn,
channel,
dest_vertex,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let v = v?;
if v.introduced_by == change_id || v.dest.change.is_root() {
continue;
}
if v.introduced_by.is_root() {
ws.pseudo.push((dest_vertex, v));
continue;
}
if v.flag.contains(EdgeFlags::PARENT) {
// Unwrap ok, since `v` is in the channel.
let intro = txn.get_external(v.introduced_by)?.unwrap();
if !known(intro) {
debug!("unknown: {:?}", v);
unknown_parents.push((dest_vertex, v))
}
}
}
zombify(txn, channel, ws, change_id, inode, n.flag, &unknown_parents)?;
if dest_vertex.end < end_pos.pos {
pos.pos = dest_vertex.end
} else {
break;
}
}
Ok(())
}
fn zombify<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
inode: Position<Option<Hash>>,
flag: EdgeFlags,
unknown: &[(Vertex<ChangeId>, Edge)],
) -> Result<(), MissingError<T::GraphError>> {
for &(dest_vertex, edge) in unknown.iter() {
let p = txn.find_block_end(channel, edge.dest)?;
ws.unknown_parents.push((dest_vertex, p, inode, edge.flag));
let fold = flag & EdgeFlags::FOLDER;
debug!("zombify p {:?}, dest_vertex {:?}", p, dest_vertex);
let mut v = p;
while let Ok(u) = txn.find_block_end(channel, v.start_pos()) {
if u != v {
debug!("u = {:?}, v = {:?}", u, v);
put_graph_with_rev(
txn,
channel,
EdgeFlags::DELETED | EdgeFlags::BLOCK | fold,
u,
v,
change_id,
)?;
v = u
} else {
break;
}
}
// Zombify the first chunk of the split.
for parent in iter_adjacent(
txn,
channel,
v,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let parent = parent?;
if !parent.flag.contains(EdgeFlags::PSEUDO) {
ws.parents.insert(parent);
}
}
debug!("ws.parents = {:?}", ws.parents);
for parent in ws.parents.drain() {
let parent_dest = txn.find_block_end(channel, parent.dest)?;
let mut flag = EdgeFlags::DELETED | EdgeFlags::BLOCK;
if parent.flag.contains(EdgeFlags::FOLDER) {
flag |= EdgeFlags::FOLDER
}
put_graph_with_rev(txn, channel, flag, parent_dest, v, change_id)?;
}
}
Ok(())
}
pub(crate) fn repair_parents_of_deleted<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
) -> Result<(), MissingError<T::GraphError>> {
debug!("repair_parents_of_deleted");
let mut unknown = std::mem::replace(&mut ws.unknown_parents, Vec::new());
for (dest_vertex, p, inode, flag) in unknown.drain(..) {
if flag.contains(EdgeFlags::FOLDER) {
repair_missing_down_context(txn, channel, ws, inode, dest_vertex, &[dest_vertex])?
} else {
repair_missing_down_context(txn, channel, ws, inode, dest_vertex, &[p])?
}
}
ws.unknown_parents = unknown;
Ok(())
}
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate pijul_macros;
#[macro_use]
extern crate thiserror;
#[macro_use]
extern crate lazy_static;
pub mod alive;
mod apply;
pub mod change;
pub mod changestore;
mod diff;
mod find_alive;
pub mod fs;
mod missing_context;
pub mod output;
pub mod path;
pub mod pristine;
pub mod record;
pub mod small_string;
mod unrecord;
mod vector2;
pub mod vertex_buffer;
pub mod working_copy;
#[cfg(test)]
mod tests;
pub const DOT_DIR: &str = ".pijul";
#[derive(Debug, Error)]
pub enum Error {
#[error("File {:?} already in repository", path)]
FileAlreadyInRepo { path: String },
#[error("File {:?} not in repository", path)]
FileNotInRepo { path: String },
#[error("Cannot record from unrecorded prefix {:?}", path)]
UnrecordedPath { path: String },
#[error("File {:?} not found", path)]
FileNotFound { path: String },
#[error("Change not on channel {:?}", change_id)]
ChangeNotOnChannel { change_id: pristine::ChangeId },
#[error("Change is depended upon {:?}", change_id)]
ChangeIsDependedUpon { change_id: pristine::ChangeId },
#[error("Change not found: {:?}", hash)]
ChangeNotFound { hash: String },
#[error("State not found: {:?}", state)]
StateNotFound { state: pristine::Merkle },
#[error("Change hash mismatch: {:?} != {:?}", claimed, computed)]
ChangeHashMismatch {
claimed: pristine::Hash,
computed: pristine::Hash,
},
#[error("Contents hash mismatch: {:?} != {:?}", claimed, computed)]
ContentsHashMismatch {
claimed: pristine::Hash,
computed: pristine::Hash,
},
#[error("Change already on channel: {:?}", hash)]
ChangeAlreadyOnChannel { hash: pristine::Hash },
#[error("Dependency missing: {:?}", hash)]
DependencyMissing { hash: pristine::Hash },
#[error("Channel name already taken: {:?}", name)]
ChannelNameExists { name: String },
#[error("Parse error: {:?}", s)]
ParseError { s: String },
#[error("Verify error, public key = {:?}", pk)]
VerifyError { pk: String },
#[error("Ambiguous hash prefix: {}", prefix)]
AmbiguousHashPrefix { prefix: String },
#[error("Inconsistent references in change")]
InconsistentChange,
#[error("Missing change contents: {}", hash)]
MissingContents { hash: String },
#[error("Wrong block: {:?}", block)]
WrongBlock {
block: pristine::Position<pristine::ChangeId>,
},
#[error("Pristine corrupt")]
PristineCorrupt,
#[error("Change version mismatch, please run `pijul upgrade`.")]
VersionMismatch,
#[error("The repository is locked by another process.")]
PristineLocked,
}
pub use crate::apply::Workspace as ApplyWorkspace;
pub use crate::apply::{ApplyError, LocalApplyError};
pub use crate::fs::{FsError, WorkingCopyIterator};
pub use crate::output::{Archive, Conflict};
pub use crate::pristine::{
Base32, ChangeId, ChannelRef, ChannelTxnT, DepsTxnT, EdgeFlags, GraphTxnT, Hash, Inode, Merkle,
MutTxnT, OwnedPathId, RemoteRef, TreeTxnT, TxnT, Vertex,
};
pub use crate::record::Builder as RecordBuilder;
pub use crate::record::{Algorithm, InodeUpdate};
use std::collections::HashMap;
impl MutTxnTExt for pristine::sanakirja::MutTxn<()> {}
impl TxnTExt for pristine::sanakirja::MutTxn<()> {}
impl TxnTExt for pristine::sanakirja::Txn {}
pub trait MutTxnTExt: pristine::MutTxnT {
fn apply_change_ws<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: crate::pristine::Hash,
workspace: &mut ApplyWorkspace,
) -> Result<(u64, pristine::Merkle), crate::apply::ApplyError<C::Error, Self::GraphError>> {
crate::apply::apply_change_ws(changes, self, channel, hash, workspace)
}
fn apply_change_rec_ws<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: crate::pristine::Hash,
workspace: &mut ApplyWorkspace,
) -> Result<(), crate::apply::ApplyError<C::Error, Self::GraphError>> {
crate::apply::apply_change_rec_ws(changes, self, channel, hash, workspace, false)
}
fn apply_change<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<(u64, pristine::Merkle), crate::apply::ApplyError<C::Error, Self::GraphError>> {
crate::apply::apply_change(changes, self, channel, hash)
}
fn apply_change_rec<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<(), crate::apply::ApplyError<C::Error, Self::GraphError>> {
crate::apply::apply_change_rec(changes, self, channel, hash, false)
}
fn apply_deps_rec<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<(), crate::apply::ApplyError<C::Error, Self::GraphError>> {
crate::apply::apply_change_rec(changes, self, channel, hash, true)
}
fn apply_local_change_ws(
&mut self,
channel: &mut pristine::ChannelRef<Self>,
change: &change::Change,
hash: pristine::Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
workspace: &mut ApplyWorkspace,
) -> Result<(u64, pristine::Merkle), crate::apply::LocalApplyError<Self::GraphError>> {
crate::apply::apply_local_change_ws(self, channel, change, hash, inode_updates, workspace)
}
fn apply_local_change(
&mut self,
channel: &mut crate::pristine::ChannelRef<Self>,
change: &crate::change::Change,
hash: pristine::Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
) -> Result<(u64, pristine::Merkle), crate::apply::LocalApplyError<Self::GraphError>> {
crate::apply::apply_local_change(self, channel, change, hash, inode_updates)
}
fn record<W: crate::working_copy::WorkingCopy, C: crate::changestore::ChangeStore>(
&mut self,
builder: &mut RecordBuilder,
diff_algorithm: Algorithm,
channel: &mut pristine::ChannelRef<Self>,
working_copy: &mut W,
changes: &C,
prefix: &str,
) -> Result<(), crate::record::RecordError<C::Error, W::Error, Self::GraphError>>
where
<W as crate::working_copy::WorkingCopy>::Error: 'static,
{
builder.record(
self,
diff_algorithm,
&mut channel.borrow_mut(),
working_copy,
changes,
prefix,
)
}
fn record_all<W: crate::working_copy::WorkingCopy, C: crate::changestore::ChangeStore>(
&mut self,
diff_algorithm: Algorithm,
channel: &mut pristine::ChannelRef<Self>,
working_copy: &mut W,
changes: &C,
prefix: &str,
) -> Result<record::Recorded, crate::record::RecordError<C::Error, W::Error, Self::GraphError>>
where
<W as crate::working_copy::WorkingCopy>::Error: 'static,
{
let mut builder = crate::record::Builder::new();
builder.record(
self,
diff_algorithm,
&mut channel.borrow_mut(),
working_copy,
changes,
prefix,
)?;
Ok(builder.finish())
}
fn apply_recorded<C: changestore::ChangeStore>(
&mut self,
channel: &mut pristine::ChannelRef<Self>,
recorded: record::Recorded,
changestore: &C,
) -> Result<pristine::Hash, crate::apply::ApplyError<C::Error, Self::GraphError>> {
let contents_hash = {
let mut hasher = pristine::Hasher::default();
hasher.update(&recorded.contents);
hasher.finish()
};
let change = change::LocalChange {
offsets: change::Offsets::default(),
hashed: change::Hashed {
version: change::VERSION,
contents_hash,
changes: recorded
.actions
.into_iter()
.map(|rec| rec.globalize(self).unwrap())
.collect(),
metadata: Vec::new(),
dependencies: Vec::new(),
extra_known: Vec::new(),
header: change::ChangeHeader::default(),
},
unhashed: None,
contents: recorded.contents,
};
let hash = changestore
.save_change(&change)
.map_err(apply::ApplyError::Changestore)?;
apply::apply_local_change(self, channel, &change, hash, &recorded.updatables)?;
Ok(hash)
}
fn unrecord<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: &pristine::Hash,
) -> Result<bool, unrecord::UnrecordError<C::Error, Self::GraphError>> {
unrecord::unrecord(self, channel, changes, hash)
}
fn output_repository_no_pending<R: working_copy::WorkingCopy, C: changestore::ChangeStore>(
&mut self,
repo: &mut R,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
done: &mut HashMap<pristine::Position<ChangeId>, Vertex<ChangeId>>,
prefix: &str,
output_name_conflicts: bool,
if_modified_since: Option<std::time::SystemTime>,
) -> Result<Vec<output::Conflict>, output::OutputError<C::Error, Self::GraphError, R::Error>>
{
output::output_repository_no_pending(
repo,
changes,
self,
channel,
done,
prefix,
output_name_conflicts,
if_modified_since,
)
}
/// Register a file in the working copy, where the file is given by
/// its path from the root of the repository, where the components of
/// the path are separated by `/` (example path: `a/b/c`).
fn add_file(&mut self, path: &str) -> Result<(), fs::FsError<Self::GraphError>> {
fs::add_inode(self, None, path, false)
}
/// Register a directory in the working copy, where the directory is
/// given by its path from the root of the repository, where the
/// components of the path are separated by `/` (example path:
/// `a/b/c`).
fn add_dir(&mut self, path: &str) -> Result<(), fs::FsError<Self::GraphError>> {
fs::add_inode(self, None, path, true)
}
/// Register a file or directory in the working copy, given by its
/// path from the root of the repository, where the components of the
/// path are separated by `/` (example path: `a/b/c`).
fn add(&mut self, path: &str, is_dir: bool) -> Result<(), fs::FsError<Self::GraphError>> {
fs::add_inode(self, None, path, is_dir)
}
fn move_file(&mut self, a: &str, b: &str) -> Result<(), fs::FsError<Self::GraphError>> {
fs::move_file(self, a, b)
}
fn remove_file(&mut self, a: &str) -> Result<(), fs::FsError<Self::GraphError>> {
fs::remove_file(self, a)
}
#[cfg(feature = "dump")]
fn channel_from_dump<'a>(
&'a mut self,
name: &str,
) -> Result<
pristine::channel_dump::ChannelFromDump<'a, Self>,
pristine::channel_dump::ChannelDumpError<Self::GraphError>,
> {
use pristine::channel_dump::*;
if self.load_channel(name)?.is_none() {
let channel = pristine::MutTxnT::open_or_create_channel(self, name)
.map_err(ChannelDumpError::Txn)?;
Ok(ChannelFromDump::new(self, channel))
} else {
Err(ChannelDumpError::ChannelNameExists(name.to_string()))
}
}
fn archive_with_state<P: changestore::ChangeStore, A: Archive>(
&mut self,
changes: &P,
channel: &mut pristine::ChannelRef<Self>,
state: pristine::Merkle,
extra: &[pristine::Hash],
arch: &mut A,
) -> Result<Vec<output::Conflict>, output::ArchiveError<P::Error, Self::GraphError, A::Error>>
{
self.archive_prefix_with_state(
changes,
channel,
state,
extra,
&mut std::iter::empty(),
arch,
)
}
/// Warning: this method unrecords changes until finding the
/// state. If this is not wanted, please fork the channel before
/// calling.
fn archive_prefix_with_state<
'a,
P: changestore::ChangeStore,
A: Archive,
I: Iterator<Item = &'a str>,
>(
&mut self,
changes: &P,
channel: &mut pristine::ChannelRef<Self>,
state: pristine::Merkle,
extra: &[pristine::Hash],
prefix: &mut I,
arch: &mut A,
) -> Result<Vec<output::Conflict>, output::ArchiveError<P::Error, Self::GraphError, A::Error>>
{
let mut unrecord = Vec::new();
let mut found = false;
for x in pristine::changeid_rev_log(self, &channel.borrow(), None)? {
let (_, (h, m)) = x?;
if m == state {
found = true;
break;
} else {
unrecord.push(h)
}
}
debug!("unrecord = {:?}", unrecord);
if found {
for h in unrecord.drain(..) {
let h = self.get_external(h)?.unwrap();
self.unrecord(changes, channel, &h)?;
}
for app in extra.iter() {
self.apply_change_rec(changes, channel, *app)?
}
output::archive(changes, self, channel, prefix, arch)
} else {
Err(output::ArchiveError::StateNotFound { state })
}
}
}
pub trait TxnTExt: pristine::TxnT {
fn is_directory(&self, inode: pristine::Inode) -> Result<bool, Self::TreeError> {
fs::is_directory(self, inode).map_err(|e| e.0)
}
fn is_tracked(&self, path: &str) -> Result<bool, Self::TreeError> {
fs::is_tracked(self, path).map_err(|e| e.0)
}
fn iter_working_copy(&self) -> WorkingCopyIterator<Self> {
fs::iter_working_copy(self, pristine::Inode::ROOT)
}
fn has_change(
&self,
channel: &pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<Option<u64>, Self::GraphError> {
if let Some(cid) = pristine::GraphTxnT::get_internal(self, hash).map_err(|e| e.0)? {
self.get_changeset(Self::changes(&channel.borrow()), cid)
.map_err(|e| e.0)
} else {
Ok(None)
}
}
fn is_alive(
&self,
channel: &Self::Channel,
a: pristine::Vertex<pristine::ChangeId>,
) -> Result<bool, Self::GraphError> {
pristine::is_alive(self, Self::graph(channel), a).map_err(|e| e.0)
}
fn current_state(&self, channel: &Self::Channel) -> Result<pristine::Merkle, Self::GraphError> {
pristine::current_state(self, channel).map_err(|e| e.0)
}
fn log<'channel, 'txn>(
&'txn self,
channel: &'channel Self::Channel,
from: u64,
) -> Result<Log<'txn, Self>, Self::GraphError> {
Ok(Log {
txn: self,
iter: pristine::changeid_log(self, channel, from).map_err(|e| e.0)?,
})
}
fn log_for_path<'channel, 'txn>(
&'txn self,
channel: &'channel Self::Channel,
pos: pristine::Position<pristine::ChangeId>,
from: u64,
) -> Result<pristine::PathChangeset<'channel, 'txn, Self>, Self::GraphError> {
pristine::log_for_path(self, channel, pos, from).map_err(|e| e.0)
}
fn rev_log_for_path<'channel, 'txn>(
&'txn self,
channel: &'channel Self::Channel,
pos: pristine::Position<pristine::ChangeId>,
from: u64,
) -> Result<pristine::RevPathChangeset<'channel, 'txn, Self>, Self::DepsError> {
pristine::rev_log_for_path(self, channel, pos, from).map_err(|e| e.0)
}
fn reverse_log<'channel, 'txn>(
&'txn self,
channel: &'channel Self::Channel,
from: Option<u64>,
) -> Result<RevLog<'txn, Self>, Self::GraphError> {
Ok(RevLog {
txn: self,
iter: pristine::changeid_rev_log(self, channel, from).map_err(|e| e.0)?,
})
}
fn changeid_reverse_log<'txn>(
&'txn self,
channel: &Self::Channel,
from: Option<u64>,
) -> Result<
pristine::RevCursor<
Self,
&'txn Self,
Self::RevchangesetCursor,
u64,
(pristine::ChangeId, pristine::Merkle),
>,
Self::GraphError,
> {
pristine::changeid_rev_log(self, channel, from).map_err(|e| e.0)
}
fn get_changes(
&self,
channel: &pristine::ChannelRef<Self>,
n: u64,
) -> Result<Option<(pristine::Hash, pristine::Merkle)>, Self::GraphError> {
if let Some((h, m)) = self
.get_revchangeset(Self::rev_changes(&channel.borrow()), n)
.map_err(|e| e.0)?
{
Ok(Some((self.get_external(h).map_err(|e| e.0)?.unwrap(), m)))
} else {
Ok(None)
}
}
fn get_revchanges(
&self,
channel: &pristine::ChannelRef<Self>,
h: pristine::Hash,
) -> Result<Option<u64>, Self::GraphError> {
if let Some(h) = pristine::GraphTxnT::get_internal(self, h).map_err(|e| e.0)? {
self.get_changeset(Self::changes(&channel.borrow()), h)
.map_err(|e| e.0)
} else {
Ok(None)
}
}
fn touched_files(&self, h: pristine::Hash) -> Result<Option<Touched<Self>>, Self::DepsError> {
if let Some(id) = pristine::GraphTxnT::get_internal(self, h).map_err(|e| e.0)? {
Ok(Some(Touched {
txn: self,
iter: self.iter_rev_touched_files(id, None).map_err(|e| e.0)?,
id,
}))
} else {
Ok(None)
}
}
fn find_oldest_path<C: changestore::ChangeStore>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
position: pristine::Position<pristine::Hash>,
) -> Result<(String, bool), output::FileError<C::Error, Self::GraphError>> {
let position = pristine::Position {
change: pristine::GraphTxnT::get_internal(self, position.change)?.unwrap(),
pos: position.pos,
};
fs::find_path(changes, self, &channel.borrow(), false, position)
}
fn find_youngest_path<C: changestore::ChangeStore>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
position: pristine::Position<pristine::Hash>,
) -> Result<(String, bool), output::FileError<C::Error, Self::GraphError>> {
let position = pristine::Position {
change: pristine::GraphTxnT::get_internal(self, position.change)?.unwrap(),
pos: position.pos,
};
fs::find_path(changes, self, &channel.borrow(), true, position)
}
fn follow_oldest_path<C: changestore::ChangeStore>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
path: &str,
) -> Result<
(pristine::Position<pristine::ChangeId>, bool),
fs::FsErrorC<C::Error, Self::GraphError>,
> {
fs::follow_oldest_path(changes, self, &channel.borrow(), path)
}
fn output_file<C: changestore::ChangeStore, V: vertex_buffer::VertexBuffer>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
v0: pristine::Position<pristine::ChangeId>,
out: &mut V,
) -> Result<(), output::FileError<C::Error, Self::GraphError>> {
let mut forward = Vec::new();
let channel = channel.borrow();
let mut graph = alive::retrieve(self, Self::graph(&channel), v0)?;
alive::output_graph(changes, self, &channel, out, &mut graph, &mut forward)?;
Ok(())
}
fn archive<C: changestore::ChangeStore, A: Archive>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
arch: &mut A,
) -> Result<Vec<output::Conflict>, output::ArchiveError<C::Error, Self::GraphError, A::Error>>
{
output::archive(changes, self, channel, &mut std::iter::empty(), arch)
}
fn archive_prefix<'a, C: changestore::ChangeStore, I: Iterator<Item = &'a str>, A: Archive>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
prefix: &mut I,
arch: &mut A,
) -> Result<Vec<output::Conflict>, output::ArchiveError<C::Error, Self::GraphError, A::Error>>
{
output::archive(changes, self, channel, prefix, arch)
}
fn iter_adjacent<'txn>(
&'txn self,
graph: &'txn Self::Channel,
key: Vertex<pristine::ChangeId>,
min_flag: pristine::EdgeFlags,
max_flag: pristine::EdgeFlags,
) -> Result<pristine::AdjacentIterator<'txn, Self>, pristine::TxnErr<Self::GraphError>> {
pristine::iter_adjacent(self, Self::graph(graph), key, min_flag, max_flag)
}
}
pub struct Log<'txn, T: pristine::ChannelTxnT> {
txn: &'txn T,
iter: pristine::Cursor<
T,
&'txn T,
T::RevchangesetCursor,
u64,
(pristine::ChangeId, pristine::Merkle),
>,
}
impl<'txn, T: pristine::ChannelTxnT> Iterator for Log<'txn, T> {
type Item = Result<(u64, (pristine::Hash, pristine::Merkle)), T::GraphError>;
fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() {
Some(Ok((n, (c, m)))) => {
let ext = match self.txn.get_external(c) {
Err(pristine::TxnErr(e)) => return Some(Err(e)),
Ok(Some(ext)) => ext,
Ok(None) => panic!("Unknown change {:?}", c),
};
Some(Ok((n, (ext, m))))
}
None => None,
Some(Err(e)) => Some(Err(e.0)),
}
}
}
pub struct RevLog<'txn, T: pristine::ChannelTxnT> {
txn: &'txn T,
iter: pristine::RevCursor<
T,
&'txn T,
T::RevchangesetCursor,
u64,
(pristine::ChangeId, pristine::Merkle),
>,
}
impl<'txn, T: pristine::ChannelTxnT> Iterator for RevLog<'txn, T> {
type Item = Result<(u64, (pristine::Hash, pristine::Merkle)), T::GraphError>;
fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() {
Some(Ok((n, (c, m)))) => match self.txn.get_external(c) {
Ok(Some(ext)) => Some(Ok((n, (ext, m)))),
Err(e) => Some(Err(e.0)),
Ok(None) => panic!("Unknown change {:?}", c),
},
None => None,
Some(Err(e)) => Some(Err(e.0)),
}
}
}
pub struct Touched<'txn, T: pristine::DepsTxnT> {
txn: &'txn T,
iter: pristine::Cursor<
T,
&'txn T,
T::Rev_touched_filesCursor,
pristine::ChangeId,
pristine::Position<pristine::ChangeId>,
>,
id: pristine::ChangeId,
}
impl<
'txn,
T: pristine::DepsTxnT + pristine::GraphTxnT<GraphError = <T as pristine::DepsTxnT>::DepsError>,
> Iterator for Touched<'txn, T>
{
type Item = Result<pristine::Position<pristine::Hash>, T::DepsError>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(x) = self.iter.next() {
let (cid, file) = match x {
Ok(x) => x,
Err(e) => return Some(Err(e.0)),
};
if cid > self.id {
return None;
} else if cid == self.id {
let change = match self.txn.get_external(file.change) {
Ok(ext) => ext.unwrap(),
Err(e) => return Some(Err(e.0)),
};
return Some(Ok(pristine::Position {
change,
pos: file.pos,
}));
}
}
None
}
}
#[doc(hidden)]
#[derive(Debug, Default, Clone)]
pub struct Timers {
pub alive_output: std::time::Duration,
pub alive_graph: std::time::Duration,
pub alive_retrieve: std::time::Duration,
pub alive_contents: std::time::Duration,
pub alive_write: std::time::Duration,
pub record: std::time::Duration,
pub apply: std::time::Duration,
pub repair_context: std::time::Duration,
pub check_cyclic_paths: std::time::Duration,
pub find_alive: std::time::Duration,
}
use std::sync::Mutex;
lazy_static! {
pub static ref TIMERS: Mutex<Timers> = Mutex::new(Timers {
alive_output: std::time::Duration::from_secs(0),
alive_graph: std::time::Duration::from_secs(0),
alive_retrieve: std::time::Duration::from_secs(0),
alive_contents: std::time::Duration::from_secs(0),
alive_write: std::time::Duration::from_secs(0),
record: std::time::Duration::from_secs(0),
apply: std::time::Duration::from_secs(0),
repair_context: std::time::Duration::from_secs(0),
check_cyclic_paths: std::time::Duration::from_secs(0),
find_alive: std::time::Duration::from_secs(0),
});
}
#[doc(hidden)]
pub fn reset_timers() {
*TIMERS.lock().unwrap() = Timers::default();
}
#[doc(hidden)]
pub fn get_timers() -> Timers {
TIMERS.lock().unwrap().clone()
}
//! Manipulating the internal representation of files and directories
//! tracked by Pijul (i.e. adding files, removing files, getting file
//! names…).
//!
//! Pijul tracks files in two different ways: one is the *graph*,
//! where changes are applied. The other one is the *working copy*,
//! where some filesystem changes are not yet recorded. The purpose of
//! this double representation is to be able to compare a file from
//! the graph with its version in the working copy, even if its name
//! has changed in the working copy.
//!
//! The functions of this module work at exactly one of these two
//! levels. Changing the graph is done by recording and applying a
//! change, and changing the working copy is done either by some of the
//! functions in this module, or by outputting the graph to the
//! working copy (using the [output module](../output/index.html)).
use crate::changestore::*;
use crate::pristine::*;
use crate::small_string::*;
use std::collections::HashSet;
use std::iter::Iterator;
#[derive(Debug, Error)]
pub enum FsError<T: std::error::Error + 'static> {
#[error(transparent)]
NotFound(#[from] FsNotFound),
#[error("File already in repository: {0}")]
AlreadyInRepo(String),
#[error(transparent)]
Txn(T),
}
#[derive(Debug, Error)]
pub enum FsErrorC<C: std::error::Error + 'static, T: std::error::Error + 'static> {
#[error(transparent)]
Txn(T),
#[error(transparent)]
Changestore(C),
#[error(transparent)]
NotFound(#[from] FsNotFound),
}
#[derive(Debug, Error)]
#[error("Path not found: {0}")]
pub struct FsNotFound(String);
impl<T: std::error::Error + 'static> std::convert::From<TxnErr<T>> for FsError<T> {
fn from(e: TxnErr<T>) -> Self {
FsError::Txn(e.0)
}
}
impl<C: std::error::Error + 'static, T: std::error::Error + 'static> std::convert::From<TxnErr<T>>
for FsErrorC<C, T>
{
fn from(e: TxnErr<T>) -> Self {
FsErrorC::Txn(e.0)
}
}
pub(crate) fn create_new_inode<T: TreeMutTxnT>(txn: &mut T) -> Result<Inode, TxnErr<T::TreeError>> {
let mut already_taken = true;
let mut inode: Inode = Inode::ROOT;
while already_taken {
inode = Inode::random();
already_taken = txn.get_revtree(inode, None)?.is_some();
}
Ok(inode)
}
/// Test whether `inode` is the inode of a directory (as opposed to a
/// file).
pub fn is_directory<T: TreeTxnT>(txn: &T, inode: Inode) -> Result<bool, TxnErr<T::TreeError>> {
if inode == Inode::ROOT {
return Ok(true);
}
let pathid = OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
};
for x in txn.iter_tree(pathid.clone(), None)? {
let (pid, _) = x?;
if pid < pathid {
continue;
} else if pid > pathid {
break;
}
return Ok(true);
}
Ok(false)
}
fn closest_in_repo_ancestor<'a, T: TreeTxnT>(
txn: &T,
path: &'a str,
) -> Result<(Inode, std::iter::Peekable<crate::path::Components<'a>>), TxnErr<T::TreeError>> {
let mut components = crate::path::components(path).peekable();
let mut fileid = OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::new(),
};
while let Some(c) = components.peek() {
trace!("component {:?}", c);
fileid.basename.clone_from_str(c);
trace!("{:?}", fileid);
let mut found = false;
for x in txn.iter_tree(fileid.clone(), None)? {
let (id, inode) = x?;
trace!(
"id = {:?}, inode = {:?}, cmp = {:?}",
id,
inode,
id > fileid
);
if id > fileid {
break;
} else if id < fileid {
continue;
}
found = true;
fileid.parent_inode = inode;
break;
}
if found {
components.next();
} else {
break;
}
}
Ok((fileid.parent_inode, components))
}
/// Find the inode corresponding to that path, if it exists.
pub fn find_inode<T: TreeTxnT>(txn: &T, path: &str) -> Result<Inode, FsError<T::TreeError>> {
debug!("find_inode");
let (inode, mut remaining_path_components) = closest_in_repo_ancestor(txn, path)?;
debug!("/find_inode");
if let Some(c) = remaining_path_components.next() {
debug!("c = {:?}", c);
Err(FsNotFound(path.to_string()).into())
} else {
Ok(inode)
}
}
/// Returns whether a path is registered in the working copy.
pub fn is_tracked<T: TreeTxnT>(txn: &T, path: &str) -> Result<bool, TxnErr<T::TreeError>> {
debug!("is_tracked {:?}", path);
let (_, mut remaining_path_components) = closest_in_repo_ancestor(txn, path)?;
debug!("/is_tracked {:?}", path);
Ok(remaining_path_components.next().is_none())
}
/// Find the filename leading from the root to ~inode~.
pub fn inode_filename<T: TreeTxnT>(
txn: &T,
inode: Inode,
) -> Result<Option<String>, TxnErr<T::TreeError>> {
let mut components = Vec::new();
let mut current = inode;
loop {
match txn.get_revtree(current, None)? {
Some(v) => {
components.push(v.basename);
current = v.parent_inode;
if current == Inode::ROOT {
break;
}
}
None => {
debug!("filename_of_inode: not in tree");
return Ok(None);
}
}
}
let mut path = String::new();
for c in components.iter().rev() {
if !path.is_empty() {
path.push('/')
}
path.push_str(c.as_str());
}
Ok(Some(path))
}
/// Record the information that `parent_inode` is now a parent of
/// file `filename`, and `filename` has inode `child_inode`.
fn make_new_child<T: TreeMutTxnT>(
txn: &mut T,
parent_inode: Inode,
filename: &str,
is_dir: bool,
child_inode: Option<Inode>,
) -> Result<Inode, FsError<T::TreeError>> {
let parent_id = OwnedPathId {
parent_inode,
basename: SmallString::from_str(filename),
};
if let Some(inode) = txn.get_tree(parent_id.as_file_id(), None)? {
debug!("inode = {:?}", inode);
if let Some(child) = child_inode {
if child == inode {
// No need to do anything.
Ok(inode)
} else {
del_tree_with_rev(txn, parent_id.as_file_id(), inode)?;
if let Some(vertex) = txn.get_inodes(inode, None)? {
del_inodes_with_rev(txn, inode, vertex)?;
}
put_tree_with_rev(txn, parent_id.as_file_id(), child)?;
Ok(child)
}
} else {
Err(FsError::AlreadyInRepo(filename.to_string()))
}
} else {
let child_inode = match child_inode {
None => create_new_inode(txn)?,
Some(i) => i,
};
debug!("make_new_child: {:?} {:?}", parent_id, child_inode);
put_tree_with_rev(txn, parent_id.as_file_id(), child_inode)?;
if is_dir {
let dir_id = OwnedPathId {
parent_inode: child_inode,
basename: SmallString::new(),
};
txn.put_tree(dir_id.as_file_id(), child_inode)?;
};
Ok(child_inode)
}
}
pub(crate) fn add_inode<T: TreeMutTxnT>(
txn: &mut T,
inode: Option<Inode>,
path: &str,
is_dir: bool,
) -> Result<(), FsError<T::TreeError>> {
debug!("add_inode");
if let Some(parent) = crate::path::parent(path) {
let (current_inode, unrecorded_path) = closest_in_repo_ancestor(txn, parent)?;
let mut current_inode = current_inode;
debug!("add_inode: closest = {:?}", current_inode);
for c in unrecorded_path {
debug!("unrecorded: {:?}", c);
current_inode = make_new_child(txn, current_inode, c, true, None)?;
}
let file_name = crate::path::file_name(path).unwrap();
debug!("add_inode: file_name = {:?}", file_name);
make_new_child(txn, current_inode, file_name, is_dir, inode)?;
}
Ok(())
}
/// Move an inode (file or directory) from `origin` to `destination`,
/// (in the working copy).
///
/// **Warning**: both `origin` and `destination` must be full paths to
/// the inode being moved (unlike e.g. in the `mv` Unix command).
pub fn move_file<T: TreeMutTxnT>(
txn: &mut T,
origin: &str,
destination: &str,
) -> Result<(), FsError<T::TreeError>> {
debug!("move_file: {},{}", origin, destination);
move_file_by_inode(txn, find_inode(txn, origin)?, destination)?;
Ok(())
}
pub fn move_file_by_inode<T: TreeMutTxnT>(
txn: &mut T,
inode: Inode,
destination: &str,
) -> Result<(), FsError<T::TreeError>> {
let fileref = txn.get_revtree(inode, None)?.unwrap().to_owned();
debug!("fileref = {:?}", fileref);
del_tree_with_rev(txn, fileref.as_file_id(), inode)?;
debug!("inode={:?} destination={}", inode, destination);
let is_dir = txn
.get_tree(
(OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
})
.as_file_id(),
None,
)?
.is_some();
add_inode(txn, Some(inode), destination, is_dir)?;
Ok(())
}
pub(crate) fn rec_delete<T: TreeMutTxnT>(
txn: &mut T,
parent: OwnedPathId,
inode: Inode,
delete_inodes: bool,
) -> Result<(), FsError<T::TreeError>> {
let file_id = OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
};
let mut children = Vec::new();
let mut is_dir = false;
for x in txn.iter_tree(file_id.clone(), None)? {
let (k, inode) = x?;
if k.parent_inode > file_id.parent_inode {
break;
} else if k.parent_inode < file_id.parent_inode {
continue;
}
debug!("iter_tree: {:?} {:?}", k, inode);
is_dir = true;
if !k.basename.is_empty() {
children.push((k, inode))
}
}
for (k, inode_) in children {
assert_ne!(inode, inode_);
rec_delete(txn, k, inode_, delete_inodes)?;
}
debug!(
"rec_delete: {:?}, {:?}, {:?}, {:?}",
parent, file_id, inode, is_dir
);
if is_dir {
assert!(inode.is_root() || txn.del_tree(file_id.as_file_id(), Some(inode))?);
}
if !inode.is_root() && del_tree_with_rev(txn, parent.as_file_id(), inode)? {
if delete_inodes {
if let Some(vertex) = txn.get_inodes(inode, None)? {
del_inodes_with_rev(txn, inode, vertex)?;
}
}
} else {
debug!(
"rec_delete: {:?} {:?} not present",
parent.as_file_id(),
inode
);
}
Ok(())
}
/// Removes a file from the repository.
pub fn remove_file<T: TreeMutTxnT>(txn: &mut T, path: &str) -> Result<(), FsError<T::TreeError>> {
debug!("remove file {:?}", path);
let inode = find_inode(txn, path)?;
let parent = if inode.is_root() {
OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::new(),
}
} else {
txn.get_revtree(inode, None)?.unwrap().to_owned()
};
debug!("remove file {:?} {:?}", parent, inode);
rec_delete(txn, parent, inode, false)?;
Ok(())
}
/// An iterator over the children (i.e. one level down) of an inode in
/// the working copy.
///
/// Constructed using
/// [`working_copy_children`](fn.working_copy_children.html).
pub struct WorkingCopyChildren<'txn, T: TreeTxnT> {
iter: crate::pristine::Cursor<T, &'txn T, T::TreeCursor, OwnedPathId, Inode>,
inode: Inode,
}
impl<'txn, T: TreeTxnT> Iterator for WorkingCopyChildren<'txn, T> {
type Item = Result<(SmallString, Inode), T::TreeError>;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.iter.next() {
Some(Ok((k, v))) => {
if k.parent_inode == self.inode {
if k.basename.len() > 0 {
return Some(Ok((k.basename, v)));
}
} else if k.parent_inode > self.inode {
return None;
}
}
None => return None,
Some(Err(e)) => return Some(Err(e.0)),
}
}
}
}
/// Returns a list of the children of an inode, in the working copy.
pub fn working_copy_children<T: TreeTxnT>(
txn: &T,
inode: Inode,
) -> Result<WorkingCopyChildren<T>, T::TreeError> {
Ok(WorkingCopyChildren {
iter: txn
.iter_tree(
OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
},
None,
)
.map_err(|e| e.0)?,
inode,
})
}
/// An iterator over all the paths in the working copy.
///
/// Constructed using [`iter_working_copy`](fn.iter_working_copy.html).
pub struct WorkingCopyIterator<'txn, T: TreeTxnT> {
stack: Vec<(Inode, String)>,
txn: &'txn T,
}
impl<'txn, T: TreeTxnT> Iterator for WorkingCopyIterator<'txn, T> {
type Item = Result<(Inode, String), T::TreeError>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some((inode, name)) = self.stack.pop() {
let fileid = OwnedPathId {
parent_inode: inode,
basename: SmallString::from_str(""),
};
let len = self.stack.len();
let iter = match self.txn.iter_tree(fileid, None) {
Ok(iter) => iter,
Err(e) => return Some(Err(e.0)),
};
for x in iter {
let (k, v) = match x {
Ok(x) => x,
Err(e) => return Some(Err(e.0)),
};
if k.parent_inode < inode {
continue;
} else if k.parent_inode > inode {
break;
}
if !k.basename.is_empty() {
let mut name = name.clone();
crate::path::push(&mut name, k.basename.as_str());
self.stack.push((v, name))
}
}
(&mut self.stack[len..]).reverse();
if !name.is_empty() {
return Some(Ok((inode, name)));
}
} else {
return None;
}
}
}
}
/// Returns an iterator over all the files in the working copy.
pub fn iter_working_copy<T: TreeTxnT>(txn: &T, root: Inode) -> WorkingCopyIterator<T> {
WorkingCopyIterator {
stack: vec![(root, String::new())],
txn,
}
}
/// An iterator over the descendants of an
/// inode key in the graph.
///
/// Constructed using
/// [`iter_graph_descendants`](fn.iter_graph_descendants.html).
pub struct GraphDescendants<'txn, T: GraphTxnT> {
txn: &'txn T,
channel: &'txn T::Graph,
stack: Vec<AdjacentIterator<'txn, T>>,
visited: HashSet<Position<ChangeId>>,
}
impl<'txn, T: GraphTxnT> Iterator for GraphDescendants<'txn, T> {
type Item = Result<Position<ChangeId>, T::GraphError>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(mut adj) = self.stack.pop() {
match adj.next() {
Some(Ok(child)) => {
self.stack.push(adj);
let dest = match self.txn.find_block(self.channel, child.dest) {
Ok(dest) => dest,
Err(BlockError::Txn(t)) => return Some(Err(t)),
Err(e) => panic!("{}", e),
};
let grandchild = match iter_adjacent(
self.txn,
self.channel,
dest,
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
) {
Ok(mut x) => match x.next().unwrap() {
Ok(x) => x,
Err(e) => return Some(Err(e.0)),
},
Err(e) => return Some(Err(e.0)),
};
if self.visited.insert(grandchild.dest) {
match iter_adjacent(
self.txn,
self.channel,
grandchild.dest.inode_vertex(),
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
) {
Ok(adj) => self.stack.push(adj),
Err(e) => return Some(Err(e.0)),
}
}
return Some(Ok(grandchild.dest));
}
Some(Err(e)) => return Some(Err(e.0)),
None => {
// No child left, actually pop.
}
}
}
None
}
}
/// Returns a list of files under the given key. The root key is
/// [`pristine::Vertex::ROOT`](../pristine/constant.Vertex::ROOT.html).
pub fn iter_graph_descendants<'txn, T: GraphTxnT>(
txn: &'txn T,
channel: &'txn T::Graph,
key: Position<ChangeId>,
) -> Result<GraphDescendants<'txn, T>, T::GraphError> {
Ok(GraphDescendants {
stack: vec![iter_adjacent(
txn,
&channel,
key.inode_vertex(),
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)
.map_err(|e| e.0)?],
visited: HashSet::new(),
txn,
channel,
})
}
/// An iterator over the children (i.e. a single level down) of an
/// inode key in the graph.
///
/// Constructed using
/// [`iter_graph_children`](fn.iter_graph_children.html).
pub struct GraphChildren<'txn, 'changes, T: GraphTxnT, P: ChangeStore + 'changes> {
txn: &'txn T,
channel: &'txn T::Graph,
adj: AdjacentIterator<'txn, T>,
changes: &'changes P,
buf: Vec<u8>,
}
impl<'txn, 'changes, T: GraphTxnT, P: ChangeStore + 'changes> Iterator
for GraphChildren<'txn, 'changes, T, P>
{
type Item = Result<(Position<ChangeId>, ChangeId, InodeMetadata, String), T::GraphError>;
fn next(&mut self) -> Option<Self::Item> {
let child = match self.adj.next()? {
Ok(child) => child,
Err(e) => return Some(Err(e.0)),
};
let dest = self.txn.find_block(self.channel, child.dest).unwrap();
let mut buf = std::mem::replace(&mut self.buf, Vec::new());
self.changes
.get_contents(|p| self.txn.get_external(p).unwrap(), dest, &mut buf)
.unwrap();
self.buf = buf;
let (perms, basename) = self.buf.split_at(2);
let perms = InodeMetadata::from_basename(perms);
let basename = std::str::from_utf8(basename).unwrap();
let grandchild = match iter_adjacent(
self.txn,
self.channel,
dest,
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
) {
Ok(mut adj) => match adj.next() {
Some(Ok(n)) => n,
None => unreachable!(),
Some(Err(e)) => return Some(Err(e.0)),
},
Err(e) => return Some(Err(e.0)),
};
Some(Ok((
grandchild.dest,
grandchild.introduced_by,
perms,
basename.to_string(),
)))
}
}
/// Returns a list of files under the given key. The root key is
/// [`pristine::Vertex::ROOT`](../pristine/constant.Vertex::ROOT.html).
pub fn iter_graph_children<'txn, 'changes, T, P>(
txn: &'txn T,
changes: &'changes P,
channel: &'txn T::Graph,
key: Position<ChangeId>,
) -> Result<GraphChildren<'txn, 'changes, T, P>, T::GraphError>
where
T: GraphTxnT,
P: ChangeStore,
{
Ok(GraphChildren {
buf: Vec::new(),
adj: iter_adjacent(
txn,
channel,
key.inode_vertex(),
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)
.map_err(|e| e.0)?,
txn,
channel,
changes,
})
}
/// An iterator over the basenames of an "inode key" in the graph.
///
/// See [`iter_basenames`](fn.iter_basenames.html).
pub struct GraphBasenames<'txn, 'changes, T: GraphTxnT, P: ChangeStore + 'changes> {
txn: &'txn T,
channel: &'txn T::Graph,
adj: AdjacentIterator<'txn, T>,
changes: &'changes P,
buf: Vec<u8>,
}
impl<'txn, 'changes, T: GraphTxnT, P: ChangeStore + 'changes> Iterator
for GraphBasenames<'txn, 'changes, T, P>
{
type Item = Result<(Position<ChangeId>, InodeMetadata, String), T::GraphError>;
fn next(&mut self) -> Option<Self::Item> {
loop {
let parent = match self.adj.next() {
Some(Ok(n)) => n,
Some(Err(e)) => return Some(Err(e.0)),
None => return None,
};
let dest = self.txn.find_block_end(&self.channel, parent.dest).unwrap();
let mut buf = std::mem::replace(&mut self.buf, Vec::new());
self.changes
.get_contents(|p| self.txn.get_external(p).unwrap(), dest, &mut buf)
.unwrap();
self.buf = buf;
let (perms, basename) = self.buf.split_at(2);
let perms = InodeMetadata::from_basename(perms);
let basename = std::str::from_utf8(basename).unwrap().to_string();
match iter_adjacent(
self.txn,
&self.channel,
dest,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
) {
Ok(mut adj) => match adj.next() {
Some(Ok(grandparent)) => return Some(Ok((grandparent.dest, perms, basename))),
Some(Err(e)) => return Some(Err(e.0)),
None => {}
},
Err(e) => return Some(Err(e.0)),
}
}
}
}
/// List all the basenames of an "inode key" in the graph (more than
/// one name means a conflict).
///
/// See also [`iter_paths`](fn.iter_paths.html).
pub fn iter_basenames<'txn, 'changes, T, P>(
txn: &'txn T,
changes: &'changes P,
channel: &'txn T::Graph,
pos: Position<ChangeId>,
) -> Result<GraphBasenames<'txn, 'changes, T, P>, T::GraphError>
where
T: GraphTxnT,
P: ChangeStore,
{
Ok(GraphBasenames {
buf: Vec::new(),
adj: iter_adjacent(
txn,
channel,
pos.inode_vertex(),
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO,
)
.map_err(|e| e.0)?,
txn,
channel,
changes,
})
}
/// Traverse the paths in the graph to a key. **Warning:** there might
/// be a number of paths exponential in the number of conflicts.
///
/// This function takes a closure `f`, which gets called on each path
/// with an iterator over the keys from the root to `key`. This
/// function stops when `f` returns `false` (or when all the paths
/// have been traversed).
///
/// See also [`iter_basenames`](fn.iter_basenames.html).
pub fn iter_paths<T: GraphTxnT, F: FnMut(&mut dyn Iterator<Item = Position<ChangeId>>) -> bool>(
txn: &T,
graph: &T::Graph,
key: Position<ChangeId>,
mut f: F,
) -> Result<(), TxnErr<T::GraphError>> {
let mut stack: Vec<(Position<ChangeId>, bool)> = vec![(key, true)];
while let Some((cur_key, on_stack)) = stack.pop() {
if cur_key.is_root() {
if !f(&mut stack
.iter()
.filter_map(|(key, on_path)| if *on_path { Some(*key) } else { None }))
{
break;
}
} else if !on_stack {
stack.push((cur_key, true));
let len = stack.len();
let f = EdgeFlags::parent_folder();
for parent in iter_adjacent(
txn,
&graph,
cur_key.inode_vertex(),
f,
f | EdgeFlags::PSEUDO,
)? {
let parent_dest = txn.find_block_end(&graph, parent?.dest).unwrap();
for grandparent in
iter_adjacent(txn, &graph, parent_dest, f, f | EdgeFlags::PSEUDO)?
{
stack.push((grandparent?.dest, false))
}
}
if stack.len() == len {
stack.pop();
}
}
}
Ok(())
}
pub(crate) fn follow_oldest_path<T: ChannelTxnT, C: ChangeStore>(
changes: &C,
txn: &T,
channel: &T::Channel,
path: &str,
) -> Result<(Position<ChangeId>, bool), FsErrorC<C::Error, T::GraphError>> {
use crate::pristine::*;
debug!("follow_oldest_path = {:?}", path);
let mut current = Position::ROOT;
let flag0 = EdgeFlags::FOLDER;
let flag1 = flag0 | EdgeFlags::BLOCK | EdgeFlags::PSEUDO;
let mut name_buf = Vec::new();
let mut ambiguous = false;
for c in crate::path::components(path) {
let mut next = None;
for name in iter_adjacent(txn, T::graph(channel), current.inode_vertex(), flag0, flag1)? {
let name = name?;
let name_dest = txn.find_block(T::graph(channel), name.dest).unwrap();
name_buf.clear();
debug!("getting contents {:?}", name);
changes
.get_contents(|h| txn.get_external(h).unwrap(), name_dest, &mut name_buf)
.map_err(FsErrorC::Changestore)?;
if std::str::from_utf8(&name_buf[2..]) == Ok(c) {
let age = txn
.get_changeset(T::changes(&channel), name.dest.change)
.unwrap();
if let Some((ref mut next, ref mut next_age)) = next {
ambiguous = true;
if age < *next_age {
*next = name_dest;
*next_age = age;
}
} else {
next = Some((name_dest, age));
}
}
}
if let Some((next, _)) = next {
current = iter_adjacent(txn, T::graph(channel), next, flag0, flag1)?
.next()
.unwrap()?
.dest
} else {
return Err(FsErrorC::NotFound(FsNotFound(path.to_string())));
}
}
Ok((current, ambiguous))
}
pub fn find_path<T: ChannelTxnT, C: ChangeStore>(
changes: &C,
txn: &T,
channel: &T::Channel,
youngest: bool,
mut v: Position<ChangeId>,
) -> Result<(String, bool), crate::output::FileError<C::Error, T::GraphError>> {
debug!("oldest_path = {:?}", v);
let mut path = Vec::new();
let mut name_buf = Vec::new();
let flag0 = EdgeFlags::FOLDER | EdgeFlags::PARENT;
let flag1 = EdgeFlags::all();
let mut all_alive = true;
while !v.change.is_root() {
let mut next_v = None;
let mut alive = false;
let inode_vertex = txn.find_block_end(T::graph(channel), v).unwrap();
assert_eq!(inode_vertex, v.inode_vertex());
for name in iter_adjacent(txn, T::graph(channel), v.inode_vertex(), flag0, flag1)? {
let name = name?;
if !name.flag.contains(EdgeFlags::PARENT) {
continue;
}
debug!("oldest_path, name = {:?}", name);
let age = txn
.get_changeset(T::changes(&channel), name.dest.change)?
.unwrap();
let name_dest = txn.find_block_end(T::graph(channel), name.dest).unwrap();
debug!("name_dest = {:?}", name_dest);
let mut next = None;
for e in iter_adjacent(txn, T::graph(channel), name_dest, flag0, flag1)? {
let e = e?;
if e.flag.contains(EdgeFlags::PARENT | EdgeFlags::FOLDER) {
next = Some(e);
break;
}
}
if let Some(next) = next {
debug!("oldest_path, next = {:?}", next);
if !next.flag.contains(EdgeFlags::DELETED) {
alive = true;
} else if alive {
break;
} else {
all_alive = false
}
if let Some((_, p_age, _)) = next_v {
if (age > p_age) ^ youngest {
continue;
}
}
next_v = Some((name_dest, age, next.dest));
}
}
let (name, _, next) = next_v.unwrap();
if alive {
name_buf.clear();
debug!("getting contents {:?}", name);
changes
.get_contents(|h| txn.get_external(h).unwrap(), name, &mut name_buf)
.map_err(crate::output::FileError::Changestore)?;
path.push(std::str::from_utf8(&name_buf[2..]).unwrap().to_string());
}
debug!("next = {:?}", next);
v = next;
}
path.reverse();
Ok((path.join("/"), all_alive))
}
pub fn get_latest_touch<'a, T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>>(
txn: &T,
channel: &T::Channel,
pos: Position<ChangeId>,
) -> Result<(u64, ChangeId), TxnErr<T::GraphError>> {
let mut latest_change = 0;
let mut id = ChangeId::ROOT;
let mut touch_iter = Some(txn.iter_touched(pos)?);
let mut log_iter = changeid_rev_log(txn, &channel, None)?;
let mut continue_ = true;
while continue_ {
continue_ = false;
if let Some(ref mut it) = touch_iter {
if let Some(c) = it.next() {
debug!("inode, change = {:?}", c);
let (inode, change) = c?;
if inode > pos {
touch_iter = None;
} else if inode == pos {
if let Some(t) = txn.get_changeset(T::changes(&channel), change)? {
if t >= latest_change {
latest_change = t;
id = change;
}
}
continue_ = true;
}
}
}
if let Some(l) = log_iter.next() {
debug!("int = {:?}", l);
let (n, (int, _)) = l?;
if txn.get_touched_files(pos, Some(int))?.is_some() {
id = int;
latest_change = n;
break;
}
continue_ = true;
}
}
Ok((latest_change, id))
}
use crate::pristine::*;
use std::collections::HashSet;
pub(crate) fn find_alive_down<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
vertex0: Vertex<ChangeId>,
) -> Result<HashSet<Vertex<ChangeId>>, BlockError<T::GraphError>> {
let mut stack = vec![Edge {
dest: vertex0.start_pos(),
introduced_by: ChangeId::ROOT,
flag: EdgeFlags::empty(),
}];
let mut visited = HashSet::new();
let mut alive = HashSet::new();
while let Some(elt) = stack.pop() {
if !visited.insert(elt.dest) {
continue;
}
let vertex = txn.find_block(&channel, elt.dest)?;
debug!("elt = {:?}, vertex = {:?}", elt, vertex);
let elt_index = stack.len();
for v in iter_adj_all(txn, &channel, vertex)? {
let v = v?;
if v.flag.contains(EdgeFlags::FOLDER) {
continue;
}
debug!("v = {:?}", v);
if v.flag.contains(EdgeFlags::PARENT) {
if v.flag.contains(EdgeFlags::BLOCK)
&& !v.flag.contains(EdgeFlags::DELETED)
&& vertex != vertex0
&& !v.flag.contains(EdgeFlags::PSEUDO)
{
alive.insert(vertex);
stack.truncate(elt_index);
break;
} else {
continue;
}
}
stack.push(v)
}
}
Ok(alive)
}
pub(crate) fn find_alive_up<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
files: &mut HashSet<Vertex<ChangeId>>,
vertex0: Vertex<ChangeId>,
change: ChangeId,
) -> Result<HashSet<Vertex<ChangeId>>, BlockError<T::GraphError>> {
let mut alive = HashSet::new();
let mut stack = vec![Edge {
dest: vertex0.end_pos(),
introduced_by: ChangeId::ROOT,
flag: EdgeFlags::empty(),
}];
let mut visited = HashSet::new();
while let Some(elt) = stack.pop() {
if elt.dest.is_root() {
continue;
}
if !visited.insert(elt.dest) {
continue;
}
let vertex = txn.find_block_end(&channel, elt.dest)?;
debug!("find_alive_up: elt = {:?}, vertex = {:?}", elt, vertex);
let elt_index = stack.len();
let mut is_file = false;
let mut it = iter_adj_all(txn, &channel, vertex)?;
while let Some(v) = it.next() {
let v = v?;
debug!("find_alive_up: v = {:?}", v);
if !v.flag.is_parent() {
is_file |= !v.flag.is_folder();
continue;
}
if v.flag & EdgeFlags::pseudof() == EdgeFlags::PSEUDO {
continue;
}
if !v.flag.is_deleted() {
if v.flag.is_folder() {
for e in it {
let e = e?;
is_file |= !e.flag.intersects(EdgeFlags::parent_folder())
}
if is_file {
alive.insert(vertex);
files.insert(vertex);
}
break;
} else {
alive.insert(vertex);
if v.introduced_by != change {
stack.truncate(elt_index);
break;
}
}
}
if v.flag.is_folder() {
if is_file {
alive.insert(vertex);
files.insert(vertex);
}
break;
} else {
stack.push(v)
}
}
debug!("is_file = {:?}", is_file);
}
Ok(alive)
}
use crate::pristine::*;
use crate::vertex_buffer;
use std::collections::{HashMap, HashSet};
pub(super) struct Diff {
pub buf: Vec<u8>,
pub inode: Position<Option<ChangeId>>,
pub path: String,
pub contents_a: Vec<u8>,
pub pos_a: Vec<Vertex>,
pub missing_eol: HashSet<usize>,
pub marker: HashMap<usize, ConflictMarker>,
conflict_stack: Vec<Conflict>,
pub conflict_ends: Vec<ConflictEnds>,
pub solved_conflicts: HashSet<usize>,
pub cyclic_conflict_bytes: Vec<(usize, usize)>,
}
#[derive(Debug, Clone)]
pub struct Conflict {
pub counter: usize,
pub side: usize,
pub conflict_type: ConflictType,
}
#[derive(Debug, Clone, Copy)]
pub enum ConflictType {
Root,
Order,
Zombie,
Cyclic,
}
#[derive(Debug)]
pub struct ConflictEnds {
pub start: usize,
pub end: usize,
pub end_pos: usize,
pub conflict_type: ConflictType,
}
#[derive(Debug, PartialEq, Eq)]
pub enum ConflictMarker {
Begin,
Next,
End,
}
#[derive(Debug)]
pub struct Vertex {
pub pos: usize,
pub vertex: crate::pristine::Vertex<ChangeId>,
pub before_conflict: bool,
pub conflict: usize,
}
impl Diff {
pub fn new(
inode: Position<Option<ChangeId>>,
path: String,
graph: &crate::alive::Graph,
) -> Self {
Diff {
inode,
path,
buf: Vec::with_capacity(graph.len_bytes()),
pos_a: Vec::with_capacity(2 * graph.len_vertices()),
contents_a: Vec::with_capacity(graph.len_bytes()),
missing_eol: HashSet::new(),
conflict_ends: vec![ConflictEnds {
start: 0,
end: 0,
end_pos: 0,
conflict_type: ConflictType::Root,
}],
marker: HashMap::new(),
conflict_stack: vec![Conflict {
counter: 0,
side: 0,
conflict_type: ConflictType::Root,
}],
cyclic_conflict_bytes: Vec::new(),
solved_conflicts: HashSet::new(),
}
}
}
impl Diff {
pub fn vertex(
&self,
i: usize,
pos: usize,
end_pos: usize,
) -> crate::pristine::Vertex<ChangeId> {
let mut v = self.pos_a[i].vertex;
assert!(!v.is_root());
if pos > self.pos_a[i].pos {
v.start =
ChangePosition(self.pos_a[i].vertex.start.0 + (pos - self.pos_a[i].pos) as u64)
}
if i + 1 >= self.pos_a.len() || end_pos < self.pos_a[i + 1].pos {
v.end =
ChangePosition(self.pos_a[i].vertex.start.0 + (end_pos - self.pos_a[i].pos) as u64)
}
v
}
pub fn position(&self, i: usize, pos: usize) -> crate::pristine::Position<ChangeId> {
let mut v = self.pos_a[i].vertex.start_pos();
if pos > self.pos_a[i].pos {
v.pos = ChangePosition(self.pos_a[i].vertex.start.0 + (pos - self.pos_a[i].pos) as u64)
}
v
}
}
impl Diff {
fn begin_conflict_(&mut self, conflict_type: ConflictType) {
self.conflict_stack.push(Conflict {
counter: self.conflict_ends.len(),
side: 0,
conflict_type,
});
let len = match self.contents_a.last() {
Some(&b'\n') | None => self.contents_a.len(),
_ => {
self.missing_eol.insert(self.contents_a.len());
self.contents_a.len() + 1
}
};
self.conflict_ends.push(ConflictEnds {
start: self.pos_a.len(),
end: self.pos_a.len(),
end_pos: len,
conflict_type,
});
self.marker.insert(len, ConflictMarker::Begin);
}
}
impl vertex_buffer::VertexBuffer for Diff {
fn output_line<E, C>(&mut self, v: crate::pristine::Vertex<ChangeId>, c: C) -> Result<(), E>
where
E: From<std::io::Error>,
C: FnOnce(&mut Vec<u8>) -> Result<(), E>,
{
if v == crate::pristine::Vertex::BOTTOM {
return Ok(());
}
self.buf.clear();
c(&mut self.buf)?;
self.pos_a.push(Vertex {
pos: self.contents_a.len(),
vertex: v,
before_conflict: false,
conflict: self.conflict_stack.last().unwrap().counter,
});
self.contents_a.extend(&self.buf);
Ok(())
}
fn begin_conflict(&mut self) -> Result<(), std::io::Error> {
self.begin_conflict_(ConflictType::Order);
self.output_conflict_marker(vertex_buffer::START_MARKER)
}
fn begin_cyclic_conflict(&mut self) -> Result<(), std::io::Error> {
let len = self.contents_a.len();
self.begin_conflict_(ConflictType::Cyclic);
self.cyclic_conflict_bytes.push((len, len));
self.output_conflict_marker(vertex_buffer::START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), std::io::Error> {
self.begin_conflict_(ConflictType::Zombie);
self.output_conflict_marker(vertex_buffer::START_MARKER)
}
fn end_conflict(&mut self) -> Result<(), std::io::Error> {
let len = match self.contents_a.last() {
Some(&b'\n') | None => self.contents_a.len(),
_ => {
self.missing_eol.insert(self.contents_a.len());
self.contents_a.len() + 1
}
};
let chunk = self.pos_a.len();
self.output_conflict_marker(vertex_buffer::END_MARKER)?;
let conflict = self.conflict_stack.pop().unwrap();
self.marker.insert(len, ConflictMarker::End);
self.conflict_ends[conflict.counter].end_pos = len;
self.conflict_ends[conflict.counter].end = chunk;
Ok(())
}
fn end_cyclic_conflict(&mut self) -> Result<(), std::io::Error> {
debug!("end_cyclic_conflict");
self.end_conflict()?;
self.cyclic_conflict_bytes.last_mut().unwrap().1 = self.contents_a.len();
Ok(())
}
fn conflict_next(&mut self) -> Result<(), std::io::Error> {
let len = match self.contents_a.last() {
Some(&b'\n') | None => self.contents_a.len(),
_ => {
self.missing_eol.insert(self.contents_a.len());
self.contents_a.len() + 1
}
};
self.conflict_stack.last_mut().unwrap().side += 1;
self.marker.insert(len, ConflictMarker::Next);
self.output_conflict_marker(vertex_buffer::SEPARATOR)
}
fn output_conflict_marker(&mut self, marker: &str) -> Result<(), std::io::Error> {
if let Some(line) = self.pos_a.last_mut() {
line.before_conflict = true
}
debug!(
"output_conflict_marker {:?} {:?}",
self.contents_a.last(),
marker
);
let pos = match self.contents_a.last() {
Some(&b'\n') | None => {
let len = self.contents_a.len();
self.contents_a.extend(marker.as_bytes().iter().skip(1));
len
}
_ => {
let len = self.contents_a.len() + 1;
self.contents_a.extend(marker.as_bytes().iter());
len
}
};
self.pos_a.push(Vertex {
pos,
vertex: crate::pristine::Vertex::ROOT,
before_conflict: false,
conflict: self.conflict_stack.last().unwrap().counter,
});
Ok(())
}
}
impl Diff {
pub fn last_vertex_containing(&self, pos: usize) -> usize {
match self.pos_a.binary_search_by(|l| l.pos.cmp(&pos)) {
Ok(mut i) => loop {
if i + 1 >= self.pos_a.len() {
return i;
}
if self.pos_a[i].pos == self.pos_a[i + 1].pos {
i += 1
} else {
return i;
}
},
Err(i) => {
assert!(i > 0);
i - 1
}
}
}
pub fn first_vertex_containing(&self, pos: usize) -> usize {
match self.pos_a.binary_search_by(|l| l.pos.cmp(&pos)) {
Ok(mut i) => loop {
if i == 0 {
return 0;
}
if self.pos_a[i].pos == self.pos_a[i - 1].pos {
i -= 1
} else {
return i;
}
},
Err(i) => {
assert!(i > 0);
i - 1
}
}
}
}
use std::collections::HashSet;
pub struct LineSplit<'a> {
buf: &'a [u8],
missing_eol: Option<&'a HashSet<usize>>,
current: usize,
}
impl super::vertex_buffer::Diff {
pub fn lines(&self) -> LineSplit {
LineSplit {
buf: &self.contents_a,
missing_eol: Some(&self.missing_eol),
current: 0,
}
}
}
impl<'a> std::convert::From<&'a [u8]> for LineSplit<'a> {
fn from(buf: &'a [u8]) -> LineSplit<'a> {
LineSplit {
buf,
missing_eol: None,
current: 0,
}
}
}
impl<'a> Iterator for LineSplit<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<Self::Item> {
if self.current >= self.buf.len() {
return None;
}
let current = self.current;
while self.current < self.buf.len() && self.buf[self.current] != b'\n' {
self.current += 1
}
if self.current < self.buf.len() {
self.current += 1
}
let mut last = self.current;
if let Some(miss) = self.missing_eol {
if miss.contains(&(self.current - 1)) {
last -= 1
}
}
Some(&self.buf[current..last])
}
}
use super::diff::*;
use super::vertex_buffer::{ConflictMarker, Diff};
use super::{bytes_len, bytes_pos, Line};
use crate::change::{Atom, Hunk, Local, NewVertex};
use crate::pristine::{ChangeId, ChangePosition, EdgeFlags, Position};
use crate::record::Builder;
use std::collections::{HashMap, HashSet};
pub struct ConflictContexts {
pub up: HashMap<usize, ChangePosition>,
pub side_ends: HashMap<usize, Vec<ChangePosition>>,
pub active: HashSet<usize>,
pub reorderings: HashMap<usize, ChangePosition>,
}
impl ConflictContexts {
pub fn new() -> Self {
ConflictContexts {
side_ends: HashMap::new(),
up: HashMap::new(),
active: HashSet::new(),
reorderings: HashMap::new(),
}
}
}
impl Builder {
pub(super) fn replace(
&mut self,
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
dd: &D,
r: usize,
) {
let old = dd[r].old;
let old_len = dd[r].old_len;
let from_new = dd[r].new;
let len = dd[r].new_len;
let up_context = get_up_context(diff, conflict_contexts, lines_a, old);
let down_context = get_down_context(
diff,
conflict_contexts,
dd,
lines_a,
lines_b,
old,
old_len,
from_new,
len,
self.rec.contents.len(),
);
debug!("old {:?}", &lines_a[old..(old + old_len)]);
debug!("new {:?}", &lines_b[from_new..(from_new + len)]);
let start = self.rec.contents.len();
for &line in &lines_b[from_new..(from_new + len)] {
self.rec.contents.extend(line.l);
}
let end = self.rec.contents.len();
self.rec.contents.push(0);
let change = NewVertex {
up_context,
down_context,
flag: EdgeFlags::BLOCK,
start: ChangePosition(start as u64),
end: ChangePosition(end as u64),
inode: diff.inode,
};
if old_len > 0 {
match self.rec.actions.pop() {
Some(Hunk::Edit { change: c, local }) => {
if local.line == from_new + 1 {
self.rec.actions.push(Hunk::Replacement {
change: c,
local,
replacement: Atom::NewVertex(change),
});
return;
} else {
self.rec.actions.push(Hunk::Edit { change: c, local })
}
}
Some(c) => self.rec.actions.push(c),
None => {}
}
}
self.rec.actions.push(Hunk::Edit {
local: Local {
line: from_new + 1,
path: diff.path.clone(),
},
change: Atom::NewVertex(change),
});
}
}
pub(super) fn get_up_context(
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
old: usize,
) -> Vec<Position<Option<ChangeId>>> {
if let Some(&pos) = conflict_contexts.reorderings.get(&old) {
return vec![Position { change: None, pos }];
}
let old_bytes = if old == 0 {
return vec![diff.pos_a[0].vertex.end_pos().to_option()];
} else if old < lines_a.len() {
bytes_pos(lines_a, old)
} else {
diff.contents_a.len()
};
debug!("old_bytes {:?}", old_bytes);
let mut up_context_idx = diff.last_vertex_containing(old_bytes - 1);
let mut seen_conflict_markers = false;
loop {
debug!("up_context_idx = {:?}", up_context_idx);
debug!("{:?}", diff.marker.get(&diff.pos_a[up_context_idx].pos));
match diff.marker.get(&diff.pos_a[up_context_idx].pos) {
None if seen_conflict_markers => {
return vec![diff.pos_a[up_context_idx].vertex.end_pos().to_option()]
}
None => {
let change = diff.pos_a[up_context_idx].vertex.change;
let pos = diff.pos_a[up_context_idx].vertex.start;
let offset = old_bytes - diff.pos_a[up_context_idx].pos;
debug!("offset {:?} {:?}", pos.0, offset);
return vec![Position {
change: Some(change),
pos: ChangePosition(pos.0 + offset as u64),
}];
}
Some(ConflictMarker::End) => {
debug!("get_up_context_conflict");
return get_up_context_conflict(diff, conflict_contexts, up_context_idx);
}
_ => {
let conflict = diff.pos_a[up_context_idx].conflict;
debug!(
"conflict = {:?} {:?}",
conflict, diff.conflict_ends[conflict]
);
if let Some(&pos) = conflict_contexts.up.get(&conflict) {
return vec![Position { change: None, pos }];
}
seen_conflict_markers = true;
if diff.conflict_ends[conflict].start > 0 {
up_context_idx = diff.conflict_ends[conflict].start - 1
} else {
return vec![diff.pos_a[0].vertex.end_pos().to_option()];
}
}
}
}
}
fn get_up_context_conflict(
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
mut up_context_idx: usize,
) -> Vec<Position<Option<ChangeId>>> {
let conflict = diff.pos_a[up_context_idx].conflict;
let conflict_start = diff.conflict_ends[conflict].start;
let mut up_context = Vec::new();
if let Some(ref up) = conflict_contexts.side_ends.get(&up_context_idx) {
up_context.extend(up.iter().map(|&pos| Position { change: None, pos }));
}
let mut on = true;
conflict_contexts.active.clear();
conflict_contexts.active.insert(conflict);
while up_context_idx > conflict_start {
match diff.marker.get(&diff.pos_a[up_context_idx].pos) {
None if on => {
let change = diff.pos_a[up_context_idx].vertex.change;
let pos = diff.pos_a[up_context_idx].vertex.end;
up_context.push(Position {
change: Some(change),
pos,
});
on = false
}
Some(ConflictMarker::End) if on => {
conflict_contexts
.active
.insert(diff.pos_a[up_context_idx].conflict);
}
Some(ConflictMarker::Next)
if conflict_contexts
.active
.contains(&diff.pos_a[up_context_idx].conflict) =>
{
on = true
}
_ => {}
}
up_context_idx -= 1;
}
assert!(!up_context.is_empty());
up_context
}
pub(super) fn get_down_context(
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
dd: &D,
lines_a: &[Line],
lines_b: &[Line],
old: usize,
old_len: usize,
from_new: usize,
new_len: usize,
contents_len: usize,
) -> Vec<Position<Option<ChangeId>>> {
if old + old_len >= lines_a.len() {
return Vec::new();
}
let mut down_context_idx = 1;
let mut pos_bytes = if old + old_len == 0 {
0
} else {
let pos_bytes = bytes_pos(lines_a, old) + bytes_len(lines_a, old, old_len);
down_context_idx = diff.first_vertex_containing(pos_bytes);
pos_bytes
};
while down_context_idx < diff.pos_a.len() {
match diff.marker.get(&(diff.pos_a[down_context_idx].pos)) {
Some(ConflictMarker::Begin) => {
return get_down_context_conflict(
diff,
dd,
conflict_contexts,
lines_a,
lines_b,
from_new,
new_len,
down_context_idx,
)
}
Some(marker) => {
if let ConflictMarker::Next = marker {
let conflict = diff.pos_a[down_context_idx].conflict;
down_context_idx = diff.conflict_ends[conflict].end;
}
let e = conflict_contexts
.side_ends
.entry(down_context_idx)
.or_default();
let b_len_bytes = bytes_len(lines_b, from_new, new_len);
e.push(ChangePosition((contents_len + b_len_bytes) as u64));
down_context_idx += 1
}
None => {
pos_bytes = pos_bytes.max(diff.pos_a[down_context_idx].pos);
let next_vertex_pos = if down_context_idx + 1 >= diff.pos_a.len() {
diff.contents_a.len()
} else {
diff.pos_a[down_context_idx + 1].pos
};
while pos_bytes < next_vertex_pos {
match dd.is_deleted(lines_a, pos_bytes) {
Some(Deleted { replaced: true, .. }) => return Vec::new(),
Some(Deleted {
replaced: false,
next,
}) => pos_bytes = next,
None => {
return vec![diff.position(down_context_idx, pos_bytes).to_option()]
}
}
}
down_context_idx += 1;
}
}
}
Vec::new()
}
fn get_down_context_conflict(
diff: &Diff,
dd: &D,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
from_new: usize,
new_len: usize,
mut down_context_idx: usize,
) -> Vec<Position<Option<ChangeId>>> {
let conflict = diff.pos_a[down_context_idx].conflict;
let len_bytes = bytes_len(lines_b, from_new, new_len);
conflict_contexts
.up
.insert(conflict, ChangePosition(len_bytes as u64));
conflict_contexts.active.clear();
conflict_contexts.active.insert(conflict);
assert!(!diff.pos_a.is_empty());
let conflict_end = diff.conflict_ends[conflict].end.min(diff.pos_a.len() - 1);
let mut down_context = Vec::new();
let mut on = true;
let mut pos = diff.pos_a[down_context_idx].pos;
loop {
match diff.marker.get(&pos) {
None if on => match dd.is_deleted(lines_a, pos) {
Some(Deleted { replaced: true, .. }) => on = false,
Some(Deleted { next, .. }) => {
pos = next;
let next_pos = if down_context_idx + 1 < diff.pos_a.len() {
diff.pos_a[down_context_idx + 1].pos
} else {
diff.contents_a.len()
};
if pos < next_pos {
continue;
}
}
None => {
down_context.push(diff.position(down_context_idx, pos).to_option());
on = false;
}
},
Some(ConflictMarker::Begin) if on => {
conflict_contexts
.active
.insert(diff.pos_a[down_context_idx].conflict);
}
Some(ConflictMarker::Next)
if conflict_contexts
.active
.contains(&diff.pos_a[down_context_idx].conflict) =>
{
on = true
}
_ => {}
}
down_context_idx += 1;
if down_context_idx > conflict_end {
break;
} else {
pos = diff.pos_a[down_context_idx].pos
}
}
down_context
}
use crate::alive::{output_graph, Graph};
use crate::changestore::*;
use crate::pristine::*;
use crate::record::Builder;
mod diff;
mod split;
mod vertex_buffer;
pub use diff::Algorithm;
mod delete;
mod replace;
#[derive(Debug, Hash, Clone, Copy)]
struct Line<'a> {
l: &'a [u8],
cyclic: bool,
before_end_marker: bool,
last: bool,
}
impl<'a> PartialEq for Line<'a> {
fn eq(&self, b: &Self) -> bool {
if self.before_end_marker && !b.last && b.l.last() == Some(&b'\n') {
return &b.l[..b.l.len() - 1] == self.l;
}
if b.before_end_marker && !self.last && self.l.last() == Some(&b'\n') {
return &self.l[..self.l.len() - 1] == b.l;
}
self.l == b.l && self.cyclic == b.cyclic
}
}
impl<'a> Eq for Line<'a> {}
#[derive(Debug, Error)]
pub enum DiffError<P: std::error::Error + 'static, T: std::error::Error + 'static> {
#[error(transparent)]
Output(#[from] crate::output::FileError<P, T>),
#[error(transparent)]
Txn(T),
}
impl<T: std::error::Error + 'static, C: std::error::Error + 'static> std::convert::From<TxnErr<T>>
for DiffError<C, T>
{
fn from(e: TxnErr<T>) -> Self {
DiffError::Txn(e.0)
}
}
impl Builder {
pub(crate) fn diff<T: ChannelTxnT, P: ChangeStore>(
&mut self,
changes: &P,
txn: &T,
channel: &T::Channel,
algorithm: Algorithm,
path: String,
inode: Position<Option<ChangeId>>,
a: &mut Graph,
b: &[u8],
) -> Result<(), DiffError<P::Error, T::GraphError>> {
self.rec.largest_file = self.rec.largest_file.max(b.len() as u64);
let mut d = vertex_buffer::Diff::new(inode, path.clone(), a);
output_graph(changes, txn, channel, &mut d, a, &mut self.redundant)?;
if (std::str::from_utf8(&d.contents_a).is_err() || std::str::from_utf8(&b).is_err())
&& d.contents_a != b
{
self.diff_binary(changes, txn, T::graph(channel), path, inode, a, &b)?;
return Ok(());
}
let lines_a: Vec<Line> = d
.lines()
.map(|l| {
let old_bytes = l.as_ptr() as usize - d.contents_a.as_ptr() as usize;
let cyclic = if let Err(n) = d
.cyclic_conflict_bytes
.binary_search(&(old_bytes, std::usize::MAX))
{
n > 0 && {
let (a, b) = d.cyclic_conflict_bytes[n - 1];
a <= old_bytes && old_bytes < b
}
} else {
false
};
let before_end_marker = if l.last() != Some(&b'\n') {
let next_index =
l.as_ptr() as usize + l.len() - d.contents_a.as_ptr() as usize + 1;
d.marker.get(&next_index) == Some(&vertex_buffer::ConflictMarker::End)
} else {
false
};
debug!("old = {:?}", l);
Line {
l,
cyclic,
before_end_marker,
last: l.as_ptr() as usize + l.len() - d.contents_a.as_ptr() as usize
>= d.contents_a.len(),
}
})
.collect();
let lines_b: Vec<Line> = split::LineSplit::from(&b[..])
.map(|l| {
debug!("new: {:?}", l);
let next_index = l.as_ptr() as usize + l.len() - b.as_ptr() as usize;
Line {
l,
cyclic: false,
before_end_marker: false,
last: next_index >= b.len(),
}
})
.collect();
debug!("pos = {:?}", d.pos_a);
debug!("{:?} {:?}", lines_a, lines_b);
let dd = diff::diff(&lines_a, &lines_b, algorithm);
let mut conflict_contexts = replace::ConflictContexts::new();
for r in 0..dd.len() {
if dd[r].old_len > 0 {
self.delete(
txn,
T::graph(channel),
&d,
&dd,
&mut conflict_contexts,
&lines_a,
&lines_b,
r,
)?;
}
if dd[r].new_len > 0 {
self.replace(&d, &mut conflict_contexts, &lines_a, &lines_b, &dd, r);
}
}
debug!("Diff ended");
Ok(())
}
fn diff_binary<T: GraphTxnT, C: ChangeStore>(
&mut self,
changes: &C,
txn: &T,
channel: &T::Graph,
path: String,
inode: Position<Option<ChangeId>>,
ret: &crate::alive::Graph,
b: &[u8],
) -> Result<(), TxnErr<T::GraphError>> {
self.rec.has_binary_files = true;
use crate::change::{Atom, EdgeMap, Hunk, Local, NewEdge, NewVertex};
let pos = self.rec.contents.len();
self.rec.contents.extend_from_slice(&b[..]);
let pos_end = self.rec.contents.len();
self.rec.contents.push(0);
let mut edges = Vec::new();
let mut deleted = Vec::new();
for v in ret.lines.iter() {
debug!("v.vertex = {:?}, inode = {:?}", v.vertex, inode);
if Some(v.vertex.change) == inode.change && v.vertex.end == inode.pos {
continue;
}
for e in iter_adjacent(txn, channel, v.vertex, EdgeFlags::PARENT, EdgeFlags::all())? {
let e = e?;
if e.flag.contains(EdgeFlags::PSEUDO) {
continue;
}
if e.flag.contains(EdgeFlags::FOLDER) {
if log_enabled!(log::Level::Debug) {
let f = std::fs::File::create("debug_diff_binary").unwrap();
ret.debug(changes, txn, channel, false, true, f).unwrap();
}
panic!("e.flag.contains(EdgeFlags::FOLDER)");
}
if e.flag.contains(EdgeFlags::PARENT) {
if e.flag.contains(EdgeFlags::DELETED) {
deleted.push(NewEdge {
previous: e.flag - EdgeFlags::PARENT,
flag: e.flag - EdgeFlags::PARENT,
from: e.dest.to_option(),
to: v.vertex.to_option(),
introduced_by: Some(e.introduced_by),
})
} else {
let previous = e.flag - EdgeFlags::PARENT;
edges.push(NewEdge {
previous,
flag: previous | EdgeFlags::DELETED,
from: e.dest.to_option(),
to: v.vertex.to_option(),
introduced_by: Some(e.introduced_by),
})
}
}
}
}
// Kill all of `ret`, add `b` instead.
if !deleted.is_empty() {
self.rec.actions.push(Hunk::Edit {
local: Local {
line: 0,
path: path.clone(),
},
change: Atom::EdgeMap(EdgeMap {
edges: deleted,
inode,
}),
})
}
self.rec.actions.push(Hunk::Replacement {
local: Local { line: 0, path },
change: Atom::EdgeMap(EdgeMap { edges, inode }),
replacement: Atom::NewVertex(NewVertex {
up_context: vec![inode],
down_context: Vec::new(),
flag: EdgeFlags::empty(),
start: ChangePosition(pos as u64),
end: ChangePosition(pos_end as u64),
inode,
}),
});
Ok(())
}
}
fn bytes_pos(chunks: &[Line], old: usize) -> usize {
debug!("bytes pos {:?} {:?}", old, chunks[old]);
chunks[old].l.as_ptr() as usize - chunks[0].l.as_ptr() as usize
}
fn bytes_len(chunks: &[Line], old: usize, len: usize) -> usize {
if let Some(p) = chunks.get(old + len) {
p.l.as_ptr() as usize - chunks[old].l.as_ptr() as usize
} else if old + len > 0 {
chunks[old + len - 1].l.as_ptr() as usize + chunks[old + len - 1].l.len()
- chunks[old].l.as_ptr() as usize
} else {
chunks[old + len].l.as_ptr() as usize - chunks[old].l.as_ptr() as usize
}
}
use super::Line;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
/// Algorithm used to compute the diff.
pub enum Algorithm {
Myers,
Patience,
}
impl Default for Algorithm {
fn default() -> Self {
Algorithm::Myers
}
}
pub(super) fn diff(lines_a: &[Line], lines_b: &[Line], algorithm: Algorithm) -> D {
let mut dd = diffs::Replace::new(D(Vec::with_capacity(lines_a.len() + lines_b.len())));
match algorithm {
Algorithm::Patience => diffs::patience::diff(
&mut dd,
lines_a,
0,
lines_a.len(),
lines_b,
0,
lines_b.len(),
)
.unwrap(),
Algorithm::Myers => diffs::myers::diff(
&mut dd,
lines_a,
0,
lines_a.len(),
lines_b,
0,
lines_b.len(),
)
.unwrap(),
}
dd.into_inner()
}
#[derive(Debug)]
pub struct D(pub Vec<Replacement>);
impl D {
pub fn len(&self) -> usize {
self.0.len()
}
}
impl std::ops::Index<usize> for D {
type Output = Replacement;
fn index(&self, i: usize) -> &Replacement {
self.0.index(i)
}
}
impl std::ops::IndexMut<usize> for D {
fn index_mut(&mut self, i: usize) -> &mut Replacement {
self.0.index_mut(i)
}
}
#[derive(Debug)]
pub struct Replacement {
pub old: usize,
pub old_len: usize,
pub new: usize,
pub new_len: usize,
pub is_cyclic: bool,
}
impl diffs::Diff for D {
type Error = ();
fn delete(&mut self, old: usize, old_len: usize, new: usize) -> std::result::Result<(), ()> {
debug!("Diff::delete {:?} {:?} {:?}", old, old_len, new);
self.0.push(Replacement {
old,
old_len,
new,
new_len: 0,
is_cyclic: false,
});
Ok(())
}
fn insert(&mut self, old: usize, new: usize, new_len: usize) -> std::result::Result<(), ()> {
debug!("Diff::insert {:?} {:?} {:?}", old, new, new_len);
self.0.push(Replacement {
old,
old_len: 0,
new,
new_len,
is_cyclic: false,
});
Ok(())
}
fn replace(
&mut self,
old: usize,
old_len: usize,
new: usize,
new_len: usize,
) -> std::result::Result<(), ()> {
debug!(
"Diff::replace {:?} {:?} {:?} {:?}",
old, old_len, new, new_len
);
self.0.push(Replacement {
old,
old_len,
new,
new_len,
is_cyclic: false,
});
Ok(())
}
}
fn line_index(lines_a: &[Line], pos_bytes: usize) -> usize {
lines_a
.binary_search_by(|line| {
(line.l.as_ptr() as usize - lines_a[0].l.as_ptr() as usize).cmp(&pos_bytes)
})
.unwrap()
}
pub struct Deleted {
pub replaced: bool,
pub next: usize,
}
impl D {
pub(super) fn is_deleted(&self, lines_a: &[Line], pos: usize) -> Option<Deleted> {
let line = line_index(lines_a, pos);
match self.0.binary_search_by(|repl| repl.old.cmp(&line)) {
Ok(i) if self.0[i].old_len > 0 => Some(Deleted {
replaced: self.0[i].new_len > 0,
next: pos + lines_a[line].l.len(),
}),
Err(i) if i == 0 => None,
Err(i) if line < self.0[i - 1].old + self.0[i - 1].old_len => Some(Deleted {
replaced: self.0[i - 1].new_len > 0,
next: pos + lines_a[line].l.len(),
}),
_ => None,
}
}
}
use super::diff::*;
use super::replace::ConflictContexts;
use super::vertex_buffer::{ConflictMarker, ConflictType, Diff};
use super::{bytes_len, bytes_pos, Line};
use crate::change;
use crate::change::{Atom, EdgeMap, Hunk, Local, NewVertex};
use crate::pristine::*;
use crate::record::Builder;
impl Builder {
pub(super) fn delete<T: GraphTxnT>(
&mut self,
txn: &T,
channel: &T::Graph,
diff: &Diff,
d: &super::diff::D,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
r: usize,
) -> Result<(), TxnErr<T::GraphError>> {
debug!("delete {:?}", r);
self.delete_lines(txn, channel, diff, d, lines_a, r)?;
let old = d[r].old;
let len = d[r].old_len;
self.order_conflict_sides(
diff,
d,
conflict_contexts,
lines_a,
lines_b,
old,
len,
d[r].new,
d[r].new_len > 0,
);
Ok(())
}
}
struct Deletion {
edges: Vec<crate::change::NewEdge<Option<ChangeId>>>,
resurrect: Vec<crate::change::NewEdge<Option<ChangeId>>>,
}
impl Builder {
fn delete_lines<T: GraphTxnT>(
&mut self,
txn: &T,
channel: &T::Graph,
diff: &Diff,
d: &super::diff::D,
lines_a: &[Line],
r: usize,
) -> Result<(), TxnErr<T::GraphError>> {
let deletion = delete_lines(txn, channel, diff, d, lines_a, r)?;
if !deletion.edges.is_empty() {
self.rec.actions.push(Hunk::Edit {
change: Atom::EdgeMap(EdgeMap {
edges: deletion.edges,
inode: diff.inode,
}),
local: Local {
line: d[r].new + 1,
path: diff.path.clone(),
},
})
}
if !deletion.resurrect.is_empty() {
self.rec.actions.push(Hunk::ResurrectZombies {
change: Atom::EdgeMap(EdgeMap {
edges: deletion.resurrect,
inode: diff.inode,
}),
local: Local {
line: d[r].new + 1,
path: diff.path.clone(),
},
})
}
Ok(())
}
}
fn delete_lines<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
diff: &Diff,
d: &super::diff::D,
lines_a: &[Line],
r: usize,
) -> Result<Deletion, TxnErr<T::GraphError>> {
let old = d[r].old;
let len = d[r].old_len;
let mut deletion = Deletion {
edges: Vec::new(),
resurrect: Vec::new(),
};
let mut pos = bytes_pos(lines_a, old);
let end_pos = pos + bytes_len(lines_a, old, len);
let first_vertex = diff.last_vertex_containing(pos);
let mut solved_conflict_end = 0;
let mut i = first_vertex;
while pos < end_pos {
debug!("pos = {:?} {:?}", diff.pos_a[i].pos, diff.pos_a[i].vertex);
let marker = diff.marker.get(&diff.pos_a[i].pos);
if marker.is_none() || (!diff.pos_a[i].vertex.is_root() && diff.pos_a[i].vertex.is_empty())
{
debug!("{:?}", diff.vertex(i, pos, end_pos));
delete_parents(
txn,
channel,
diff.pos_a[i].vertex,
diff.vertex(i, pos, end_pos),
&mut deletion,
)?
} else if let Some(ConflictMarker::Begin) = marker {
debug!(
"conflict type = {:#?}",
diff.conflict_ends[diff.pos_a[i].conflict].conflict_type
);
if let ConflictType::Zombie = diff.conflict_ends[diff.pos_a[i].conflict].conflict_type {
solved_conflict_end =
solved_conflict_end.max(diff.conflict_ends[diff.pos_a[i].conflict].end_pos)
}
} else {
debug!("conflict: {:?}", marker);
}
i += 1;
if i < diff.pos_a.len() {
pos = diff.pos_a[i].pos
} else {
break;
}
}
if solved_conflict_end > 0 && i < diff.pos_a.len() {
resurrect_zombies(
txn,
channel,
diff,
d,
lines_a,
r,
i,
end_pos,
solved_conflict_end,
&mut deletion,
)?
}
Ok(deletion)
}
fn delete_parents<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
graph_key: Vertex<ChangeId>,
del_key: Vertex<ChangeId>,
deletion: &mut Deletion,
) -> Result<(), TxnErr<T::GraphError>> {
for e in iter_adjacent(
txn,
&channel,
graph_key,
EdgeFlags::PARENT,
EdgeFlags::all(),
)? {
let e = e?;
if !e.flag.contains(EdgeFlags::PARENT) || e.flag.contains(EdgeFlags::PSEUDO) {
continue;
}
let previous = e.flag - EdgeFlags::PARENT;
if graph_key.start != del_key.start
&& !graph_key.is_empty()
&& !e.flag.contains(EdgeFlags::BLOCK)
{
continue;
}
deletion.edges.push(change::NewEdge {
previous,
flag: previous | EdgeFlags::DELETED,
from: if graph_key.start == del_key.start {
e.dest.to_option()
} else {
del_key.start_pos().to_option()
},
to: del_key.to_option(),
introduced_by: Some(e.introduced_by),
})
}
Ok(())
}
fn is_conflict_reordering(diff: &Diff, old_bytes: usize, len_bytes: usize) -> bool {
let mut result = false;
debug!("conflict reordering {:?} {:?}", old_bytes, len_bytes);
trace!("markers: {:#?}", diff.marker);
for i in old_bytes..old_bytes + len_bytes {
match diff.marker.get(&i) {
Some(&ConflictMarker::Next) => result = true,
Some(_) => return false,
_ => {}
}
}
debug!("is_conflict_reordering: {:?}", result);
result
}
impl Builder {
fn order_conflict_sides(
&mut self,
diff: &Diff,
dd: &D,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
old: usize,
len: usize,
new: usize,
is_replaced: bool,
) {
let old_bytes = bytes_pos(lines_a, old);
let len_bytes = bytes_len(lines_a, old, len);
if !is_conflict_reordering(diff, old_bytes, len_bytes) {
return;
}
let up_context = super::replace::get_up_context(diff, conflict_contexts, lines_a, old);
self.rec.contents.push(0);
let pos = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
let down_context = if is_replaced {
conflict_contexts.reorderings.insert(old, pos);
Vec::new()
} else {
super::replace::get_down_context(
diff,
conflict_contexts,
dd,
lines_a,
lines_b,
old,
len,
0,
0,
self.rec.contents.len(),
)
};
debug!("Conflict reordering {:?} {:?}", up_context, down_context);
self.rec.actions.push(Hunk::SolveOrderConflict {
change: Atom::NewVertex(NewVertex {
up_context,
down_context,
flag: EdgeFlags::empty(),
start: pos,
end: pos,
inode: diff.inode,
}),
local: Local {
line: new + 1,
path: diff.path.clone(),
},
});
}
}
fn resurrect_zombies<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
diff: &Diff,
d: &super::diff::D,
lines_a: &[Line],
mut r: usize,
mut i: usize,
end_pos: usize,
solved_conflict_end: usize,
deletion: &mut Deletion,
) -> Result<(), TxnErr<T::GraphError>> {
debug!(
"resurrect_zombies {:?} {:?} {:?} {:?}",
r, i, end_pos, solved_conflict_end
);
let mut pos = end_pos;
if diff.pos_a[i].pos > pos {
i -= 1;
}
while pos < solved_conflict_end {
r += 1;
while r < d.len() && d[r].old_len == 0 && bytes_pos(lines_a, d[r].old) < solved_conflict_end
{
r += 1
}
let next_pos = if r >= d.len() {
solved_conflict_end
} else {
bytes_pos(lines_a, d[r].old).min(solved_conflict_end)
};
while i < diff.pos_a.len() {
if diff.pos_a[i].pos > next_pos {
break;
}
if diff.pos_a[i].vertex.is_root()
|| (i + 1 < diff.pos_a.len() && diff.pos_a[i + 1].pos <= pos)
{
i += 1;
continue;
}
resurrect_zombie(
txn,
channel,
diff.pos_a[i].vertex,
diff.vertex(i, pos, next_pos),
deletion,
)?;
i += 1
}
if r >= d.len() {
break;
} else {
pos = bytes_pos(lines_a, d[r].old) + bytes_len(lines_a, d[r].old, d[r].old_len)
}
}
Ok(())
}
fn resurrect_zombie<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
v: Vertex<ChangeId>,
target: Vertex<ChangeId>,
deletion: &mut Deletion,
) -> Result<(), TxnErr<T::GraphError>> {
debug!("resurrect zombie {:?} {:?}", v, target);
for e in iter_adjacent(
txn,
&channel,
v,
EdgeFlags::PARENT,
EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK,
)? {
let e = e?;
if e.flag.contains(EdgeFlags::PSEUDO) || !e.flag.contains(EdgeFlags::PARENT) {
continue;
}
let previous = e.flag - EdgeFlags::PARENT;
let newedge = change::NewEdge {
previous,
flag: previous - EdgeFlags::DELETED,
from: if target.start_pos() == v.start_pos() {
e.dest.to_option()
} else {
target.start_pos().to_option()
},
to: target.to_option(),
introduced_by: Some(e.introduced_by),
};
deletion.resurrect.push(newedge)
}
Ok(())
}
//! A change store is a trait for change storage facilities. Even though
//! changes are normally stored on disk, there are situations (such as
//! an embedded Pijul) where one might want changes in-memory, in a
//! database, or something else.
use crate::change::{Change, ChangeHeader};
use crate::pristine::{ChangeId, Hash, InodeMetadata, Position, Vertex};
#[cfg(feature = "ondisk-repos")]
/// If this crate is compiled with the `ondisk-repos` feature (the
/// default), this module stores changes on the file system, under
/// `.pijul/changes`.
pub mod filesystem;
/// A change store entirely in memory.
pub mod memory;
/// A trait for storing changes and reading from them.
pub trait ChangeStore {
type Error: std::error::Error
+ std::fmt::Debug
+ Send
+ Sync
+ From<std::str::Utf8Error>
+ From<crate::change::ChangeError>
+ 'static;
fn has_contents(&self, hash: Hash, change_id: Option<ChangeId>) -> bool;
fn get_contents<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
key: Vertex<ChangeId>,
buf: &mut Vec<u8>,
) -> Result<usize, Self::Error>;
fn get_header(&self, h: &Hash) -> Result<ChangeHeader, Self::Error> {
Ok(self.get_change(h)?.hashed.header)
}
fn get_contents_ext(
&self,
key: Vertex<Option<Hash>>,
buf: &mut Vec<u8>,
) -> Result<usize, Self::Error>;
fn get_dependencies(&self, hash: &Hash) -> Result<Vec<Hash>, Self::Error> {
Ok(self.get_change(hash)?.hashed.dependencies)
}
fn get_extra_known(&self, hash: &Hash) -> Result<Vec<Hash>, Self::Error> {
Ok(self.get_change(hash)?.hashed.extra_known)
}
fn get_changes(
&self,
hash: &Hash,
) -> Result<Vec<crate::change::Hunk<Option<Hash>, crate::change::Local>>, Self::Error> {
Ok(self.get_change(hash)?.hashed.changes)
}
fn knows(&self, hash0: &Hash, hash1: &Hash) -> Result<bool, Self::Error> {
Ok(self.get_change(hash0)?.knows(hash1))
}
fn has_edge(
&self,
change: Hash,
from: Position<Option<Hash>>,
to: Position<Option<Hash>>,
flags: crate::pristine::EdgeFlags,
) -> Result<bool, Self::Error> {
let change_ = self.get_change(&change)?;
Ok(change_.has_edge(change, from, to, flags))
}
fn change_deletes_position<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
pos: Position<Option<Hash>>,
) -> Result<Vec<Hash>, Self::Error>;
fn save_change(&self, p: &Change) -> Result<Hash, Self::Error>;
fn del_change(&self, h: &Hash) -> Result<bool, Self::Error>;
fn get_change(&self, h: &Hash) -> Result<Change, Self::Error>;
fn get_file_name<'a, F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
vertex: Vertex<ChangeId>,
buf: &'a mut Vec<u8>,
) -> Result<(InodeMetadata, &'a str), Self::Error> {
buf.clear();
self.get_contents(hash, vertex, buf)?;
assert!(buf.len() >= 2);
let (a, b) = buf.as_slice().split_at(2);
Ok((InodeMetadata::from_basename(a), std::str::from_utf8(b)?))
}
}
impl crate::change::Atom<Option<Hash>> {
pub(crate) fn deletes_pos(&self, pos: Position<Option<Hash>>) -> Vec<Hash> {
let mut h = Vec::new();
if let crate::change::Atom::EdgeMap(ref n) = self {
for edge in n.edges.iter() {
if edge.to.change == pos.change && edge.to.start <= pos.pos && pos.pos < edge.to.end
{
if let Some(c) = edge.introduced_by {
h.push(c)
}
}
}
}
h
}
}
use super::*;
use crate::change::Change;
use crate::pristine::{ChangeId, Hash, Vertex};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
#[derive(Clone, Default)]
/// A change store in memory, i.e. basically a hash table.
pub struct Memory {
changes: Arc<RwLock<HashMap<Hash, Change>>>,
}
impl Memory {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Debug, Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Utf8(#[from] std::str::Utf8Error),
#[error(transparent)]
Change(#[from] crate::change::ChangeError),
#[error("Change not found: {:?}", hash)]
ChangeNotFound { hash: crate::Hash },
#[error(transparent)]
Bincode(#[from] bincode::Error),
}
impl ChangeStore for Memory {
type Error = Error;
fn has_contents(&self, hash: Hash, _: Option<ChangeId>) -> bool {
let changes = self.changes.read().unwrap();
let p = changes.get(&hash).unwrap();
!p.contents.is_empty()
}
fn get_contents<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
key: Vertex<ChangeId>,
buf: &mut Vec<u8>,
) -> Result<usize, Self::Error> {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start {
return Ok(0);
}
let changes = self.changes.read().unwrap();
let p = changes.get(&hash(key.change).unwrap()).unwrap();
let start = key.start.0 as usize;
let end = key.end.0 as usize;
buf.clear();
buf.extend(&p.contents[start..end]);
Ok(end - start)
}
fn get_contents_ext(
&self,
key: Vertex<Option<Hash>>,
buf: &mut Vec<u8>,
) -> Result<usize, Self::Error> {
if let Some(change) = key.change {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start {
return Ok(0);
}
let changes = self.changes.read().unwrap();
let p = changes.get(&change).unwrap();
let start = key.start.0 as usize;
let end = key.end.0 as usize;
buf.clear();
buf.extend(&p.contents[start..end]);
Ok(end - start)
} else {
Ok(0)
}
}
fn change_deletes_position<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
pos: Position<Option<Hash>>,
) -> Result<Vec<Hash>, Self::Error> {
let changes = self.changes.read().unwrap();
let change = changes.get(&hash(change).unwrap()).unwrap();
let mut v = Vec::new();
for c in change.changes.iter() {
for c in c.iter() {
v.extend(c.deletes_pos(pos).into_iter())
}
}
Ok(v)
}
fn save_change(&self, p: &Change) -> Result<Hash, Self::Error> {
let mut w = self.changes.write().unwrap();
let hash = p.hash()?;
w.insert(hash, p.clone());
Ok(hash)
}
fn del_change(&self, h: &Hash) -> Result<bool, Self::Error> {
let mut w = self.changes.write().unwrap();
Ok(w.remove(h).is_some())
}
fn get_change(&self, h: &Hash) -> Result<Change, Self::Error> {
let w = self.changes.read().unwrap();
if let Some(p) = w.get(h) {
Ok(p.clone())
} else {
Err(Error::ChangeNotFound { hash: *h })
}
}
}
use super::*;
use crate::change::{Change, ChangeFile};
use crate::pristine::{Base32, ChangeId, Hash, Vertex};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, MutexGuard};
const CHANGE_CACHE_SIZE: usize = 100;
/// A file system change store.
#[derive(Clone)]
pub struct FileSystem(Arc<FileSystem_>);
struct FileSystem_ {
change_cache: Mutex<lru_cache::LruCache<ChangeId, Arc<Mutex<ChangeFile<'static>>>>>,
changes_dir: PathBuf,
}
#[derive(Debug, Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Utf8(#[from] std::str::Utf8Error),
#[error(transparent)]
ChangeFile(#[from] crate::change::ChangeError),
#[error(transparent)]
Persist(#[from] tempfile::PersistError),
}
pub fn push_filename(changes_dir: &mut PathBuf, hash: &Hash) {
let h32 = hash.to_base32();
let (a, b) = h32.split_at(2);
changes_dir.push(a);
changes_dir.push(b);
changes_dir.set_extension("change");
}
pub fn pop_filename(changes_dir: &mut PathBuf) {
changes_dir.pop();
changes_dir.pop();
}
impl FileSystem {
pub fn filename(&self, hash: &Hash) -> PathBuf {
let mut path = self.0.changes_dir.clone();
push_filename(&mut path, hash);
path
}
pub fn has_change(&self, hash: &Hash) -> bool {
std::fs::metadata(&self.filename(hash)).is_ok()
}
/// Construct a `FileSystem`, starting from the root of the
/// repository (i.e. the parent of the `.pijul` directory).
pub fn from_root<P: AsRef<Path>>(root: P) -> Self {
let dot_pijul = root.as_ref().join(crate::DOT_DIR);
let changes_dir = dot_pijul.join("changes");
Self::from_changes(changes_dir)
}
/// Construct a `FileSystem`, starting from the root of the
/// repository (i.e. the parent of the `.pijul` directory).
pub fn from_changes(changes_dir: PathBuf) -> Self {
std::fs::create_dir_all(&changes_dir).unwrap();
FileSystem(Arc::new(FileSystem_ {
changes_dir,
change_cache: Mutex::new(lru_cache::LruCache::new(CHANGE_CACHE_SIZE)),
}))
}
fn load<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
) -> Result<
MutexGuard<lru_cache::LruCache<ChangeId, Arc<Mutex<ChangeFile<'static>>>>>,
crate::change::ChangeError,
> {
let mut cache = self.0.change_cache.lock().unwrap();
if !cache.contains_key(&change) {
let h = hash(change).unwrap();
let path = self.filename(&h);
let p = crate::change::ChangeFile::open(h, &path.to_str().unwrap())?;
cache.insert(change, Arc::new(Mutex::new(p)));
}
Ok(cache)
}
pub fn save_from_buf(
&self,
buf: &[u8],
hash: &Hash,
change_id: Option<ChangeId>,
) -> Result<(), crate::change::ChangeError> {
Change::check_from_buffer(buf, hash)?;
self.save_from_buf_unchecked(buf, hash, change_id)?;
Ok(())
}
pub fn save_from_buf_unchecked(
&self,
buf: &[u8],
hash: &Hash,
change_id: Option<ChangeId>,
) -> Result<(), std::io::Error> {
let mut f = tempfile::NamedTempFile::new_in(&self.0.changes_dir)?;
let file_name = self.filename(hash);
use std::io::Write;
f.write_all(buf)?;
debug!("file_name = {:?}", file_name);
std::fs::create_dir_all(file_name.parent().unwrap())?;
f.persist(file_name)?;
if let Some(ref change_id) = change_id {
let mut cache = self.0.change_cache.lock().unwrap();
cache.remove(change_id);
}
Ok(())
}
}
impl ChangeStore for FileSystem {
type Error = Error;
fn has_contents(&self, hash: Hash, change_id: Option<ChangeId>) -> bool {
if let Some(ref change_id) = change_id {
let mut cache = self.0.change_cache.lock().unwrap();
let mut poisoned = false;
if let Some(c) = cache.get_mut(change_id) {
if let Ok(l) = c.lock() {
return l.has_contents();
} else {
poisoned = true
}
}
if poisoned {
cache.remove(change_id);
}
}
let path = self.filename(&hash);
if let Ok(p) = crate::change::ChangeFile::open(hash, &path.to_str().unwrap()) {
p.has_contents()
} else {
false
}
}
fn get_header(&self, h: &Hash) -> Result<ChangeHeader, Self::Error> {
let path = self.filename(h);
let p = crate::change::ChangeFile::open(*h, &path.to_str().unwrap())?;
Ok(p.hashed().header.clone())
}
fn get_contents<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
key: Vertex<ChangeId>,
buf: &mut Vec<u8>,
) -> Result<usize, Self::Error> {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start || key.is_root() {
return Ok(0);
}
let mut cache = self.load(hash, key.change)?;
let p = cache.get_mut(&key.change).unwrap();
let mut p = p.lock().unwrap();
let n = p.read_contents(key.start.0, buf)?;
Ok(n)
}
fn get_contents_ext(
&self,
key: Vertex<Option<Hash>>,
buf: &mut Vec<u8>,
) -> Result<usize, Self::Error> {
if let Some(change) = key.change {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start {
return Ok(0);
}
let path = self.filename(&change);
let mut p = crate::change::ChangeFile::open(change, &path.to_str().unwrap())?;
let n = p.read_contents(key.start.0, buf)?;
Ok(n)
} else {
Ok(0)
}
}
fn change_deletes_position<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
pos: Position<Option<Hash>>,
) -> Result<Vec<Hash>, Self::Error> {
let mut cache = self.load(hash, change)?;
let p = cache.get_mut(&change).unwrap();
let p = p.lock().unwrap();
let mut v = Vec::new();
for c in p.hashed().changes.iter() {
for c in c.iter() {
v.extend(c.deletes_pos(pos).into_iter())
}
}
Ok(v)
}
fn save_change(&self, p: &Change) -> Result<Hash, Self::Error> {
let mut f = tempfile::NamedTempFile::new_in(&self.0.changes_dir)?;
let hash = {
let w = std::io::BufWriter::new(&mut f);
p.serialize(w)?
};
let file_name = self.filename(&hash);
std::fs::create_dir_all(file_name.parent().unwrap())?;
debug!("file_name = {:?}", file_name);
f.persist(file_name)?;
Ok(hash)
}
fn del_change(&self, hash: &Hash) -> Result<bool, Self::Error> {
let file_name = self.filename(hash);
debug!("file_name = {:?}", file_name);
let result = std::fs::remove_file(&file_name).is_ok();
std::fs::remove_dir(file_name.parent().unwrap()).unwrap_or(()); // fails silently if there are still changes with the same 2-letter prefix.
Ok(result)
}
fn get_change(&self, h: &Hash) -> Result<Change, Self::Error> {
let file_name = self.filename(h);
let file_name = file_name.to_str().unwrap();
debug!("file_name = {:?}", file_name);
Ok(Change::deserialize(&file_name, Some(h))?)
}
}
use crate::pristine::*;
use chrono::{DateTime, Utc};
use std::collections::{BTreeSet, HashSet};
#[cfg(feature = "zstd")]
use std::io::Write;
#[cfg(feature = "text-changes")]
mod text_changes;
mod change_file;
pub use change_file::*;
#[derive(Debug, Error)]
pub enum ChangeError {
#[error("Version mismatch")]
VersionMismatch,
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Bincode(#[from] bincode::Error),
#[error(transparent)]
Zstd(#[from] zstd_seekable::Error),
#[error(transparent)]
TomlDe(#[from] toml::de::Error),
#[error(transparent)]
TomlSer(#[from] toml::ser::Error),
#[error("Missing contents for change {:?}", hash)]
MissingContents { hash: crate::pristine::Hash },
#[error("Change hash mismatch, claimed {:?}, computed {:?}", claimed, computed)]
ChangeHashMismatch {
claimed: crate::pristine::Hash,
computed: crate::pristine::Hash,
},
#[error(
"Change contents hash mismatch, claimed {:?}, computed {:?}",
claimed,
computed
)]
ContentsHashMismatch {
claimed: crate::pristine::Hash,
computed: crate::pristine::Hash,
},
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub enum Atom<Change> {
NewVertex(NewVertex<Change>),
EdgeMap(EdgeMap<Change>),
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NewVertex<Change> {
pub up_context: Vec<Position<Change>>,
pub down_context: Vec<Position<Change>>,
pub flag: EdgeFlags,
pub start: ChangePosition,
pub end: ChangePosition,
pub inode: Position<Change>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct EdgeMap<Change> {
pub edges: Vec<NewEdge<Change>>,
pub inode: Position<Change>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NewEdge<Change> {
pub previous: EdgeFlags,
pub flag: EdgeFlags,
/// The origin of the edge, i.e. if a vertex split is needed, the
/// left-hand side of the split will include `from.pos`. This
/// means that splitting vertex `[a, b[` to apply this edge
/// modification will yield vertices `[a, from.pos+1[` and
/// `[from.pos+1, b[`.
pub from: Position<Change>,
/// The destination of the edge, i.e. the last byte affected by
/// this change.
pub to: Vertex<Change>,
/// The change that introduced the previous version of the edge
/// (the one being replaced by this `NewEdge`).
pub introduced_by: Change,
}
impl<T: Clone> NewEdge<T> {
pub(crate) fn reverse(&self, introduced_by: T) -> Self {
NewEdge {
previous: self.flag,
flag: self.previous,
from: self.from.clone(),
to: self.to.clone(),
introduced_by,
}
}
}
/// The header of a change contains all the metadata about a change
/// (but not the actual contents of a change).
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct ChangeHeader {
pub message: String,
pub description: Option<String>,
pub timestamp: DateTime<Utc>,
pub authors: Vec<Author>,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct Author {
pub name: String,
#[serde(default)]
pub full_name: Option<String>,
#[serde(default)]
pub email: Option<String>,
}
impl std::fmt::Display for Author {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
if self.full_name.is_none() && self.email.is_none() {
write!(fmt, "{:?}", self.name)
} else {
write!(fmt, "{{ name = {:?}", self.name)?;
if let Some(ref f) = self.full_name {
write!(fmt, ", full_name = {:?}", f)?;
}
if let Some(ref f) = self.email {
write!(fmt, ", email = {:?}", f)?;
}
write!(fmt, " }}")
}
}
}
impl Default for ChangeHeader {
fn default() -> Self {
ChangeHeader {
message: String::new(),
description: None,
timestamp: Utc::now(),
authors: Vec::new(),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct LocalChange<Local> {
pub offsets: Offsets,
pub hashed: Hashed<Local>,
/// unhashed TOML extra contents.
pub unhashed: Option<toml::Value>,
/// The contents.
pub contents: Vec<u8>,
}
impl std::ops::Deref for LocalChange<Local> {
type Target = Hashed<Local>;
fn deref(&self) -> &Self::Target {
&self.hashed
}
}
impl std::ops::DerefMut for LocalChange<Local> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.hashed
}
}
pub const VERSION: u64 = 4;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Hashed<Local> {
/// Version, again (in order to hash it).
pub version: u64,
/// Header part, containing the metadata.
pub header: ChangeHeader,
/// The dependencies of this change.
pub dependencies: Vec<Hash>,
/// Extra known "context" changes to recover from deleted contexts.
pub extra_known: Vec<Hash>,
/// Some space to write application-specific data.
pub metadata: Vec<u8>,
/// The changes, without the contents.
pub changes: Vec<Hunk<Option<Hash>, Local>>,
/// Hash of the contents, so that the "contents" field is
/// verifiable independently from the actions in this change.
pub contents_hash: Hash,
}
pub type Change = LocalChange<Local>;
pub fn dependencies<
'a,
Local: 'a,
I: Iterator<Item = &'a Hunk<Option<Hash>, Local>>,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
>(
txn: &T,
channel: &T::Channel,
changes: I,
) -> Result<(Vec<Hash>, Vec<Hash>), TxnErr<T::DepsError>> {
let mut deps = BTreeSet::new();
let mut zombie_deps = BTreeSet::new();
for ch in changes.flat_map(|r| r.iter()) {
match *ch {
Atom::NewVertex(NewVertex {
ref up_context,
ref down_context,
..
}) => {
for up in up_context.iter().chain(down_context.iter()) {
match up.change {
None | Some(Hash::None) => {}
Some(ref dep) => {
deps.insert(*dep);
}
}
}
}
Atom::EdgeMap(EdgeMap { ref edges, .. }) => {
for e in edges {
assert!(!e.flag.contains(EdgeFlags::PARENT));
assert!(e.introduced_by != Some(Hash::None));
if let Some(p) = e.from.change {
deps.insert(p);
}
if let Some(p) = e.introduced_by {
deps.insert(p);
}
if let Some(p) = e.to.change {
deps.insert(p);
}
add_zombie_deps_from(txn, T::graph(channel), &mut zombie_deps, e.from)?;
add_zombie_deps_to(txn, T::graph(channel), &mut zombie_deps, e.to)?
}
}
}
}
let deps = minimize_deps(txn, &channel, &deps)?;
for d in deps.iter() {
zombie_deps.remove(d);
}
let mut deps: Vec<Hash> = deps.into_iter().collect();
deps.sort_by(|a, b| {
let a = txn.get_internal(*a).unwrap().unwrap();
let b = txn.get_internal(*b).unwrap().unwrap();
txn.get_changeset(T::changes(&channel), a)
.unwrap()
.cmp(&txn.get_changeset(T::changes(&channel), b).unwrap())
});
let mut zombie_deps: Vec<Hash> = zombie_deps.into_iter().collect();
zombie_deps.sort_by(|a, b| {
let a = txn.get_internal(*a).unwrap().unwrap();
let b = txn.get_internal(*b).unwrap().unwrap();
txn.get_changeset(T::changes(&channel), a)
.unwrap()
.cmp(&txn.get_changeset(T::changes(&channel), b).unwrap())
});
Ok((deps, zombie_deps))
}
pub fn full_dependencies<T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>>(
txn: &T,
channel: &ChannelRef<T>,
) -> Result<(Vec<Hash>, Vec<Hash>), TxnErr<T::DepsError>> {
let mut deps = BTreeSet::new();
let channel = channel.borrow();
for x in changeid_log(txn, &channel, 0)? {
let (_, (ch, _)) = x?;
let h = txn.get_external(ch)?.unwrap();
deps.insert(h);
}
let deps = minimize_deps(txn, &channel, &deps)?;
Ok((deps, Vec::new()))
}
fn add_zombie_deps_from<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
zombie_deps: &mut BTreeSet<Hash>,
e_from: Position<Option<Hash>>,
) -> Result<(), TxnErr<T::GraphError>> {
let e_from = if let Some(p) = e_from.change {
Position {
change: txn.get_internal(p)?.unwrap(),
pos: e_from.pos,
}
} else {
return Ok(());
};
let from = txn.find_block_end(channel, e_from).unwrap();
for edge in iter_adj_all(txn, channel, from)? {
let edge = edge?;
if let Some(ext) = txn.get_external(edge.introduced_by)? {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
if let Some(ext) = txn.get_external(edge.dest.change)? {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
}
Ok(())
}
fn add_zombie_deps_to<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
zombie_deps: &mut BTreeSet<Hash>,
e_to: Vertex<Option<Hash>>,
) -> Result<(), TxnErr<T::GraphError>> {
let to_pos = if let Some(p) = e_to.change {
Position {
change: txn.get_internal(p)?.unwrap(),
pos: e_to.start,
}
} else {
return Ok(());
};
let mut to = txn.find_block(channel, to_pos).unwrap();
loop {
for edge in iter_adj_all(txn, channel, to)? {
let edge = edge?;
if let Some(ext) = txn.get_external(edge.introduced_by)? {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
if let Some(ext) = txn.get_external(edge.dest.change)? {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
}
if to.end >= e_to.end {
break;
}
to = txn.find_block(channel, to.end_pos()).unwrap();
}
Ok(())
}
fn minimize_deps<T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>>(
txn: &T,
channel: &T::Channel,
deps: &BTreeSet<Hash>,
) -> Result<Vec<Hash>, TxnErr<T::DepsError>> {
let mut min_time = std::u64::MAX;
let mut internal_deps = Vec::new();
let mut internal_deps_ = HashSet::new();
for h in deps.iter() {
if let Hash::None = h {
continue;
}
debug!("h = {:?}", h);
let id = txn.get_internal(*h)?.unwrap();
debug!("id = {:?}", id);
let time = txn.get_changeset(T::changes(&channel), id)?.unwrap();
debug!("time = {:?}", time);
min_time = min_time.min(time);
internal_deps.push((id, true));
internal_deps_.insert(id);
}
internal_deps.sort_by(|a, b| a.1.cmp(&b.1));
let mut visited = HashSet::new();
while let Some((id, is_root)) = internal_deps.pop() {
if is_root {
if !internal_deps_.contains(&id) {
continue;
}
} else if internal_deps_.remove(&id) {
debug!("removing dep {:?}", id);
}
if !visited.insert(id) {
continue;
}
let mut cursor = txn.iter_dep(id)?;
while let Some(x) = txn.cursor_dep_next(&mut cursor.cursor)? {
let (id0, dep) = x;
trace!("minimize loop = {:?} {:?}", id0, dep);
if id0 < id {
continue;
} else if id0 > id {
break;
}
let time = if let Some(time) = txn.get_changeset(T::changes(&channel), dep)? {
time
} else {
panic!(
"not found in channel {:?}: id = {:?} depends on {:?}",
T::name(channel),
id,
dep
);
};
trace!("time = {:?}", time);
if time >= min_time {
internal_deps.push((dep, false))
}
}
}
Ok(internal_deps_
.into_iter()
.map(|id| txn.get_external(id).unwrap().unwrap())
.collect())
}
impl Change {
pub fn knows(&self, hash: &Hash) -> bool {
self.extra_known.contains(hash) || self.dependencies.contains(&hash)
}
pub fn has_edge(
&self,
hash: Hash,
from: Position<Option<Hash>>,
to: Position<Option<Hash>>,
flags: crate::pristine::EdgeFlags,
) -> bool {
debug!("has_edge: {:?} {:?} {:?} {:?}", hash, from, to, flags);
for change_ in self.changes.iter() {
for change_ in change_.iter() {
match change_ {
Atom::NewVertex(n) => {
debug!("has_edge: {:?}", n);
if from.change == Some(hash) && from.pos >= n.start && from.pos <= n.end {
if to.change == Some(hash) {
// internal
return flags | EdgeFlags::FOLDER
== EdgeFlags::BLOCK | EdgeFlags::FOLDER;
} else {
// down context
if n.down_context.iter().any(|d| *d == to) {
return flags.is_empty();
} else {
return false;
}
}
} else if to.change == Some(hash) && to.pos >= n.start && to.pos <= n.end {
// up context
if n.up_context.iter().any(|d| *d == from) {
return flags | EdgeFlags::FOLDER
== EdgeFlags::BLOCK | EdgeFlags::FOLDER;
} else {
return false;
}
}
}
Atom::EdgeMap(e) => {
debug!("has_edge: {:?}", e);
if e.edges
.iter()
.any(|e| e.from == from && e.to.start_pos() == to && e.flag == flags)
{
return true;
}
}
}
}
}
debug!("not found");
false
}
}
impl Atom<Option<Hash>> {
pub fn inode(&self) -> Position<Option<Hash>> {
match self {
Atom::NewVertex(ref n) => n.inode,
Atom::EdgeMap(ref n) => n.inode,
}
}
pub fn inverse(&self, hash: &Hash) -> Self {
match *self {
Atom::NewVertex(NewVertex {
ref up_context,
flag,
start,
end,
ref inode,
..
}) => {
let mut edges = Vec::new();
for up in up_context {
edges.push(NewEdge {
previous: flag,
flag: flag | EdgeFlags::DELETED,
from: Position {
change: Some(if let Some(ref h) = up.change {
*h
} else {
*hash
}),
pos: up.pos,
},
to: Vertex {
change: Some(*hash),
start,
end,
},
introduced_by: Some(*hash),
})
}
Atom::EdgeMap(EdgeMap {
edges,
inode: Position {
change: Some(if let Some(p) = inode.change { p } else { *hash }),
pos: inode.pos,
},
})
}
Atom::EdgeMap(EdgeMap {
ref edges,
ref inode,
}) => Atom::EdgeMap(EdgeMap {
inode: Position {
change: Some(if let Some(p) = inode.change { p } else { *hash }),
pos: inode.pos,
},
edges: edges
.iter()
.map(|e| {
let mut e = e.clone();
e.introduced_by = Some(*hash);
std::mem::swap(&mut e.flag, &mut e.previous);
e
})
.collect(),
}),
}
}
}
impl EdgeMap<Option<Hash>> {
fn concat(mut self, e: EdgeMap<Option<Hash>>) -> Self {
assert_eq!(self.inode, e.inode);
self.edges.extend(e.edges.into_iter());
EdgeMap {
inode: self.inode,
edges: self.edges,
}
}
}
impl<L: Clone> Hunk<Option<Hash>, L> {
pub fn inverse(&self, hash: &Hash) -> Self {
match self {
Hunk::FileMove { del, add, path } => Hunk::FileMove {
del: add.inverse(hash),
add: del.inverse(hash),
path: path.clone(),
},
Hunk::FileDel {
del,
contents,
path,
} => Hunk::FileUndel {
undel: del.inverse(hash),
contents: contents.as_ref().map(|c| c.inverse(hash)),
path: path.clone(),
},
Hunk::FileUndel {
undel,
contents,
path,
} => Hunk::FileDel {
del: undel.inverse(hash),
contents: contents.as_ref().map(|c| c.inverse(hash)),
path: path.clone(),
},
Hunk::FileAdd {
add_name,
add_inode,
contents,
path,
} => {
let del = match (add_name.inverse(hash), add_inode.inverse(hash)) {
(Atom::EdgeMap(e0), Atom::EdgeMap(e1)) => Atom::EdgeMap(e0.concat(e1)),
_ => unreachable!(),
};
Hunk::FileDel {
del,
contents: contents.as_ref().map(|c| c.inverse(hash)),
path: path.clone(),
}
}
Hunk::SolveNameConflict { name, path } => Hunk::UnsolveNameConflict {
name: name.inverse(hash),
path: path.clone(),
},
Hunk::UnsolveNameConflict { name, path } => Hunk::SolveNameConflict {
name: name.inverse(hash),
path: path.clone(),
},
Hunk::Edit { change, local } => Hunk::Edit {
change: change.inverse(hash),
local: local.clone(),
},
Hunk::Replacement {
change,
replacement,
local,
} => Hunk::Replacement {
change: replacement.inverse(hash),
replacement: change.inverse(hash),
local: local.clone(),
},
Hunk::SolveOrderConflict { change, local } => Hunk::UnsolveOrderConflict {
change: change.inverse(hash),
local: local.clone(),
},
Hunk::UnsolveOrderConflict { change, local } => Hunk::SolveOrderConflict {
change: change.inverse(hash),
local: local.clone(),
},
Hunk::ResurrectZombies { change, local } => Hunk::Edit {
change: change.inverse(hash),
local: local.clone(),
},
}
}
}
impl Change {
pub fn inverse(&self, hash: &Hash, header: ChangeHeader, metadata: Vec<u8>) -> Self {
let dependencies = vec![*hash];
let contents_hash = Hasher::default().finish();
Change {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header,
dependencies,
extra_known: self.extra_known.clone(),
metadata,
changes: self.changes.iter().map(|r| r.inverse(hash)).collect(),
contents_hash,
},
contents: Vec::new(),
unhashed: None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Local {
pub path: String,
pub line: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Hunk<Hash, Local> {
FileMove {
del: Atom<Hash>,
add: Atom<Hash>,
path: String,
},
FileDel {
del: Atom<Hash>,
contents: Option<Atom<Hash>>,
path: String,
},
FileUndel {
undel: Atom<Hash>,
contents: Option<Atom<Hash>>,
path: String,
},
FileAdd {
add_name: Atom<Hash>,
add_inode: Atom<Hash>,
contents: Option<Atom<Hash>>,
path: String,
},
SolveNameConflict {
name: Atom<Hash>,
path: String,
},
UnsolveNameConflict {
name: Atom<Hash>,
path: String,
},
Edit {
change: Atom<Hash>,
local: Local,
},
Replacement {
change: Atom<Hash>,
replacement: Atom<Hash>,
local: Local,
},
SolveOrderConflict {
change: Atom<Hash>,
local: Local,
},
UnsolveOrderConflict {
change: Atom<Hash>,
local: Local,
},
ResurrectZombies {
change: Atom<Hash>,
local: Local,
},
}
#[doc(hidden)]
pub struct HunkIter<R, C> {
rec: Option<R>,
extra: Option<C>,
extra2: Option<C>,
}
impl<Context, Local> IntoIterator for Hunk<Context, Local> {
type IntoIter = HunkIter<Hunk<Context, Local>, Atom<Context>>;
type Item = Atom<Context>;
fn into_iter(self) -> Self::IntoIter {
HunkIter {
rec: Some(self),
extra: None,
extra2: None,
}
}
}
impl<Context, Local> Hunk<Context, Local> {
pub fn iter(&self) -> HunkIter<&Hunk<Context, Local>, &Atom<Context>> {
HunkIter {
rec: Some(self),
extra: None,
extra2: None,
}
}
pub fn rev_iter(&self) -> RevHunkIter<&Hunk<Context, Local>, &Atom<Context>> {
RevHunkIter {
rec: Some(self),
extra: None,
extra2: None,
}
}
}
impl<Context, Local> Iterator for HunkIter<Hunk<Context, Local>, Atom<Context>> {
type Item = Atom<Context>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(extra) = self.extra.take() {
Some(extra)
} else if let Some(extra) = self.extra2.take() {
Some(extra)
} else if let Some(rec) = self.rec.take() {
match rec {
Hunk::FileMove { del, add, .. } => {
self.extra = Some(add);
Some(del)
}
Hunk::FileDel { del, contents, .. } => {
self.extra = contents;
Some(del)
}
Hunk::FileUndel {
undel, contents, ..
} => {
self.extra = contents;
Some(undel)
}
Hunk::FileAdd {
add_name,
add_inode,
contents,
..
} => {
self.extra = Some(add_inode);
self.extra2 = contents;
Some(add_name)
}
Hunk::SolveNameConflict { name, .. } => Some(name),
Hunk::UnsolveNameConflict { name, .. } => Some(name),
Hunk::Edit { change, .. } => Some(change),
Hunk::Replacement {
change,
replacement,
..
} => {
self.extra = Some(replacement);
Some(change)
}
Hunk::SolveOrderConflict { change, .. } => Some(change),
Hunk::UnsolveOrderConflict { change, .. } => Some(change),
Hunk::ResurrectZombies { change, .. } => Some(change),
}
} else {
None
}
}
}
impl<'a, Context, Local> Iterator for HunkIter<&'a Hunk<Context, Local>, &'a Atom<Context>> {
type Item = &'a Atom<Context>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(extra) = self.extra.take() {
Some(extra)
} else if let Some(extra) = self.extra2.take() {
Some(extra)
} else if let Some(rec) = self.rec.take() {
match *rec {
Hunk::FileMove {
ref del, ref add, ..
} => {
self.extra = Some(add);
Some(del)
}
Hunk::FileDel {
ref del,
ref contents,
..
} => {
self.extra = contents.as_ref();
Some(del)
}
Hunk::FileUndel {
ref undel,
ref contents,
..
} => {
self.extra = contents.as_ref();
Some(undel)
}
Hunk::FileAdd {
ref add_name,
ref add_inode,
ref contents,
..
} => {
self.extra = Some(add_inode);
self.extra2 = contents.as_ref();
Some(&add_name)
}
Hunk::SolveNameConflict { ref name, .. } => Some(&name),
Hunk::UnsolveNameConflict { ref name, .. } => Some(&name),
Hunk::Edit { change: ref c, .. } => Some(c),
Hunk::Replacement {
replacement: ref r,
change: ref c,
..
} => {
self.extra = Some(r);
Some(c)
}
Hunk::SolveOrderConflict { ref change, .. } => Some(change),
Hunk::UnsolveOrderConflict { ref change, .. } => Some(change),
Hunk::ResurrectZombies { ref change, .. } => Some(change),
}
} else {
None
}
}
}
pub struct RevHunkIter<R, C> {
rec: Option<R>,
extra: Option<C>,
extra2: Option<C>,
}
impl<'a, Context, Local> Iterator for RevHunkIter<&'a Hunk<Context, Local>, &'a Atom<Context>> {
type Item = &'a Atom<Context>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(extra) = self.extra.take() {
Some(extra)
} else if let Some(extra) = self.extra2.take() {
Some(extra)
} else if let Some(rec) = self.rec.take() {
match *rec {
Hunk::FileMove {
ref del, ref add, ..
} => {
self.extra = Some(del);
Some(add)
}
Hunk::FileDel {
ref del,
ref contents,
..
} => {
if let Some(ref c) = contents {
self.extra = Some(del);
Some(c)
} else {
Some(del)
}
}
Hunk::FileUndel {
ref undel,
ref contents,
..
} => {
if let Some(ref c) = contents {
self.extra = Some(undel);
Some(c)
} else {
Some(undel)
}
}
Hunk::FileAdd {
ref add_name,
ref add_inode,
ref contents,
..
} => {
if let Some(ref c) = contents {
self.extra = Some(add_inode);
self.extra2 = Some(add_name);
Some(c)
} else {
self.extra = Some(add_name);
Some(add_inode)
}
}
Hunk::SolveNameConflict { ref name, .. } => Some(&name),
Hunk::UnsolveNameConflict { ref name, .. } => Some(&name),
Hunk::Edit { change: ref c, .. } => Some(c),
Hunk::Replacement {
replacement: ref r,
change: ref c,
..
} => {
self.extra = Some(c);
Some(r)
}
Hunk::SolveOrderConflict { ref change, .. } => Some(change),
Hunk::UnsolveOrderConflict { ref change, .. } => Some(change),
Hunk::ResurrectZombies { ref change, .. } => Some(change),
}
} else {
None
}
}
}
impl Atom<Option<ChangeId>> {
fn globalize<T: GraphTxnT>(&self, txn: &T) -> Result<Atom<Option<Hash>>, T::GraphError> {
match self {
Atom::NewVertex(NewVertex {
up_context,
down_context,
start,
end,
flag,
inode,
}) => Ok(Atom::NewVertex(NewVertex {
up_context: up_context
.iter()
.map(|&up| Position {
change: up.change.and_then(|a| txn.get_external(a).unwrap()),
pos: up.pos,
})
.collect(),
down_context: down_context
.iter()
.map(|&down| Position {
change: down.change.and_then(|a| txn.get_external(a).unwrap()),
pos: down.pos,
})
.collect(),
start: *start,
end: *end,
flag: *flag,
inode: Position {
change: inode.change.and_then(|a| txn.get_external(a).unwrap()),
pos: inode.pos,
},
})),
Atom::EdgeMap(EdgeMap { edges, inode }) => Ok(Atom::EdgeMap(EdgeMap {
edges: edges
.iter()
.map(|edge| NewEdge {
previous: edge.previous,
flag: edge.flag,
from: Position {
change: edge.from.change.and_then(|a| txn.get_external(a).unwrap()),
pos: edge.from.pos,
},
to: Vertex {
change: edge.to.change.and_then(|a| txn.get_external(a).unwrap()),
start: edge.to.start,
end: edge.to.end,
},
introduced_by: edge.introduced_by.map(|a| {
if let Some(a) = txn.get_external(a).unwrap() {
a
} else {
panic!("introduced by {:?}", a);
}
}),
})
.collect(),
inode: Position {
change: inode.change.and_then(|a| txn.get_external(a).unwrap()),
pos: inode.pos,
},
})),
}
}
}
impl<H> Hunk<H, Local> {
pub fn path(&self) -> &str {
match self {
Hunk::FileMove { ref path, .. }
| Hunk::FileDel { ref path, .. }
| Hunk::FileUndel { ref path, .. }
| Hunk::SolveNameConflict { ref path, .. }
| Hunk::UnsolveNameConflict { ref path, .. }
| Hunk::FileAdd { ref path, .. } => path,
Hunk::Edit { ref local, .. }
| Hunk::Replacement { ref local, .. }
| Hunk::SolveOrderConflict { ref local, .. }
| Hunk::UnsolveOrderConflict { ref local, .. }
| Hunk::ResurrectZombies { ref local, .. } => &local.path,
}
}
pub fn line(&self) -> Option<usize> {
match self {
Hunk::FileMove { .. }
| Hunk::FileDel { .. }
| Hunk::FileUndel { .. }
| Hunk::SolveNameConflict { .. }
| Hunk::UnsolveNameConflict { .. }
| Hunk::FileAdd { .. } => None,
Hunk::Edit { ref local, .. }
| Hunk::Replacement { ref local, .. }
| Hunk::SolveOrderConflict { ref local, .. }
| Hunk::UnsolveOrderConflict { ref local, .. }
| Hunk::ResurrectZombies { ref local, .. } => Some(local.line),
}
}
}
impl<Local> Hunk<Option<ChangeId>, Local> {
pub fn globalize<T: GraphTxnT>(
self,
txn: &T,
) -> Result<Hunk<Option<Hash>, Local>, T::GraphError> {
Ok(match self {
Hunk::FileMove { del, add, path } => Hunk::FileMove {
del: del.globalize(txn)?,
add: add.globalize(txn)?,
path,
},
Hunk::FileDel {
del,
contents,
path,
} => Hunk::FileDel {
del: del.globalize(txn)?,
contents: contents.as_ref().map(|del| del.globalize(txn).unwrap()),
path,
},
Hunk::FileUndel {
undel,
contents,
path,
} => Hunk::FileUndel {
undel: undel.globalize(txn)?,
contents: contents.as_ref().map(|del| del.globalize(txn).unwrap()),
path,
},
Hunk::SolveNameConflict { name, path } => Hunk::SolveNameConflict {
name: name.globalize(txn)?,
path,
},
Hunk::UnsolveNameConflict { name, path } => Hunk::UnsolveNameConflict {
name: name.globalize(txn)?,
path,
},
Hunk::FileAdd {
add_inode,
add_name,
contents,
path,
} => Hunk::FileAdd {
add_name: add_name.globalize(txn)?,
add_inode: add_inode.globalize(txn)?,
contents: contents.as_ref().map(|add| add.globalize(txn).unwrap()),
path,
},
Hunk::Edit { change, local } => Hunk::Edit {
change: change.globalize(txn)?,
local,
},
Hunk::Replacement {
change,
replacement,
local,
} => Hunk::Replacement {
change: change.globalize(txn)?,
replacement: replacement.globalize(txn)?,
local,
},
Hunk::SolveOrderConflict { change, local } => Hunk::SolveOrderConflict {
change: change.globalize(txn)?,
local,
},
Hunk::UnsolveOrderConflict { change, local } => Hunk::UnsolveOrderConflict {
change: change.globalize(txn)?,
local,
},
Hunk::ResurrectZombies { change, local } => Hunk::ResurrectZombies {
change: change.globalize(txn)?,
local,
},
})
}
}
/// A table of contents of a change, indicating where each section is,
/// to allow seeking inside a change file.
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq)]
pub struct Offsets {
version: u64,
hashed_len: u64, // length of the hashed contents
unhashed_off: u64,
unhashed_len: u64, // length of the unhashed contents
contents_off: u64,
contents_len: u64,
total: u64,
}
impl<L> LocalChange<L> {
#[cfg(feature = "zstd")]
const OFFSETS_SIZE: u64 = 56;
pub fn make_change<T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>>(
txn: &T,
channel: &ChannelRef<T>,
changes: Vec<Hunk<Option<Hash>, L>>,
contents: Vec<u8>,
header: ChangeHeader,
metadata: Vec<u8>,
) -> Result<Self, TxnErr<T::DepsError>> {
let (dependencies, extra_known) = dependencies(txn, &channel.borrow(), changes.iter())?;
trace!("make_change, contents = {:?}", contents);
let contents_hash = {
let mut hasher = Hasher::default();
hasher.update(&contents);
hasher.finish()
};
debug!("make_change, contents_hash = {:?}", contents_hash);
Ok(LocalChange {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header,
changes,
contents_hash,
metadata,
dependencies,
extra_known,
},
contents,
unhashed: None,
})
}
pub fn new() -> Self {
LocalChange {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header: ChangeHeader::default(),
changes: Vec::new(),
contents_hash: Hasher::default().finish(),
metadata: Vec::new(),
dependencies: Vec::new(),
extra_known: Vec::new(),
},
unhashed: None,
contents: Vec::new(),
}
}
}
#[cfg(feature = "zstd")]
const LEVEL: usize = 1;
#[cfg(feature = "zstd")]
const FRAME_SIZE: usize = 256;
#[cfg(feature = "zstd")]
fn compress<W: Write>(input: &[u8], mut w: W) -> Result<(), ChangeError> {
let mut cstream = zstd_seekable::SeekableCStream::new(LEVEL, FRAME_SIZE).unwrap();
let mut output = [0; 4096];
let mut input_pos = 0;
while input_pos < input.len() {
let (out_pos, inp_pos) = cstream.compress(&mut output, &input[input_pos..])?;
w.write_all(&output[..out_pos])?;
input_pos += inp_pos;
}
while let Ok(n) = cstream.end_stream(&mut output) {
if n == 0 {
break;
}
w.write_all(&output[..n])?;
}
Ok(())
}
impl Change {
pub fn size_no_contents<R: std::io::Read + std::io::Seek>(
r: &mut R,
) -> Result<u64, ChangeError> {
let pos = r.seek(std::io::SeekFrom::Current(0))?;
let mut off = [0u8; Self::OFFSETS_SIZE as usize];
r.read_exact(&mut off)?;
let off: Offsets = bincode::deserialize(&off)?;
if off.version != VERSION {
return Err(ChangeError::VersionMismatch);
}
r.seek(std::io::SeekFrom::Start(pos))?;
Ok(off.contents_off)
}
/// Serialise the change as a file named "<hash>.change" in
/// directory `dir`, where "<hash>" is the actual hash of the
/// change.
#[cfg(feature = "zstd")]
pub fn serialize<W: Write>(&self, mut w: W) -> Result<Hash, ChangeError> {
// Hashed part.
let mut hashed = Vec::new();
bincode::serialize_into(&mut hashed, &self.hashed)?;
trace!("hashed = {:?}", hashed);
let mut hasher = Hasher::default();
hasher.update(&hashed);
let hash = hasher.finish();
debug!("{:?}", hash);
// Unhashed part.
let unhashed = if let Some(ref un) = self.unhashed {
let s = toml::ser::to_string(un).unwrap();
s.into()
} else {
Vec::new()
};
// Compress the change.
let mut hashed_comp = Vec::new();
compress(&hashed, &mut hashed_comp)?;
let unhashed_off = Self::OFFSETS_SIZE + hashed_comp.len() as u64;
let mut unhashed_comp = Vec::new();
compress(&unhashed, &mut unhashed_comp)?;
let contents_off = unhashed_off + unhashed_comp.len() as u64;
let mut contents_comp = Vec::new();
compress(&self.contents, &mut contents_comp)?;
let offsets = Offsets {
version: VERSION,
hashed_len: hashed.len() as u64,
unhashed_off,
unhashed_len: unhashed.len() as u64,
contents_off,
contents_len: self.contents.len() as u64,
total: contents_off + contents_comp.len() as u64,
};
bincode::serialize_into(&mut w, &offsets)?;
w.write_all(&hashed_comp)?;
w.write_all(&unhashed_comp)?;
w.write_all(&contents_comp)?;
Ok(hash)
}
/// Deserialise a change from the file given as input `file`.
#[cfg(feature = "zstd")]
pub fn check_from_buffer(buf: &[u8], hash: &Hash) -> Result<(), ChangeError> {
let offsets: Offsets = bincode::deserialize_from(&buf[..Self::OFFSETS_SIZE as usize])?;
if offsets.version != VERSION {
return Err(ChangeError::VersionMismatch);
}
debug!("check_from_buffer, offsets = {:?}", offsets);
let mut s = zstd_seekable::Seekable::init_buf(
&buf[Self::OFFSETS_SIZE as usize..offsets.unhashed_off as usize],
)?;
let mut buf_ = Vec::new();
buf_.resize(offsets.hashed_len as usize, 0);
s.decompress(&mut buf_[..], 0)?;
debug!("check_from_buffer, buf_ = {:?}", buf_);
let mut hasher = Hasher::default();
hasher.update(&buf_);
let computed_hash = hasher.finish();
debug!("{:?} {:?}", computed_hash, hash);
if &computed_hash != hash {
return Err((ChangeError::ChangeHashMismatch {
claimed: *hash,
computed: computed_hash,
})
.into());
}
let hashed: Hashed<Local> = bincode::deserialize(&buf_)?;
buf_.clear();
buf_.resize(offsets.contents_len as usize, 0);
let mut s = zstd_seekable::Seekable::init_buf(&buf[offsets.contents_off as usize..])?;
buf_.resize(offsets.contents_len as usize, 0);
s.decompress(&mut buf_[..], 0)?;
let mut hasher = Hasher::default();
debug!("contents = {:?}", buf_);
hasher.update(&buf_);
let computed_hash = hasher.finish();
debug!(
"contents hash: {:?}, computed: {:?}",
hashed.contents_hash, computed_hash
);
if computed_hash != hashed.contents_hash {
return Err(ChangeError::ContentsHashMismatch {
claimed: hashed.contents_hash,
computed: computed_hash,
});
}
Ok(())
}
/// Deserialise a change from the file given as input `file`.
#[cfg(feature = "zstd")]
pub fn deserialize(file: &str, hash: Option<&Hash>) -> Result<Self, ChangeError> {
use std::io::Read;
let mut r = std::fs::File::open(file)?;
let mut buf = vec![0u8; Self::OFFSETS_SIZE as usize];
r.read_exact(&mut buf)?;
let offsets: Offsets = bincode::deserialize(&buf)?;
if offsets.version != VERSION {
return Err(ChangeError::VersionMismatch);
}
debug!("offsets = {:?}", offsets);
buf.clear();
buf.resize((offsets.unhashed_off - Self::OFFSETS_SIZE) as usize, 0);
r.read_exact(&mut buf)?;
let hashed: Hashed<Local> = {
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut out = vec![0u8; offsets.hashed_len as usize];
s.decompress(&mut out[..], 0)?;
let mut hasher = Hasher::default();
hasher.update(&out);
let computed_hash = hasher.finish();
if let Some(hash) = hash {
if &computed_hash != hash {
return Err(ChangeError::ChangeHashMismatch {
claimed: *hash,
computed: computed_hash,
});
}
}
bincode::deserialize_from(&out[..])?
};
buf.clear();
buf.resize((offsets.contents_off - offsets.unhashed_off) as usize, 0);
let unhashed = if buf.is_empty() {
None
} else {
r.read_exact(&mut buf)?;
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut out = vec![0u8; offsets.unhashed_len as usize];
s.decompress(&mut out[..], 0)?;
Some(toml::de::from_slice(&out)?)
};
debug!("unhashed = {:?}", unhashed);
buf.clear();
buf.resize((offsets.total - offsets.contents_off) as usize, 0);
let contents = if r.read_exact(&mut buf).is_ok() {
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut contents = vec![0u8; offsets.contents_len as usize];
s.decompress(&mut contents[..], 0)?;
contents
} else {
Vec::new()
};
debug!("contents = {:?}", contents);
Ok(LocalChange {
offsets,
hashed,
unhashed,
contents,
})
}
/// Compute the hash of this change. If the `zstd` feature is
/// enabled, it is probably more efficient to serialise the change
/// (using the `serialize` method) at the same time, which also
/// returns the hash.
pub fn hash(&self) -> Result<Hash, bincode::Error> {
let input = bincode::serialize(&self.hashed)?;
let mut hasher = Hasher::default();
hasher.update(&input);
Ok(hasher.finish())
}
}
use super::*;
use crate::changestore::*;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::io::BufRead;
#[derive(Debug, Error)]
pub enum TextDeError {
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
TomlDe(#[from] toml::de::Error),
#[error("Missing dependency [{0}]")]
MissingChange(usize),
#[error("Byte position {0} from this change missing")]
MissingPosition(u64),
}
#[derive(Debug, Error)]
pub enum TextSerError<C: std::error::Error + 'static> {
#[error(transparent)]
C(C),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
TomlSer(#[from] toml::ser::Error),
#[error("Missing contents in change {:?}", h)]
MissingContents { h: Hash },
#[error(transparent)]
Change(#[from] ChangeError),
#[error("Invalid change")]
InvalidChange,
}
impl LocalChange<Local> {
const DEPS_LINE: &'static str = "# Dependencies\n";
const HUNKS_LINE: &'static str = "# Hunks\n";
pub fn write_all_deps<F: FnMut(Hash) -> Result<(), ChangeError>>(
&self,
mut f: F,
) -> Result<(), ChangeError> {
for c in self.changes.iter() {
for c in c.iter() {
match *c {
Atom::NewVertex(ref n) => {
for change in n
.up_context
.iter()
.chain(n.down_context.iter())
.map(|c| c.change)
.chain(std::iter::once(n.inode.change))
{
if let Some(change) = change {
if let Hash::None = change {
continue;
}
f(change)?
}
}
}
Atom::EdgeMap(ref e) => {
for edge in e.edges.iter() {
for change in &[
edge.from.change,
edge.to.change,
edge.introduced_by,
e.inode.change,
] {
if let Some(change) = *change {
if let Hash::None = change {
continue;
}
f(change)?
}
}
}
}
}
}
}
Ok(())
}
pub fn write<W: Write, C: ChangeStore, F: FnMut(&Local, Position<Option<Hash>>) -> String>(
&self,
changes: &C,
hash: Option<Hash>,
mut file_name: F,
write_header: bool,
mut w: W,
) -> Result<(), TextSerError<C::Error>> {
if let Some(h) = hash {
// Check if we have the full contents
let mut hasher = Hasher::default();
hasher.update(&self.contents);
let hash = hasher.finish();
if hash != self.contents_hash {
return Err((TextSerError::MissingContents { h }).into());
}
}
if write_header {
w.write_all(toml::ser::to_string_pretty(&self.header)?.as_bytes())?;
w.write_all(b"\n")?;
}
let mut hashes = HashMap::new();
let mut i = 2;
let mut needs_newline = false;
if !self.dependencies.is_empty() {
w.write_all(Self::DEPS_LINE.as_bytes())?;
needs_newline = true;
for dep in self.dependencies.iter() {
hashes.insert(*dep, i);
writeln!(w, "[{}] {}", i, dep.to_base32())?;
i += 1;
}
}
self.write_all_deps(|change| {
if let Entry::Vacant(e) = hashes.entry(change) {
e.insert(i);
if !needs_newline {
w.write_all(Self::DEPS_LINE.as_bytes())?;
needs_newline = true;
}
writeln!(w, "[{}]+{}", i, change.to_base32())?;
i += 1;
}
Ok(())
})?;
if !self.extra_known.is_empty() {
needs_newline = true;
for dep in self.extra_known.iter() {
writeln!(w, "[*] {}", dep.to_base32())?;
i += 1;
}
}
if !self.changes.is_empty() {
if needs_newline {
w.write_all(b"\n")?
}
w.write_all(Self::HUNKS_LINE.as_bytes())?;
for (n, rec) in self.changes.iter().enumerate() {
write!(w, "\n{}. ", n + 1)?;
rec.write(changes, &mut file_name, &hashes, &self.contents, &mut w)?
}
}
Ok(())
}
}
impl Change {
pub fn read_and_deps<
R: BufRead,
T: ChannelTxnT + DepsTxnT<DepsError = <T as GraphTxnT>::GraphError>,
>(
r: R,
updatables: &mut HashMap<usize, crate::InodeUpdate>,
txn: &T,
channel: &ChannelRef<T>,
) -> Result<Self, TextDeError> {
let (mut change, extra_dependencies) = Self::read_(r, updatables)?;
let (mut deps, extra) =
dependencies(txn, &channel.borrow(), change.hashed.changes.iter()).unwrap();
deps.extend(extra_dependencies.into_iter());
change.hashed.dependencies = deps;
change.hashed.extra_known = extra;
Ok(change)
}
pub fn read<R: BufRead>(
r: R,
updatables: &mut HashMap<usize, crate::InodeUpdate>,
) -> Result<Self, TextDeError> {
Ok(Self::read_(r, updatables)?.0)
}
fn read_<R: BufRead>(
mut r: R,
updatables: &mut HashMap<usize, crate::InodeUpdate>,
) -> Result<(Self, HashSet<Hash>), TextDeError> {
use self::text_changes::*;
let mut section = Section::Header(String::new());
let mut change = Change {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header: ChangeHeader {
authors: Vec::new(),
message: String::new(),
description: None,
timestamp: chrono::Utc::now(),
},
dependencies: Vec::new(),
extra_known: Vec::new(),
metadata: Vec::new(),
changes: Vec::new(),
contents_hash: Hasher::default().finish(),
},
unhashed: None,
contents: Vec::new(),
};
let conclude_section = |change: &mut Change,
section: Section,
contents: &mut Vec<u8>|
-> Result<(), TextDeError> {
match section {
Section::Header(ref s) => {
debug!("header = {:?}", s);
change.header = toml::de::from_str(&s)?;
Ok(())
}
Section::Deps => Ok(()),
Section::Changes {
mut changes,
current,
..
} => {
if has_newvertices(¤t) {
contents.push(0)
}
if let Some(c) = current {
debug!("next action = {:?}", c);
changes.push(c)
}
change.changes = changes;
Ok(())
}
}
};
let mut h = String::new();
let mut contents = Vec::new();
let mut deps = HashMap::new();
let mut extra_dependencies = HashSet::new();
while r.read_line(&mut h)? > 0 {
debug!("h = {:?}", h);
if h == Self::DEPS_LINE {
let section = std::mem::replace(&mut section, Section::Deps);
conclude_section(&mut change, section, &mut contents)?;
} else if h == Self::HUNKS_LINE {
let section = std::mem::replace(
&mut section,
Section::Changes {
changes: Vec::new(),
current: None,
offsets: HashMap::new(),
},
);
conclude_section(&mut change, section, &mut contents)?;
} else {
use regex::Regex;
lazy_static! {
static ref DEPS: Regex = Regex::new(r#"\[(\d*|\*)\](\+| ) *(\S*)"#).unwrap();
static ref KNOWN: Regex = Regex::new(r#"(\S*)"#).unwrap();
}
match section {
Section::Header(ref mut s) => s.push_str(&h),
Section::Deps => {
if let Some(d) = DEPS.captures(&h) {
let hash = Hash::from_base32(d[3].as_bytes()).unwrap();
if let Ok(n) = d[1].parse() {
if &d[2] == " " {
change.hashed.dependencies.push(hash);
}
deps.insert(n, hash);
} else if &d[1] == "*" {
change.hashed.extra_known.push(hash);
} else {
extra_dependencies.insert(hash);
}
}
}
Section::Changes {
ref mut current,
ref mut changes,
ref mut offsets,
} => {
if let Some(next) =
Hunk::read(updatables, current, &mut contents, &deps, offsets, &h)?
{
debug!("next action = {:?}", next);
changes.push(next)
}
}
}
}
h.clear();
}
conclude_section(&mut change, section, &mut contents)?;
change.contents = contents;
change.contents_hash = {
let mut hasher = Hasher::default();
hasher.update(&change.contents);
hasher.finish()
};
Ok((change, extra_dependencies))
}
}
impl Hunk<Option<Hash>, Local> {
fn write<
W: std::io::Write,
C: ChangeStore,
F: FnMut(&Local, Position<Option<Hash>>) -> String,
>(
&self,
changes: &C,
mut file_name: F,
hashes: &HashMap<Hash, usize>,
change_contents: &[u8],
mut w: W,
) -> Result<(), TextSerError<C::Error>> {
use self::text_changes::*;
match self {
Hunk::FileMove { del, add, path } => match add {
Atom::NewVertex(ref add) => {
let name = std::str::from_utf8(
&change_contents[add.start.0 as usize + 2..add.end.0 as usize],
)
.unwrap();
let perms = crate::pristine::InodeMetadata::from_basename(
&change_contents[add.start.0 as usize..add.start.0 as usize + 2],
);
write!(w, "Moved: {:?} {:?} {:o} ", path, name, perms.0)?;
write_pos(&mut w, hashes, del.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &del)?;
write!(w, "up")?;
for c in add.up_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
write!(w, ", down")?;
for c in add.down_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
w.write_all(b"\n")?;
}
Atom::EdgeMap(_) => {
write!(w, "Moved: {:?} ", path)?;
write_pos(&mut w, hashes, del.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &add)?;
write_atom(&mut w, hashes, &del)?;
}
},
Hunk::FileDel {
del,
contents,
path,
} => {
debug!("file del");
write!(w, "File deletion: {:?} ", path)?;
write_pos(&mut w, hashes, del.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &del)?;
if let Some(ref contents) = contents {
write_atom(&mut w, hashes, &contents)?;
writeln!(w)?;
print_change_contents(&mut w, changes, contents, change_contents)?;
} else {
writeln!(w)?;
}
}
Hunk::FileUndel {
undel,
contents,
path,
} => {
debug!("file undel");
write!(w, "File un-deletion: {:?} ", path)?;
write_pos(&mut w, hashes, undel.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &undel)?;
if let Some(ref contents) = contents {
write_atom(&mut w, hashes, &contents)?;
print_change_contents(&mut w, changes, contents, change_contents)?;
} else {
writeln!(w)?;
}
}
Hunk::FileAdd {
add_name,
contents,
path,
..
} => {
if let Atom::NewVertex(ref n) = add_name {
let name = std::str::from_utf8(
&change_contents[n.start.0 as usize + 2..n.end.0 as usize],
)
.unwrap();
let perms = crate::pristine::InodeMetadata::from_basename(
&change_contents[n.start.0 as usize..n.start.0 as usize + 2],
);
let parent = if let Some(p) = crate::path::parent(&path) {
if p.is_empty() {
"/"
} else {
p
}
} else {
"/"
};
write!(
w,
"File addition: {:?} in {:?} {:o}\n up",
name, parent, perms.0
)?;
assert!(n.down_context.is_empty());
for c in n.up_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
writeln!(w, ", new {}:{}", n.start.0, n.end.0)?;
}
if let Some(Atom::NewVertex(ref n)) = contents {
let c = &change_contents[n.start.0 as usize..n.end.0 as usize];
print_contents(&mut w, "+", c)?;
if !c.ends_with(b"\n") {
writeln!(w, "\\")?
}
}
}
Hunk::Edit { change, local } => {
debug!("edit");
write!(w, "Edit in {} ", file_name(&local, change.inode()))?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &change)?;
print_change_contents(&mut w, changes, change, change_contents)?;
}
Hunk::Replacement {
change,
replacement,
local,
} => {
debug!("replacement");
write!(w, "Replacement in {} ", file_name(&local, change.inode()))?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &change)?;
write_atom(&mut w, hashes, &replacement)?;
print_change_contents(&mut w, changes, change, change_contents)?;
print_change_contents(&mut w, changes, replacement, change_contents)?;
}
Hunk::SolveNameConflict { name, path } => {
write!(w, "Solving a name conflict in {:?} ", path)?;
write_pos(&mut w, hashes, name.inode())?;
write!(w, ": ")?;
write_deleted_names(&mut w, changes, name)?;
writeln!(w)?;
write_atom(&mut w, hashes, &name)?;
}
Hunk::UnsolveNameConflict { name, path } => {
write!(w, "Un-solving a name conflict in {:?} ", path)?;
write_pos(&mut w, hashes, name.inode())?;
write!(w, ": ")?;
write_deleted_names(&mut w, changes, name)?;
writeln!(w)?;
write_atom(&mut w, hashes, &name)?;
}
Hunk::SolveOrderConflict { change, local } => {
debug!("solve order conflict");
write!(
w,
"Solving an order conflict in {} ",
file_name(&local, change.inode())
)?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &change)?;
print_change_contents(&mut w, changes, change, change_contents)?;
}
Hunk::UnsolveOrderConflict { change, local } => {
debug!("unsolve order conflict");
write!(
w,
"Un-solving an order conflict in {} ",
file_name(&local, change.inode())
)?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &change)?;
print_change_contents(&mut w, changes, change, change_contents)?;
}
Hunk::ResurrectZombies { change, local } => {
debug!("resurrect zombies");
write!(
w,
"Resurrecting zombie lines in {:?}:{} ",
local.path, local.line
)?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w)?;
write_atom(&mut w, hashes, &change)?;
print_change_contents(&mut w, changes, change, change_contents)?;
}
}
Ok(())
}
}
impl Hunk<Option<Hash>, Local> {
fn read(
updatables: &mut HashMap<usize, crate::InodeUpdate>,
current: &mut Option<Self>,
mut contents_: &mut Vec<u8>,
changes: &HashMap<usize, Hash>,
offsets: &mut HashMap<u64, ChangePosition>,
h: &str,
) -> Result<Option<Self>, TextDeError> {
use self::text_changes::*;
use regex::Regex;
lazy_static! {
static ref FILE_ADDITION: Regex =
Regex::new(r#"^(?P<n>\d+)\. File addition: "(?P<name>[^"]*)" in "(?P<parent>[^"]*)" (?P<perm>\d+)"#).unwrap();
static ref EDIT: Regex =
Regex::new(r#"^([0-9]+)\. Edit in ([^:]+):(\d+) (\d+\.\d+)"#).unwrap();
static ref REPLACEMENT: Regex =
Regex::new(r#"^([0-9]+)\. Replacement in ([^:]+):(\d+) (\d+\.\d+)"#).unwrap();
static ref FILE_DELETION: Regex =
Regex::new(r#"^([0-9]+)\. File deletion: "([^"]*)" (\d+\.\d+)"#).unwrap();
static ref FILE_UNDELETION: Regex =
Regex::new(r#"^([0-9]+)\. File un-deletion: "([^"]*)" (\d+\.\d+)"#).unwrap();
static ref MOVE: Regex =
Regex::new(r#"^([0-9]+)\. Moved: "(?P<former>[^"]*)" "(?P<new>[^"]*)" (?P<perm>\d+) (?P<inode>.*)"#).unwrap();
static ref MOVE_: Regex = Regex::new(r#"^([0-9]+)\. Moved: "([^"]*)" (.*)"#).unwrap();
static ref NAME_CONFLICT: Regex = Regex::new(
r#"^([0-9]+)\. ((Solving)|(Un-solving)) a name conflict in "([^"]*)" (.*): .*"#
)
.unwrap();
static ref ORDER_CONFLICT: Regex = Regex::new(
r#"^([0-9]+)\. ((Solving)|(Un-solving)) an order conflict in (.*):(\d+) (\d+\.\d+)"#
)
.unwrap();
static ref ZOMBIE: Regex =
Regex::new(r#"^([0-9]+)\. Resurrecting zombie lines in (?P<path>"[^"]+"):(?P<line>\d+) (?P<inode>\d+\.\d+)"#)
.unwrap();
static ref CONTEXT: Regex = Regex::new(
r#"up ((\d+\.\d+ )*\d+\.\d+)(, new (\d+):(\d+))?(, down ((\d+\.\d+ )*\d+\.\d+))?"#
)
.unwrap();
}
if let Some(cap) = FILE_ADDITION.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut add_name = default_newvertex();
add_name.start = ChangePosition(contents_.len() as u64);
add_name.flag = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
let name = &cap.name("name").unwrap().as_str();
let path = {
let parent = cap.name("parent").unwrap().as_str();
(if parent == "/" {
String::new()
} else {
parent.to_string() + "/"
}) + name
};
let meta = cap
.name("perm")
.unwrap()
.as_str()
.chars()
.fold(0, |x, c| x * 8 + (c as u16 - b'0' as u16));
let meta = InodeMetadata(meta);
meta.write(&mut contents_).unwrap();
contents_.extend(name.as_bytes());
add_name.end = ChangePosition(contents_.len() as u64);
let mut add_inode = default_newvertex();
add_inode.flag = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
add_inode.up_context.push(Position {
change: None,
pos: ChangePosition(contents_.len() as u64),
});
contents_.push(0);
add_inode.start = ChangePosition(contents_.len() as u64);
add_inode.end = ChangePosition(contents_.len() as u64);
contents_.push(0);
let n = cap.name("n").unwrap().as_str().parse().unwrap();
if let Entry::Occupied(mut e) = updatables.entry(n) {
if let crate::InodeUpdate::Add { ref mut pos, .. } = e.get_mut() {
*pos = add_inode.start
}
}
Ok(std::mem::replace(
current,
Some(Hunk::FileAdd {
add_name: Atom::NewVertex(add_name),
add_inode: Atom::NewVertex(add_inode),
contents: None,
path,
}),
))
} else if let Some(cap) = EDIT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut v = default_newvertex();
v.inode = parse_pos(changes, &cap[4])?;
v.flag = EdgeFlags::BLOCK;
Ok(std::mem::replace(
current,
Some(Hunk::Edit {
change: Atom::NewVertex(v),
local: Local {
path: cap[2].to_string(),
line: cap[3].parse().unwrap(),
},
}),
))
} else if let Some(cap) = REPLACEMENT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut v = default_newvertex();
v.inode = parse_pos(changes, &cap[4])?;
v.flag = EdgeFlags::BLOCK;
Ok(std::mem::replace(
current,
Some(Hunk::Replacement {
change: Atom::NewVertex(v.clone()),
replacement: Atom::NewVertex(v),
local: Local {
path: cap[2].to_string(),
line: cap[3].parse().unwrap(),
},
}),
))
} else if let Some(cap) = FILE_DELETION.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut del = default_edgemap();
del.inode = parse_pos(changes, &cap[3])?;
Ok(std::mem::replace(
current,
Some(Hunk::FileDel {
del: Atom::EdgeMap(del),
contents: None,
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = FILE_UNDELETION.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut undel = default_edgemap();
undel.inode = parse_pos(changes, &cap[3])?;
Ok(std::mem::replace(
current,
Some(Hunk::FileUndel {
undel: Atom::EdgeMap(undel),
contents: None,
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = NAME_CONFLICT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut name = default_edgemap();
debug!("cap = {:?}", cap);
name.inode = parse_pos(changes, &cap[6])?;
Ok(std::mem::replace(
current,
if &cap[2] == "Solving" {
Some(Hunk::SolveNameConflict {
name: Atom::EdgeMap(name),
path: cap[5].to_string(),
})
} else {
Some(Hunk::UnsolveNameConflict {
name: Atom::EdgeMap(name),
path: cap[5].to_string(),
})
},
))
} else if let Some(cap) = MOVE.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut add = default_newvertex();
add.start = ChangePosition(contents_.len() as u64);
add.flag = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
let name = cap.name("new").unwrap().as_str();
let meta = cap
.name("perm")
.unwrap()
.as_str()
.chars()
.fold(0, |x, c| x * 8 + (c as u16 - b'0' as u16));
let meta = InodeMetadata(meta);
meta.write(&mut contents_).unwrap();
contents_.extend(name.as_bytes());
add.end = ChangePosition(contents_.len() as u64);
let mut del = default_edgemap();
del.inode = parse_pos(changes, cap.name("inode").unwrap().as_str())?;
Ok(std::mem::replace(
current,
Some(Hunk::FileMove {
del: Atom::EdgeMap(del),
add: Atom::NewVertex(add),
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = MOVE_.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut add = default_edgemap();
let mut del = default_edgemap();
add.inode = parse_pos(changes, &cap[3])?;
del.inode = add.inode;
Ok(std::mem::replace(
current,
Some(Hunk::FileMove {
del: Atom::EdgeMap(del),
add: Atom::EdgeMap(add),
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = ORDER_CONFLICT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
Ok(std::mem::replace(
current,
Some(if &cap[2] == "Solving" {
let mut v = default_newvertex();
v.inode = parse_pos(changes, &cap[7])?;
Hunk::SolveOrderConflict {
change: Atom::NewVertex(v),
local: Local {
path: cap[5].to_string(),
line: cap[6].parse().unwrap(),
},
}
} else {
let mut v = default_edgemap();
v.inode = parse_pos(changes, &cap[7])?;
Hunk::UnsolveOrderConflict {
change: Atom::EdgeMap(v),
local: Local {
path: cap[5].to_string(),
line: cap[6].parse().unwrap(),
},
}
}),
))
} else if let Some(cap) = ZOMBIE.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut v = default_edgemap();
v.inode = parse_pos(changes, &cap.name("inode").unwrap().as_str())?;
Ok(std::mem::replace(
current,
Some(Hunk::ResurrectZombies {
change: Atom::EdgeMap(v),
local: Local {
path: cap.name("path").unwrap().as_str().parse().unwrap(),
line: cap.name("line").unwrap().as_str().parse().unwrap(),
},
}),
))
} else {
match current {
Some(Hunk::FileAdd {
ref mut contents,
ref mut add_name,
..
}) => {
if h.starts_with('+') {
if contents.is_none() {
let mut v = default_newvertex();
let inode = Position {
change: None,
pos: ChangePosition(contents_.len() as u64 - 1),
};
v.up_context.push(inode);
v.inode = inode;
v.flag = EdgeFlags::BLOCK;
v.start = ChangePosition(contents_.len() as u64);
*contents = Some(Atom::NewVertex(v));
}
if let Some(Atom::NewVertex(ref mut contents)) = contents {
if h.starts_with('+') {
text_changes::parse_line_add(h, contents, contents_)
}
}
} else if h.starts_with('\\') {
if let Some(Atom::NewVertex(ref mut contents)) = contents {
if contents.end > contents.start
&& contents_[contents.end.0 as usize - 1] == b'\n'
{
assert_eq!(contents.end.0 as usize, contents_.len());
contents_.pop();
contents.end.0 -= 1;
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
if let Atom::NewVertex(ref mut name) = add_name {
name.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let (Some(new_start), Some(new_end)) = (cap.get(4), cap.get(5)) {
offsets.insert(new_start.as_str().parse().unwrap(), name.start);
offsets.insert(new_end.as_str().parse().unwrap(), name.end);
offsets.insert(
new_end.as_str().parse::<u64>().unwrap() + 1,
name.end + 1,
);
}
}
}
Ok(None)
}
Some(Hunk::FileDel {
ref mut del,
ref mut contents,
..
}) => {
if let Some(edges) = parse_edges(changes, h)? {
if let Atom::EdgeMap(ref mut e) = del {
if edges[0].flag.contains(EdgeFlags::FOLDER) {
*e = EdgeMap {
inode: e.inode,
edges,
}
} else {
*contents = Some(Atom::EdgeMap(EdgeMap {
inode: e.inode,
edges,
}))
}
}
}
Ok(None)
}
Some(Hunk::FileUndel {
ref mut undel,
ref mut contents,
..
}) => {
if let Some(edges) = parse_edges(changes, h)? {
if let Atom::EdgeMap(ref mut e) = undel {
if edges[0].flag.contains(EdgeFlags::FOLDER) {
*e = EdgeMap {
inode: e.inode,
edges,
}
} else {
*contents = Some(Atom::EdgeMap(EdgeMap {
inode: e.inode,
edges,
}))
}
}
}
Ok(None)
}
Some(Hunk::FileMove {
ref mut del,
ref mut add,
..
}) => {
if let Some(edges) = parse_edges(changes, h)? {
if edges[0].flag.contains(EdgeFlags::DELETED) {
*del = Atom::EdgeMap(EdgeMap {
inode: del.inode(),
edges,
});
return Ok(None);
} else if let Atom::EdgeMap(ref mut add) = add {
if add.edges.is_empty() {
*add = EdgeMap {
inode: add.inode,
edges,
};
return Ok(None);
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
if let Atom::NewVertex(ref mut c) = add {
debug!("cap = {:?}", cap);
c.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
c.down_context = parse_pos_vec(changes, offsets, cap.as_str())?;
}
}
}
Ok(None)
}
Some(Hunk::Edit { ref mut change, .. }) => {
debug!("edit {:?}", h);
if h.starts_with("+ ") {
if let Atom::NewVertex(ref mut change) = change {
if change.start == change.end {
change.start = ChangePosition(contents_.len() as u64);
}
text_changes::parse_line_add(h, change, contents_)
}
} else if h.starts_with('\\') {
if let Atom::NewVertex(ref mut change) = change {
if change.end > change.start
&& contents_[change.end.0 as usize - 1] == b'\n'
{
assert_eq!(change.end.0 as usize, contents_.len());
contents_.pop();
change.end.0 -= 1;
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
if let Atom::NewVertex(ref mut c) = change {
debug!("cap = {:?}", cap);
c.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
c.down_context = parse_pos_vec(changes, offsets, cap.as_str())?;
}
}
} else if let Some(edges) = parse_edges(changes, h)? {
*change = Atom::EdgeMap(EdgeMap {
inode: change.inode(),
edges,
});
}
Ok(None)
}
Some(Hunk::Replacement {
ref mut change,
ref mut replacement,
..
}) => {
if h.starts_with("+ ") {
if let Atom::NewVertex(ref mut repl) = replacement {
if repl.start == repl.end {
repl.start = ChangePosition(contents_.len() as u64);
}
text_changes::parse_line_add(h, repl, contents_)
}
} else if h.starts_with('\\') {
if let Atom::NewVertex(ref mut repl) = replacement {
if repl.end > repl.start && contents_[repl.end.0 as usize - 1] == b'\n'
{
assert_eq!(repl.end.0 as usize, contents_.len());
contents_.pop();
repl.end.0 -= 1;
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
debug!("cap = {:?}", cap);
if let Atom::NewVertex(ref mut repl) = replacement {
repl.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
repl.down_context = parse_pos_vec(changes, offsets, cap.as_str())?;
}
}
} else if let Some(edges) = parse_edges(changes, h)? {
*change = Atom::EdgeMap(EdgeMap {
inode: change.inode(),
edges,
});
}
Ok(None)
}
Some(Hunk::SolveNameConflict { ref mut name, .. })
| Some(Hunk::UnsolveNameConflict { ref mut name, .. }) => {
if let Some(edges) = parse_edges(changes, h)? {
*name = Atom::EdgeMap(EdgeMap {
edges,
inode: name.inode(),
})
}
Ok(None)
}
Some(Hunk::SolveOrderConflict { ref mut change, .. }) => {
if h.starts_with("+ ") {
if let Atom::NewVertex(ref mut change) = change {
if change.start == change.end {
change.start = ChangePosition(contents_.len() as u64);
}
text_changes::parse_line_add(h, change, contents_)
}
} else if let Some(cap) = CONTEXT.captures(h) {
debug!("cap = {:?}", cap);
if let Atom::NewVertex(ref mut change) = change {
change.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
change.down_context =
parse_pos_vec(changes, offsets, cap.as_str())?;
}
if let (Some(new_start), Some(new_end)) = (cap.get(4), cap.get(5)) {
let new_start = new_start.as_str().parse::<u64>().unwrap();
let new_end = new_end.as_str().parse::<u64>().unwrap();
change.start = ChangePosition(contents_.len() as u64);
change.end =
ChangePosition(contents_.len() as u64 + new_end - new_start);
offsets.insert(new_end, change.end);
}
}
}
Ok(None)
}
Some(Hunk::UnsolveOrderConflict { ref mut change, .. }) => {
if let Some(edges) = parse_edges(changes, h)? {
if let Atom::EdgeMap(ref mut change) = change {
change.edges = edges
}
}
Ok(None)
}
Some(Hunk::ResurrectZombies { ref mut change, .. }) => {
if let Some(edges) = parse_edges(changes, h)? {
if let Atom::EdgeMap(ref mut change) = change {
change.edges = edges
}
}
Ok(None)
}
None => {
debug!("current = {:#?}", current);
debug!("h = {:?}", h);
Ok(None)
}
}
}
}
}
lazy_static! {
static ref POS: regex::Regex = regex::Regex::new(r#"(\d+)\.(\d+)"#).unwrap();
static ref EDGE: regex::Regex =
regex::Regex::new(r#"\s*(?P<prev>[BFD]*):(?P<flag>[BFD]*)\s+(?P<up_c>\d+)\.(?P<up_l>\d+)\s*->\s*(?P<c>\d+)\.(?P<l0>\d+):(?P<l1>\d+)/(?P<intro>\d+)\s*"#).unwrap();
}
pub fn default_newvertex() -> NewVertex<Option<Hash>> {
NewVertex {
start: ChangePosition(0),
end: ChangePosition(0),
flag: EdgeFlags::empty(),
up_context: Vec::new(),
down_context: Vec::new(),
inode: Position {
change: Some(Hash::None),
pos: ChangePosition(0),
},
}
}
pub fn default_edgemap() -> EdgeMap<Option<Hash>> {
EdgeMap {
edges: Vec::new(),
inode: Position {
change: Some(Hash::None),
pos: ChangePosition(0),
},
}
}
pub fn has_newvertices<L>(current: &Option<Hunk<Option<Hash>, L>>) -> bool {
match current {
Some(Hunk::FileAdd { contents: None, .. }) | None => false,
Some(rec) => rec.iter().any(|e| matches!(e, Atom::NewVertex(_))),
}
}
pub fn parse_pos_vec(
changes: &HashMap<usize, Hash>,
offsets: &HashMap<u64, ChangePosition>,
s: &str,
) -> Result<Vec<Position<Option<Hash>>>, TextDeError> {
let mut v = Vec::new();
for pos in POS.captures_iter(s) {
let change: usize = (&pos[1]).parse().unwrap();
let pos: u64 = (&pos[2]).parse().unwrap();
let pos = if change == 0 {
if let Some(&pos) = offsets.get(&pos) {
pos
} else {
debug!("inconsistent change: {:?} {:?}", s, offsets);
return Err(TextDeError::MissingPosition(pos));
}
} else {
ChangePosition(pos)
};
v.push(Position {
change: change_ref(changes, change)?,
pos,
})
}
Ok(v)
}
fn change_ref(changes: &HashMap<usize, Hash>, change: usize) -> Result<Option<Hash>, TextDeError> {
debug!("change_ref {:?} {:?}", changes, change);
if change == 0 {
Ok(None)
} else if change == 1 {
Ok(Some(Hash::None))
} else if let Some(&c) = changes.get(&change) {
Ok(Some(c))
} else {
Err(TextDeError::MissingChange(change))
}
}
pub fn parse_pos(
changes: &HashMap<usize, Hash>,
s: &str,
) -> Result<Position<Option<Hash>>, TextDeError> {
let pos = POS.captures(s).unwrap();
let change: usize = (&pos[1]).parse().unwrap();
let pos: u64 = (&pos[2]).parse().unwrap();
Ok(Position {
change: change_ref(changes, change)?,
pos: ChangePosition(pos),
})
}
pub fn parse_edges(
changes: &HashMap<usize, Hash>,
s: &str,
) -> Result<Option<Vec<NewEdge<Option<Hash>>>>, TextDeError> {
debug!("parse_edges {:?}", s);
let mut result = Vec::new();
for edge in s.split(',') {
debug!("parse edge {:?}", edge);
if let Some(cap) = EDGE.captures(edge) {
let previous = read_flag(cap.name("prev").unwrap().as_str());
let flag = read_flag(cap.name("flag").unwrap().as_str());
let change0: usize = cap.name("up_c").unwrap().as_str().parse().unwrap();
let pos0: u64 = cap.name("up_l").unwrap().as_str().parse().unwrap();
let change1: usize = cap.name("c").unwrap().as_str().parse().unwrap();
let start1: u64 = cap.name("l0").unwrap().as_str().parse().unwrap();
let end1: u64 = cap.name("l1").unwrap().as_str().parse().unwrap();
let introduced_by: usize = cap.name("intro").unwrap().as_str().parse().unwrap();
result.push(NewEdge {
previous,
flag,
from: Position {
change: change_ref(changes, change0)?,
pos: ChangePosition(pos0),
},
to: Vertex {
change: change_ref(changes, change1)?,
start: ChangePosition(start1),
end: ChangePosition(end1),
},
introduced_by: change_ref(changes, introduced_by)?,
})
} else {
debug!("not parsed");
return Ok(None);
}
}
Ok(Some(result))
}
pub fn parse_line_add(h: &str, change: &mut NewVertex<Option<Hash>>, contents_: &mut Vec<u8>) {
let h = h.as_bytes();
debug!("parse_line_add {:?} {:?}", change.end, change.start);
debug!("parse_line_add {:?}", h);
if h.len() > 2 {
let h = &h[2..h.len()];
contents_.extend(h);
} else if h.len() > 1 {
contents_.push(b'\n');
}
debug!("contents_.len() = {:?}", contents_.len());
trace!("contents_ = {:?}", contents_);
change.end = ChangePosition(contents_.len() as u64);
}
pub fn print_contents<W: std::io::Write>(
w: &mut W,
pref: &str,
contents: &[u8],
) -> Result<(), std::io::Error> {
if let Ok(mut contents) = std::str::from_utf8(&contents) {
while let Some(n) = contents.as_bytes().iter().position(|&c| c == b'\n') {
let (a, b) = contents.split_at(n + 1);
contents = b;
write!(w, "{} {}", pref, a)?;
}
if !contents.is_empty() {
writeln!(w, "{} {}", pref, contents)?;
}
} else {
writeln!(w, "{}b{}", pref, data_encoding::BASE64.encode(contents))?
}
Ok(())
}
pub fn print_change_contents<W: std::io::Write, C: ChangeStore>(
w: &mut W,
changes: &C,
change: &Atom<Option<Hash>>,
change_contents: &[u8],
) -> Result<(), TextSerError<C::Error>> {
match change {
Atom::NewVertex(ref n) => {
let c = &change_contents[n.start.0 as usize..n.end.0 as usize];
print_contents(w, "+", c)?;
if !c.ends_with(b"\n") {
writeln!(w, "\\")?
}
Ok(())
}
Atom::EdgeMap(ref n) if n.edges.is_empty() => return Err(TextSerError::InvalidChange),
Atom::EdgeMap(ref n) if n.edges[0].flag.contains(EdgeFlags::DELETED) => {
let mut buf = Vec::new();
let mut current = None;
for e in n.edges.iter() {
if Some(e.to) == current {
continue;
}
buf.clear();
changes
.get_contents_ext(e.to, &mut buf)
.map_err(TextSerError::C)?;
print_contents(w, "-", &buf[..])?;
current = Some(e.to)
}
Ok(())
}
_ => Ok(()),
}
}
pub fn write_deleted_names<W: std::io::Write, C: ChangeStore>(
w: &mut W,
changes: &C,
del: &Atom<Option<Hash>>,
) -> Result<(), TextSerError<C::Error>> {
if let Atom::EdgeMap(ref e) = del {
let mut buf = Vec::new();
let mut is_first = true;
for d in e.edges.iter() {
buf.clear();
changes
.get_contents_ext(d.to, &mut buf)
.map_err(TextSerError::C)?;
if !buf.is_empty() {
let name = std::str::from_utf8(buf.split_at(2).1).unwrap();
write!(w, "{}{:?}", if is_first { "" } else { ", " }, name)?;
is_first = false;
}
}
}
Ok(())
}
pub fn write_flag<W: std::io::Write>(mut w: W, flag: EdgeFlags) -> Result<(), std::io::Error> {
if flag.contains(EdgeFlags::BLOCK) {
w.write_all(b"B")?;
}
if flag.contains(EdgeFlags::FOLDER) {
w.write_all(b"F")?;
}
if flag.contains(EdgeFlags::DELETED) {
w.write_all(b"D")?;
}
assert!(!flag.contains(EdgeFlags::PARENT));
assert!(!flag.contains(EdgeFlags::PSEUDO));
Ok(())
}
pub fn read_flag(s: &str) -> EdgeFlags {
let mut f = EdgeFlags::empty();
for i in s.chars() {
match i {
'B' => f |= EdgeFlags::BLOCK,
'F' => f |= EdgeFlags::FOLDER,
'D' => f |= EdgeFlags::DELETED,
c => panic!("read_flag: {:?}", c),
}
}
f
}
pub fn write_pos<W: std::io::Write>(
mut w: W,
hashes: &HashMap<Hash, usize>,
pos: Position<Option<Hash>>,
) -> Result<(), std::io::Error> {
let change = if let Some(Hash::None) = pos.change {
1
} else if let Some(ref c) = pos.change {
*hashes.get(c).unwrap()
} else {
0
};
write!(w, "{}.{}", change, pos.pos.0)?;
Ok(())
}
pub fn write_atom<W: std::io::Write>(
w: &mut W,
hashes: &HashMap<Hash, usize>,
atom: &Atom<Option<Hash>>,
) -> Result<(), std::io::Error> {
match atom {
Atom::NewVertex(ref n) => write_newvertex(w, hashes, n),
Atom::EdgeMap(ref n) => write_edgemap(w, hashes, n),
}
}
pub fn write_newvertex<W: std::io::Write>(
mut w: W,
hashes: &HashMap<Hash, usize>,
n: &NewVertex<Option<Hash>>,
) -> Result<(), std::io::Error> {
write!(w, " up")?;
for c in n.up_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
write!(w, ", new {}:{}", n.start.0, n.end.0)?;
if !n.down_context.is_empty() {
write!(w, ", down")?;
for c in n.down_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
}
w.write_all(b"\n")?;
Ok(())
}
pub fn write_edgemap<W: std::io::Write>(
mut w: W,
hashes: &HashMap<Hash, usize>,
n: &EdgeMap<Option<Hash>>,
) -> Result<(), std::io::Error> {
let mut is_first = true;
for c in n.edges.iter() {
if !is_first {
write!(w, ", ")?;
}
is_first = false;
write_flag(&mut w, c.previous)?;
write!(w, ":")?;
write_flag(&mut w, c.flag)?;
write!(w, " ")?;
write_pos(&mut w, hashes, c.from)?;
write!(w, " -> ")?;
write_pos(&mut w, hashes, c.to.start_pos())?;
let h = if let Some(h) = hashes.get(c.introduced_by.as_ref().unwrap()) {
h
} else {
panic!("introduced_by = {:?}, not found", c.introduced_by);
};
write!(w, ":{}/{}", c.to.end.0, h)?;
}
writeln!(w)?;
Ok(())
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Section {
Header(String),
Deps,
Changes {
changes: Vec<Hunk<Option<Hash>, Local>>,
current: Option<Hunk<Option<Hash>, Local>>,
offsets: HashMap<u64, ChangePosition>,
},
}
use super::*;
/// An open, seekable change file.
#[cfg(feature = "zstd")]
pub struct ChangeFile<'a> {
s: Option<zstd_seekable::Seekable<'a, OffFile>>,
hashed: Hashed<Local>,
hash: Hash,
unhashed: Option<toml::Value>,
}
struct OffFile {
f: std::fs::File,
start: u64,
}
unsafe impl Send for OffFile {}
impl std::io::Read for OffFile {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error> {
self.f.read(buf)
}
}
impl std::io::Seek for OffFile {
fn seek(&mut self, from: std::io::SeekFrom) -> Result<u64, std::io::Error> {
use std::io::SeekFrom;
let from = match from {
SeekFrom::Start(s) => SeekFrom::Start(s + self.start),
c => c,
};
self.f.seek(from)
}
}
#[cfg(feature = "zstd")]
impl<'a> ChangeFile<'a> {
/// Open a change file from a path.
pub fn open(hash: Hash, path: &str) -> Result<Self, ChangeError> {
use std::io::Read;
let mut r = std::fs::File::open(path)?;
let mut buf = Vec::new();
buf.resize(Change::OFFSETS_SIZE as usize, 0);
r.read_exact(&mut buf)?;
let offsets: Offsets = bincode::deserialize(&buf)?;
if offsets.version != VERSION {
return Err(ChangeError::VersionMismatch);
}
buf.clear();
buf.resize((offsets.unhashed_off - Change::OFFSETS_SIZE) as usize, 0);
r.read_exact(&mut buf)?;
let mut buf2 = vec![0u8; offsets.hashed_len as usize];
let hashed: Hashed<Local> = {
let mut s = zstd_seekable::Seekable::init_buf(&buf)?;
s.decompress(&mut buf2, 0)?;
bincode::deserialize(&buf2)?
};
buf.resize((offsets.contents_off - offsets.unhashed_off) as usize, 0);
let unhashed = if buf.is_empty() {
None
} else {
r.read_exact(&mut buf)?;
let mut s = zstd_seekable::Seekable::init_buf(&buf)?;
buf2.resize(offsets.unhashed_len as usize, 0);
s.decompress(&mut buf2, 0)?;
Some(toml::de::from_slice(&buf2)?)
};
let m = r.metadata()?;
let s = if offsets.contents_off >= m.len() {
None
} else {
Some(zstd_seekable::Seekable::init(Box::new(OffFile {
f: r,
start: offsets.contents_off,
}))?)
};
Ok(ChangeFile {
s,
hashed,
hash,
unhashed,
})
}
pub fn has_contents(&self) -> bool {
self.s.is_some()
}
/// Reads the contents at an offset into `buf`, and returns the
/// number of bytes read. The bounds of the change's "contents"
/// section are not checked.
pub fn read_contents(&mut self, offset: u64, buf: &mut [u8]) -> Result<usize, ChangeError> {
debug!("read_contents {:?} {:?}", offset, buf.len());
if let Some(ref mut s) = self.s {
Ok(s.decompress(buf, offset)?)
} else {
Err(ChangeError::MissingContents { hash: self.hash })
}
}
pub fn hashed(&self) -> &Hashed<Local> {
&self.hashed
}
pub fn unhashed(&self) -> &Option<toml::Value> {
&self.unhashed
}
}
//! Apply a change.
use crate::change::{Atom, Change, EdgeMap, NewEdge, NewVertex};
use crate::changestore::ChangeStore;
use crate::missing_context::*;
use crate::pristine::*;
use crate::record::InodeUpdate;
use std::collections::{HashMap, HashSet};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum ApplyError<ChangestoreError: std::error::Error, TxnError: std::error::Error + 'static> {
#[error("Changestore error: {0}")]
Changestore(ChangestoreError),
#[error("Local change error: {err}")]
LocalChange {
#[from]
err: LocalApplyError<TxnError>,
},
}
#[derive(Debug, Error)]
pub enum LocalApplyError<TxnError: std::error::Error + 'static> {
#[error("Dependency missing: {:?}", hash)]
DependencyMissing { hash: crate::pristine::Hash },
#[error("Change already on channel: {:?}", hash)]
ChangeAlreadyOnChannel { hash: crate::pristine::Hash },
#[error("Transaction error: {0}")]
Txn(TxnError),
#[error("Block error: {:?}", block)]
Block { block: Position<ChangeId> },
#[error("Invalid change")]
InvalidChange,
}
impl<TxnError: std::error::Error> LocalApplyError<TxnError> {
fn from_missing(err: MissingError<TxnError>) -> Self {
match err {
MissingError::Txn(e) => LocalApplyError::Txn(e),
MissingError::Block(e) => e.into(),
MissingError::Inconsistent(_) => LocalApplyError::InvalidChange,
}
}
}
impl<T: std::error::Error> From<crate::pristine::InconsistentChange<T>> for LocalApplyError<T> {
fn from(err: crate::pristine::InconsistentChange<T>) -> Self {
match err {
InconsistentChange::Txn(e) => LocalApplyError::Txn(e),
_ => LocalApplyError::InvalidChange,
}
}
}
impl<T: std::error::Error> From<crate::pristine::TxnErr<T>> for LocalApplyError<T> {
fn from(err: crate::pristine::TxnErr<T>) -> Self {
LocalApplyError::Txn(err.0)
}
}
impl<C: std::error::Error, T: std::error::Error> From<crate::pristine::TxnErr<T>>
for ApplyError<C, T>
{
fn from(err: crate::pristine::TxnErr<T>) -> Self {
LocalApplyError::Txn(err.0).into()
}
}
impl<T: std::error::Error> From<crate::pristine::BlockError<T>> for LocalApplyError<T> {
fn from(err: crate::pristine::BlockError<T>) -> Self {
match err {
BlockError::Txn(e) => LocalApplyError::Txn(e),
BlockError::Block { block } => LocalApplyError::Block { block },
}
}
}
/// Apply a change to a channel. This function does not update the
/// inodes/tree tables, i.e. the correspondence between the pristine
/// and the working copy. Therefore, this function must be used only
/// on remote changes, or on "bare" repositories.
pub fn apply_change_ws<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
workspace: &mut Workspace,
) -> Result<(u64, Merkle), ApplyError<P::Error, T::GraphError>> {
debug!("apply_change {:?}", hash.to_base32());
workspace.clear();
let mut channel = channel.r.borrow_mut();
let change = changes.get_change(&hash).map_err(ApplyError::Changestore)?;
for &hash in change.dependencies.iter() {
if let Hash::None = hash {
continue;
}
if let Some(int) = txn.get_internal(hash)? {
if txn.get_changeset(T::changes(&channel), int)?.is_some() {
continue;
}
}
return Err((LocalApplyError::DependencyMissing { hash }).into());
}
let internal = if let Some(p) = txn.get_internal(hash)? {
p
} else {
let internal: ChangeId = make_changeid(txn, &hash)?;
register_change(txn, internal, hash, &change)?;
internal
};
debug!("internal = {:?}", internal);
Ok(apply_change_to_channel(
txn,
&mut channel,
internal,
&hash,
&change,
workspace,
)?)
}
pub fn apply_change_rec_ws<T: TxnT + MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
workspace: &mut Workspace,
deps_only: bool,
) -> Result<(), ApplyError<P::Error, T::GraphError>> {
debug!("apply_change {:?}", hash.to_base32());
workspace.clear();
let mut channel = channel.r.borrow_mut();
let mut dep_stack = vec![(hash, true, !deps_only)];
let mut visited = HashSet::new();
while let Some((hash, first, actually_apply)) = dep_stack.pop() {
let change = changes.get_change(&hash).map_err(ApplyError::Changestore)?;
if first {
if !visited.insert(hash) {
continue;
}
if let Some(change_id) = txn.get_internal(hash)? {
if txn
.get_changeset(T::changes(&channel), change_id)?
.is_some()
{
continue;
}
}
dep_stack.push((hash, false, actually_apply));
for &hash in change.dependencies.iter() {
if let Hash::None = hash {
continue;
}
dep_stack.push((hash, true, true))
}
} else if actually_apply {
let applied = if let Some(int) = txn.get_internal(hash)? {
txn.get_changeset(T::changes(&channel), int)?.is_some()
} else {
false
};
if !applied {
let internal = if let Some(p) = txn.get_internal(hash)? {
p
} else {
let internal: ChangeId = make_changeid(txn, &hash)?;
register_change(txn, internal, hash, &change)?;
internal
};
debug!("internal = {:?}", internal);
workspace.clear();
apply_change_to_channel(txn, &mut channel, internal, &hash, &change, workspace)?;
}
}
}
Ok(())
}
/// Same as [apply_change_ws], but allocates its own workspace.
pub fn apply_change<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
) -> Result<(u64, Merkle), ApplyError<P::Error, T::GraphError>> {
apply_change_ws(changes, txn, channel, hash, &mut Workspace::new())
}
/// Same as [apply_change_ws], but allocates its own workspace.
pub fn apply_change_rec<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
deps_only: bool,
) -> Result<(), ApplyError<P::Error, T::GraphError>> {
apply_change_rec_ws(
changes,
txn,
channel,
hash,
&mut Workspace::new(),
deps_only,
)
}
fn apply_change_to_channel<T: ChannelMutTxnT>(
txn: &mut T,
channel: &mut T::Channel,
change_id: ChangeId,
hash: &Hash,
change: &Change,
ws: &mut Workspace,
) -> Result<(u64, Merkle), LocalApplyError<T::GraphError>> {
ws.assert_empty();
let n = T::apply_counter(channel);
let merkle =
if let Some(m) = txn.put_changes(channel, change_id, T::apply_counter(channel), hash)? {
m
} else {
return Err(LocalApplyError::ChangeAlreadyOnChannel { hash: *hash });
};
debug!("apply change to channel");
let now = std::time::Instant::now();
for change_ in change.changes.iter() {
debug!("Applying {:?} (1)", change_);
for change_ in change_.iter() {
match *change_ {
Atom::NewVertex(ref n) => {
put_newvertex(txn, T::graph_mut(channel), change, ws, change_id, n)?
}
Atom::EdgeMap(ref n) => {
for edge in n.edges.iter() {
if !edge.flag.contains(EdgeFlags::DELETED) {
put_newedge(
txn,
T::graph_mut(channel),
ws,
change_id,
n.inode,
edge,
|_, _, _, _| Ok::<bool, LocalApplyError<T::GraphError>>(true),
)?;
}
}
}
}
}
}
for change_ in change.changes.iter() {
debug!("Applying {:?} (2)", change_);
for change_ in change_.iter() {
if let Atom::EdgeMap(ref n) = *change_ {
for edge in n.edges.iter() {
if edge.flag.contains(EdgeFlags::DELETED) {
put_newedge(
txn,
T::graph_mut(channel),
ws,
change_id,
n.inode,
edge,
|_, _, _, _| Ok::<bool, LocalApplyError<T::GraphError>>(true),
)?;
crate::missing_context::collect_zombie_context(
txn,
T::graph_mut(channel),
&mut ws.missing_context,
n.inode,
edge,
change_id,
|h| change.knows(&h),
)
.map_err(LocalApplyError::from_missing)?
}
}
}
}
}
crate::TIMERS.lock().unwrap().apply += now.elapsed();
clean_obsolete_pseudo_edges(txn, T::graph_mut(channel), ws, change_id)?;
info!("repairing missing contexts");
repair_missing_contexts(txn, T::graph_mut(channel), ws, change_id, change)?;
repair_cyclic_paths(txn, T::graph_mut(channel), ws)?;
info!("done applying change");
Ok((n, merkle))
}
/// Apply a change created locally: serialize it, compute its hash, and
/// apply it. This function also registers changes in the filesystem
/// introduced by the change (file additions, deletions and moves), to
/// synchronise the pristine and the working copy after the
/// application.
pub fn apply_local_change_ws<
T: ChannelMutTxnT
+ DepsMutTxnT<DepsError = <T as GraphTxnT>::GraphError>
+ TreeMutTxnT<TreeError = <T as GraphTxnT>::GraphError>,
>(
txn: &mut T,
channel: &mut ChannelRef<T>,
change: &Change,
hash: Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
workspace: &mut Workspace,
) -> Result<(u64, Merkle), LocalApplyError<T::GraphError>> {
let mut channel = channel.r.borrow_mut();
let internal: ChangeId = make_changeid(txn, &hash)?;
for &hash in change.dependencies.iter() {
if let Hash::None = hash {
continue;
}
if let Some(int) = txn.get_internal(hash)? {
if txn.get_changeset(T::changes(&channel), int)?.is_some() {
continue;
}
}
return Err((LocalApplyError::DependencyMissing { hash }).into());
}
register_change(txn, internal, hash, &change)?;
let n = apply_change_to_channel(txn, &mut channel, internal, &hash, &change, workspace)?;
for (_, update) in inode_updates.iter() {
info!("updating {:?}", update);
update_inode(txn, T::graph(&channel), internal, update)?;
}
Ok(n)
}
/// Same as [apply_local_change_ws], but allocates its own workspace.
pub fn apply_local_change<
T: ChannelMutTxnT
+ DepsMutTxnT<DepsError = <T as GraphTxnT>::GraphError>
+ TreeMutTxnT<TreeError = <T as GraphTxnT>::GraphError>,
>(
txn: &mut T,
channel: &mut ChannelRef<T>,
change: &Change,
hash: Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
) -> Result<(u64, Merkle), LocalApplyError<T::GraphError>> {
apply_local_change_ws(
txn,
channel,
change,
hash,
inode_updates,
&mut Workspace::new(),
)
}
fn update_inode<T: GraphTxnT + TreeMutTxnT<TreeError = <T as GraphTxnT>::GraphError>>(
txn: &mut T,
channel: &T::Graph,
internal: ChangeId,
update: &InodeUpdate,
) -> Result<(), LocalApplyError<T::TreeError>> {
debug!("update_inode {:?}", update);
match *update {
InodeUpdate::Add { inode, pos, .. } => {
let vertex = Position {
change: internal,
pos,
};
if txn
.get_graph(&channel, vertex.inode_vertex(), None)?
.is_some()
{
debug!("Adding inodes: {:?} {:?}", inode, vertex);
put_inodes_with_rev(txn, inode, vertex)?;
} else {
debug!("Not adding inodes: {:?} {:?}", inode, vertex);
}
}
InodeUpdate::Deleted { inode } => {
if let Some(parent) = txn.get_revtree(inode, None)?.map(|x| x.to_owned()) {
del_tree_with_rev(txn, parent.as_file_id(), inode)?;
}
// Delete the directory, if it's there.
txn.del_tree(
(OwnedPathId {
parent_inode: inode,
basename: crate::small_string::SmallString::new(),
})
.as_file_id(),
Some(inode),
)?;
if let Some(vertex) = txn.get_inodes(inode, None)? {
del_inodes_with_rev(txn, inode, vertex)?;
}
}
}
Ok(())
}
fn put_newvertex<T: GraphMutTxnT>(
txn: &mut T,
graph: &mut T::Graph,
ch: &Change,
ws: &mut Workspace,
change: ChangeId,
n: &NewVertex<Option<Hash>>,
) -> Result<(), LocalApplyError<T::GraphError>> {
let vertex = Vertex {
change,
start: n.start,
end: n.end,
};
debug!(
"put_newvertex {:?} {:?} {:?} {:?} {:?}",
vertex, n.up_context, n.down_context, n.flag, change
);
assert!(ws.deleted_by.is_empty());
for up in n.up_context.iter() {
let up = internal_pos(txn, up, change)?;
if put_up_context(txn, graph, ch, ws, up)? && n.flag.contains(EdgeFlags::FOLDER) {
return Err(LocalApplyError::InvalidChange);
}
}
for down in n.down_context.iter() {
let down = internal_pos(txn, down, change)?;
if down.change == change {
return Err(LocalApplyError::InvalidChange);
}
if put_down_context(txn, graph, ch, ws, down)? && !n.flag.contains(EdgeFlags::FOLDER) {
return Err(LocalApplyError::InvalidChange);
}
}
debug!("deleted by: {:?}", ws.deleted_by);
let up_flag = n.flag | EdgeFlags::BLOCK | EdgeFlags::DELETED;
for up in ws.up_context.drain(..) {
assert_ne!(up, vertex);
if !n.flag.contains(EdgeFlags::FOLDER) {
for change in ws.deleted_by.iter() {
put_graph_with_rev(txn, graph, up_flag, up, vertex, *change)?;
}
}
put_graph_with_rev(txn, graph, n.flag | EdgeFlags::BLOCK, up, vertex, change)?;
}
debug!("down_context {:?}", ws.down_context);
let mut down_flag = n.flag;
if !n.flag.is_folder() {
down_flag -= EdgeFlags::BLOCK
}
for down in ws.down_context.drain(..) {
assert_ne!(down, vertex);
put_graph_with_rev(txn, graph, down_flag, vertex, down, change)?;
if n.flag.is_folder() {
ws.missing_context.files.insert(down);
}
}
ws.deleted_by.clear();
Ok(())
}
fn put_up_context<T: GraphMutTxnT>(
txn: &mut T,
graph: &mut T::Graph,
ch: &Change,
ws: &mut Workspace,
up: Position<ChangeId>,
) -> Result<bool, LocalApplyError<T::GraphError>> {
let up_vertex = if up.change.is_root() {
Vertex::ROOT
} else {
debug!("put_up_context {:?}", up);
let k = txn.find_block_end(graph, up)?;
assert_eq!(k.change, up.change);
assert!(k.start <= up.pos);
debug!("k = {:?}", k);
if k.start < up.pos && k.end > up.pos {
// The missing context "graphs" are only used at the
// DELETION stage, check that:
assert!(ws.missing_context.graphs.0.is_empty());
txn.split_block(graph, k, up.pos, &mut ws.adjbuf)?
}
Vertex {
change: k.change,
start: k.start,
end: up.pos,
}
};
debug!("up_vertex {:?}", up_vertex);
let flag0 = EdgeFlags::PARENT | EdgeFlags::BLOCK;
let flag1 = flag0 | EdgeFlags::DELETED | EdgeFlags::FOLDER;
let mut is_non_folder = false;
for parent in iter_adjacent(txn, graph, up_vertex, flag0, flag1)? {
let parent = parent?;
is_non_folder |= parent.flag & (EdgeFlags::PARENT | EdgeFlags::FOLDER) == EdgeFlags::PARENT;
if parent
.flag
.contains(EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK)
{
let introduced_by = txn.get_external(parent.introduced_by)?.unwrap();
if !ch.knows(&introduced_by) {
ws.deleted_by.insert(parent.introduced_by);
}
}
}
ws.up_context.push(up_vertex);
Ok(is_non_folder)
}
fn put_down_context<T: GraphMutTxnT>(
txn: &mut T,
graph: &mut T::Graph,
ch: &Change,
ws: &mut Workspace,
down: Position<ChangeId>,
) -> Result<bool, LocalApplyError<T::GraphError>> {
let k = txn.find_block(&graph, down)?;
assert_eq!(k.change, down.change);
assert!(k.end >= down.pos);
if k.start < down.pos && k.end > down.pos {
// The missing context "graphs" are only used at the
// DELETION stage, check that:
assert!(ws.missing_context.graphs.0.is_empty());
txn.split_block(graph, k, down.pos, &mut ws.adjbuf)?
}
let down_vertex = Vertex {
change: k.change,
start: down.pos,
end: k.end,
};
debug!("down_vertex {:?}", down_vertex);
let flag0 = EdgeFlags::PARENT;
let flag1 = flag0 | EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED;
let mut is_folder = false;
for parent in iter_adjacent(txn, &graph, down_vertex, flag0, flag1)? {
let parent = parent?;
is_folder |= parent.flag.contains(EdgeFlags::PARENT | EdgeFlags::FOLDER);
if parent.flag.contains(EdgeFlags::PARENT | EdgeFlags::BLOCK) {
if parent.flag.contains(EdgeFlags::DELETED) {
let introduced_by = txn.get_external(parent.introduced_by)?.unwrap();
if !ch.knows(&introduced_by) {
ws.deleted_by.insert(parent.introduced_by);
}
}
}
}
ws.down_context.push(down_vertex);
Ok(is_folder)
}
#[derive(Default)]
pub struct Workspace {
parents: HashSet<Vertex<ChangeId>>,
children: HashSet<Vertex<ChangeId>>,
pseudo: Vec<(Vertex<ChangeId>, Edge, Position<Option<Hash>>)>,
deleted_by: HashSet<ChangeId>,
up_context: Vec<Vertex<ChangeId>>,
down_context: Vec<Vertex<ChangeId>>,
pub(crate) missing_context: crate::missing_context::Workspace,
rooted: HashMap<Vertex<ChangeId>, bool>,
adjbuf: Vec<Edge>,
}
impl Workspace {
pub fn new() -> Self {
Self::default()
}
fn clear(&mut self) {
self.children.clear();
self.parents.clear();
self.pseudo.clear();
self.deleted_by.clear();
self.up_context.clear();
self.down_context.clear();
self.missing_context.clear();
self.rooted.clear();
self.adjbuf.clear();
}
fn assert_empty(&self) {
assert!(self.children.is_empty());
assert!(self.parents.is_empty());
assert!(self.pseudo.is_empty());
assert!(self.deleted_by.is_empty());
assert!(self.up_context.is_empty());
assert!(self.down_context.is_empty());
self.missing_context.assert_empty();
assert!(self.rooted.is_empty());
assert!(self.adjbuf.is_empty());
}
}
pub(crate) fn put_newedge<T, E, F>(
txn: &mut T,
graph: &mut T::Graph,
ws: &mut Workspace,
change: ChangeId,
inode: Position<Option<Hash>>,
n: &NewEdge<Option<Hash>>,
apply_check: F,
) -> Result<(), E>
where
T: GraphMutTxnT,
E: From<LocalApplyError<T::GraphError>> + From<TxnErr<T::GraphError>>,
F: Fn(&mut T, &mut T::Graph, Vertex<ChangeId>, Vertex<ChangeId>) -> Result<bool, E>,
{
debug!("put_newedge {:?} {:?}", n, change);
if n.flag.contains(EdgeFlags::DELETED) {
ws.missing_context
.load_graph(txn, graph, inode)
.map_err(|_| LocalApplyError::InvalidChange)?;
}
if (n.previous.is_block() && !n.flag.is_block())
|| (n.previous.is_folder() != n.flag.is_folder())
{
return Err(LocalApplyError::InvalidChange.into());
}
debug_assert!(ws.children.is_empty());
debug_assert!(ws.parents.is_empty());
let n_introduced_by = if let Some(n) = internal(txn, &n.introduced_by, change)? {
n
} else {
return Err(LocalApplyError::InvalidChange.into());
};
let mut source = find_source_vertex(txn, graph, &n.from, change, inode, n.flag, ws)?;
let mut target = find_target_vertex(txn, graph, &n.to, change, inode, n.flag, ws)?;
if n.flag.contains(EdgeFlags::FOLDER) {
ws.missing_context.files.insert(target);
}
loop {
if target.end > n.to.end {
assert!(!n.flag.contains(EdgeFlags::FOLDER));
ws.missing_context.graphs.split(inode, target, n.to.end);
txn.split_block(graph, target, n.to.end, &mut ws.adjbuf)?;
target.end = n.to.end
}
if n.flag.contains(EdgeFlags::DELETED) {
collect_pseudo_edges(txn, graph, ws, inode, target)?;
if !n.flag.contains(EdgeFlags::FOLDER) {
reconnect_pseudo_edges(txn, graph, inode, ws, target)?;
}
ws.children.clear();
ws.parents.clear();
}
del_graph_with_rev(txn, graph, n.previous, source, target, n_introduced_by)?;
if apply_check(txn, graph, source, target)? {
put_graph_with_rev(txn, graph, n.flag, source, target, change)?;
}
if target.end >= n.to.end {
debug!("{:?} {:?}", target, n.to);
debug_assert_eq!(target.end, n.to.end);
break;
}
source = target;
target = txn
.find_block(graph, target.end_pos())
.map_err(LocalApplyError::from)?;
assert_ne!(source, target);
}
Ok(())
}
fn find_source_vertex<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
from: &Position<Option<Hash>>,
change: ChangeId,
inode: Position<Option<Hash>>,
flag: EdgeFlags,
ws: &mut Workspace,
) -> Result<Vertex<ChangeId>, LocalApplyError<T::GraphError>> {
debug!("find_source_vertex");
let mut source = txn.find_block_end(&channel, internal_pos(txn, &from, change)?)?;
debug!("source = {:?}", source);
if source.start < from.pos && source.end > from.pos {
assert!(!flag.contains(EdgeFlags::FOLDER));
ws.missing_context.graphs.split(inode, source, from.pos);
txn.split_block(channel, source, from.pos, &mut ws.adjbuf)?;
source.end = from.pos;
}
Ok(source)
}
fn find_target_vertex<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
to: &Vertex<Option<Hash>>,
change: ChangeId,
inode: Position<Option<Hash>>,
flag: EdgeFlags,
ws: &mut Workspace,
) -> Result<Vertex<ChangeId>, LocalApplyError<T::GraphError>> {
let to_pos = internal_pos(txn, &to.start_pos(), change)?;
debug!("find_target_vertex, to = {:?}", to);
let mut target = txn.find_block(channel, to_pos)?;
debug!("target = {:?}", target);
if target.start < to.start {
assert!(!flag.contains(EdgeFlags::FOLDER));
ws.missing_context.graphs.split(inode, target, to.start);
txn.split_block(channel, target, to.start, &mut ws.adjbuf)?;
target.start = to.start;
}
Ok(target)
}
fn collect_pseudo_edges<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
apply: &mut Workspace,
inode: Position<Option<Hash>>,
v: Vertex<ChangeId>,
) -> Result<(), LocalApplyError<T::GraphError>> {
for e in iter_adjacent(
txn,
&channel,
v,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let e = e?;
debug!("collect_pseudo_edges {:?} {:?}", v, e);
if !e.flag.contains(EdgeFlags::FOLDER) {
if e.flag.contains(EdgeFlags::PARENT) {
let p = txn.find_block_end(channel, e.dest)?;
if is_alive(txn, channel, p)? {
apply.parents.insert(p);
}
} else {
let p = txn.find_block(channel, e.dest)?;
if e.flag.contains(EdgeFlags::BLOCK)
|| p.is_empty()
|| is_alive(txn, channel, p).unwrap()
{
apply.children.insert(p);
}
}
}
if e.flag.contains(EdgeFlags::PSEUDO) {
apply.pseudo.push((v, e, inode));
}
}
Ok(())
}
fn reconnect_pseudo_edges<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
inode: Position<Option<Hash>>,
ws: &mut Workspace,
target: Vertex<ChangeId>,
) -> Result<(), LocalApplyError<T::GraphError>> {
if ws.parents.is_empty() || ws.children.is_empty() {
return Ok(());
}
let (graph, vids) = if let Some(x) = ws.missing_context.graphs.get(inode) {
x
} else {
return Err(LocalApplyError::InvalidChange.into());
};
crate::alive::remove_redundant_parents(
&graph,
&vids,
&mut ws.parents,
&mut ws.missing_context.covered_parents,
target,
);
for &p in ws.parents.iter() {
ws.missing_context.covered_parents.insert((p, target));
}
crate::alive::remove_redundant_children(&graph, &vids, &mut ws.children, target);
for &p in ws.parents.iter() {
debug_assert!(is_alive(txn, channel, p).unwrap());
for &c in ws.children.iter() {
if p != c {
debug_assert!(is_alive(txn, channel, c).unwrap());
put_graph_with_rev(txn, channel, EdgeFlags::PSEUDO, p, c, ChangeId::ROOT)?;
}
}
}
Ok(())
}
pub(crate) fn clean_obsolete_pseudo_edges<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
) -> Result<(), LocalApplyError<T::GraphError>> {
for (next_vertex, p, inode) in ws.pseudo.drain(..) {
let (a, b) = if p.flag.is_parent() {
if let Ok(dest) = txn.find_block_end(channel, p.dest) {
(dest, next_vertex)
} else {
continue;
}
} else if let Ok(dest) = txn.find_block(channel, p.dest) {
(next_vertex, dest)
} else {
continue;
};
let a_is_alive = is_alive(txn, channel, a)?;
let b_is_alive = is_alive(txn, channel, b)?;
if a_is_alive && b_is_alive {
continue;
}
debug!(
"Deleting {:?} {:?} {:?} {:?}",
a, b, p.introduced_by, p.flag
);
del_graph_with_rev(
txn,
channel,
p.flag - EdgeFlags::PARENT,
a,
b,
p.introduced_by,
)?;
if a_is_alive {
debug!("repair down");
debug_assert!(!b_is_alive);
crate::missing_context::repair_missing_down_context(
txn,
channel,
&mut ws.missing_context,
inode,
b,
&[a],
)
.map_err(LocalApplyError::from_missing)?
} else if b_is_alive && !p.flag.is_folder() {
debug!("repair up");
crate::missing_context::repair_missing_up_context(
txn,
channel,
&mut ws.missing_context,
change_id,
inode,
a,
&[b],
)
.map_err(LocalApplyError::from_missing)?
}
}
Ok(())
}
fn repair_missing_contexts<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
change: &Change,
) -> Result<(), LocalApplyError<T::GraphError>> {
let now = std::time::Instant::now();
crate::missing_context::repair_parents_of_deleted(txn, channel, &mut ws.missing_context)
.map_err(LocalApplyError::from_missing)?;
for atom in change.changes.iter().flat_map(|r| r.iter()) {
match atom {
Atom::NewVertex(ref n) if !n.flag.is_folder() => {
let vertex = Vertex {
change: change_id,
start: n.start,
end: n.end,
};
repair_new_vertex_context_up(txn, channel, ws, change_id, n, vertex)?;
repair_new_vertex_context_down(txn, channel, ws, change_id, n, vertex)?;
}
Atom::NewVertex(_) => {}
Atom::EdgeMap(ref n) => {
repair_edge_context(txn, channel, ws, change_id, change, n)?;
}
}
}
crate::missing_context::delete_pseudo_edges(txn, channel, &mut ws.missing_context)
.map_err(LocalApplyError::from_missing)?;
crate::TIMERS.lock().unwrap().repair_context += now.elapsed();
Ok(())
}
fn repair_new_vertex_context_up<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
n: &NewVertex<Option<Hash>>,
vertex: Vertex<ChangeId>,
) -> Result<(), LocalApplyError<T::GraphError>> {
for up in n.up_context.iter() {
let up = txn.find_block_end(channel, internal_pos(txn, &up, change_id)?)?;
if !is_alive(txn, channel, up)? {
debug!("repairing missing up context {:?} {:?}", up, vertex);
repair_missing_up_context(
txn,
channel,
&mut ws.missing_context,
change_id,
n.inode,
up,
&[vertex],
)
.map_err(LocalApplyError::from_missing)?
}
}
Ok(())
}
fn repair_new_vertex_context_down<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
n: &NewVertex<Option<Hash>>,
vertex: Vertex<ChangeId>,
) -> Result<(), LocalApplyError<T::GraphError>> {
debug!("repairing missing context for {:?}", vertex);
if n.flag.contains(EdgeFlags::FOLDER) {
return Ok(());
}
'outer: for down in n.down_context.iter() {
let down = txn.find_block(channel, internal_pos(txn, &down, change_id)?)?;
for e in iter_adjacent(
txn,
channel,
down,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)? {
let e = e?;
if e.introduced_by != change_id {
continue 'outer;
}
}
debug!("repairing missing down context {:?} {:?}", down, vertex);
repair_missing_down_context(
txn,
channel,
&mut ws.missing_context,
n.inode,
down,
&[vertex],
)
.map_err(LocalApplyError::from_missing)?
}
Ok(())
}
fn repair_edge_context<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
change_id: ChangeId,
change: &Change,
n: &EdgeMap<Option<Hash>>,
) -> Result<(), LocalApplyError<T::GraphError>> {
for e in n.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
if e.flag.contains(EdgeFlags::DELETED) {
trace!("repairing context deleted {:?}", e);
repair_context_deleted(
txn,
channel,
&mut ws.missing_context,
n.inode,
change_id,
|h| change.knows(&h),
e,
)
.map_err(LocalApplyError::from_missing)?
} else {
trace!("repairing context nondeleted {:?}", e);
repair_context_nondeleted(
txn,
channel,
&mut ws.missing_context,
n.inode,
change_id,
|h| change.knows(&h),
e,
)
.map_err(LocalApplyError::from_missing)?
}
}
Ok(())
}
pub(crate) fn repair_cyclic_paths<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
ws: &mut Workspace,
) -> Result<(), LocalApplyError<T::GraphError>> {
let now = std::time::Instant::now();
let mut files = std::mem::replace(&mut ws.missing_context.files, HashSet::new());
for file in files.drain() {
if file.is_empty() {
if !is_rooted(txn, channel, file, ws)? {
repair_edge(txn, channel, file, ws)?
}
} else {
let f0 = EdgeFlags::FOLDER;
let f1 = EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::PSEUDO;
let mut iter = iter_adjacent(txn, channel, file, f0, f1)?;
if let Some(ee) = iter.next() {
let ee = ee?;
let dest = ee.dest.inode_vertex();
if !is_rooted(txn, channel, dest, ws)? {
repair_edge(txn, channel, dest, ws)?
}
}
}
}
ws.missing_context.files = files;
crate::TIMERS.lock().unwrap().check_cyclic_paths += now.elapsed();
Ok(())
}
fn repair_edge<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
to0: Vertex<ChangeId>,
ws: &mut Workspace,
) -> Result<(), LocalApplyError<T::GraphError>> {
debug!("repair_edge {:?}", to0);
let mut stack = vec![(to0, true, true, true)];
ws.parents.clear();
while let Some((current, _, al, anc_al)) = stack.pop() {
if !ws.parents.insert(current) {
continue;
}
debug!("repair_cyclic {:?}", current);
if current != to0 {
stack.push((current, true, al, anc_al));
}
if current.is_root() {
debug!("root");
break;
}
if let Some(&true) = ws.rooted.get(¤t) {
debug!("rooted");
break;
}
let f = EdgeFlags::PARENT | EdgeFlags::FOLDER;
let len = stack.len();
for parent in iter_adjacent(txn, channel, current, f, EdgeFlags::all())? {
let parent = parent?;
if parent.flag.is_parent() {
let anc = txn.find_block_end(channel, parent.dest)?;
debug!("is_rooted, parent = {:?}", parent);
let al = if let Some(e) = iter_adjacent(
txn,
channel,
anc,
f,
f | EdgeFlags::BLOCK | EdgeFlags::PSEUDO,
)?
.next()
{
e?;
true
} else {
false
};
debug!("al = {:?}, flag = {:?}", al, parent.flag);
stack.push((anc, false, parent.flag.is_deleted(), al));
}
}
if stack.len() == len {
stack.pop();
} else {
(&mut stack[len..]).sort_unstable_by(|a, b| a.3.cmp(&b.3))
}
}
let mut current = to0;
for (next, on_path, del, _) in stack {
if on_path {
if del {
put_graph_with_rev(
txn,
channel,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO,
next,
current,
ChangeId::ROOT,
)?;
}
current = next
}
}
ws.parents.clear();
Ok(())
}
fn is_rooted<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
v: Vertex<ChangeId>,
ws: &mut Workspace,
) -> Result<bool, LocalApplyError<T::GraphError>> {
let mut alive = false;
assert!(v.is_empty());
for e in iter_adjacent(txn, channel, v, EdgeFlags::empty(), EdgeFlags::all())? {
let e = e?;
if e.flag.contains(EdgeFlags::PARENT) {
if e.flag & (EdgeFlags::FOLDER | EdgeFlags::DELETED) == EdgeFlags::FOLDER {
alive = true;
break;
}
} else if !e.flag.is_deleted() {
alive = true;
break;
}
}
if !alive {
debug!("is_rooted, not alive");
return Ok(true);
}
// Recycling ws.up_context and ws.parents as a stack and a
// "visited" hashset, respectively.
let stack = &mut ws.up_context;
stack.clear();
stack.push(v);
let visited = &mut ws.parents;
visited.clear();
while let Some(to) = stack.pop() {
debug!("is_rooted, pop = {:?}", to);
if to.is_root() {
stack.clear();
for v in visited.drain() {
ws.rooted.insert(v, true);
}
return Ok(true);
}
if !visited.insert(to) {
continue;
}
if let Some(&rooted) = ws.rooted.get(&to) {
if rooted {
for v in visited.drain() {
ws.rooted.insert(v, true);
}
return Ok(true);
} else {
continue;
}
}
let f = EdgeFlags::PARENT | EdgeFlags::FOLDER;
for parent in iter_adjacent(
txn,
channel,
to,
f,
f | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)? {
let parent = parent?;
debug!("is_rooted, parent = {:?}", parent);
stack.push(txn.find_block_end(channel, parent.dest)?)
}
}
for v in visited.drain() {
ws.rooted.insert(v, false);
}
Ok(false)
}
use super::{Flags, Graph, VertexId};
use crate::vector2::*;
use std::cmp::min;
impl Graph {
pub(crate) fn tarjan(&mut self) -> Vector2<VertexId> {
if self.lines.len() <= 1 {
let mut sccs = Vector2::with_capacities(self.lines.len(), self.lines.len());
sccs.push();
sccs.push_to_last(VertexId(0));
return sccs;
}
let mut call_stack = vec![(VertexId(1), 0, true)];
let mut index = 0;
let mut stack = Vec::new();
let mut scc = Vector2::new();
'recursion: while let Some((n_l, i, first_visit)) = call_stack.pop() {
if first_visit {
let l = &mut self[n_l];
l.index = index;
l.lowlink = index;
l.flags = l.flags | Flags::ONSTACK | Flags::VISITED;
stack.push(n_l);
index += 1;
} else {
let &(_, n_child) = self.child(n_l, i);
self[n_l].lowlink = self[n_l].lowlink.min(self[n_child].lowlink);
}
for j in i..self[n_l].n_children {
let &(_, n_child) = self.child(n_l, j);
if !self[n_child].flags.contains(Flags::VISITED) {
call_stack.push((n_l, j, false));
call_stack.push((n_child, 0, true));
continue 'recursion;
} else if self[n_child].flags.contains(Flags::ONSTACK) {
self[n_l].lowlink = min(self[n_l].lowlink, self[n_child].index)
}
}
if self[n_l].index == self[n_l].lowlink {
let n_scc = scc.len();
scc.push();
loop {
match stack.pop() {
None => break,
Some(n_p) => {
self[n_p].scc = n_scc;
self[n_p].flags ^= Flags::ONSTACK;
scc.push_to_last(n_p);
if n_p == n_l {
break;
}
}
}
}
}
}
scc
}
}
use super::{AliveVertex, Flags, Graph, VertexId};
use crate::pristine::*;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
pub fn retrieve<T: GraphTxnT>(
txn: &T,
channel: &T::Graph,
pos0: Position<ChangeId>,
) -> Result<Graph, TxnErr<T::GraphError>> {
let now = std::time::Instant::now();
let mut graph = Graph {
lines: Vec::new(),
children: Vec::new(),
total_bytes: 0,
};
let mut cache: HashMap<Position<ChangeId>, VertexId> = HashMap::new();
graph.lines.push(AliveVertex::DUMMY);
cache.insert(Position::BOTTOM, VertexId(0));
graph.lines.push(AliveVertex {
vertex: pos0.inode_vertex(),
flags: Flags::empty(),
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
});
cache.insert(pos0, VertexId(1));
let mut stack = vec![VertexId(1)];
while let Some(vid) = stack.pop() {
debug!("vid {:?}", vid);
graph[vid].children = graph.children.len();
for e in crate::pristine::iter_adjacent(
txn,
&channel,
graph[vid].vertex,
EdgeFlags::empty(),
EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)? {
let e = e?;
let dest_vid = match cache.entry(e.dest) {
Entry::Vacant(ent) => {
if let Some(alive) = new_vertex(txn, channel, e.dest)? {
let n = VertexId(graph.lines.len());
ent.insert(n);
graph.total_bytes += alive.vertex.len();
graph.lines.push(alive);
stack.push(n);
n
} else {
continue;
}
}
Entry::Occupied(e) => *e.get(),
};
assert_ne!(graph[vid].vertex.start_pos(), e.dest);
trace!("child {:?}", dest_vid);
graph.children.push((Some(e), dest_vid));
graph[vid].n_children += 1;
}
graph.children.push((None, VertexId::DUMMY));
graph[vid].n_children += 1;
}
crate::TIMERS.lock().unwrap().alive_retrieve += now.elapsed();
Ok(graph)
}
fn new_vertex<T: GraphTxnT>(
txn: &T,
graph: &T::Graph,
pos: Position<ChangeId>,
) -> Result<Option<AliveVertex>, TxnErr<T::GraphError>> {
let vertex = txn.find_block(graph, pos).unwrap();
if !is_alive(txn, graph, vertex)? {
debug!("not alive: {:?}", vertex);
return Ok(None);
}
let mut flags = Flags::empty();
for e in crate::pristine::iter_adjacent(
txn,
graph,
vertex,
EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK,
EdgeFlags::all(),
)? {
if e?
.flag
.contains(EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK)
{
flags = Flags::ZOMBIE;
break;
}
}
debug!("flags for {:?}: {:?}", vertex, flags);
Ok(Some(AliveVertex {
vertex,
flags,
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
}))
}
pub(crate) fn remove_forward_edges<T: GraphMutTxnT>(
txn: &mut T,
channel: &mut T::Graph,
pos: Position<ChangeId>,
) -> Result<(), TxnErr<T::GraphError>> {
let mut graph = retrieve(txn, channel, pos)?;
let scc = graph.tarjan(); // SCCs are given here in reverse order.
let (_, forward_scc) = graph.dfs(&scc);
let mut forward = Vec::new();
graph.collect_forward_edges(txn, channel, &scc, &forward_scc, &mut forward)?;
for &(vertex, edge) in forward.iter() {
let dest = txn.find_block(channel, edge.dest).unwrap();
debug!(target:"libpijul::forward", "deleting forward edge {:?} {:?} {:?}", vertex, dest, edge);
del_graph_with_rev(txn, channel, edge.flag, vertex, dest, edge.introduced_by)?;
}
Ok(())
}
use super::dfs::{Path, PathElement};
use super::{Flags, Graph, VertexId};
use crate::changestore::ChangeStore;
use crate::output::FileError;
use crate::pristine::*;
use crate::vector2::Vector2;
use crate::vertex_buffer::VertexBuffer;
#[derive(Debug)]
struct ConflictStackElt {
conflict: Vec<Path>,
side: usize,
idx: usize,
}
fn output_conflict<T: ChannelTxnT, B: VertexBuffer, P: ChangeStore>(
changes: &P,
txn: &T,
channel: &T::Channel,
line_buf: &mut B,
graph: &Graph,
sccs: &Vector2<VertexId>,
conflict: Path,
) -> Result<(), FileError<P::Error, T::GraphError>> {
let mut stack = vec![ConflictStackElt {
conflict: vec![conflict],
side: 0,
idx: 0,
}];
while let Some(mut elt) = stack.pop() {
let n_sides = elt.conflict.len();
if n_sides > 1 && elt.side == 0 && elt.idx == 0 {
line_buf.begin_conflict()?;
elt.conflict.sort_by(|a, b| {
let a_ = a
.path
.iter()
.map(|a| a.oldest_vertex(changes, txn, channel, graph, sccs).unwrap())
.min()
.unwrap();
let b_ = b
.path
.iter()
.map(|b| b.oldest_vertex(changes, txn, channel, graph, sccs).unwrap())
.min()
.unwrap();
a_.cmp(&b_)
});
}
let mut next = None;
'outer: while elt.side < n_sides {
if elt.side > 0 && elt.idx == 0 {
line_buf.conflict_next()?;
}
while elt.idx < elt.conflict[elt.side].path.len() {
match elt.conflict[elt.side].path[elt.idx] {
PathElement::Scc { scc } => {
output_scc(changes, txn, graph, &sccs[scc], line_buf)?;
elt.idx += 1;
}
PathElement::Conflict { ref mut sides } => {
let sides = std::mem::replace(sides, Vec::new());
elt.idx += 1;
next = Some(ConflictStackElt {
side: 0,
idx: 0,
conflict: sides,
});
break 'outer;
}
}
}
elt.side += 1;
elt.idx = 0;
}
if elt.side >= n_sides {
if n_sides > 1 {
line_buf.end_conflict()?;
}
} else {
stack.push(elt);
stack.push(next.unwrap())
}
}
Ok(())
}
impl PathElement {
fn oldest_vertex<T: ChannelTxnT, C: ChangeStore>(
&self,
changes: &C,
txn: &T,
channel: &T::Channel,
graph: &Graph,
sccs: &Vector2<VertexId>,
) -> Result<u64, TxnErr<T::GraphError>> {
match *self {
PathElement::Scc { ref scc } => {
let mut min: Option<u64> = None;
for x in sccs[*scc].iter() {
if let Some(t) =
txn.get_changeset(T::changes(&channel), graph[*x].vertex.change)?
{
if let Some(ref mut m) = min {
*m = (*m).min(t)
} else {
min = Some(t)
}
} else {
if log_enabled!(log::Level::Debug) {
let f = std::fs::File::create("debug_oldest").unwrap();
graph
.debug(changes, txn, T::graph(channel), false, true, f)
.unwrap();
}
panic!("vertex not in channel: {:?}", graph[*x].vertex)
}
}
Ok(min.unwrap())
}
PathElement::Conflict { ref sides } => {
let mut min: Option<u64> = None;
for x in sides.iter() {
for y in x.path.iter() {
let t = y.oldest_vertex(changes, txn, channel, graph, sccs)?;
if let Some(ref mut m) = min {
*m = (*m).min(t)
} else {
min = Some(t)
}
}
}
Ok(min.unwrap())
}
}
}
}
fn output_scc<T: GraphTxnT, B: VertexBuffer, P: ChangeStore>(
changes: &P,
txn: &T,
graph: &Graph,
scc: &[VertexId],
vbuf: &mut B,
) -> Result<(), FileError<P::Error, T::GraphError>> {
if scc.len() > 1 {
vbuf.begin_cyclic_conflict()?;
}
for &v in scc.iter() {
let now = std::time::Instant::now();
if graph[v].flags.contains(Flags::ZOMBIE) {
vbuf.begin_zombie_conflict()?;
}
crate::TIMERS.lock().unwrap().alive_write += now.elapsed();
let vertex = graph[v].vertex;
let get_contents = |buf: &mut Vec<u8>| {
let now = std::time::Instant::now();
let result = changes
.get_contents(|p| txn.get_external(p).unwrap(), vertex, buf)
.map(|_| ())
.map_err(FileError::Changestore);
crate::TIMERS.lock().unwrap().alive_contents += now.elapsed();
result
};
let now = std::time::Instant::now();
debug!("outputting {:?}", vertex);
vbuf.output_line(vertex, get_contents)?;
if graph[v].flags.contains(Flags::ZOMBIE) {
vbuf.end_conflict()?;
}
crate::TIMERS.lock().unwrap().alive_write += now.elapsed();
}
let now = std::time::Instant::now();
if scc.len() > 1 {
vbuf.end_cyclic_conflict()?;
}
crate::TIMERS.lock().unwrap().alive_write += now.elapsed();
Ok(())
}
pub fn output_graph<T: ChannelTxnT, B: VertexBuffer, P: ChangeStore>(
changes: &P,
txn: &T,
channel: &T::Channel,
line_buf: &mut B,
graph: &mut Graph,
forward: &mut Vec<(Vertex<ChangeId>, Edge)>,
) -> Result<(), crate::output::FileError<P::Error, T::GraphError>> {
if graph.lines.len() <= 1 {
return Ok(());
}
let now0 = std::time::Instant::now();
let scc = graph.tarjan(); // SCCs are given here in reverse order.
let (conflict_tree, forward_scc) = graph.dfs(&scc);
graph.collect_forward_edges(txn, T::graph(channel), &scc, &forward_scc, forward)?;
crate::TIMERS.lock().unwrap().alive_graph += now0.elapsed();
let now1 = std::time::Instant::now();
debug!("conflict_tree = {:?}", conflict_tree);
output_conflict(changes, txn, channel, line_buf, graph, &scc, conflict_tree)?;
crate::TIMERS.lock().unwrap().alive_output += now1.elapsed();
Ok(())
}
use crate::pristine::{ChangeId, Edge, Vertex};
mod debug;
mod dfs;
mod output;
pub mod retrieve;
mod tarjan;
pub(crate) use output::*;
pub(crate) use retrieve::*;
#[derive(Debug, Clone)]
pub(crate) struct AliveVertex {
pub vertex: Vertex<ChangeId>,
flags: Flags,
children: usize,
n_children: usize,
index: usize,
lowlink: usize,
pub scc: usize,
}
bitflags! {
struct Flags: u8 {
const ZOMBIE = 4;
const VISITED = 2;
const ONSTACK = 1;
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub(crate) struct VertexId(pub(crate) usize);
impl VertexId {
const DUMMY: VertexId = VertexId(0);
}
impl AliveVertex {
const DUMMY: AliveVertex = AliveVertex {
vertex: Vertex::BOTTOM,
flags: Flags::empty(),
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
};
}
#[derive(Debug)]
pub struct Graph {
pub(crate) lines: Vec<AliveVertex>,
children: Vec<(Option<Edge>, VertexId)>,
total_bytes: usize,
}
impl Graph {
pub fn len_vertices(&self) -> usize {
self.lines.len()
}
pub fn len_bytes(&self) -> usize {
self.total_bytes
}
}
impl std::ops::Index<VertexId> for Graph {
type Output = AliveVertex;
fn index(&self, idx: VertexId) -> &Self::Output {
self.lines.index(idx.0)
}
}
impl std::ops::IndexMut<VertexId> for Graph {
fn index_mut(&mut self, idx: VertexId) -> &mut Self::Output {
self.lines.index_mut(idx.0)
}
}
impl Graph {
pub(crate) fn children(&self, i: VertexId) -> &[(Option<Edge>, VertexId)] {
let line = &self[i];
&self.children[line.children..line.children + line.n_children]
}
fn child(&self, i: VertexId, j: usize) -> &(Option<Edge>, VertexId) {
&self.children[self[i].children + j]
}
}
use std::collections::{HashMap, HashSet};
pub(crate) fn remove_redundant_children(
graph: &Graph,
vids: &HashMap<Vertex<ChangeId>, crate::alive::VertexId>,
vertices: &mut HashSet<Vertex<ChangeId>>,
target: Vertex<ChangeId>,
) {
let mut min = std::usize::MAX;
let mut stack = Vec::new();
for p in vertices.iter() {
let vid = if let Some(vid) = vids.get(p) {
*vid
} else {
continue;
};
min = min.min(graph[vid].scc);
stack.push(vid);
}
let target_scc = if let Some(&target) = vids.get(&target) {
graph[target].scc
} else {
std::usize::MAX
};
let mut visited = HashSet::new();
while let Some(p) = stack.pop() {
if !visited.insert(p) {
continue;
}
for (_, child) in graph.children(p) {
if graph[p].scc < target_scc && graph[p].scc != graph[*child].scc {
assert!(graph[p].scc > graph[*child].scc);
vertices.remove(&graph[*child].vertex);
}
if graph[*child].scc >= min {
stack.push(*child);
}
}
}
}
pub(crate) fn remove_redundant_parents(
graph: &Graph,
vids: &HashMap<Vertex<ChangeId>, crate::alive::VertexId>,
vertices: &mut HashSet<Vertex<ChangeId>>,
covered: &mut HashSet<(Vertex<ChangeId>, Vertex<ChangeId>)>,
target: Vertex<ChangeId>,
) {
let mut min = std::usize::MAX;
let mut stack = Vec::new();
for p in vertices.iter() {
let vid = if let Some(vid) = vids.get(p) {
*vid
} else {
continue;
};
min = min.min(graph[vid].scc);
stack.push((vid, false));
}
stack.sort_by(|(a, _), (b, _)| graph[*a].scc.cmp(&graph[*b].scc));
let target_scc = if let Some(&target) = vids.get(&target) {
graph[target].scc
} else {
0
};
let mut visited = HashSet::new();
while let Some((p, _)) = stack.pop() {
if !visited.insert(p) {
continue;
}
if graph[p].scc > target_scc
&& (vertices.contains(&graph[p].vertex) || covered.contains(&(graph[p].vertex, target)))
{
for (pp, pp_on_path) in stack.iter() {
if graph[*pp].scc != graph[p].scc && *pp_on_path {
vertices.remove(&graph[*pp].vertex);
covered.insert((graph[*pp].vertex, target));
}
}
}
stack.push((p, true));
for (_, child) in graph.children(p) {
if graph[*child].scc >= min {
stack.push((*child, false));
}
if graph[p].scc > target_scc
&& graph[*child].scc != graph[p].scc
&& covered.contains(&(graph[*child].vertex, target))
{
assert!(graph[*child].scc < graph[p].scc);
vertices.remove(&graph[p].vertex);
covered.insert((graph[p].vertex, target));
}
}
}
}
use super::{Graph, VertexId};
use crate::pristine::*;
use crate::vector2::Vector2;
use std::collections::HashSet;
#[derive(Debug)]
pub(super) struct Path {
pub path: Vec<PathElement>,
pub sccs: HashSet<usize>,
pub end: usize,
}
#[derive(Debug)]
pub(super) enum PathElement {
Scc { scc: usize },
Conflict { sides: Vec<Path> },
}
impl Path {
fn new() -> Self {
Path {
path: Vec::new(),
sccs: HashSet::new(),
end: 0,
}
}
}
struct DFS {
visits: Vec<Visits>,
counter: usize,
}
#[derive(Clone, Debug)]
struct Visits {
first: usize,
last: usize,
}
impl DFS {
pub fn new(n: usize) -> Self {
DFS {
visits: vec![Visits { first: 0, last: 0 }; n],
counter: 1,
}
}
}
#[derive(Debug)]
struct State {
n_scc: usize,
descendants: Option<(usize, usize)>,
current_path: Path,
current_path_len: usize,
return_values: Vec<Path>,
}
fn init_stack(n: usize) -> Vec<State> {
let mut stack = Vec::with_capacity(n);
stack.push(State {
n_scc: n - 1,
descendants: None,
current_path: Path::new(),
current_path_len: 0,
return_values: Vec::new(),
});
stack
}
#[test]
fn test4165() {
env_logger::try_init().unwrap_or(());
use super::*;
use crate::pristine::*;
let mut graph = Graph {
lines: vec![
AliveVertex {
vertex: Vertex {
change: ChangeId(1),
start: ChangePosition(0),
end: ChangePosition(1)
},
flags: super::Flags::empty(),
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
};
13
],
children: Vec::new(),
total_bytes: 0,
};
for i in 0..13 {
graph.lines[i].vertex.change = ChangeId(i as u64);
}
for (i, &children) in [
&[][..],
&[4, 2, 6, 7, 5, 3][..],
&[12, 4, 7, 6][..],
&[5][..],
&[10, 8, 9][..],
&[6, 7, 4][..],
&[4, 7][..],
&[4][..],
&[10][..],
&[10][..],
&[11][..],
&[0][..],
&[4][..],
]
.iter()
.enumerate()
{
graph.lines[i].children = graph.children.len();
graph.lines[i].n_children = children.len();
for &chi in children.iter() {
graph.children.push((
Some(Edge {
dest: graph.lines[chi].vertex.start_pos(),
flag: EdgeFlags::empty(),
introduced_by: ChangeId(4165),
}),
VertexId(chi),
))
}
}
let scc = graph.tarjan();
for i in 0..scc.len() {
for &j in scc[i].iter() {
graph[j].scc = i
}
}
let mut f = std::fs::File::create("debug4165").unwrap();
graph.debug_raw(&mut f).unwrap();
println!("{:#?}", graph.dfs(&scc))
}
impl Graph {
pub(super) fn dfs(&mut self, scc: &Vector2<VertexId>) -> (Path, HashSet<(usize, usize)>) {
let mut dfs = DFS::new(scc.len());
let mut stack = init_stack(scc.len());
let mut forward_scc = HashSet::new();
let mut regular_scc = HashSet::new();
let mut return_value = None;
let mut descendants = Vector2::with_capacities(scc.len(), scc.len());
'recursion: while let Some(mut state) = stack.pop() {
debug!("dfs state = {:?}", state);
let (i, mut j) = if let Some(n) = state.descendants {
n
} else {
first_visit(self, &mut dfs, scc, &mut descendants, &mut state)
};
debug!("i = {:?}, j = {:?}", i, j);
let scc_vertices: Vec<_> = scc[state.n_scc].iter().map(|x| &self[*x]).collect();
debug!("scc_vertices = {:?}", scc_vertices);
while j > 0 {
let child = descendants[i][j - 1];
let scc_child: Vec<_> = scc[child].iter().map(|x| &self[*x]).collect();
debug!("dfs child = {:?} {:?}", child, scc_child);
if dfs.visits[state.n_scc].first < dfs.visits[child].first {
// This is a forward edge.
if child > 0 && !regular_scc.contains(&(state.n_scc, child)) {
debug!("forward edge");
forward_scc.insert((state.n_scc, child));
}
} else if dfs.visits[child].first == 0 {
// Regular edge.
regular_scc.insert((state.n_scc, child));
debug!("regular edge, return_value {:?}", return_value);
if let Some(return_value) = return_value.take() {
state.return_values.push(return_value)
}
recurse(state, (i, j), child, &mut stack);
continue 'recursion;
} else {
// Cross edge.
regular_scc.insert((state.n_scc, child));
debug!("cross edge");
}
j -= 1
}
return_value = Some(if let Some(return_value_) = return_value.take() {
dfs.visits[state.n_scc].last = dfs.counter;
dfs.counter += 1;
if state.return_values.is_empty() {
return_value_
} else {
state.return_values.push(return_value_);
make_conflict(&mut state)
}
} else {
state.current_path
});
debug!("end of loop, returning {:?}", return_value);
}
(return_value.unwrap_or_else(Path::new), forward_scc)
}
}
fn first_visit(
graph: &Graph,
dfs: &mut DFS,
scc: &Vector2<VertexId>,
descendants: &mut Vector2<usize>,
state: &mut State,
) -> (usize, usize) {
assert_eq!(dfs.visits[state.n_scc].first, 0);
dfs.visits[state.n_scc].first = dfs.counter;
dfs.counter += 1;
state
.current_path
.path
.push(PathElement::Scc { scc: state.n_scc });
state.current_path.sccs.insert(state.n_scc);
let i = descendants.len();
descendants.push();
let mut descendants_end = 0;
for cousin in scc[state.n_scc].iter() {
for &(_, n_child) in graph.children(*cousin) {
let child_component = graph[n_child].scc;
if child_component > state.n_scc {
panic!("{} > {}", child_component, state.n_scc);
} else if child_component == state.n_scc {
debug!("cyclic component {:?}", child_component);
continue;
}
if dfs.visits[child_component].first == 0 {
descendants.push_to_last(child_component)
} else {
descendants_end = descendants_end.max(child_component);
}
}
}
state.current_path.end = descendants_end;
let d = descendants.last_mut().unwrap();
d.sort_unstable();
debug!(
"first visit, n_scc = {:?}, state.current_path = {:?}, descendants = {:?}",
state.n_scc, state.current_path, d
);
(i, d.len())
}
fn recurse(mut state: State, (i, j): (usize, usize), child: usize, stack: &mut Vec<State>) {
let current_path = std::mem::replace(&mut state.current_path, Path::new());
let len = stack.len();
stack.push(State {
descendants: Some((i, j - 1)),
..state
});
stack.push(State {
n_scc: child,
descendants: None,
current_path_len: current_path.path.len(),
current_path,
return_values: Vec::new(),
});
debug!("recursing {:?}", &stack[len..]);
}
fn make_conflict(state: &mut State) -> Path {
let mut main_path = state.return_values[0]
.path
.split_off(state.current_path_len + 1);
std::mem::swap(&mut state.return_values[0].path, &mut main_path);
debug!(
"make_conflict {:#?} {:#?}",
state.return_values[0].path, main_path
);
state.return_values.sort_by(|a, b| a.end.cmp(&b.end));
let sccs = state
.return_values
.iter()
.flat_map(|side| side.sccs.iter())
.copied()
.collect();
let mut conflict_sides = Vec::new();
while let Some(side) = state.return_values.pop() {
debug!("side = {:#?}", side);
let main_side = if let Some(n) = state
.return_values
.iter()
.position(|side_| side_.sccs.contains(&side.end))
{
n
} else {
conflict_sides.push(side);
continue;
};
if let PathElement::Conflict { ref mut sides, .. } = state.return_values[main_side].path[0]
{
if sides[0].end == side.end {
sides.push(side);
continue;
}
}
create_nested_conflict(&mut state.return_values[main_side], side);
}
if conflict_sides.len() > 1 {
main_path.push(PathElement::Conflict {
sides: conflict_sides,
})
} else {
main_path.extend(conflict_sides.pop().unwrap().path.into_iter())
}
Path {
path: main_path,
sccs,
end: 0,
}
}
fn create_nested_conflict(main_side: &mut Path, side: Path) {
let end = main_side
.path
.iter()
.position(|v| match v {
PathElement::Scc { ref scc } => *scc == side.end,
PathElement::Conflict { ref sides } => {
sides.iter().any(|side_| side_.sccs.contains(&side.end))
}
})
.unwrap();
let mut v = vec![PathElement::Conflict { sides: Vec::new() }];
v.extend(main_side.path.drain(end..));
let side0 = std::mem::replace(&mut main_side.path, v);
let mut sccs0 = HashSet::new();
for elt in side0.iter() {
match *elt {
PathElement::Scc { scc } => {
sccs0.insert(scc);
}
PathElement::Conflict { ref sides } => {
for side in sides {
for &scc in side.sccs.iter() {
sccs0.insert(scc);
}
}
}
}
}
main_side.sccs.extend(side.sccs.iter().copied());
main_side.path[0] = PathElement::Conflict {
sides: vec![
Path {
path: side0,
sccs: sccs0,
end: side.end,
},
side,
],
};
}
impl Graph {
pub(super) fn collect_forward_edges<T: GraphTxnT>(
&self,
txn: &T,
channel: &T::Graph,
scc: &Vector2<VertexId>,
forward_scc: &HashSet<(usize, usize)>,
forward: &mut Vec<(Vertex<ChangeId>, Edge)>,
) -> Result<(), TxnErr<T::GraphError>> {
for &(a, b) in forward_scc.iter() {
for cousin in scc[a].iter() {
for &(edge, n_child) in self.children(*cousin) {
if self[n_child].scc != b {
continue;
}
if let Some(edge) = edge {
if edge.flag.contains(EdgeFlags::PSEUDO)
&& !crate::pristine::test_edge(
txn,
channel,
Position {
change: self[*cousin].vertex.change,
pos: self[*cousin].vertex.start,
},
edge.dest,
EdgeFlags::DELETED,
EdgeFlags::DELETED,
)?
{
forward.push((self[*cousin].vertex, edge))
}
}
}
}
}
Ok(())
}
}
use super::{Graph, VertexId};
use crate::changestore::*;
use crate::pristine::{Base32, GraphTxnT, Position};
use std::collections::{HashMap, HashSet};
use std::io::Write;
impl Graph {
/// Write a graph to an `std::io::Write` in GraphViz (dot) format.
#[allow(dead_code)]
pub fn debug<W: Write, T: GraphTxnT, P: ChangeStore>(
&self,
changes: &P,
txn: &T,
channel: &T::Graph,
add_others: bool,
introduced_by: bool,
mut w: W,
) -> Result<(), std::io::Error> {
writeln!(w, "digraph {{")?;
let mut buf = Vec::new();
let mut cache = HashMap::new();
if add_others {
for (line, i) in self.lines.iter().zip(0..) {
cache.insert(
Position {
change: line.vertex.change,
pos: line.vertex.start,
},
i,
);
}
}
let mut others = HashSet::new();
for (line, i) in self.lines.iter().zip(0..) {
changes
.get_contents(|h| txn.get_external(h).unwrap(), line.vertex, &mut buf)
.unwrap();
let contents = &buf;
// Produce an escaped string.
let contents = format!(
"{:?}",
if let Ok(contents) = std::str::from_utf8(contents) {
contents.chars().take(100).collect()
} else {
"<INVALID UTF8>".to_string()
}
);
// Remove the quotes around the escaped string.
let contents = contents.split_at(contents.len() - 1).0.split_at(1).1;
writeln!(
w,
"n_{}[label=\"{}({}): {}.[{};{}[: {}\"];",
i,
i,
line.scc,
line.vertex.change.to_base32(),
line.vertex.start.0,
line.vertex.end.0,
contents
)?;
if add_others && !line.vertex.is_root() {
for v in crate::pristine::iter_adj_all(txn, &channel, line.vertex).unwrap() {
let v = v.unwrap();
if let Some(dest) = cache.get(&v.dest) {
writeln!(
w,
"n_{} -> n_{}[color=red,label=\"{:?}{}{}\"];",
i,
dest,
v.flag.bits(),
if introduced_by { " " } else { "" },
if introduced_by {
v.introduced_by.to_base32()
} else {
String::new()
}
)?;
} else {
if !others.contains(&v.dest) {
others.insert(v.dest);
writeln!(
w,
"n_{}_{}[label=\"{}.{}\",color=red];",
v.dest.change.to_base32(),
v.dest.pos.0,
v.dest.change.to_base32(),
v.dest.pos.0
)?;
}
writeln!(
w,
"n_{} -> n_{}_{}[color=red,label=\"{:?}{}{}\"];",
i,
v.dest.change.to_base32(),
v.dest.pos.0,
v.flag.bits(),
if introduced_by { " " } else { "" },
if introduced_by {
v.introduced_by.to_base32()
} else {
String::new()
}
)?;
}
}
}
for &(edge, VertexId(j)) in
&self.children[line.children..line.children + line.n_children]
{
if let Some(ref edge) = edge {
writeln!(
w,
"n_{}->n_{}[label=\"{:?}{}{}\"];",
i,
j,
edge.flag.bits(),
if introduced_by { " " } else { "" },
if introduced_by {
edge.introduced_by.to_base32()
} else {
String::new()
}
)?
} else {
writeln!(w, "n_{}->n_0[label=\"none\"];", i)?
}
}
}
writeln!(w, "}}")?;
Ok(())
}
#[allow(dead_code)]
pub fn debug_raw<W: Write>(&self, mut w: W) -> Result<(), std::io::Error> {
writeln!(w, "digraph {{")?;
for (line, i) in self.lines.iter().zip(0..) {
// Remove the quotes around the escaped string.
writeln!(
w,
"n_{}[label=\"{}(scc {}): {}.[{};{}[\"];",
i,
i,
line.scc,
line.vertex.change.to_base32(),
line.vertex.start.0,
line.vertex.end.0,
)?;
for &(edge, VertexId(j)) in
&self.children[line.children..line.children + line.n_children]
{
if let Some(ref edge) = edge {
writeln!(
w,
"n_{}->n_{}[label=\"{:?} {}\"];",
i,
j,
edge.flag.bits(),
edge.introduced_by.to_base32()
)?
} else {
writeln!(w, "n_{}->n_0[label=\"none\"];", i)?
}
}
}
writeln!(w, "}}")?;
Ok(())
}
}
[package]
name = "libpijul"
description = "Core library of Pijul, a distributed version control system based on a sound theory of collaborative work."
version = "1.0.0-alpha.28"
repository = "https://nest.pijul.com/pijul/libpijul"
documentation = "https://docs.rs/libpijul"
authors = ["Pierre-Étienne Meunier <pe@pijul.org>"]
edition = "2018"
license = "GPL-2.0-or-later"
include = [
"Cargo.toml",
"src",
"src/apply.rs",
"src/missing_context.rs",
"src/vector2.rs",
"src/path.rs",
"src/working_copy",
"src/working_copy/filesystem.rs",
"src/working_copy/mod.rs",
"src/working_copy/memory.rs",
"src/unrecord",
"src/unrecord/mod.rs",
"src/unrecord/working_copy.rs",
"src/record.rs",
"src/change.rs",
"src/change/change_file.rs",
"src/change/text_changes.rs",
"src/alive",
"src/alive/tarjan.rs",
"src/alive/debug.rs",
"src/alive/retrieve.rs",
"src/alive/dfs.rs",
"src/alive/mod.rs",
"src/alive/output.rs",
"src/fs.rs",
"src/vertex_buffer.rs",
"src/changestore",
"src/changestore/filesystem.rs",
"src/changestore/mod.rs",
"src/changestore/memory.rs",
"src/small_string.rs",
"src/pristine",
"src/pristine/path_id.rs",
"src/pristine/block.rs",
"src/pristine/edge.rs",
"src/pristine/merkle.rs",
"src/pristine/channel_dump.rs",
"src/pristine/patch_id.rs",
"src/pristine/inode_metadata.rs",
"src/pristine/inode.rs",
"src/pristine/sanakirja.rs",
"src/pristine/mod.rs",
"src/pristine/vertex.rs",
"src/pristine/hash.rs",
"src/pristine/change_id.rs",
"src/pristine/inode_vertex.rs",
"src/find_alive.rs",
"src/tests",
"src/tests/performance.rs",
"src/tests/file_conflicts.rs",
"src/tests/filesystem.rs",
"src/tests/missing_context.rs",
"src/tests/conflict.rs",
"src/tests/clone.rs",
"src/tests/change.rs",
"src/tests/unrecord.rs",
"src/tests/partial.rs",
"src/tests/rm_file.rs",
"src/tests/mod.rs",
"src/tests/add_file.rs",
"src/tests/patch.rs",
"src/output",
"src/output/mod.rs",
"src/output/archive.rs",
"src/output/output.rs",
"src/diff",
"src/diff/replace.rs",
"src/diff/split.rs",
"src/diff/diff.rs",
"src/diff/mod.rs",
"src/diff/delete.rs",
"src/diff/vertex_buffer.rs",
"src/lib.rs"
]
[features]
ondisk-repos = [ "mmap", "zstd", "ignore", "canonical-path", "lru-cache", "tempfile" ]
mmap = [ "sanakirja/mmap" ]
zstd = [ "zstd-seekable" ]
text-changes = [ "regex" ]
dump = [ "tokio" ]
default = [ "ondisk-repos", "text-changes", "dump" ]
tarball = [ "tar", "flate2" ]
[dependencies]
sanakirja = { version = "0.15.8", features = [ "crc32" ] }
byteorder = "1.3"
log = "0.4"
serde = "1.0"
serde_derive = "1.0"
bitflags = "1.2"
thiserror = "1.0"
rand = "0.8"
blake3 = "0.3"
chrono = { version = "0.4", features = ["serde"] }
pijul-macros = { path = "../pijul-macros", version = "0.3.0" }
bincode = "1.3"
data-encoding = "2.3"
diffs = "0.4"
toml = "0.5"
lazy_static = "1.4"
zstd-seekable = { version = "0.1.7", optional = true }
regex = { version = "1.4", optional = true }
tokio = { version = "1.0", optional = true, features = ["io-util"] }
curve25519-dalek = { version = "3", features = [ "serde" ] }
ignore = { version = "0.4", optional = true }
tar = { version = "0.4", optional = true }
flate2 = { version = "1.0", optional = true }
canonical-path = { version = "2.0", optional = true }
lru-cache = { version = "0.1", optional = true }
tempfile = { version = "3.1", optional = true }
[dev-dependencies]
env_logger = "0.8"
anyhow = "1.0"
{
description = "pijul, the sound distributed version control system";
inputs.nixpkgs.url = "github:nixos/nixpkgs/nixos-20.09";
inputs.mozilla = { url = "github:mozilla/nixpkgs-mozilla"; flake = false; };
outputs =
{ self
, nixpkgs
, mozilla
, ...
} @ inputs:
let
nameValuePair = name: value: { inherit name value; };
genAttrs = names: f: builtins.listToAttrs (map (n: nameValuePair n (f n)) names);
allSystems = [ "x86_64-linux" "aarch64-linux" "i686-linux" "x86_64-darwin" ];
rustOverlay = final: prev:
let
rustChannel = prev.rustChannelOf {
channel = "1.49.0";
sha256 = "sha256-KCh2UBGtdlBJ/4UOqZlxUtcyefv7MH1neoVNV4z0nWs=";
};
in
{
inherit rustChannel;
rustc = rustChannel.rust;
cargo = rustChannel.rust;
};
forAllSystems = f: genAttrs allSystems (system: f {
inherit system;
pkgs = import nixpkgs {
inherit system;
overlays = [
(import "${mozilla}/rust-overlay.nix")
rustOverlay
];
};
});
in
{
devShell = forAllSystems ({ system, pkgs, ... }:
pkgs.mkShell {
name = "pijul";
inputsFrom = [ self.packages.${system}.pijul-git ];
# Eventually crate2nix will provide a devShell that includes transitive dependencies for us.
# https://github.com/kolloch/crate2nix/issues/111
buildInputs = with pkgs; [
pkg-config
clang
openssl
# rustChannel.rust provides tools like clippy, rustfmt, cargo,
# rust-analyzer, rustc, and more.
(rustChannel.rust.override { extensions = [ "rust-src" ]; })
crate2nix
];
LIBCLANG_PATH = "${pkgs.llvmPackages.libclang}/lib";
});
packages = forAllSystems
({ system, pkgs, ... }:
let
pijul =
let
cargoNix = import ./Cargo.nix {
inherit pkgs;
defaultCrateOverrides = pkgs.defaultCrateOverrides // {
zstd-seekable = { ... }: {
nativeBuildInputs = [ pkgs.clang ]
++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcbuild ];
LIBCLANG_PATH = "${pkgs.llvmPackages.libclang}/lib";
};
blake3 = attr: {
nativeBuildInputs = pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcbuild ];
};
pijul = { ... }: {
buildInputs = with pkgs; [
xxHash
zstd
libsodium
] ++ lib.optionals stdenv.isDarwin (
[ openssl ]
++ (with darwin.apple_sdk.frameworks; [
CoreServices
Security
SystemConfiguration
]));
};
};
};
in
cargoNix.workspaceMembers.pijul.build;
in
{
inherit pijul;
pijul-git = pijul.override { features = [ "git" ]; };
});
defaultPackage = forAllSystems ({ system, ... }: self.packages.${system}.pijul);
};
}
{
"nodes": {
"mozilla": {
"flake": false,
"locked": {
"lastModified": 1603906276,
"narHash": "sha256-RsNPnEKd7BcogwkqhaV5kI/HuNC4flH/OQCC/4W5y/8=",
"owner": "mozilla",
"repo": "nixpkgs-mozilla",
"rev": "8c007b60731c07dd7a052cce508de3bb1ae849b4",
"type": "github"
},
"original": {
"owner": "mozilla",
"repo": "nixpkgs-mozilla",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1606344557,
"narHash": "sha256-IVIMVKWNNt6VODiLfINaW6sfrWPw5ZT91GQMgLUl7ZA=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "ffb3aab257e8851b558cdc6079241a7eb0c7239e",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-20.09",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"mozilla": "mozilla",
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}
(import
(fetchTarball {
url = "https://github.com/edolstra/flake-compat/archive/c71d063a2fce96001c7ef99b301dfbd0c29ebde1.tar.gz";
sha256 = "0vnbhqf0lc4mf2zmzqbfv6kqj9raijxz8xfaimxihz3c6s5gma2x";
})
{ src = ./.; }).defaultNix
# Pijul
This is the repository of Pijul, a sound and fast distributed version control system based on a mathematical theory of asynchronous work.
## License
The license is GPL-2.0.
## Documentation
While we are working on documentation, here are a few useful commands:
### Create a repository
~~~
$ pijul init
~~~
### Add files
If you want to track all the files in a folder, and record that change, do:
~~~
$ pijul rec that_folder
~~~
If you want to add files to track:
~~~
$ pijul add these_files
~~~
### Make a change
Pijul is based on changes, so perhaps the most important command is the one that creates them:
~~~
$ pijul rec
~~~
You will be presented with a change draft, which you can approve or edit by deleting sections, where sections are introduced by a number (of the form `1.`) followed by the name of an operation (example: `File addition: "my_file" in "/" 420`).
You can of course try other edits, but they are not guaranteed to work.
### Collaborate
A *remote* is the reference to another repository, for example `pijul@nest.pijul.com:manual` for the manual repository, or `me@nest.pijul.com:pijul/manual`, `https://nest.pijul.com/pijul/manual`, or a local path `/path/to/my/repository`.
The `remote` command allows one to view the saved remotes and possibly delete them.
The `push` and `pull` commands exchange changes with remotes.
Cloning repositories need a target directory at the moment, or else take the current directory as the target:
~~~
$ pijul clone https://nest.pijul.com/pijul/pijul
~~~
Hint: clones over SSH are almost always faster.
### Going back in time
If you want to reset your files to the last recorded version, just do:
~~~
$ pijul reset
~~~
If you want to remove some changes from the history:
~~~
$ pijul unrecord PREFIX_OF_CHANGE_HASH
~~~
Where `PREFIX_OF_CHANGE_HASH` is an unambiguous prefix of a change hash, which can be found by doing `pijul log`.
### Import a Git repository
If you have compiled Pijul with `--features git`, the `git` command allows one to import changes from a Git repository. This works by replaying the repository history, and is not particularly optimised, hence it may be take a long time on large repositories.
One missing feature of Git at the moment is symbolic links, which are treated as regular files by that command (i.e. the same might get imported multiple times).
### About channels
Channels are a way to maintain two related versions of a repository in the same place (a bit like branches in Git).
Formally, a channel is a pointer to a set of changes (the *state* of a channel is a set of changes).
However, channels are different from Git branches, and do not serve the same purpose. In Pijul, **independent changes commute**, which means that in many cases where branches are used in Git, there is no need to create a channel in Pijul.
The main differences with Git branches are:
- The identity of a change doesn't depend on the branch it is on, or in other words, rebase and merge are the same operation in Pijul.
- This implies that conflicts do not mysteriously come back after you solve them (which is what `git rerere` is for).
- Also, conflicts are between changes, so the resolution of a conflict on one channel solves the same conflict in all other channels.
## Contributing
We welcome all contributions. Moreover, as this projects aims at making it easier to collaborate with others (we're getting there), we obviously value mutual respect and inclusiveness above anything else.
Moreover, since this is a Rust project, we ask contributors to run `cargo fmt` on their code before recording changes. This can be done automatically by adding the following lines to the repository's `.pijul/config`:
```
[hooks]
record = [ "cargo fmt" ]
```
[workspace]
members = [ "pijul-macros", "pijul", "libpijul" ]
# This file was @generated by crate2nix 0.8.0 with the command:
# "generate"
# See https://github.com/kolloch/crate2nix for more info.
{ nixpkgs ? <nixpkgs>
, pkgs ? import nixpkgs { config = {}; }
, lib ? pkgs.lib
, stdenv ? pkgs.stdenv
, buildRustCrate ? pkgs.buildRustCrate
# This is used as the `crateOverrides` argument for `buildRustCrate`.
, defaultCrateOverrides ? pkgs.defaultCrateOverrides
# The features to enable for the root_crate or the workspace_members.
, rootFeatures ? [ "default" ]
# If true, throw errors instead of issueing deprecation warnings.
, strictDeprecation ? false
# Whether to perform release builds: longer compile times, faster binaries.
, release ? true
}:
rec {
#
# "public" attributes that we attempt to keep stable with new versions of crate2nix.
#
# Refer your crate build derivation by name here.
# You can override the features with
# workspaceMembers."${crateName}".build.override { features = [ "default" "feature1" ... ]; }.
workspaceMembers = {
"libpijul" = rec {
packageId = "libpijul";
build = internal.buildRustCrateWithFeatures {
packageId = "libpijul";
};
# Debug support which might change between releases.
# File a bug if you depend on any for non-debug work!
debug = internal.debugCrate { inherit packageId; };
};
"pijul" = rec {
packageId = "pijul";
build = internal.buildRustCrateWithFeatures {
packageId = "pijul";
};
# Debug support which might change between releases.
# File a bug if you depend on any for non-debug work!
debug = internal.debugCrate { inherit packageId; };
};
"pijul-macros" = rec {
packageId = "pijul-macros";
build = internal.buildRustCrateWithFeatures {
packageId = "pijul-macros";
};
# Debug support which might change between releases.
# File a bug if you depend on any for non-debug work!
debug = internal.debugCrate { inherit packageId; };
};
};
workspace_members =
internal.deprecationWarning
"workspace_members is deprecated in crate2nix 0.4. Please use workspaceMembers instead."
lib.mapAttrs (n: v: v.build) workspaceMembers;
#
# "internal" ("private") attributes that may change in every new version of crate2nix.
#
internal = rec {
# Build and dependency information for crates.
# Many of the fields are passed one-to-one to buildRustCrate.
#
# Noteworthy:
# * `dependencies`/`buildDependencies`: similar to the corresponding fields for buildRustCrate.
# but with additional information which is used during dependency/feature resolution.
# * `resolvedDependencies`: the selected default features reported by cargo - only included for debugging.
# * `devDependencies` as of now not used by `buildRustCrate` but used to
# inject test dependencies into the build
crates = {
"addr2line" = rec {
crateName = "addr2line";
version = "0.14.1";
edition = "2015";
sha256 = "1xs5bsi40zpyxbbylyaysya5h36ykcbg91i82415sxw5wk7q4px5";
authors = [
"Nick Fitzgerald <fitzgen@gmail.com>"
"Philip Craig <philipjcraig@gmail.com>"
"Jon Gjengset <jon@thesquareplanet.com>"
"Noah Bergbauer <noah.bergbauer@tum.de>"
];
dependencies = [
{
name = "gimli";
packageId = "gimli";
usesDefaultFeatures = false;
features = [ "read" ];
}
];
features = {
"default" = [ "rustc-demangle" "cpp_demangle" "std-object" "fallible-iterator" "smallvec" ];
"rustc-dep-of-std" = [ "core" "alloc" "compiler_builtins" "gimli/rustc-dep-of-std" ];
"std" = [ "gimli/std" ];
"std-object" = [ "std" "object" "object/std" "object/compression" "gimli/endian-reader" ];
};
};
"adler" = rec {
crateName = "adler";
version = "0.2.3";
edition = "2015";
sha256 = "0zpdsrfq5bd34941gmrlamnzjfbsx0x586afb7b0jqhr8g1lwapf";
authors = [
"Jonas Schievink <jonasschievink@gmail.com>"
];
features = {
"default" = [ "std" ];
"rustc-dep-of-std" = [ "core" "compiler_builtins" ];
};
};
"aho-corasick" = rec {
crateName = "aho-corasick";
version = "0.7.15";
edition = "2015";
sha256 = "1rb8gzhljl8r87dpf2n5pnqnkl694casgns4ma0sqzd4zazzw13l";
libName = "aho_corasick";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "memchr";
packageId = "memchr";
usesDefaultFeatures = false;
}
];
features = {
"default" = [ "std" ];
"std" = [ "memchr/use_std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"anyhow" = rec {
crateName = "anyhow";
version = "1.0.37";
edition = "2018";
sha256 = "11kaqp25lchr2ckyc46zm6blzndnw0w2w8qv0sp8z4xcxqgw2rzf";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"arrayref" = rec {
crateName = "arrayref";
version = "0.3.6";
edition = "2015";
sha256 = "0i6m1l3f73i0lf0cjdf5rh3xpvxydyhfbakq7xx7bkrp5qajgid4";
authors = [
"David Roundy <roundyd@physics.oregonstate.edu>"
];
};
"arrayvec" = rec {
crateName = "arrayvec";
version = "0.5.2";
edition = "2018";
sha256 = "12q6hn01x5435bprwlb7w9m7817dyfq55yrl4psygr78bp32zdi3";
authors = [
"bluss"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "array-sizes-33-128" ];
};
"atty" = rec {
crateName = "atty";
version = "0.2.14";
edition = "2015";
sha256 = "1s7yslcs6a28c5vz7jwj63lkfgyx8mx99fdirlhi9lbhhzhrpcyr";
authors = [
"softprops <d.tangren@gmail.com>"
];
dependencies = [
{
name = "hermit-abi";
packageId = "hermit-abi";
target = { target, features }: (target."os" == "hermit");
}
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
target = { target, features }: target."unix";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "consoleapi" "processenv" "minwinbase" "minwindef" "winbase" ];
}
];
};
"autocfg" = rec {
crateName = "autocfg";
version = "1.0.1";
edition = "2015";
sha256 = "0jj6i9zn4gjl03kjvziqdji6rwx8ykz8zk2ngpc331z2g3fk3c6d";
authors = [
"Josh Stone <cuviper@gmail.com>"
];
};
"backtrace" = rec {
crateName = "backtrace";
version = "0.3.55";
edition = "2018";
sha256 = "161mc9ii12n7pha3m4ggrj3k4jxam2474jsdpfdizc459hs40lgg";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "addr2line";
packageId = "addr2line";
optional = true;
usesDefaultFeatures = false;
}
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
}
{
name = "miniz_oxide";
packageId = "miniz_oxide";
optional = true;
usesDefaultFeatures = false;
}
{
name = "object";
packageId = "object";
optional = true;
usesDefaultFeatures = false;
features = [ "read_core" "elf" "macho" "pe" "unaligned" "archive" ];
}
{
name = "rustc-demangle";
packageId = "rustc-demangle";
}
];
features = {
"default" = [ "std" "gimli-symbolize" ];
"gimli-symbolize" = [ "addr2line" "miniz_oxide" "object" ];
"libbacktrace" = [ "backtrace-sys/backtrace-sys" ];
"serialize-rustc" = [ "rustc-serialize" ];
"serialize-serde" = [ "serde" ];
"verify-winapi" = [ "winapi/dbghelp" "winapi/handleapi" "winapi/libloaderapi" "winapi/memoryapi" "winapi/minwindef" "winapi/processthreadsapi" "winapi/synchapi" "winapi/tlhelp32" "winapi/winbase" "winapi/winnt" ];
};
resolvedDefaultFeatures = [ "addr2line" "default" "gimli-symbolize" "miniz_oxide" "object" "std" ];
};
"base64" = rec {
crateName = "base64";
version = "0.13.0";
edition = "2018";
sha256 = "1z82g23mbzjgijkpcrilc7nljpxpvpf7zxf6iyiapkgka2ngwkch";
authors = [
"Alice Maz <alice@alicemaz.com>"
"Marshall Pierce <marshall@mpierce.org>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"bincode" = rec {
crateName = "bincode";
version = "1.3.1";
edition = "2015";
sha256 = "0vc9pjh6hfp9vfq752sa88rxwg93ydhm0dvvy58rcvx2p8wkl3gk";
authors = [
"Ty Overby <ty@pre-alpha.com>"
"Francesco Mazzoli <f@mazzo.li>"
"David Tolnay <dtolnay@gmail.com>"
"Zoey Riordan <zoey@dos.cafe>"
];
dependencies = [
{
name = "byteorder";
packageId = "byteorder";
}
{
name = "serde";
packageId = "serde";
}
];
features = {
};
};
"bit-vec" = rec {
crateName = "bit-vec";
version = "0.6.3";
edition = "2015";
sha256 = "1ywqjnv60cdh1slhz67psnp422md6jdliji6alq0gmly2xm9p7rl";
authors = [
"Alexis Beingessner <a.beingessner@gmail.com>"
];
features = {
"default" = [ "std" ];
"serde_no_std" = [ "serde/alloc" ];
"serde_std" = [ "std" "serde/std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"bitflags" = rec {
crateName = "bitflags";
version = "1.2.1";
edition = "2015";
sha256 = "14qnd5nq8p2almk79m4m8ydqhd413yaxsyjp5xd19g3mikzf47fg";
authors = [
"The Rust Project Developers"
];
features = {
};
resolvedDefaultFeatures = [ "default" ];
};
"blake2b_simd" = rec {
crateName = "blake2b_simd";
version = "0.5.11";
edition = "2018";
sha256 = "11y5nm06lpypz65dbxgncs12ckx24i5i4a777ckfhfxd93ili9xg";
authors = [
"Jack O'Connor"
];
dependencies = [
{
name = "arrayref";
packageId = "arrayref";
}
{
name = "arrayvec";
packageId = "arrayvec";
usesDefaultFeatures = false;
}
{
name = "constant_time_eq";
packageId = "constant_time_eq";
}
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"blake3" = rec {
crateName = "blake3";
version = "0.3.7";
edition = "2018";
sha256 = "0gwxy0vbjf8nwjc32n5jb0lfyrn74bc6hcxdizdlp4gk06vkbzz9";
authors = [
"Jack O'Connor <oconnor663@gmail.com>"
];
dependencies = [
{
name = "arrayref";
packageId = "arrayref";
}
{
name = "arrayvec";
packageId = "arrayvec";
usesDefaultFeatures = false;
features = [ "array-sizes-33-128" ];
}
{
name = "cfg-if";
packageId = "cfg-if 0.1.10";
}
{
name = "constant_time_eq";
packageId = "constant_time_eq";
}
{
name = "crypto-mac";
packageId = "crypto-mac";
}
{
name = "digest";
packageId = "digest";
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
}
];
features = {
"default" = [ "std" ];
"std" = [ "digest/std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"bstr" = rec {
crateName = "bstr";
version = "0.2.14";
edition = "2015";
sha256 = "1bwzbln4c0ckf7iwh8cfbgcy72d3vi95iywlm9xszy9kharwcgs7";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "memchr";
packageId = "memchr";
usesDefaultFeatures = false;
}
];
features = {
"default" = [ "std" "unicode" ];
"serde1" = [ "std" "serde1-nostd" "serde/std" ];
"serde1-nostd" = [ "serde" ];
"std" = [ "memchr/use_std" ];
"unicode" = [ "lazy_static" "regex-automata" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"bumpalo" = rec {
crateName = "bumpalo";
version = "3.4.0";
edition = "2018";
sha256 = "082qig1vxlklb9qwkg3j4hpfxb4b5blm59ln21njfc2p01zhi31f";
authors = [
"Nick Fitzgerald <fitzgen@gmail.com>"
];
features = {
};
resolvedDefaultFeatures = [ "default" ];
};
"byteorder" = rec {
crateName = "byteorder";
version = "1.3.4";
edition = "2015";
sha256 = "1pkjfhgjnq898g1d38ygcfi0msg3m6756cwv0sgysj1d26p8mi08";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "i128" "std" ];
};
"bytes 0.5.6" = rec {
crateName = "bytes";
version = "0.5.6";
edition = "2018";
sha256 = "0f5s7xq6qzmdh22ygsy8v0sp02m51y0radvq4i4y8cizy1lfqk0f";
authors = [
"Carl Lerche <me@carllerche.com>"
"Sean McArthur <sean@seanmonstar.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"bytes 1.0.0" = rec {
crateName = "bytes";
version = "1.0.0";
edition = "2018";
sha256 = "0wpsy2jwmbrsn7x6vcd00hw9vvz071lv8nrb25wrspvmkna8w7xd";
authors = [
"Carl Lerche <me@carllerche.com>"
"Sean McArthur <sean@seanmonstar.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"canonical-path" = rec {
crateName = "canonical-path";
version = "2.0.2";
edition = "2018";
sha256 = "0vvsjda6ka5nz8zvx6r08zqi0j59sjccgcbjxj96xj764w9y1sg6";
authors = [
"Tony Arcieri <tony@iqlusion.io>"
];
};
"cc" = rec {
crateName = "cc";
version = "1.0.66";
edition = "2018";
crateBin = [];
sha256 = "0j7d7h4n81z5f22l3v8ggjvvw8m64636nlaqax4x1y44da1rc12c";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "jobserver";
packageId = "jobserver";
optional = true;
}
];
features = {
"parallel" = [ "jobserver" ];
};
resolvedDefaultFeatures = [ "jobserver" "parallel" ];
};
"cfg-if 0.1.10" = rec {
crateName = "cfg-if";
version = "0.1.10";
edition = "2018";
sha256 = "08h80ihs74jcyp24cd75wwabygbbdgl05k6p5dmq8akbr78vv1a7";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"rustc-dep-of-std" = [ "core" "compiler_builtins" ];
};
};
"cfg-if 1.0.0" = rec {
crateName = "cfg-if";
version = "1.0.0";
edition = "2018";
sha256 = "1za0vb97n4brpzpv8lsbnzmq5r8f2b0cpqqr0sy8h5bn751xxwds";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"rustc-dep-of-std" = [ "core" "compiler_builtins" ];
};
};
"chrono" = rec {
crateName = "chrono";
version = "0.4.19";
edition = "2015";
sha256 = "0wyfl6c00vhfl562spnfcna3zkw8jqvcp652m9iskhl8j26dc2k7";
authors = [
"Kang Seonghoon <public+rust@mearie.org>"
"Brandon W Maister <quodlibetor@gmail.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
optional = true;
}
{
name = "num-integer";
packageId = "num-integer";
usesDefaultFeatures = false;
}
{
name = "num-traits";
packageId = "num-traits";
usesDefaultFeatures = false;
}
{
name = "serde";
packageId = "serde";
optional = true;
usesDefaultFeatures = false;
}
{
name = "time";
packageId = "time";
optional = true;
}
{
name = "winapi";
packageId = "winapi";
optional = true;
target = { target, features }: target."windows";
features = [ "std" "minwinbase" "minwindef" "timezoneapi" ];
}
];
features = {
"clock" = [ "libc" "std" "winapi" ];
"default" = [ "clock" "std" "oldtime" ];
"oldtime" = [ "time" ];
"unstable-locales" = [ "pure-rust-locales" "alloc" ];
"wasmbind" = [ "wasm-bindgen" "js-sys" ];
};
resolvedDefaultFeatures = [ "clock" "default" "libc" "oldtime" "serde" "std" "time" "winapi" ];
};
"clap" = rec {
crateName = "clap";
version = "3.0.0-beta.2";
edition = "2018";
sha256 = "0hm1kivw6190rxbfqhdr4hqwlrijvwh90i3d9dyyw0d5k0chdlab";
authors = [
"Kevin K. <kbknapp@gmail.com>"
"Clap Maintainers"
];
dependencies = [
{
name = "atty";
packageId = "atty";
optional = true;
}
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "clap_derive";
packageId = "clap_derive";
optional = true;
}
{
name = "indexmap";
packageId = "indexmap";
}
{
name = "lazy_static";
packageId = "lazy_static";
optional = true;
}
{
name = "os_str_bytes";
packageId = "os_str_bytes";
features = [ "raw" ];
}
{
name = "strsim";
packageId = "strsim";
optional = true;
}
{
name = "termcolor";
packageId = "termcolor";
optional = true;
}
{
name = "textwrap";
packageId = "textwrap";
}
{
name = "unicode-width";
packageId = "unicode-width";
}
{
name = "vec_map";
packageId = "vec_map";
}
];
devDependencies = [
{
name = "lazy_static";
packageId = "lazy_static";
}
];
features = {
"cargo" = [ "lazy_static" ];
"color" = [ "atty" "termcolor" ];
"debug" = [ "clap_derive/debug" ];
"default" = [ "suggestions" "color" "derive" "std" "cargo" ];
"derive" = [ "clap_derive" "lazy_static" ];
"doc" = [ "yaml" "regex" ];
"suggestions" = [ "strsim" ];
"unstable" = [ "clap_derive/unstable" ];
"wrap_help" = [ "terminal_size" "textwrap/terminal_size" ];
"yaml" = [ "yaml-rust" ];
};
resolvedDefaultFeatures = [ "atty" "cargo" "clap_derive" "color" "default" "derive" "lazy_static" "std" "strsim" "suggestions" "termcolor" ];
};
"clap_derive" = rec {
crateName = "clap_derive";
version = "3.0.0-beta.2";
edition = "2018";
sha256 = "18cn82jhcha7m0nkpi1a03jx8k7aaq5kxfcxnsqpaa8ih5dp23rp";
procMacro = true;
authors = [
"Guillaume Pinot <texitoi@texitoi.eu>"
"Clap Maintainers"
];
dependencies = [
{
name = "heck";
packageId = "heck";
}
{
name = "proc-macro-error";
packageId = "proc-macro-error";
}
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "full" ];
}
];
features = {
};
resolvedDefaultFeatures = [ "default" ];
};
"console" = rec {
crateName = "console";
version = "0.14.0";
edition = "2018";
sha256 = "1ajnr0rga4vya0fza12ighf3ffkm86w1rv8p5wf443s8nd30kj3w";
authors = [
"Armin Ronacher <armin.ronacher@active-4.com>"
];
dependencies = [
{
name = "encode_unicode";
packageId = "encode_unicode";
target = { target, features }: target."windows";
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "libc";
packageId = "libc";
}
{
name = "regex";
packageId = "regex";
optional = true;
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "terminal_size";
packageId = "terminal_size";
}
{
name = "unicode-width";
packageId = "unicode-width";
optional = true;
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "winbase" "winuser" "consoleapi" "processenv" "wincon" ];
}
];
features = {
"ansi-parsing" = [ "regex" ];
"default" = [ "unicode-width" "ansi-parsing" ];
"windows-console-colors" = [ "ansi-parsing" "winapi-util" ];
};
resolvedDefaultFeatures = [ "ansi-parsing" "default" "regex" "unicode-width" ];
};
"constant_time_eq" = rec {
crateName = "constant_time_eq";
version = "0.1.5";
edition = "2015";
sha256 = "1g3vp04qzmk6cpzrd19yci6a95m7ap6wy7wkwgiy2pjklklrfl14";
authors = [
"Cesar Eduardo Barros <cesarb@cesarb.eti.br>"
];
};
"core-foundation" = rec {
crateName = "core-foundation";
version = "0.9.1";
edition = "2015";
sha256 = "0qhackx0i914nbhcwi6bbxnyyqqldgxc046gviak3a3f8apf528a";
authors = [
"The Servo Project Developers"
];
dependencies = [
{
name = "core-foundation-sys";
packageId = "core-foundation-sys";
}
{
name = "libc";
packageId = "libc";
}
];
features = {
"mac_os_10_7_support" = [ "core-foundation-sys/mac_os_10_7_support" ];
"mac_os_10_8_features" = [ "core-foundation-sys/mac_os_10_8_features" ];
"with-chrono" = [ "chrono" ];
"with-uuid" = [ "uuid" ];
};
};
"core-foundation-sys" = rec {
crateName = "core-foundation-sys";
version = "0.8.2";
edition = "2015";
sha256 = "06wq7yb7mlkc4h2kbc0yjfi0xv44z4snzdpr7c1l0zm4hi91n8pa";
authors = [
"The Servo Project Developers"
];
features = {
};
};
"crc32fast" = rec {
crateName = "crc32fast";
version = "1.2.1";
edition = "2015";
sha256 = "06ivjlkzcxxxk7nyshc44aql4zjpmvirq46vmzrakdjax3n6y5c1";
authors = [
"Sam Rijs <srijs@airpost.net>"
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"crossbeam-utils" = rec {
crateName = "crossbeam-utils";
version = "0.8.1";
edition = "2018";
sha256 = "13fvrqlap7bgvlnpqr5gjcxdhx1jv99pkfg5xdlq5xcy30g6vn82";
authors = [
"The Crossbeam Project Developers"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "lazy_static";
packageId = "lazy_static";
optional = true;
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
features = {
"default" = [ "std" ];
"std" = [ "lazy_static" ];
};
resolvedDefaultFeatures = [ "default" "lazy_static" "std" ];
};
"crypto-mac" = rec {
crateName = "crypto-mac";
version = "0.8.0";
edition = "2018";
sha256 = "1axfs4zmy74rn9666p92j7nmcv11zdp2d51yrppc2dv26cqa715m";
authors = [
"RustCrypto Developers"
];
dependencies = [
{
name = "generic-array";
packageId = "generic-array";
}
{
name = "subtle";
packageId = "subtle";
usesDefaultFeatures = false;
}
];
features = {
"dev" = [ "blobby" ];
};
};
"cryptovec" = rec {
crateName = "cryptovec";
version = "0.6.0";
edition = "2015";
sha256 = "1mbsxcm91ndkcb2xcvhq34a1wmnf2ggz1ff1b9dc4kgmrf7srgz4";
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
{
name = "winapi";
packageId = "winapi";
features = [ "basetsd" "minwindef" "memoryapi" ];
}
];
};
"ctrlc" = rec {
crateName = "ctrlc";
version = "3.1.7";
edition = "2015";
sha256 = "027bzbddk0wg7s2wnbgcwx2iv6bjzvxfpp0s2xdg444yfklr4ymm";
authors = [
"Antti Keränen <detegr@gmail.com>"
];
dependencies = [
{
name = "nix";
packageId = "nix";
target = { target, features }: target."unix";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "consoleapi" "handleapi" "synchapi" "winbase" ];
}
];
devDependencies = [
{
name = "winapi";
packageId = "winapi";
target = {target, features}: target."windows";
features = [ "fileapi" "processenv" "winnt" ];
}
];
features = {
};
};
"curve25519-dalek" = rec {
crateName = "curve25519-dalek";
version = "3.0.0";
edition = "2015";
sha256 = "01xknhlwagv601k6125372vr0lw2j6xjsvnnl74hprp943j2sjf8";
authors = [
"Isis Lovecruft <isis@patternsinthevoid.net>"
"Henry de Valence <hdevalence@hdevalence.ca>"
];
dependencies = [
{
name = "byteorder";
packageId = "byteorder";
usesDefaultFeatures = false;
features = [ "i128" ];
}
{
name = "digest";
packageId = "digest";
usesDefaultFeatures = false;
}
{
name = "rand_core";
packageId = "rand_core 0.5.1";
usesDefaultFeatures = false;
}
{
name = "serde";
packageId = "serde";
optional = true;
usesDefaultFeatures = false;
features = [ "derive" ];
}
{
name = "subtle";
packageId = "subtle";
usesDefaultFeatures = false;
}
{
name = "zeroize";
packageId = "zeroize";
usesDefaultFeatures = false;
}
];
features = {
"alloc" = [ "zeroize/alloc" ];
"avx2_backend" = [ "simd_backend" ];
"default" = [ "std" "u64_backend" ];
"nightly" = [ "subtle/nightly" ];
"simd_backend" = [ "nightly" "u64_backend" "packed_simd" ];
"std" = [ "alloc" "subtle/std" "rand_core/std" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "serde" "std" "u64_backend" ];
};
"data-encoding" = rec {
crateName = "data-encoding";
version = "2.3.1";
edition = "2018";
sha256 = "027rcrwdschrkdr2n9d24gnh03vl41qmvhjqn9vn6z1njy2n0flr";
authors = [
"Julien Cretin <git@ia0.eu>"
];
features = {
"default" = [ "std" ];
"std" = [ "alloc" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "std" ];
};
"diffs" = rec {
crateName = "diffs";
version = "0.4.0";
edition = "2015";
sha256 = "18l1w5plhhsj8v8mxckw5ikbhvra1096p4n0kli5wivvya14akmv";
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
};
"digest" = rec {
crateName = "digest";
version = "0.9.0";
edition = "2018";
sha256 = "0rmhvk33rgvd6ll71z8sng91a52rw14p0drjn1da0mqa138n1pfk";
authors = [
"RustCrypto Developers"
];
dependencies = [
{
name = "generic-array";
packageId = "generic-array";
}
];
features = {
"dev" = [ "blobby" ];
"std" = [ "alloc" ];
};
resolvedDefaultFeatures = [ "alloc" "std" ];
};
"dirs" = rec {
crateName = "dirs";
version = "3.0.1";
edition = "2015";
sha256 = "1zxrb3anxsh80mnp2il7awccv0s5gvy7djn6gis18nbm0bnraa8l";
authors = [
"Simon Ochsenreither <simon@ochsenreither.de>"
];
dependencies = [
{
name = "dirs-sys";
packageId = "dirs-sys";
}
];
};
"dirs-next" = rec {
crateName = "dirs-next";
version = "2.0.0";
edition = "2018";
sha256 = "1q9kr151h9681wwp6is18750ssghz6j9j7qm7qi1ngcwy7mzi35r";
authors = [
"The @xdg-rs members"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "dirs-sys-next";
packageId = "dirs-sys-next";
}
];
};
"dirs-sys" = rec {
crateName = "dirs-sys";
version = "0.3.5";
edition = "2015";
sha256 = "0ym5843xack45b1yjahrh3q2f72shnwf1dd2jncf9qsxf3sxg4wf";
authors = [
"Simon Ochsenreither <simon@ochsenreither.de>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "redox_users";
packageId = "redox_users";
target = { target, features }: (target."os" == "redox");
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "knownfolders" "objbase" "shlobj" "winbase" "winerror" ];
}
];
};
"dirs-sys-next" = rec {
crateName = "dirs-sys-next";
version = "0.1.1";
edition = "2018";
sha256 = "0zgy7is3h2dyf1l4sa7k065w2kvx0l12l40my4rswm2mc1gkdplr";
authors = [
"The @xdg-rs members"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "redox_users";
packageId = "redox_users";
usesDefaultFeatures = false;
target = { target, features }: (target."os" == "redox");
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "knownfolders" "objbase" "shlobj" "winbase" "winerror" ];
}
];
};
"edit" = rec {
crateName = "edit";
version = "0.1.2";
edition = "2018";
crateBin = [];
sha256 = "1z1vvd07i7f042a5ca217v2ic70m2mw6wvdli355lvxsgr234c1j";
authors = [
"Milkey Mouse <milkeymouse@meme.institute>"
];
dependencies = [
{
name = "tempfile";
packageId = "tempfile";
}
{
name = "which";
packageId = "which";
optional = true;
usesDefaultFeatures = false;
}
];
features = {
"better-path" = [ "which" ];
"default" = [ "better-path" ];
};
resolvedDefaultFeatures = [ "better-path" "default" "which" ];
};
"encode_unicode" = rec {
crateName = "encode_unicode";
version = "0.3.6";
edition = "2015";
sha256 = "07w3vzrhxh9lpjgsg2y5bwzfar2aq35mdznvcp3zjl0ssj7d4mx3";
authors = [
"Torbjørn Birch Moltu <t.b.moltu@lyse.net>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"encoding_rs" = rec {
crateName = "encoding_rs";
version = "0.8.26";
edition = "2018";
sha256 = "10xjcafwbxvm2kfsyymxlz8wc9s4bmdj1xzlc809rxyp2yrbl6w0";
authors = [
"Henri Sivonen <hsivonen@hsivonen.fi>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
];
features = {
"fast-legacy-encode" = [ "fast-hangul-encode" "fast-hanja-encode" "fast-kanji-encode" "fast-gb-hanzi-encode" "fast-big5-hanzi-encode" ];
"simd-accel" = [ "packed_simd" "packed_simd/into_bits" ];
};
};
"env_logger" = rec {
crateName = "env_logger";
version = "0.8.2";
edition = "2018";
sha256 = "07k6m6igz02g2b1v7nims7vd8azwxrav43xl14a6rjmxnikcnvpj";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "atty";
packageId = "atty";
optional = true;
}
{
name = "humantime";
packageId = "humantime";
optional = true;
}
{
name = "log";
packageId = "log";
features = [ "std" ];
}
{
name = "regex";
packageId = "regex";
optional = true;
usesDefaultFeatures = false;
features = [ "std" "perf" ];
}
{
name = "termcolor";
packageId = "termcolor";
optional = true;
}
];
features = {
"default" = [ "termcolor" "atty" "humantime" "regex" ];
};
resolvedDefaultFeatures = [ "atty" "default" "humantime" "regex" "termcolor" ];
};
"errno" = rec {
crateName = "errno";
version = "0.2.7";
edition = "2015";
sha256 = "1zj6rra8n7d7gagppvvs5pvrfblad6x4ln5knb4kg7dfkkxz4s7s";
authors = [
"Chris Wong <lambda.fairy@gmail.com>"
];
dependencies = [
{
name = "errno-dragonfly";
packageId = "errno-dragonfly";
target = { target, features }: (target."os" == "dragonfly");
}
{
name = "libc";
packageId = "libc";
target = { target, features }: (target."os" == "hermit");
}
{
name = "libc";
packageId = "libc";
target = { target, features }: (target."os" == "wasi");
}
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "errhandlingapi" "minwindef" "ntdef" "winbase" ];
}
];
};
"errno-dragonfly" = rec {
crateName = "errno-dragonfly";
version = "0.1.1";
edition = "2015";
sha256 = "0rshlc00nv45f14v2l1w0ma2nf1jg5j7q9pvw7hh018r6r73bjhl";
authors = [
"Michael Neumann <mneumann@ntecs.de>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
];
buildDependencies = [
{
name = "gcc";
packageId = "gcc";
}
];
};
"filetime" = rec {
crateName = "filetime";
version = "0.2.13";
edition = "2018";
sha256 = "1zlyqwl6l1vv95x5mzxrhqp9jy136wyvsvzhbc0lhxm57qwjl4hc";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "redox_syscall";
packageId = "redox_syscall";
target = { target, features }: (target."os" == "redox");
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "fileapi" "minwindef" "winbase" ];
}
];
};
"flate2" = rec {
crateName = "flate2";
version = "1.0.19";
edition = "2018";
sha256 = "0abinkvc6ylbhshm8b3yfsfyl0vi2qplvjv4m8cs95yzalyqc4bl";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"Josh Triplett <josh@joshtriplett.org>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "crc32fast";
packageId = "crc32fast";
}
{
name = "libc";
packageId = "libc";
}
{
name = "miniz_oxide";
packageId = "miniz_oxide";
optional = true;
usesDefaultFeatures = false;
}
{
name = "miniz_oxide";
packageId = "miniz_oxide";
usesDefaultFeatures = false;
target = { target, features }: ((target."arch" == "wasm32") && (!(target."os" == "emscripten")));
}
];
features = {
"cloudflare_zlib" = [ "any_zlib" "cloudflare-zlib-sys" ];
"default" = [ "rust_backend" ];
"rust_backend" = [ "miniz_oxide" ];
"tokio" = [ "tokio-io" "futures" ];
"zlib" = [ "any_zlib" "libz-sys" ];
"zlib-ng-compat" = [ "zlib" "libz-sys/zlib-ng" ];
};
resolvedDefaultFeatures = [ "default" "miniz_oxide" "rust_backend" ];
};
"fnv" = rec {
crateName = "fnv";
version = "1.0.7";
edition = "2015";
sha256 = "1hc2mcqha06aibcaza94vbi81j6pr9a1bbxrxjfhc91zin8yr7iz";
libPath = "lib.rs";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"foreign-types" = rec {
crateName = "foreign-types";
version = "0.3.2";
edition = "2015";
sha256 = "1cgk0vyd7r45cj769jym4a6s7vwshvd0z4bqrb92q1fwibmkkwzn";
authors = [
"Steven Fackler <sfackler@gmail.com>"
];
dependencies = [
{
name = "foreign-types-shared";
packageId = "foreign-types-shared";
}
];
};
"foreign-types-shared" = rec {
crateName = "foreign-types-shared";
version = "0.1.1";
edition = "2015";
sha256 = "0jxgzd04ra4imjv8jgkmdq59kj8fsz6w4zxsbmlai34h26225c00";
authors = [
"Steven Fackler <sfackler@gmail.com>"
];
};
"form_urlencoded" = rec {
crateName = "form_urlencoded";
version = "1.0.0";
edition = "2015";
sha256 = "005yi1319k5bz8g5ylbdiakq5jp5jh90yy6k357zm11fr4aqvrpc";
authors = [
"The rust-url developers"
];
dependencies = [
{
name = "matches";
packageId = "matches";
}
{
name = "percent-encoding";
packageId = "percent-encoding";
}
];
};
"fs2" = rec {
crateName = "fs2";
version = "0.4.3";
edition = "2015";
sha256 = "04v2hwk7035c088f19mfl5b1lz84gnvv2hv6m935n0hmirszqr4m";
authors = [
"Dan Burkert <dan@danburkert.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "handleapi" "processthreadsapi" "winerror" "fileapi" "winbase" "std" ];
}
];
};
"futures" = rec {
crateName = "futures";
version = "0.3.8";
edition = "2018";
sha256 = "1l434mh7p5na5c3c7lih575hszqc515r9idk62fm5rhz1820qfwv";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "futures-channel";
packageId = "futures-channel";
usesDefaultFeatures = false;
features = [ "sink" ];
}
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
}
{
name = "futures-executor";
packageId = "futures-executor";
optional = true;
usesDefaultFeatures = false;
}
{
name = "futures-io";
packageId = "futures-io";
usesDefaultFeatures = false;
}
{
name = "futures-sink";
packageId = "futures-sink";
usesDefaultFeatures = false;
}
{
name = "futures-task";
packageId = "futures-task";
usesDefaultFeatures = false;
}
{
name = "futures-util";
packageId = "futures-util";
usesDefaultFeatures = false;
features = [ "sink" ];
}
];
features = {
"alloc" = [ "futures-core/alloc" "futures-task/alloc" "futures-sink/alloc" "futures-channel/alloc" "futures-util/alloc" ];
"async-await" = [ "futures-util/async-await" "futures-util/async-await-macro" ];
"bilock" = [ "futures-util/bilock" ];
"cfg-target-has-atomic" = [ "futures-core/cfg-target-has-atomic" "futures-task/cfg-target-has-atomic" "futures-channel/cfg-target-has-atomic" "futures-util/cfg-target-has-atomic" ];
"compat" = [ "std" "futures-util/compat" ];
"default" = [ "std" "async-await" "executor" ];
"executor" = [ "std" "futures-executor/std" ];
"io-compat" = [ "compat" "futures-util/io-compat" ];
"read-initializer" = [ "futures-io/read-initializer" "futures-util/read-initializer" ];
"std" = [ "alloc" "futures-core/std" "futures-task/std" "futures-io/std" "futures-sink/std" "futures-util/std" "futures-util/io" "futures-util/channel" ];
"thread-pool" = [ "executor" "futures-executor/thread-pool" ];
"unstable" = [ "futures-core/unstable" "futures-task/unstable" "futures-channel/unstable" "futures-io/unstable" "futures-util/unstable" ];
"write-all-vectored" = [ "futures-util/write-all-vectored" ];
};
resolvedDefaultFeatures = [ "alloc" "async-await" "default" "executor" "futures-executor" "std" ];
};
"futures-channel" = rec {
crateName = "futures-channel";
version = "0.3.8";
edition = "2018";
sha256 = "0r7y228kkhwx9jj3ny5ppmw2gvw0capm6ig8dzppgqd4g9l0jwab";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
}
{
name = "futures-sink";
packageId = "futures-sink";
optional = true;
usesDefaultFeatures = false;
}
];
features = {
"alloc" = [ "futures-core/alloc" ];
"cfg-target-has-atomic" = [ "futures-core/cfg-target-has-atomic" ];
"default" = [ "std" ];
"sink" = [ "futures-sink" ];
"std" = [ "alloc" "futures-core/std" ];
"unstable" = [ "futures-core/unstable" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "futures-sink" "sink" "std" ];
};
"futures-core" = rec {
crateName = "futures-core";
version = "0.3.8";
edition = "2018";
sha256 = "0j0pixxv8dmqas1h5cgy92z4r9lpmnlis8ls22v17yrgnwqy2z44";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"default" = [ "std" ];
"std" = [ "alloc" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "std" ];
};
"futures-executor" = rec {
crateName = "futures-executor";
version = "0.3.8";
edition = "2018";
sha256 = "0r8ayj6g08d1i0hj2v6g5zr3hzlkxpqlkpf1awq0105qd0mjpajc";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
}
{
name = "futures-task";
packageId = "futures-task";
usesDefaultFeatures = false;
}
{
name = "futures-util";
packageId = "futures-util";
usesDefaultFeatures = false;
}
];
features = {
"default" = [ "std" ];
"std" = [ "futures-core/std" "futures-task/std" "futures-util/std" ];
"thread-pool" = [ "std" "num_cpus" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"futures-io" = rec {
crateName = "futures-io";
version = "0.3.8";
edition = "2018";
sha256 = "1frh7d0n96lczy22al3bkgwpq0p1agbgax5kqh9vv8da33738631";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"futures-macro" = rec {
crateName = "futures-macro";
version = "0.3.8";
edition = "2018";
sha256 = "0mjmb46zapb59iilsbljpj7l0hq6w19df0f03p3br5qz5xlqlh3p";
procMacro = true;
authors = [
"Taylor Cramer <cramertj@google.com>"
"Taiki Endo <te316e89@gmail.com>"
];
dependencies = [
{
name = "proc-macro-hack";
packageId = "proc-macro-hack";
}
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "full" ];
}
];
};
"futures-sink" = rec {
crateName = "futures-sink";
version = "0.3.8";
edition = "2018";
sha256 = "0gfb1z97q861ki6lqsvpgfn3hnm9w3vkrf82dc00xrff95d1jy7q";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"default" = [ "std" ];
"std" = [ "alloc" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "std" ];
};
"futures-task" = rec {
crateName = "futures-task";
version = "0.3.8";
edition = "2018";
sha256 = "03ad39v8scy353src2f9dkkvcs24n736iavi8xn45cj8pyslwmbw";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "once_cell";
packageId = "once_cell";
optional = true;
usesDefaultFeatures = false;
features = [ "std" ];
}
];
features = {
"default" = [ "std" ];
"std" = [ "alloc" "once_cell" ];
};
resolvedDefaultFeatures = [ "alloc" "once_cell" "std" ];
};
"futures-util" = rec {
crateName = "futures-util";
version = "0.3.8";
edition = "2018";
sha256 = "1lnbhpyrypn9giw6122af0pffxfijfz3zm7phrwzp75rlzscy16k";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "futures-channel";
packageId = "futures-channel";
optional = true;
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
}
{
name = "futures-io";
packageId = "futures-io";
optional = true;
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "futures-macro";
packageId = "futures-macro";
optional = true;
usesDefaultFeatures = false;
}
{
name = "futures-sink";
packageId = "futures-sink";
optional = true;
usesDefaultFeatures = false;
}
{
name = "futures-task";
packageId = "futures-task";
usesDefaultFeatures = false;
}
{
name = "memchr";
packageId = "memchr";
optional = true;
}
{
name = "pin-project";
packageId = "pin-project 1.0.3";
}
{
name = "pin-utils";
packageId = "pin-utils";
}
{
name = "proc-macro-hack";
packageId = "proc-macro-hack";
optional = true;
}
{
name = "proc-macro-nested";
packageId = "proc-macro-nested";
optional = true;
}
{
name = "slab";
packageId = "slab";
optional = true;
}
];
features = {
"alloc" = [ "futures-core/alloc" "futures-task/alloc" ];
"async-await-macro" = [ "async-await" "futures-macro" "proc-macro-hack" "proc-macro-nested" ];
"cfg-target-has-atomic" = [ "futures-core/cfg-target-has-atomic" "futures-task/cfg-target-has-atomic" ];
"channel" = [ "std" "futures-channel" ];
"compat" = [ "std" "futures_01" ];
"default" = [ "std" "async-await" "async-await-macro" ];
"io" = [ "std" "futures-io" "memchr" ];
"io-compat" = [ "io" "compat" "tokio-io" ];
"read-initializer" = [ "io" "futures-io/read-initializer" "futures-io/unstable" ];
"sink" = [ "futures-sink" ];
"std" = [ "alloc" "futures-core/std" "futures-task/std" "slab" ];
"unstable" = [ "futures-core/unstable" "futures-task/unstable" ];
"write-all-vectored" = [ "io" ];
};
resolvedDefaultFeatures = [ "alloc" "async-await" "async-await-macro" "channel" "default" "futures-channel" "futures-io" "futures-macro" "futures-sink" "io" "memchr" "proc-macro-hack" "proc-macro-nested" "sink" "slab" "std" ];
};
"gcc" = rec {
crateName = "gcc";
version = "0.3.55";
edition = "2015";
crateBin = [];
sha256 = "1hng1sajn4r67hndvhjysswz8niayjwvcj42zphpxzhbz89kjpwg";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"parallel" = [ "rayon" ];
};
};
"generic-array" = rec {
crateName = "generic-array";
version = "0.14.4";
edition = "2015";
sha256 = "05qqwm9v5asbil9z28wjkmpfvs1c5c99n8n9gwxis3d3r3n6c52h";
libName = "generic_array";
authors = [
"Bartłomiej Kamiński <fizyk20@gmail.com>"
"Aaron Trent <novacrazy@gmail.com>"
];
dependencies = [
{
name = "typenum";
packageId = "typenum";
}
];
buildDependencies = [
{
name = "version_check";
packageId = "version_check";
}
];
features = {
};
};
"getrandom 0.1.16" = rec {
crateName = "getrandom";
version = "0.1.16";
edition = "2018";
sha256 = "1kjzmz60qx9mn615ks1akjbf36n3lkv27zfwbcam0fzmj56wphwg";
authors = [
"The Rand Project Developers"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
target = { target, features }: target."unix";
}
{
name = "wasi";
packageId = "wasi 0.9.0+wasi-snapshot-preview1";
target = { target, features }: (target."os" == "wasi");
}
];
features = {
"rustc-dep-of-std" = [ "compiler_builtins" "core" ];
"test-in-browser" = [ "wasm-bindgen" ];
"wasm-bindgen" = [ "bindgen" "js-sys" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"getrandom 0.2.1" = rec {
crateName = "getrandom";
version = "0.2.1";
edition = "2018";
sha256 = "1rmfh5g58awfcc2fhpal8lvxhqp8l8c2pc0mc9v8x2z7gdjz8q20";
authors = [
"The Rand Project Developers"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
target = { target, features }: target."unix";
}
{
name = "wasi";
packageId = "wasi 0.10.0+wasi-snapshot-preview1";
target = { target, features }: (target."os" == "wasi");
}
];
features = {
"js" = [ "wasm-bindgen" "js-sys" ];
"rustc-dep-of-std" = [ "compiler_builtins" "core" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"gimli" = rec {
crateName = "gimli";
version = "0.23.0";
edition = "2018";
sha256 = "1km657nwcrb0pnv7v0ldhgl9y8s889y2j9jckmws8k2i8bhkyl7n";
authors = [
"Nick Fitzgerald <fitzgen@gmail.com>"
"Philip Craig <philipjcraig@gmail.com>"
];
features = {
"default" = [ "read" "write" "std" "fallible-iterator" "endian-reader" ];
"endian-reader" = [ "stable_deref_trait" ];
"rustc-dep-of-std" = [ "core" "alloc" "compiler_builtins" ];
"std" = [ "fallible-iterator/std" "stable_deref_trait/std" ];
"write" = [ "indexmap" ];
};
resolvedDefaultFeatures = [ "read" ];
};
"git2" = rec {
crateName = "git2";
version = "0.13.15";
edition = "2018";
sha256 = "0na3vsa44nn1sr6pzscn93w69wbmdih277mm2p3f6kcavb4ngwj4";
authors = [
"Josh Triplett <josh@joshtriplett.org>"
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "libc";
packageId = "libc";
}
{
name = "libgit2-sys";
packageId = "libgit2-sys";
}
{
name = "log";
packageId = "log";
}
{
name = "openssl-probe";
packageId = "openssl-probe";
optional = true;
target = { target, features }: (target."unix" && (!(target."os" == "macos")));
}
{
name = "openssl-sys";
packageId = "openssl-sys";
optional = true;
target = { target, features }: (target."unix" && (!(target."os" == "macos")));
}
{
name = "url";
packageId = "url";
}
];
features = {
"default" = [ "ssh" "https" "ssh_key_from_memory" ];
"https" = [ "libgit2-sys/https" "openssl-sys" "openssl-probe" ];
"ssh" = [ "libgit2-sys/ssh" ];
"ssh_key_from_memory" = [ "libgit2-sys/ssh_key_from_memory" ];
"vendored-openssl" = [ "openssl-sys/vendored" ];
"zlib-ng-compat" = [ "libgit2-sys/zlib-ng-compat" ];
};
resolvedDefaultFeatures = [ "default" "https" "openssl-probe" "openssl-sys" "ssh" "ssh_key_from_memory" ];
};
"globset" = rec {
crateName = "globset";
version = "0.4.6";
edition = "2015";
sha256 = "0jhy0qs5k43g8zyx1fys44kcdzjmcrwjyv9n703kj8g4y6g1cln1";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "aho-corasick";
packageId = "aho-corasick";
}
{
name = "bstr";
packageId = "bstr";
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "fnv";
packageId = "fnv";
}
{
name = "log";
packageId = "log";
}
{
name = "regex";
packageId = "regex";
usesDefaultFeatures = false;
features = [ "perf" "std" ];
}
];
features = {
"serde1" = [ "serde" ];
};
};
"h2" = rec {
crateName = "h2";
version = "0.3.0";
edition = "2018";
sha256 = "19g6x6smwj7pyv28zr766xmdia5pr2psnly0ib6zr3hhc9iycrvb";
authors = [
"Carl Lerche <me@carllerche.com>"
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 1.0.0";
}
{
name = "fnv";
packageId = "fnv";
}
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
}
{
name = "futures-sink";
packageId = "futures-sink";
usesDefaultFeatures = false;
}
{
name = "futures-util";
packageId = "futures-util";
usesDefaultFeatures = false;
}
{
name = "http";
packageId = "http";
}
{
name = "indexmap";
packageId = "indexmap";
}
{
name = "slab";
packageId = "slab";
}
{
name = "tokio";
packageId = "tokio";
features = [ "io-util" ];
}
{
name = "tokio-util";
packageId = "tokio-util";
features = [ "codec" ];
}
{
name = "tracing";
packageId = "tracing";
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "tracing-futures";
packageId = "tracing-futures";
usesDefaultFeatures = false;
features = [ "std-future" ];
}
];
devDependencies = [
{
name = "tokio";
packageId = "tokio";
features = [ "rt-multi-thread" "macros" "sync" "net" ];
}
];
features = {
};
};
"hashbrown" = rec {
crateName = "hashbrown";
version = "0.9.1";
edition = "2018";
sha256 = "016dsm9s4xmxlkw2jfikm54qlz6vyk0qr280gab7kzp342jf9byp";
authors = [
"Amanieu d'Antras <amanieu@gmail.com>"
];
features = {
"ahash-compile-time-rng" = [ "ahash/compile-time-rng" ];
"default" = [ "ahash" "inline-more" ];
"rustc-dep-of-std" = [ "nightly" "core" "compiler_builtins" "alloc" "rustc-internal-api" ];
};
resolvedDefaultFeatures = [ "raw" ];
};
"heck" = rec {
crateName = "heck";
version = "0.3.2";
edition = "2018";
sha256 = "1b56s2c1ymdd0qmy31bw0ndhm31hcdamnhg3npp7ssrmc1ag9jw7";
authors = [
"Without Boats <woboats@gmail.com>"
];
dependencies = [
{
name = "unicode-segmentation";
packageId = "unicode-segmentation";
}
];
};
"hermit-abi" = rec {
crateName = "hermit-abi";
version = "0.1.17";
edition = "2018";
sha256 = "1s1ss7xypnbpjglmj5k39svzsmni1rqp57a91qi5pyv0yxjmbjjs";
authors = [
"Stefan Lankes"
];
dependencies = [
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
}
];
features = {
"rustc-dep-of-std" = [ "core" "compiler_builtins/rustc-dep-of-std" "libc/rustc-dep-of-std" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"http" = rec {
crateName = "http";
version = "0.2.2";
edition = "2018";
sha256 = "09lfcl4589xfin22hblysab3zbx43j1ypy0gjr3d4mvdi8lrs4l4";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"Carl Lerche <me@carllerche.com>"
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 0.5.6";
}
{
name = "fnv";
packageId = "fnv";
}
{
name = "itoa";
packageId = "itoa";
}
];
};
"http-body" = rec {
crateName = "http-body";
version = "0.4.0";
edition = "2018";
sha256 = "150rkibhyrcmsyi0qzwx4hp02c4lga1kkdg8j7l5wkh7xqkvsq98";
authors = [
"Carl Lerche <me@carllerche.com>"
"Lucio Franco <luciofranco14@gmail.com>"
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 1.0.0";
}
{
name = "http";
packageId = "http";
}
];
};
"httparse" = rec {
crateName = "httparse";
version = "1.3.4";
edition = "2015";
sha256 = "1yf23ldnjwfkkhkca7f4w15mky9961gjz28dlwyybhphc7l9l5yd";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"httpdate" = rec {
crateName = "httpdate";
version = "0.3.2";
edition = "2015";
sha256 = "0izbd3sf0625wm4rrfv85xa4xa8j4n1ldxhwlkgff4cm6rh4sjs9";
authors = [
"Pyfisch <pyfisch@gmail.com>"
];
features = {
};
};
"human-panic" = rec {
crateName = "human-panic";
version = "1.0.3";
edition = "2018";
sha256 = "0djfad84iwl86kabj8rqfhv5nn1qi1fd9hb7z72xgjxb02jmgwrr";
authors = [
"Yoshua Wuyts <yoshuawuyts@gmail.com>"
"Pascal Hertleif <killercup@gmail.com>"
"Katharina Fey <kookie@spacekookie.de>"
];
dependencies = [
{
name = "backtrace";
packageId = "backtrace";
}
{
name = "os_type";
packageId = "os_type";
}
{
name = "serde";
packageId = "serde";
}
{
name = "serde_derive";
packageId = "serde_derive";
}
{
name = "termcolor";
packageId = "termcolor";
}
{
name = "toml";
packageId = "toml";
}
{
name = "uuid";
packageId = "uuid";
usesDefaultFeatures = false;
features = [ "v4" ];
}
];
features = {
};
};
"humantime" = rec {
crateName = "humantime";
version = "2.0.1";
edition = "2018";
sha256 = "0yivhqyi8xik2j6sd3q45ybakjx8jsx5632dx9xjn0birh4dj6iw";
authors = [
"Paul Colomiets <paul@colomiets.name>"
];
};
"hyper" = rec {
crateName = "hyper";
version = "0.14.2";
edition = "2018";
sha256 = "1gnckphz28sb01b036q53cxc408f7i0z8drpl2kb8k2ihk49s88j";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 1.0.0";
}
{
name = "futures-channel";
packageId = "futures-channel";
}
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
}
{
name = "futures-util";
packageId = "futures-util";
usesDefaultFeatures = false;
}
{
name = "h2";
packageId = "h2";
optional = true;
}
{
name = "http";
packageId = "http";
}
{
name = "http-body";
packageId = "http-body";
}
{
name = "httparse";
packageId = "httparse";
}
{
name = "httpdate";
packageId = "httpdate";
}
{
name = "itoa";
packageId = "itoa";
}
{
name = "pin-project";
packageId = "pin-project 1.0.3";
}
{
name = "socket2";
packageId = "socket2";
optional = true;
}
{
name = "tokio";
packageId = "tokio";
features = [ "sync" ];
}
{
name = "tower-service";
packageId = "tower-service";
}
{
name = "tracing";
packageId = "tracing";
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "want";
packageId = "want";
}
];
devDependencies = [
{
name = "futures-util";
packageId = "futures-util";
usesDefaultFeatures = false;
features = [ "alloc" ];
}
{
name = "tokio";
packageId = "tokio";
features = [ "fs" "macros" "io-std" "io-util" "rt" "rt-multi-thread" "sync" "time" "test-util" ];
}
];
features = {
"full" = [ "client" "http1" "http2" "server" "stream" "runtime" ];
"http2" = [ "h2" ];
"runtime" = [ "tcp" "tokio/rt" ];
"tcp" = [ "socket2" "tokio/net" "tokio/rt" "tokio/time" ];
};
resolvedDefaultFeatures = [ "client" "h2" "http1" "http2" "socket2" "tcp" ];
};
"hyper-tls" = rec {
crateName = "hyper-tls";
version = "0.5.0";
edition = "2018";
sha256 = "01crgy13102iagakf6q4mb75dprzr7ps1gj0l5hxm1cvm7gks66n";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 1.0.0";
}
{
name = "hyper";
packageId = "hyper";
usesDefaultFeatures = false;
features = [ "tcp" "client" ];
}
{
name = "native-tls";
packageId = "native-tls";
}
{
name = "tokio";
packageId = "tokio";
}
{
name = "tokio-native-tls";
packageId = "tokio-native-tls";
}
];
devDependencies = [
{
name = "hyper";
packageId = "hyper";
usesDefaultFeatures = false;
features = [ "http1" ];
}
{
name = "tokio";
packageId = "tokio";
features = [ "io-std" "macros" "io-util" ];
}
];
features = {
"vendored" = [ "native-tls/vendored" ];
};
};
"idna" = rec {
crateName = "idna";
version = "0.2.0";
edition = "2015";
sha256 = "1a9066imqpdrm1aavfasdyb1zahqaz8jmdcwdawvb1pf60y6gqh2";
authors = [
"The rust-url developers"
];
dependencies = [
{
name = "matches";
packageId = "matches";
}
{
name = "unicode-bidi";
packageId = "unicode-bidi";
}
{
name = "unicode-normalization";
packageId = "unicode-normalization";
}
];
};
"ignore" = rec {
crateName = "ignore";
version = "0.4.17";
edition = "2015";
sha256 = "1347mxd0cwiidcl0qvixl7za524x5ds0izv8vjh2df0bqr2zp1xj";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "crossbeam-utils";
packageId = "crossbeam-utils";
}
{
name = "globset";
packageId = "globset";
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "log";
packageId = "log";
}
{
name = "memchr";
packageId = "memchr";
}
{
name = "regex";
packageId = "regex";
}
{
name = "same-file";
packageId = "same-file";
}
{
name = "thread_local";
packageId = "thread_local";
}
{
name = "walkdir";
packageId = "walkdir";
}
{
name = "winapi-util";
packageId = "winapi-util";
target = { target, features }: target."windows";
}
];
features = {
"simd-accel" = [ "globset/simd-accel" ];
};
};
"indexmap" = rec {
crateName = "indexmap";
version = "1.6.1";
edition = "2018";
sha256 = "0friqyzr4ssyayks7nirqbc36zcsf8fdi67jmvl4vpjh8a9zmcag";
authors = [
"bluss"
"Josh Stone <cuviper@gmail.com>"
];
dependencies = [
{
name = "hashbrown";
packageId = "hashbrown";
usesDefaultFeatures = false;
features = [ "raw" ];
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
features = {
"serde-1" = [ "serde" ];
};
};
"indicatif" = rec {
crateName = "indicatif";
version = "0.15.0";
edition = "2018";
sha256 = "1r4n50mclyi4c7b9c9mlma1rhchjamw71r3z8vgqcmp24mhvbakv";
authors = [
"Armin Ronacher <armin.ronacher@active-4.com>"
];
dependencies = [
{
name = "console";
packageId = "console";
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "number_prefix";
packageId = "number_prefix";
}
{
name = "regex";
packageId = "regex";
usesDefaultFeatures = false;
features = [ "std" ];
}
];
features = {
"improved_unicode" = [ "unicode-segmentation" "unicode-width" "console/unicode-width" ];
"with_rayon" = [ "rayon" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"instant" = rec {
crateName = "instant";
version = "0.1.9";
edition = "2018";
sha256 = "1v659qqm55misvjijfbl1p7azjp4yynjbwldan8836ynpgp4w4k1";
authors = [
"sebcrozet <developer@crozet.re>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
];
features = {
"now" = [ "time" ];
"wasm-bindgen" = [ "js-sys" "wasm-bindgen_rs" "web-sys" ];
};
};
"ipnet" = rec {
crateName = "ipnet";
version = "2.3.0";
edition = "2015";
sha256 = "0db147nh8jnxr23yxa7hwqn7dcjivdqi3aq4mgf2zgkqqqa2zgj7";
authors = [
"Kris Price <kris@krisprice.nz>"
];
};
"itoa" = rec {
crateName = "itoa";
version = "0.4.7";
edition = "2015";
sha256 = "0di7fggbknwfjcw8cgzm1dnm3ik32l2m1f7nmyh8ipmh45h069fx";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"jobserver" = rec {
crateName = "jobserver";
version = "0.1.21";
edition = "2018";
sha256 = "1wm7h76xf7a4d0q6583agbsyl2s46vndrlnr015zffclpcz32waw";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
];
};
"js-sys" = rec {
crateName = "js-sys";
version = "0.3.46";
edition = "2018";
sha256 = "0xc1llkp23q8ac2wdwh46y6gjbc34prrd98g5my9qz4zja1p6gfg";
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "wasm-bindgen";
packageId = "wasm-bindgen";
}
];
};
"lazy_static" = rec {
crateName = "lazy_static";
version = "1.4.0";
edition = "2015";
sha256 = "0in6ikhw8mgl33wjv6q6xfrb5b9jr16q8ygjy803fay4zcisvaz2";
authors = [
"Marvin Löbel <loebel.marvin@gmail.com>"
];
features = {
"spin_no_std" = [ "spin" ];
};
};
"libc" = rec {
crateName = "libc";
version = "0.2.81";
edition = "2015";
sha256 = "1jsk82v5snd286ba92lir5snrxl18qm3kjkagz8c97hn0q9q50hl";
authors = [
"The Rust Project Developers"
];
features = {
"default" = [ "std" ];
"rustc-dep-of-std" = [ "align" "rustc-std-workspace-core" ];
"use_std" = [ "std" ];
};
resolvedDefaultFeatures = [ "align" "default" "extra_traits" "std" ];
};
"libgit2-sys" = rec {
crateName = "libgit2-sys";
version = "0.12.17+1.1.0";
edition = "2018";
sha256 = "0hc89v7kp2b3rbc64cxq024shd85m8vqcs14i3gjclblr9jxzszl";
libName = "libgit2_sys";
libPath = "lib.rs";
authors = [
"Josh Triplett <josh@joshtriplett.org>"
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
{
name = "libssh2-sys";
packageId = "libssh2-sys";
optional = true;
}
{
name = "libz-sys";
packageId = "libz-sys";
usesDefaultFeatures = false;
features = [ "libc" ];
}
{
name = "openssl-sys";
packageId = "openssl-sys";
optional = true;
target = { target, features }: target."unix";
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
features = [ "parallel" ];
}
{
name = "pkg-config";
packageId = "pkg-config";
}
];
features = {
"https" = [ "openssl-sys" ];
"ssh" = [ "libssh2-sys" ];
"zlib-ng-compat" = [ "libz-sys/zlib-ng" "libssh2-sys/zlib-ng-compat" ];
};
resolvedDefaultFeatures = [ "https" "libssh2-sys" "openssl-sys" "ssh" "ssh_key_from_memory" ];
};
"libpijul" = rec {
crateName = "libpijul";
version = "1.0.0-alpha.28";
edition = "2018";
src = (builtins.filterSource sourceFilter ./libpijul);
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "bincode";
packageId = "bincode";
}
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "blake3";
packageId = "blake3";
}
{
name = "byteorder";
packageId = "byteorder";
}
{
name = "canonical-path";
packageId = "canonical-path";
optional = true;
}
{
name = "chrono";
packageId = "chrono";
features = [ "serde" ];
}
{
name = "curve25519-dalek";
packageId = "curve25519-dalek";
features = [ "serde" ];
}
{
name = "data-encoding";
packageId = "data-encoding";
}
{
name = "diffs";
packageId = "diffs";
}
{
name = "flate2";
packageId = "flate2";
optional = true;
}
{
name = "ignore";
packageId = "ignore";
optional = true;
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "log";
packageId = "log";
}
{
name = "lru-cache";
packageId = "lru-cache";
optional = true;
}
{
name = "pijul-macros";
packageId = "pijul-macros";
}
{
name = "rand";
packageId = "rand 0.8.1";
}
{
name = "regex";
packageId = "regex";
optional = true;
}
{
name = "sanakirja";
packageId = "sanakirja";
features = [ "crc32" ];
}
{
name = "serde";
packageId = "serde";
}
{
name = "serde_derive";
packageId = "serde_derive";
}
{
name = "tar";
packageId = "tar";
optional = true;
}
{
name = "tempfile";
packageId = "tempfile";
optional = true;
}
{
name = "thiserror";
packageId = "thiserror";
}
{
name = "tokio";
packageId = "tokio";
optional = true;
features = [ "io-util" ];
}
{
name = "toml";
packageId = "toml";
}
{
name = "zstd-seekable";
packageId = "zstd-seekable";
optional = true;
}
];
devDependencies = [
{
name = "anyhow";
packageId = "anyhow";
}
{
name = "env_logger";
packageId = "env_logger";
}
];
features = {
"default" = [ "ondisk-repos" "text-changes" "dump" ];
"dump" = [ "tokio" ];
"mmap" = [ "sanakirja/mmap" ];
"ondisk-repos" = [ "mmap" "zstd" "ignore" "canonical-path" "lru-cache" "tempfile" ];
"tarball" = [ "tar" "flate2" ];
"text-changes" = [ "regex" ];
"zstd" = [ "zstd-seekable" ];
};
resolvedDefaultFeatures = [ "canonical-path" "default" "dump" "flate2" "ignore" "lru-cache" "mmap" "ondisk-repos" "regex" "tar" "tarball" "tempfile" "text-changes" "tokio" "zstd" "zstd-seekable" ];
};
"libsodium-sys" = rec {
crateName = "libsodium-sys";
version = "0.2.6";
edition = "2015";
sha256 = "0n4a5f4vi63lvf4v6xcds20kdiqs8dxpybjz25s90cvvhd7vd1d6";
libName = "libsodium_sys";
authors = [
"dnaq"
];
dependencies = [
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
target = {target, features}: (!(target."env" == "msvc"));
}
{
name = "libc";
packageId = "libc";
usesDefaultFeatures = false;
target = {target, features}: (target."env" == "msvc");
}
{
name = "pkg-config";
packageId = "pkg-config";
}
];
features = {
};
};
"libssh2-sys" = rec {
crateName = "libssh2-sys";
version = "0.2.20";
edition = "2015";
sha256 = "1kn4px7rczgday5lf48qzi2w65iq4x8mldpsvywyj6zawwzv2h6z";
libName = "libssh2_sys";
libPath = "lib.rs";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"Wez Furlong <wez@wezfurlong.org>"
"Matteo Bigoi <bigo@crisidev.org>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
{
name = "libz-sys";
packageId = "libz-sys";
usesDefaultFeatures = false;
features = [ "libc" ];
}
{
name = "openssl-sys";
packageId = "openssl-sys";
target = { target, features }: target."unix";
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
}
{
name = "pkg-config";
packageId = "pkg-config";
}
{
name = "vcpkg";
packageId = "vcpkg";
target = {target, features}: (target."env" == "msvc");
}
];
features = {
"vendored-openssl" = [ "openssl-sys/vendored" ];
"zlib-ng-compat" = [ "libz-sys/zlib-ng" ];
};
};
"libz-sys" = rec {
crateName = "libz-sys";
version = "1.1.2";
edition = "2015";
sha256 = "0mcn8991kpmw320923hlcmci834nkv1qakkcg4w8znq85cci68b0";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"Josh Triplett <josh@joshtriplett.org>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
optional = true;
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
}
{
name = "pkg-config";
packageId = "pkg-config";
}
{
name = "vcpkg";
packageId = "vcpkg";
target = {target, features}: (target."env" == "msvc");
}
];
features = {
"default" = [ "libc" "stock-zlib" ];
"zlib-ng" = [ "libc" "cmake" ];
};
resolvedDefaultFeatures = [ "libc" ];
};
"linked-hash-map" = rec {
crateName = "linked-hash-map";
version = "0.5.3";
edition = "2015";
sha256 = "0jih3za0p1mywlnwcakc462q1byk6z8vnrzdm36hg6cxk7asdmcd";
authors = [
"Stepan Koltsov <stepan.koltsov@gmail.com>"
"Andrew Paseltiner <apaseltiner@gmail.com>"
];
features = {
"heapsize_impl" = [ "heapsize" ];
"serde_impl" = [ "serde" "serde_test" ];
};
};
"lock_api" = rec {
crateName = "lock_api";
version = "0.4.2";
edition = "2018";
sha256 = "04hkhfq308agxg9wwmzh7ncfiyyyhn0d49n07abppzdj6p8zz5nx";
authors = [
"Amanieu d'Antras <amanieu@gmail.com>"
];
dependencies = [
{
name = "scopeguard";
packageId = "scopeguard";
usesDefaultFeatures = false;
}
];
features = {
};
};
"log" = rec {
crateName = "log";
version = "0.4.11";
edition = "2015";
sha256 = "12xzqaflpiljn5cmxsbnbv9sjaj13ykhwsvll0gysbx4blbyvasg";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 0.1.10";
}
];
features = {
"kv_unstable_sval" = [ "kv_unstable" "sval/fmt" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"lru-cache" = rec {
crateName = "lru-cache";
version = "0.1.2";
edition = "2015";
sha256 = "071viv6g2p3akwqmfb3c8vsycs5n7kr17b70l7la071jv0d4zqii";
authors = [
"Stepan Koltsov <stepan.koltsov@gmail.com>"
];
dependencies = [
{
name = "linked-hash-map";
packageId = "linked-hash-map";
}
];
features = {
"heapsize_impl" = [ "heapsize" "linked-hash-map/heapsize_impl" ];
};
};
"matches" = rec {
crateName = "matches";
version = "0.1.8";
edition = "2015";
sha256 = "020axl4q7rk9vz90phs7f8jas4imxal9y9kxl4z4v7a6719mrz3z";
libPath = "lib.rs";
authors = [
"Simon Sapin <simon.sapin@exyr.org>"
];
};
"memchr" = rec {
crateName = "memchr";
version = "2.3.4";
edition = "2015";
sha256 = "098m9clfs495illlw00hv2gg67mhm7jflld3msyclvi5m9xc9q8f";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
"bluss"
];
features = {
"default" = [ "std" ];
"use_std" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" "use_std" ];
};
"memmap" = rec {
crateName = "memmap";
version = "0.7.0";
edition = "2015";
sha256 = "0ns7kkd1h4pijdkwfvw4qlbbmqmlmzwlq3g2676dcl5vwyazv1b5";
authors = [
"Dan Burkert <dan@danburkert.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "basetsd" "handleapi" "memoryapi" "minwindef" "std" "sysinfoapi" ];
}
];
};
"mime" = rec {
crateName = "mime";
version = "0.3.16";
edition = "2015";
sha256 = "13dcm9lh01hdwfjcg74ppljyjfj1c6w3a3cwkhxf0w8wa37cfq1a";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
};
"miniz_oxide" = rec {
crateName = "miniz_oxide";
version = "0.4.3";
edition = "2018";
sha256 = "17d1xp29v5xgh4vahxld14w1c1hgh38qmxpv7i18wy096gn2cb8g";
authors = [
"Frommi <daniil.liferenko@gmail.com>"
"oyvindln <oyvindln@users.noreply.github.com>"
];
dependencies = [
{
name = "adler";
packageId = "adler";
usesDefaultFeatures = false;
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
features = {
"rustc-dep-of-std" = [ "core" "alloc" "compiler_builtins" "adler/rustc-dep-of-std" ];
};
};
"mio" = rec {
crateName = "mio";
version = "0.7.7";
edition = "2018";
sha256 = "1mryrcvhcgphmz2xkph7gqavf6aj40f7sm0bvsdwr7qn9pqf62p5";
authors = [
"Carl Lerche <me@carllerche.com>"
"Thomas de Zeeuw <thomasdezeeuw@gmail.com>"
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "log";
packageId = "log";
}
{
name = "miow";
packageId = "miow";
target = { target, features }: target."windows";
}
{
name = "ntapi";
packageId = "ntapi";
target = { target, features }: target."windows";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "winsock2" "mswsock" ];
}
];
features = {
"os-ext" = [ "os-poll" ];
"os-util" = [ "os-ext" ];
"pipe" = [ "os-ext" ];
"tcp" = [ "net" ];
"udp" = [ "net" ];
"uds" = [ "net" ];
};
resolvedDefaultFeatures = [ "default" "net" "os-ext" "os-poll" "os-util" "tcp" "udp" "uds" ];
};
"miow" = rec {
crateName = "miow";
version = "0.3.6";
edition = "2018";
sha256 = "15sqdhh29dqgw5xh59clwv6scbsbvdkbmdc16hbfvyq7b2sw2css";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "socket2";
packageId = "socket2";
}
{
name = "winapi";
packageId = "winapi";
features = [ "std" "fileapi" "handleapi" "ioapiset" "minwindef" "namedpipeapi" "ntdef" "synchapi" "winerror" "winsock2" "ws2def" "ws2ipdef" ];
}
];
};
"native-tls" = rec {
crateName = "native-tls";
version = "0.2.7";
edition = "2015";
sha256 = "1m6v16xl8h4pm32pw6yhrvgwznf60bqhj2qhb1yrb8wd3hp6pndq";
authors = [
"Steven Fackler <sfackler@gmail.com>"
];
dependencies = [
{
name = "lazy_static";
packageId = "lazy_static";
target = { target, features }: ((target."os" == "macos") || (target."os" == "ios"));
}
{
name = "libc";
packageId = "libc";
target = { target, features }: ((target."os" == "macos") || (target."os" == "ios"));
}
{
name = "log";
packageId = "log";
target = { target, features }: (!((target."os" == "windows") || (target."os" == "macos") || (target."os" == "ios")));
}
{
name = "openssl";
packageId = "openssl";
target = { target, features }: (!((target."os" == "windows") || (target."os" == "macos") || (target."os" == "ios")));
}
{
name = "openssl-probe";
packageId = "openssl-probe";
target = { target, features }: (!((target."os" == "windows") || (target."os" == "macos") || (target."os" == "ios")));
}
{
name = "openssl-sys";
packageId = "openssl-sys";
target = { target, features }: (!((target."os" == "windows") || (target."os" == "macos") || (target."os" == "ios")));
}
{
name = "schannel";
packageId = "schannel";
target = { target, features }: (target."os" == "windows");
}
{
name = "security-framework";
packageId = "security-framework";
target = { target, features }: ((target."os" == "macos") || (target."os" == "ios"));
}
{
name = "security-framework-sys";
packageId = "security-framework-sys";
target = { target, features }: ((target."os" == "macos") || (target."os" == "ios"));
}
{
name = "tempfile";
packageId = "tempfile";
target = { target, features }: ((target."os" == "macos") || (target."os" == "ios"));
}
];
devDependencies = [
{
name = "tempfile";
packageId = "tempfile";
}
];
features = {
"alpn" = [ "security-framework/alpn" ];
"vendored" = [ "openssl/vendored" ];
};
};
"nix" = rec {
crateName = "nix";
version = "0.18.0";
edition = "2018";
sha256 = "0m8h9bskjjqx9sk687z8bxqg2kpwhdh78jq6zfaxsb8llvk0yic3";
authors = [
"The nix-rust Project Developers"
];
dependencies = [
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "cfg-if";
packageId = "cfg-if 0.1.10";
}
{
name = "libc";
packageId = "libc";
features = [ "extra_traits" ];
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
target = {target, features}: (target."os" == "dragonfly");
}
];
};
"ntapi" = rec {
crateName = "ntapi";
version = "0.3.6";
edition = "2018";
sha256 = "0i5daj9sr8wyi5jkpwpybln2jqpn59z0mqfc0dpdidipwh1bjsrz";
authors = [
"MSxDOS <melcodos@gmail.com>"
];
dependencies = [
{
name = "winapi";
packageId = "winapi";
features = [ "cfg" "evntrace" "in6addr" "inaddr" "minwinbase" "ntsecapi" "windef" "winioctl" ];
}
];
features = {
"default" = [ "user" ];
"impl-default" = [ "winapi/impl-default" ];
};
resolvedDefaultFeatures = [ "default" "user" ];
};
"num-bigint" = rec {
crateName = "num-bigint";
version = "0.2.6";
edition = "2015";
sha256 = "015k3wixdi4w698sappvy43pf8bvkw0f88xplmdgc3zfk2cpy309";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "num-integer";
packageId = "num-integer";
usesDefaultFeatures = false;
}
{
name = "num-traits";
packageId = "num-traits";
usesDefaultFeatures = false;
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
features = {
"default" = [ "std" ];
"i128" = [ "num-integer/i128" "num-traits/i128" ];
"std" = [ "num-integer/std" "num-traits/std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"num-integer" = rec {
crateName = "num-integer";
version = "0.1.44";
edition = "2015";
sha256 = "1nq152y3304as1iai95hqz8prqnc94lks1s7q05sfjdmcf56kk6j";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "num-traits";
packageId = "num-traits";
usesDefaultFeatures = false;
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
features = {
"default" = [ "std" ];
"i128" = [ "num-traits/i128" ];
"std" = [ "num-traits/std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"num-traits" = rec {
crateName = "num-traits";
version = "0.2.14";
edition = "2015";
sha256 = "144j176s2p76azy2ngk2vkdzgwdc0bc8c93jhki8c9fsbknb2r4s";
authors = [
"The Rust Project Developers"
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"num_cpus" = rec {
crateName = "num_cpus";
version = "1.13.0";
edition = "2015";
sha256 = "1cv6yxhz2zbnwn8pn1yn8grg7zsnd523947fby41a737aqvryj85";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "hermit-abi";
packageId = "hermit-abi";
target = { target, features }: (((target."arch" == "x86_64") || (target."arch" == "aarch64")) && (target."os" == "hermit"));
}
{
name = "libc";
packageId = "libc";
}
];
};
"number_prefix" = rec {
crateName = "number_prefix";
version = "0.3.0";
edition = "2015";
sha256 = "0slm4mqmpgs6hvz22ycny9lvyvl9ivs80a1lncslp7lszz02zc0p";
authors = [
"Benjamin Sago <ogham@bsago.me>"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"object" = rec {
crateName = "object";
version = "0.22.0";
edition = "2018";
sha256 = "15r383qxxwq08q3a5rfqhp971wd0nixd9ny22xw37jy31qv66fwd";
authors = [
"Nick Fitzgerald <fitzgen@gmail.com>"
"Philip Craig <philipjcraig@gmail.com>"
];
features = {
"all" = [ "read" "write" "std" "compression" "default" ];
"compression" = [ "flate2" "std" ];
"default" = [ "read" "compression" ];
"pe" = [ "coff" ];
"read" = [ "read_core" "archive" "coff" "elf" "macho" "pe" "wasm" "unaligned" ];
"rustc-dep-of-std" = [ "core" "compiler_builtins" "alloc" ];
"wasm" = [ "wasmparser" ];
"write" = [ "write_core" "coff" "elf" "macho" ];
"write_core" = [ "crc32fast" "indexmap" "std" ];
};
resolvedDefaultFeatures = [ "archive" "coff" "elf" "macho" "pe" "read_core" "unaligned" ];
};
"once_cell" = rec {
crateName = "once_cell";
version = "1.5.2";
edition = "2018";
sha256 = "183zs1dbmsv24mkafjypf9qwjrx46an58vb004a162l113sl3g8k";
authors = [
"Aleksey Kladov <aleksey.kladov@gmail.com>"
];
features = {
"default" = [ "std" ];
"std" = [ "alloc" ];
};
resolvedDefaultFeatures = [ "alloc" "std" ];
};
"openssl" = rec {
crateName = "openssl";
version = "0.10.32";
edition = "2015";
sha256 = "0w4s742al5yx77mfkhpc9rwy8mhv0g6xhc06j2lsgp0xbnc47383";
authors = [
"Steven Fackler <sfackler@gmail.com>"
];
dependencies = [
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "foreign-types";
packageId = "foreign-types";
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "libc";
packageId = "libc";
}
{
name = "openssl-sys";
packageId = "openssl-sys";
}
];
features = {
"vendored" = [ "openssl-sys/vendored" ];
};
};
"openssl-probe" = rec {
crateName = "openssl-probe";
version = "0.1.2";
edition = "2015";
sha256 = "1pijrdifgsdwd45b08c2g0dsmnhz7c3kmagb70839ngrd7d29bvp";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
};
"openssl-sys" = rec {
crateName = "openssl-sys";
version = "0.9.60";
edition = "2015";
sha256 = "1rpkfl0rmdcvxzyzp3n24g9clplh4avgzpi5c2a3hx96hcccf7wj";
build = "build/main.rs";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"Steven Fackler <sfackler@gmail.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
{
name = "cc";
packageId = "cc";
}
{
name = "pkg-config";
packageId = "pkg-config";
}
{
name = "vcpkg";
packageId = "vcpkg";
target = {target, features}: (target."env" == "msvc");
}
];
features = {
"vendored" = [ "openssl-src" ];
};
};
"os_str_bytes" = rec {
crateName = "os_str_bytes";
version = "2.4.0";
edition = "2018";
sha256 = "11agh8n3x2l4sr3sxvx6byc1j3ryb1g6flb1ywn0qhq7xv1y3cmg";
authors = [
"dylni"
];
features = {
};
resolvedDefaultFeatures = [ "raw" ];
};
"os_type" = rec {
crateName = "os_type";
version = "2.2.0";
edition = "2015";
sha256 = "1sz90xi9br82zmp3s0fbp397ajm5f0xsir7pikwbg65fy0d03p3y";
authors = [
"Jan Schulte <hello@unexpected-co.de>"
];
dependencies = [
{
name = "regex";
packageId = "regex";
}
];
};
"pager" = rec {
crateName = "pager";
version = "0.16.0";
edition = "2018";
sha256 = "0s0r95q3jfbh2c3paab2bpl158lyaq35xnzy1x7mrdfhy26d1iq5";
authors = [
"Cyril Plisko <cyril.plisko@mountall.com>"
];
dependencies = [
{
name = "errno";
packageId = "errno";
}
{
name = "libc";
packageId = "libc";
}
];
features = {
};
};
"parking_lot" = rec {
crateName = "parking_lot";
version = "0.11.1";
edition = "2018";
sha256 = "1sqmgaia8zfd5fbnqw2w13ijh7crk3lf9vw4cb52vwlx0an48xvd";
authors = [
"Amanieu d'Antras <amanieu@gmail.com>"
];
dependencies = [
{
name = "instant";
packageId = "instant";
}
{
name = "lock_api";
packageId = "lock_api";
}
{
name = "parking_lot_core";
packageId = "parking_lot_core";
}
];
features = {
"deadlock_detection" = [ "parking_lot_core/deadlock_detection" ];
"nightly" = [ "parking_lot_core/nightly" "lock_api/nightly" ];
"owning_ref" = [ "lock_api/owning_ref" ];
"serde" = [ "lock_api/serde" ];
"stdweb" = [ "instant/stdweb" ];
"wasm-bindgen" = [ "instant/wasm-bindgen" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"parking_lot_core" = rec {
crateName = "parking_lot_core";
version = "0.8.2";
edition = "2018";
sha256 = "0wj2lf5g5fnhrdrg02rdfllpwq0zisnn0b2388a8b12gmn665jww";
authors = [
"Amanieu d'Antras <amanieu@gmail.com>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "instant";
packageId = "instant";
}
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "redox_syscall";
packageId = "redox_syscall";
target = { target, features }: (target."os" == "redox");
}
{
name = "smallvec";
packageId = "smallvec";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "winnt" "ntstatus" "minwindef" "winerror" "winbase" "errhandlingapi" "handleapi" ];
}
];
features = {
"deadlock_detection" = [ "petgraph" "thread-id" "backtrace" ];
};
};
"percent-encoding" = rec {
crateName = "percent-encoding";
version = "2.1.0";
edition = "2015";
sha256 = "0bp3zrsk3kr47fbpipyczidbbx4g54lzxdm77ni1i3qws10mdzfl";
libPath = "lib.rs";
authors = [
"The rust-url developers"
];
};
"pijul" = rec {
crateName = "pijul";
version = "1.0.0-alpha.31";
edition = "2018";
crateBin = [
{ name = "pijul"; path = "src/main.rs"; }
];
src = (builtins.filterSource sourceFilter ./pijul);
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "anyhow";
packageId = "anyhow";
}
{
name = "byteorder";
packageId = "byteorder";
}
{
name = "canonical-path";
packageId = "canonical-path";
}
{
name = "chrono";
packageId = "chrono";
}
{
name = "clap";
packageId = "clap";
}
{
name = "ctrlc";
packageId = "ctrlc";
}
{
name = "data-encoding";
packageId = "data-encoding";
}
{
name = "dirs-next";
packageId = "dirs-next";
}
{
name = "edit";
packageId = "edit";
}
{
name = "env_logger";
packageId = "env_logger";
}
{
name = "futures";
packageId = "futures";
}
{
name = "futures-util";
packageId = "futures-util";
}
{
name = "git2";
packageId = "git2";
optional = true;
}
{
name = "human-panic";
packageId = "human-panic";
}
{
name = "ignore";
packageId = "ignore";
}
{
name = "indicatif";
packageId = "indicatif";
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "libpijul";
packageId = "libpijul";
features = [ "tarball" ];
}
{
name = "log";
packageId = "log";
}
{
name = "num_cpus";
packageId = "num_cpus";
}
{
name = "pager";
packageId = "pager";
target = { target, features }: (target."os" == "linux");
}
{
name = "rand";
packageId = "rand 0.8.1";
}
{
name = "regex";
packageId = "regex";
}
{
name = "reqwest";
packageId = "reqwest";
features = [ "stream" ];
}
{
name = "rpassword";
packageId = "rpassword";
}
{
name = "sanakirja";
packageId = "sanakirja";
features = [ "crc32" ];
}
{
name = "serde";
packageId = "serde";
}
{
name = "serde_derive";
packageId = "serde_derive";
}
{
name = "serde_json";
packageId = "serde_json";
}
{
name = "thrussh";
packageId = "thrussh";
}
{
name = "thrussh-config";
packageId = "thrussh-config";
}
{
name = "thrussh-keys";
packageId = "thrussh-keys";
}
{
name = "tokio";
packageId = "tokio";
features = [ "rt-multi-thread" "macros" "sync" ];
}
{
name = "toml";
packageId = "toml";
}
{
name = "whoami";
packageId = "whoami 0.9.0";
}
];
features = {
"default" = [ "keep-changes" ];
"git" = [ "git2" "sanakirja/git2" ];
};
resolvedDefaultFeatures = [ "default" "git" "git2" "keep-changes" ];
};
"pijul-macros" = rec {
crateName = "pijul-macros";
version = "0.3.0";
edition = "2018";
src = (builtins.filterSource sourceFilter ./pijul-macros);
procMacro = true;
authors = [
"Pierre-Étienne Meunier <pmeunier@mailbox.org>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "regex";
packageId = "regex";
}
{
name = "syn";
packageId = "syn";
}
];
};
"pin-project 0.4.27" = rec {
crateName = "pin-project";
version = "0.4.27";
edition = "2018";
sha256 = "05cfd6sp0ydkdfw7b3c8s1sws0xmhclylam9icnkvsiq9glwiyrg";
authors = [
"Taiki Endo <te316e89@gmail.com>"
];
dependencies = [
{
name = "pin-project-internal";
packageId = "pin-project-internal 0.4.27";
usesDefaultFeatures = false;
}
];
};
"pin-project 1.0.3" = rec {
crateName = "pin-project";
version = "1.0.3";
edition = "2018";
sha256 = "1rvmv48ynnnh526ylxhinyy9ypw5113p8qan6ijvmmma753810ss";
authors = [
"Taiki Endo <te316e89@gmail.com>"
];
dependencies = [
{
name = "pin-project-internal";
packageId = "pin-project-internal 1.0.3";
usesDefaultFeatures = false;
}
];
};
"pin-project-internal 0.4.27" = rec {
crateName = "pin-project-internal";
version = "0.4.27";
edition = "2018";
sha256 = "158q986q00s1pz1akazdcjkvkbdcbdhfw5azw8g3mgkadgjjmbb5";
procMacro = true;
authors = [
"Taiki Endo <te316e89@gmail.com>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "full" "visit-mut" ];
}
];
};
"pin-project-internal 1.0.3" = rec {
crateName = "pin-project-internal";
version = "1.0.3";
edition = "2018";
sha256 = "0flbvqy34ng3y21s8zqqm4glj62p66xyqpqwphaksi3kixmw9g5p";
procMacro = true;
authors = [
"Taiki Endo <te316e89@gmail.com>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "full" "visit-mut" ];
}
];
};
"pin-project-lite" = rec {
crateName = "pin-project-lite";
version = "0.2.1";
edition = "2018";
sha256 = "1n22ljlhvy6zvb19jr2gh5604vhh9cnwwlp3q9a9kpycakbl6rz3";
authors = [
"Taiki Endo <te316e89@gmail.com>"
];
};
"pin-utils" = rec {
crateName = "pin-utils";
version = "0.1.0";
edition = "2018";
sha256 = "117ir7vslsl2z1a7qzhws4pd01cg2d3338c47swjyvqv2n60v1wb";
authors = [
"Josef Brandl <mail@josefbrandl.de>"
];
};
"pkg-config" = rec {
crateName = "pkg-config";
version = "0.3.19";
edition = "2015";
sha256 = "0k4860955riciibxr8bhnklp79jydp4xfylwdn5v9kj96hxlac9q";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
};
"ppv-lite86" = rec {
crateName = "ppv-lite86";
version = "0.2.10";
edition = "2018";
sha256 = "0ms8198kclg4h96ggbziixxmsdl847s648kmbx11zlmjsqjccx5c";
authors = [
"The CryptoCorrosion Contributors"
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "simd" "std" ];
};
"proc-macro-error" = rec {
crateName = "proc-macro-error";
version = "1.0.4";
edition = "2018";
sha256 = "1373bhxaf0pagd8zkyd03kkx6bchzf6g0dkwrwzsnal9z47lj9fs";
authors = [
"CreepySkeleton <creepy-skeleton@yandex.ru>"
];
dependencies = [
{
name = "proc-macro-error-attr";
packageId = "proc-macro-error-attr";
}
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
optional = true;
usesDefaultFeatures = false;
}
];
buildDependencies = [
{
name = "version_check";
packageId = "version_check";
}
];
features = {
"default" = [ "syn-error" ];
"syn-error" = [ "syn" ];
};
resolvedDefaultFeatures = [ "default" "syn" "syn-error" ];
};
"proc-macro-error-attr" = rec {
crateName = "proc-macro-error-attr";
version = "1.0.4";
edition = "2018";
sha256 = "0sgq6m5jfmasmwwy8x4mjygx5l7kp8s4j60bv25ckv2j1qc41gm1";
procMacro = true;
authors = [
"CreepySkeleton <creepy-skeleton@yandex.ru>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
];
buildDependencies = [
{
name = "version_check";
packageId = "version_check";
}
];
};
"proc-macro-hack" = rec {
crateName = "proc-macro-hack";
version = "0.5.19";
edition = "2018";
sha256 = "1rg0kzsj7lj00qj602d3h77spwfz48vixn1wbjp7a4yrq65w9w6v";
procMacro = true;
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
};
"proc-macro-nested" = rec {
crateName = "proc-macro-nested";
version = "0.1.6";
edition = "2015";
sha256 = "0nnwm9bvp1fmr8nqjp8ynrkj97yzpsdh3062li8b0f4hzgd818gb";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
};
"proc-macro2" = rec {
crateName = "proc-macro2";
version = "1.0.24";
edition = "2018";
sha256 = "0wcabxzrddcjmryndw8fpyxcq6rw63m701vx86xxf03y3bp081qy";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "unicode-xid";
packageId = "unicode-xid";
}
];
features = {
"default" = [ "proc-macro" ];
};
resolvedDefaultFeatures = [ "default" "proc-macro" ];
};
"quote" = rec {
crateName = "quote";
version = "1.0.8";
edition = "2018";
sha256 = "1pr8dz4pyfbbsqpqw6ygin8m4sz61iir7nl23233cgwsa71k254r";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
usesDefaultFeatures = false;
}
];
features = {
"default" = [ "proc-macro" ];
"proc-macro" = [ "proc-macro2/proc-macro" ];
};
resolvedDefaultFeatures = [ "default" "proc-macro" ];
};
"rand 0.7.3" = rec {
crateName = "rand";
version = "0.7.3";
edition = "2018";
sha256 = "00sdaimkbz491qgi6qxkv582yivl32m2jd401kzbn94vsiwicsva";
authors = [
"The Rand Project Developers"
"The Rust Project Developers"
];
dependencies = [
{
name = "getrandom";
packageId = "getrandom 0.1.16";
rename = "getrandom_package";
optional = true;
}
{
name = "libc";
packageId = "libc";
optional = true;
usesDefaultFeatures = false;
target = { target, features }: target."unix";
}
{
name = "rand_chacha";
packageId = "rand_chacha 0.2.2";
usesDefaultFeatures = false;
target = { target, features }: (!(target."os" == "emscripten"));
}
{
name = "rand_core";
packageId = "rand_core 0.5.1";
}
{
name = "rand_hc";
packageId = "rand_hc 0.2.0";
target = { target, features }: (target."os" == "emscripten");
}
];
devDependencies = [
{
name = "rand_hc";
packageId = "rand_hc 0.2.0";
}
];
features = {
"alloc" = [ "rand_core/alloc" ];
"default" = [ "std" ];
"getrandom" = [ "getrandom_package" "rand_core/getrandom" ];
"nightly" = [ "simd_support" ];
"simd_support" = [ "packed_simd" ];
"small_rng" = [ "rand_pcg" ];
"std" = [ "rand_core/std" "rand_chacha/std" "alloc" "getrandom" "libc" ];
"stdweb" = [ "getrandom_package/stdweb" ];
"wasm-bindgen" = [ "getrandom_package/wasm-bindgen" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "getrandom" "getrandom_package" "libc" "std" ];
};
"rand 0.8.1" = rec {
crateName = "rand";
version = "0.8.1";
edition = "2018";
sha256 = "0d7fxnq3j2807qsjciyfbv362ym22x7slr95ff9bb8iz1m2wsky2";
authors = [
"The Rand Project Developers"
"The Rust Project Developers"
];
dependencies = [
{
name = "libc";
packageId = "libc";
optional = true;
usesDefaultFeatures = false;
target = { target, features }: target."unix";
}
{
name = "rand_chacha";
packageId = "rand_chacha 0.3.0";
optional = true;
usesDefaultFeatures = false;
target = { target, features }: (!(target."os" == "emscripten"));
}
{
name = "rand_core";
packageId = "rand_core 0.6.1";
}
{
name = "rand_hc";
packageId = "rand_hc 0.3.0";
optional = true;
target = { target, features }: (target."os" == "emscripten");
}
];
devDependencies = [
{
name = "rand_hc";
packageId = "rand_hc 0.3.0";
}
];
features = {
"alloc" = [ "rand_core/alloc" ];
"default" = [ "std" "std_rng" ];
"getrandom" = [ "rand_core/getrandom" ];
"serde1" = [ "serde" ];
"simd_support" = [ "packed_simd" ];
"std" = [ "rand_core/std" "rand_chacha/std" "alloc" "getrandom" "libc" ];
"std_rng" = [ "rand_chacha" "rand_hc" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "getrandom" "libc" "rand_chacha" "rand_hc" "std" "std_rng" ];
};
"rand_chacha 0.2.2" = rec {
crateName = "rand_chacha";
version = "0.2.2";
edition = "2018";
sha256 = "00il36fkdbsmpr99p9ksmmp6dn1md7rmnwmz0rr77jbrca2yvj7l";
authors = [
"The Rand Project Developers"
"The Rust Project Developers"
"The CryptoCorrosion Contributors"
];
dependencies = [
{
name = "ppv-lite86";
packageId = "ppv-lite86";
usesDefaultFeatures = false;
features = [ "simd" ];
}
{
name = "rand_core";
packageId = "rand_core 0.5.1";
}
];
features = {
"default" = [ "std" "simd" ];
"std" = [ "ppv-lite86/std" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"rand_chacha 0.3.0" = rec {
crateName = "rand_chacha";
version = "0.3.0";
edition = "2018";
sha256 = "03df2xh5nbdvwr17qm3sviaxa95r8yhm1nil2pr0pqf90p7ka9z1";
authors = [
"The Rand Project Developers"
"The Rust Project Developers"
"The CryptoCorrosion Contributors"
];
dependencies = [
{
name = "ppv-lite86";
packageId = "ppv-lite86";
usesDefaultFeatures = false;
features = [ "simd" ];
}
{
name = "rand_core";
packageId = "rand_core 0.6.1";
}
];
features = {
"default" = [ "std" ];
"std" = [ "ppv-lite86/std" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"rand_core 0.5.1" = rec {
crateName = "rand_core";
version = "0.5.1";
edition = "2018";
sha256 = "06bdvx08v3rkz451cm7z59xwwqn1rkfh6v9ay77b14f8dwlybgch";
authors = [
"The Rand Project Developers"
"The Rust Project Developers"
];
dependencies = [
{
name = "getrandom";
packageId = "getrandom 0.1.16";
optional = true;
}
];
features = {
"serde1" = [ "serde" ];
"std" = [ "alloc" "getrandom" "getrandom/std" ];
};
resolvedDefaultFeatures = [ "alloc" "getrandom" "std" ];
};
"rand_core 0.6.1" = rec {
crateName = "rand_core";
version = "0.6.1";
edition = "2018";
sha256 = "1rfjrcyaj7blz2nawv2pypm5kqc59p80n6f5pg691399iggxf9n0";
authors = [
"The Rand Project Developers"
"The Rust Project Developers"
];
dependencies = [
{
name = "getrandom";
packageId = "getrandom 0.2.1";
optional = true;
}
];
features = {
"serde1" = [ "serde" ];
"std" = [ "alloc" "getrandom" "getrandom/std" ];
};
resolvedDefaultFeatures = [ "alloc" "getrandom" "std" ];
};
"rand_hc 0.2.0" = rec {
crateName = "rand_hc";
version = "0.2.0";
edition = "2018";
sha256 = "0g31sqwpmsirdlwr0svnacr4dbqyz339im4ssl9738cjgfpjjcfa";
authors = [
"The Rand Project Developers"
];
dependencies = [
{
name = "rand_core";
packageId = "rand_core 0.5.1";
}
];
};
"rand_hc 0.3.0" = rec {
crateName = "rand_hc";
version = "0.3.0";
edition = "2018";
sha256 = "0wra6ar22zdjkry9dsq1mg620m4h3qb9s8rfykkz4im4crqfz41i";
authors = [
"The Rand Project Developers"
];
dependencies = [
{
name = "rand_core";
packageId = "rand_core 0.6.1";
}
];
};
"redox_syscall" = rec {
crateName = "redox_syscall";
version = "0.1.57";
edition = "2015";
sha256 = "1kh59fpwy33w9nwd5iyc283yglq8pf2s41hnhvl48iax9mz0zk21";
libName = "syscall";
authors = [
"Jeremy Soller <jackpot51@gmail.com>"
];
};
"redox_users" = rec {
crateName = "redox_users";
version = "0.3.5";
edition = "2015";
sha256 = "179fxmyqaqzibp533ajgbn4ljah9lrzpqvd3i73h55bs7qrkf1yy";
authors = [
"Jose Narvaez <goyox86@gmail.com>"
"Wesley Hershberger <mggmugginsmc@gmail.com>"
];
dependencies = [
{
name = "getrandom";
packageId = "getrandom 0.1.16";
}
{
name = "redox_syscall";
packageId = "redox_syscall";
}
{
name = "rust-argon2";
packageId = "rust-argon2";
optional = true;
}
];
features = {
"auth" = [ "rust-argon2" ];
"default" = [ "auth" ];
};
resolvedDefaultFeatures = [ "auth" "default" "rust-argon2" ];
};
"regex" = rec {
crateName = "regex";
version = "1.4.2";
edition = "2015";
sha256 = "172bw2yryv65whn3n5vkww4kgk0bq08lx0zbln8xwia7xl9jrkrq";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "aho-corasick";
packageId = "aho-corasick";
optional = true;
}
{
name = "memchr";
packageId = "memchr";
optional = true;
}
{
name = "regex-syntax";
packageId = "regex-syntax";
usesDefaultFeatures = false;
}
{
name = "thread_local";
packageId = "thread_local";
optional = true;
}
];
features = {
"default" = [ "std" "perf" "unicode" "regex-syntax/default" ];
"perf" = [ "perf-cache" "perf-dfa" "perf-inline" "perf-literal" ];
"perf-cache" = [ "thread_local" ];
"perf-literal" = [ "aho-corasick" "memchr" ];
"unicode" = [ "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" "regex-syntax/unicode" ];
"unicode-age" = [ "regex-syntax/unicode-age" ];
"unicode-bool" = [ "regex-syntax/unicode-bool" ];
"unicode-case" = [ "regex-syntax/unicode-case" ];
"unicode-gencat" = [ "regex-syntax/unicode-gencat" ];
"unicode-perl" = [ "regex-syntax/unicode-perl" ];
"unicode-script" = [ "regex-syntax/unicode-script" ];
"unicode-segment" = [ "regex-syntax/unicode-segment" ];
"unstable" = [ "pattern" ];
"use_std" = [ "std" ];
};
resolvedDefaultFeatures = [ "aho-corasick" "default" "memchr" "perf" "perf-cache" "perf-dfa" "perf-inline" "perf-literal" "std" "thread_local" "unicode" "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ];
};
"regex-syntax" = rec {
crateName = "regex-syntax";
version = "0.6.21";
edition = "2015";
sha256 = "12d176jkgw9749g07zjxz0n78nyvb2nqx3j4sp5aqyphvji1n61v";
authors = [
"The Rust Project Developers"
];
features = {
"default" = [ "unicode" ];
"unicode" = [ "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ];
};
resolvedDefaultFeatures = [ "default" "unicode" "unicode-age" "unicode-bool" "unicode-case" "unicode-gencat" "unicode-perl" "unicode-script" "unicode-segment" ];
};
"remove_dir_all" = rec {
crateName = "remove_dir_all";
version = "0.5.3";
edition = "2015";
sha256 = "1rzqbsgkmr053bxxl04vmvsd1njyz0nxvly97aip6aa2cmb15k9s";
authors = [
"Aaronepower <theaaronepower@gmail.com>"
];
dependencies = [
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "std" "errhandlingapi" "winerror" "fileapi" "winbase" ];
}
];
};
"reqwest" = rec {
crateName = "reqwest";
version = "0.11.0";
edition = "2018";
sha256 = "1pp9km1hn1y3wxkafv3mzk4b6nv4hxqx15591awmyrxa6081na7x";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "base64";
packageId = "base64";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "bytes";
packageId = "bytes 1.0.0";
}
{
name = "encoding_rs";
packageId = "encoding_rs";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "futures-core";
packageId = "futures-core";
usesDefaultFeatures = false;
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "futures-util";
packageId = "futures-util";
usesDefaultFeatures = false;
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "http";
packageId = "http";
}
{
name = "http-body";
packageId = "http-body";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "hyper";
packageId = "hyper";
usesDefaultFeatures = false;
target = { target, features }: (!(target."arch" == "wasm32"));
features = [ "tcp" "http1" "http2" "client" ];
}
{
name = "hyper-tls";
packageId = "hyper-tls";
optional = true;
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "ipnet";
packageId = "ipnet";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "js-sys";
packageId = "js-sys";
target = { target, features }: (target."arch" == "wasm32");
}
{
name = "lazy_static";
packageId = "lazy_static";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "log";
packageId = "log";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "mime";
packageId = "mime";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "native-tls";
packageId = "native-tls";
rename = "native-tls-crate";
optional = true;
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "percent-encoding";
packageId = "percent-encoding";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "pin-project-lite";
packageId = "pin-project-lite";
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "serde";
packageId = "serde";
}
{
name = "serde_urlencoded";
packageId = "serde_urlencoded";
}
{
name = "tokio";
packageId = "tokio";
usesDefaultFeatures = false;
target = { target, features }: (!(target."arch" == "wasm32"));
features = [ "net" "time" ];
}
{
name = "tokio-native-tls";
packageId = "tokio-native-tls";
optional = true;
target = { target, features }: (!(target."arch" == "wasm32"));
}
{
name = "url";
packageId = "url";
}
{
name = "wasm-bindgen";
packageId = "wasm-bindgen";
target = { target, features }: (target."arch" == "wasm32");
features = [ "serde-serialize" ];
}
{
name = "wasm-bindgen-futures";
packageId = "wasm-bindgen-futures";
target = { target, features }: (target."arch" == "wasm32");
}
{
name = "web-sys";
packageId = "web-sys";
target = { target, features }: (target."arch" == "wasm32");
features = [ "Headers" "Request" "RequestInit" "RequestMode" "Response" "Window" "FormData" "Blob" "BlobPropertyBag" "ServiceWorkerGlobalScope" ];
}
{
name = "winreg";
packageId = "winreg";
target = { target, features }: target."windows";
}
];
devDependencies = [
{
name = "hyper";
packageId = "hyper";
usesDefaultFeatures = false;
target = {target, features}: (!(target."arch" == "wasm32"));
features = [ "tcp" "stream" "http1" "http2" "client" "server" ];
}
{
name = "serde";
packageId = "serde";
target = {target, features}: (!(target."arch" == "wasm32"));
features = [ "derive" ];
}
{
name = "tokio";
packageId = "tokio";
usesDefaultFeatures = false;
target = {target, features}: (!(target."arch" == "wasm32"));
features = [ "macros" "rt-multi-thread" ];
}
];
features = {
"__rustls" = [ "hyper-rustls" "tokio-rustls" "rustls" "__tls" ];
"blocking" = [ "futures-util/io" "tokio/rt-multi-thread" "tokio/sync" ];
"brotli" = [ "async-compression" "async-compression/brotli" "tokio-util" ];
"cookies" = [ "cookie_crate" "cookie_store" "time" ];
"default" = [ "default-tls" ];
"default-tls" = [ "hyper-tls" "native-tls-crate" "__tls" "tokio-native-tls" ];
"gzip" = [ "async-compression" "async-compression/gzip" "tokio-util" ];
"json" = [ "serde_json" ];
"multipart" = [ "mime_guess" ];
"native-tls" = [ "default-tls" ];
"native-tls-vendored" = [ "native-tls" "native-tls-crate/vendored" ];
"rustls-tls" = [ "rustls-tls-webpki-roots" ];
"rustls-tls-manual-roots" = [ "__rustls" ];
"rustls-tls-native-roots" = [ "rustls-native-certs" "__rustls" ];
"rustls-tls-webpki-roots" = [ "webpki-roots" "__rustls" ];
"socks" = [ "tokio-socks" ];
"trust-dns" = [ "trust-dns-resolver" ];
};
resolvedDefaultFeatures = [ "__tls" "default" "default-tls" "hyper-tls" "native-tls-crate" "stream" "tokio-native-tls" ];
};
"rpassword" = rec {
crateName = "rpassword";
version = "5.0.0";
edition = "2015";
sha256 = "1j96nc3dmqhxwb4ql50r5xjs0imwr2x6mrj02mj9i7grq1zj6mfp";
authors = [
"Conrad Kleinespel <conradk@conradk.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "std" "winnt" "fileapi" "processenv" "winbase" "handleapi" "consoleapi" "minwindef" "wincon" ];
}
];
};
"rust-argon2" = rec {
crateName = "rust-argon2";
version = "0.8.3";
edition = "2018";
sha256 = "1yvqkv04fqk3cbvyasibr4bqbxa6mij8jdvibakwlcsbjh6q462b";
libName = "argon2";
authors = [
"Martijn Rijkeboer <mrr@sru-systems.com>"
];
dependencies = [
{
name = "base64";
packageId = "base64";
}
{
name = "blake2b_simd";
packageId = "blake2b_simd";
}
{
name = "constant_time_eq";
packageId = "constant_time_eq";
}
{
name = "crossbeam-utils";
packageId = "crossbeam-utils";
optional = true;
}
];
features = {
"default" = [ "crossbeam-utils" ];
};
resolvedDefaultFeatures = [ "crossbeam-utils" "default" ];
};
"rustc-demangle" = rec {
crateName = "rustc-demangle";
version = "0.1.18";
edition = "2015";
sha256 = "0cn2hdq0glr875hvpi0hvb19xj3y9gfnk0lnsw3wl538wc7asfvf";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
features = {
"rustc-dep-of-std" = [ "core" "compiler_builtins" ];
};
};
"ryu" = rec {
crateName = "ryu";
version = "1.0.5";
edition = "2018";
sha256 = "0vpqv1dj7fksa6hm3zpk5rbsjs0ifbfy7xwzsyyil0rx37a03lvi";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
features = {
};
};
"same-file" = rec {
crateName = "same-file";
version = "1.0.6";
edition = "2018";
sha256 = "00h5j1w87dmhnvbv9l8bic3y7xxsnjmssvifw2ayvgx9mb1ivz4k";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "winapi-util";
packageId = "winapi-util";
target = { target, features }: target."windows";
}
];
};
"sanakirja" = rec {
crateName = "sanakirja";
version = "0.15.8";
edition = "2018";
sha256 = "0zpjgp57k0a8g1xzvnkss91c659v5cyzh53xqkgrxm2g3x4g3hix";
authors = [
"Pierre-Étienne Meunier"
"Florent Becker"
];
dependencies = [
{
name = "crc32fast";
packageId = "crc32fast";
optional = true;
}
{
name = "fs2";
packageId = "fs2";
}
{
name = "git2";
packageId = "git2";
optional = true;
}
{
name = "log";
packageId = "log";
}
{
name = "memmap";
packageId = "memmap";
optional = true;
}
{
name = "parking_lot";
packageId = "parking_lot";
}
{
name = "rand";
packageId = "rand 0.8.1";
}
{
name = "thiserror";
packageId = "thiserror";
}
];
features = {
"crc32" = [ "crc32fast" ];
"mmap" = [ "memmap" "crc32" ];
};
resolvedDefaultFeatures = [ "crc32" "crc32fast" "default" "git2" "memmap" "mmap" ];
};
"schannel" = rec {
crateName = "schannel";
version = "0.1.19";
edition = "2015";
sha256 = "0xdwr3clrylywpv2r5hw7mrxmsf7ljagwiymw2z60ki3kihbl1cg";
authors = [
"Steven Fackler <sfackler@gmail.com>"
"Steffen Butzer <steffen.butzer@outlook.com>"
];
dependencies = [
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "winapi";
packageId = "winapi";
features = [ "lmcons" "minschannel" "securitybaseapi" "schannel" "sspi" "sysinfoapi" "timezoneapi" "winbase" "wincrypt" "winerror" ];
}
];
};
"scopeguard" = rec {
crateName = "scopeguard";
version = "1.1.0";
edition = "2015";
sha256 = "1kbqm85v43rq92vx7hfiay6pmcga03vrjbbfwqpyj3pwsg3b16nj";
authors = [
"bluss"
];
features = {
"default" = [ "use_std" ];
};
};
"security-framework" = rec {
crateName = "security-framework";
version = "2.0.0";
edition = "2018";
sha256 = "0scc4vj2mw9k6qpxp26zx8gnqnmw79nwayja91x030457hp9qxf1";
authors = [
"Steven Fackler <sfackler@gmail.com>"
"Kornel <kornel@geekhood.net>"
];
dependencies = [
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "core-foundation";
packageId = "core-foundation";
}
{
name = "core-foundation-sys";
packageId = "core-foundation-sys";
}
{
name = "libc";
packageId = "libc";
}
{
name = "security-framework-sys";
packageId = "security-framework-sys";
usesDefaultFeatures = false;
}
];
features = {
"OSX_10_10" = [ "OSX_10_9" "security-framework-sys/OSX_10_10" ];
"OSX_10_11" = [ "OSX_10_10" "security-framework-sys/OSX_10_11" ];
"OSX_10_12" = [ "OSX_10_11" "security-framework-sys/OSX_10_12" ];
"OSX_10_13" = [ "OSX_10_12" "security-framework-sys/OSX_10_13" "alpn" "session-tickets" ];
"OSX_10_9" = [ "security-framework-sys/OSX_10_9" ];
"default" = [ "OSX_10_9" ];
};
resolvedDefaultFeatures = [ "OSX_10_9" "default" ];
};
"security-framework-sys" = rec {
crateName = "security-framework-sys";
version = "2.0.0";
edition = "2018";
sha256 = "12v7wpf7cbc92xza4lf3w12411wzrkkvlbjgrhrid9yj4rg9v6zr";
authors = [
"Steven Fackler <sfackler@gmail.com>"
"Kornel <kornel@geekhood.net>"
];
dependencies = [
{
name = "core-foundation-sys";
packageId = "core-foundation-sys";
}
{
name = "libc";
packageId = "libc";
}
];
features = {
"OSX_10_10" = [ "OSX_10_9" ];
"OSX_10_11" = [ "OSX_10_10" ];
"OSX_10_12" = [ "OSX_10_11" ];
"OSX_10_13" = [ "OSX_10_12" ];
"default" = [ "OSX_10_9" ];
};
resolvedDefaultFeatures = [ "OSX_10_9" "default" ];
};
"serde" = rec {
crateName = "serde";
version = "1.0.118";
edition = "2015";
sha256 = "0028kv3dh3ix5g7jfws22zb9hcqq4cnpwn2lnlpam1wxhmil5ih6";
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>"
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "serde_derive";
packageId = "serde_derive";
optional = true;
}
];
devDependencies = [
{
name = "serde_derive";
packageId = "serde_derive";
}
];
features = {
"default" = [ "std" ];
"derive" = [ "serde_derive" ];
};
resolvedDefaultFeatures = [ "default" "derive" "serde_derive" "std" ];
};
"serde_derive" = rec {
crateName = "serde_derive";
version = "1.0.118";
edition = "2015";
sha256 = "1pvj4v8k107ichsnm7jgm9kxyi2lf971x52bmxhm5mcwd4k3akf8";
procMacro = true;
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>"
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "visit" ];
}
];
features = {
};
resolvedDefaultFeatures = [ "default" ];
};
"serde_json" = rec {
crateName = "serde_json";
version = "1.0.61";
edition = "2018";
sha256 = "0nijvxvgcncvd1wbn73zx7q14bdxah0gf2789qd8kdjpa1cv5kjg";
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>"
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "itoa";
packageId = "itoa";
usesDefaultFeatures = false;
}
{
name = "ryu";
packageId = "ryu";
}
{
name = "serde";
packageId = "serde";
usesDefaultFeatures = false;
}
];
features = {
"alloc" = [ "serde/alloc" ];
"default" = [ "std" ];
"preserve_order" = [ "indexmap" ];
"std" = [ "serde/std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"serde_urlencoded" = rec {
crateName = "serde_urlencoded";
version = "0.7.0";
edition = "2018";
sha256 = "1s9wnjrak5a0igfhcghhz51kvi7n010j5rs9lmhd5hfrz2kmgypd";
authors = [
"Anthony Ramine <n.oxyde@gmail.com>"
];
dependencies = [
{
name = "form_urlencoded";
packageId = "form_urlencoded";
}
{
name = "itoa";
packageId = "itoa";
}
{
name = "ryu";
packageId = "ryu";
}
{
name = "serde";
packageId = "serde";
}
];
};
"slab" = rec {
crateName = "slab";
version = "0.4.2";
edition = "2015";
sha256 = "1y59xsa27jk84sxzswjk60xcjf8b4fm5960jwpznrrcmasyva4f1";
authors = [
"Carl Lerche <me@carllerche.com>"
];
};
"smallvec" = rec {
crateName = "smallvec";
version = "1.6.0";
edition = "2018";
sha256 = "1h0s7dbhf96k1g726h7rp16wm4m83mpnmi7qkfbirr387dgwlm8s";
authors = [
"The Servo Project Developers"
];
features = {
};
};
"socket2" = rec {
crateName = "socket2";
version = "0.3.19";
edition = "2018";
sha256 = "0vldz14mxqxnjqb6an2pj7mgclv7nrk45cpscwq7g3fj2c0mfbhj";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
target = { target, features }: target."unix";
}
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
features = [ "align" ];
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "handleapi" "ws2def" "ws2ipdef" "ws2tcpip" "minwindef" ];
}
];
features = {
};
};
"strsim" = rec {
crateName = "strsim";
version = "0.10.0";
edition = "2015";
sha256 = "08s69r4rcrahwnickvi0kq49z524ci50capybln83mg6b473qivk";
authors = [
"Danny Guo <danny@dannyguo.com>"
];
};
"subtle" = rec {
crateName = "subtle";
version = "2.4.0";
edition = "2015";
sha256 = "1hipji54nipkya6szyk9kl7nz07qli570b1isklz78dda44dm08y";
authors = [
"Isis Lovecruft <isis@patternsinthevoid.net>"
"Henry de Valence <hdevalence@hdevalence.ca>"
];
features = {
"default" = [ "std" "i128" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"syn" = rec {
crateName = "syn";
version = "1.0.58";
edition = "2018";
sha256 = "1m85bvywsm6cf17d534c5ma73zch0cgwqc6q2bblqnd67vbs6q6c";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
usesDefaultFeatures = false;
}
{
name = "quote";
packageId = "quote";
optional = true;
usesDefaultFeatures = false;
}
{
name = "unicode-xid";
packageId = "unicode-xid";
}
];
features = {
"default" = [ "derive" "parsing" "printing" "clone-impls" "proc-macro" ];
"printing" = [ "quote" ];
"proc-macro" = [ "proc-macro2/proc-macro" "quote/proc-macro" ];
"test" = [ "syn-test-suite/all-features" ];
};
resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "full" "parsing" "printing" "proc-macro" "quote" "visit" "visit-mut" ];
};
"tar" = rec {
crateName = "tar";
version = "0.4.30";
edition = "2018";
sha256 = "1452rf0y7jf4apyngvbfpszpik7a9z7gl9y5jbhl76kyanvrg6a8";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "filetime";
packageId = "filetime";
}
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "redox_syscall";
packageId = "redox_syscall";
target = { target, features }: (target."os" == "redox");
}
{
name = "xattr";
packageId = "xattr";
optional = true;
target = { target, features }: target."unix";
}
];
features = {
"default" = [ "xattr" ];
};
resolvedDefaultFeatures = [ "default" "xattr" ];
};
"tempfile" = rec {
crateName = "tempfile";
version = "3.1.0";
edition = "2018";
sha256 = "1a9cfdqw70n7bcnkx05aih9xdba8lqazmqlkjpkmn2la6gcj8vks";
authors = [
"Steven Allen <steven@stebalien.com>"
"The Rust Project Developers"
"Ashley Mannix <ashleymannix@live.com.au>"
"Jason White <jasonaw0@gmail.com>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 0.1.10";
}
{
name = "libc";
packageId = "libc";
target = { target, features }: target."unix";
}
{
name = "rand";
packageId = "rand 0.7.3";
}
{
name = "redox_syscall";
packageId = "redox_syscall";
target = { target, features }: (target."os" == "redox");
}
{
name = "remove_dir_all";
packageId = "remove_dir_all";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "fileapi" "handleapi" "winbase" ];
}
];
};
"termcolor" = rec {
crateName = "termcolor";
version = "1.1.2";
edition = "2018";
sha256 = "1x65i1ny4m6z1by62ra6wdcrd557p2ysm866x0pg60zby2cxizid";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "winapi-util";
packageId = "winapi-util";
target = { target, features }: target."windows";
}
];
};
"terminal_size" = rec {
crateName = "terminal_size";
version = "0.1.15";
edition = "2018";
sha256 = "1qfb07d040wh3m4yfnbxknda7i5fr56ypnwdwdgmzb1zpn1x3ljb";
authors = [
"Andrew Chin <achin@eminence32.net>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
target = { target, features }: (!target."windows");
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "handleapi" "processenv" "winbase" "wincon" "winnt" ];
}
];
};
"textwrap" = rec {
crateName = "textwrap";
version = "0.12.1";
edition = "2018";
sha256 = "12978qmkl5gcp94lxndpvp9qxq8mxp7hm9xbrw3422dgikchhc10";
authors = [
"Martin Geisler <martin@geisler.net>"
];
dependencies = [
{
name = "unicode-width";
packageId = "unicode-width";
}
];
};
"thiserror" = rec {
crateName = "thiserror";
version = "1.0.23";
edition = "2018";
sha256 = "0imiv97kdlba0r0ld6mlizfmw5rpmfzhvk7xw8l8k35zd9n63k3n";
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "thiserror-impl";
packageId = "thiserror-impl";
}
];
};
"thiserror-impl" = rec {
crateName = "thiserror-impl";
version = "1.0.23";
edition = "2018";
sha256 = "1law4wqpc36hxs4vcgc1pjjniy4l6xn7kwvf0k886xf2mqn3mrwv";
procMacro = true;
authors = [
"David Tolnay <dtolnay@gmail.com>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
}
];
};
"thread_local" = rec {
crateName = "thread_local";
version = "1.0.1";
edition = "2015";
sha256 = "054vlrr1vsdy1h4b7n99mr24pnj8928ig9qwzg36wnkld4dns36l";
authors = [
"Amanieu d'Antras <amanieu@gmail.com>"
];
dependencies = [
{
name = "lazy_static";
packageId = "lazy_static";
}
];
};
"threadpool" = rec {
crateName = "threadpool";
version = "1.8.1";
edition = "2015";
sha256 = "1amgfyzvynbm8pacniivzq9r0fh3chhs7kijic81j76l6c5ycl6h";
authors = [
"The Rust Project Developers"
"Corey Farwell <coreyf@rwell.org>"
"Stefan Schindler <dns2utf8@estada.ch>"
];
dependencies = [
{
name = "num_cpus";
packageId = "num_cpus";
}
];
};
"thrussh" = rec {
crateName = "thrussh";
version = "0.32.1";
edition = "2018";
sha256 = "0a9qvwlfm0fwh9x77k2h2fc5bwcrmqsjsf6nzq07qbgpnh1hqm0n";
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "bitflags";
packageId = "bitflags";
}
{
name = "byteorder";
packageId = "byteorder";
}
{
name = "cryptovec";
packageId = "cryptovec";
}
{
name = "flate2";
packageId = "flate2";
optional = true;
}
{
name = "futures";
packageId = "futures";
}
{
name = "log";
packageId = "log";
}
{
name = "openssl";
packageId = "openssl";
}
{
name = "thiserror";
packageId = "thiserror";
}
{
name = "thrussh-keys";
packageId = "thrussh-keys";
}
{
name = "thrussh-libsodium";
packageId = "thrussh-libsodium";
}
{
name = "tokio";
packageId = "tokio";
features = [ "io-util" "rt-multi-thread" "time" "net" "sync" "macros" ];
}
];
devDependencies = [
{
name = "tokio";
packageId = "tokio";
features = [ "io-util" "rt-multi-thread" "time" "net" "sync" "macros" ];
}
];
features = {
"default" = [ "flate2" ];
};
resolvedDefaultFeatures = [ "default" "flate2" ];
};
"thrussh-config" = rec {
crateName = "thrussh-config";
version = "0.5.0";
edition = "2018";
sha256 = "1ygm5vdy180gslgmbd0v722ap451pw8cwaba0g0dzx5gg3cl9h37";
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "dirs-next";
packageId = "dirs-next";
}
{
name = "futures";
packageId = "futures";
}
{
name = "log";
packageId = "log";
}
{
name = "thiserror";
packageId = "thiserror";
}
{
name = "tokio";
packageId = "tokio";
features = [ "io-util" "net" "macros" ];
}
{
name = "whoami";
packageId = "whoami 1.0.3";
}
];
};
"thrussh-keys" = rec {
crateName = "thrussh-keys";
version = "0.20.2";
edition = "2018";
sha256 = "0mny64g0b22dj663lihwkzvyvdsc69c3wcqcpbaj020rmqhsqd9j";
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "bit-vec";
packageId = "bit-vec";
}
{
name = "byteorder";
packageId = "byteorder";
}
{
name = "cryptovec";
packageId = "cryptovec";
}
{
name = "data-encoding";
packageId = "data-encoding";
}
{
name = "dirs";
packageId = "dirs";
}
{
name = "futures";
packageId = "futures";
}
{
name = "log";
packageId = "log";
}
{
name = "num-bigint";
packageId = "num-bigint";
}
{
name = "num-integer";
packageId = "num-integer";
}
{
name = "openssl";
packageId = "openssl";
}
{
name = "serde";
packageId = "serde";
}
{
name = "serde_derive";
packageId = "serde_derive";
}
{
name = "thiserror";
packageId = "thiserror";
}
{
name = "thrussh-libsodium";
packageId = "thrussh-libsodium";
}
{
name = "tokio";
packageId = "tokio";
features = [ "io-util" "rt-multi-thread" "time" "net" ];
}
{
name = "yasna";
packageId = "yasna";
features = [ "bit-vec" "num-bigint" ];
}
];
};
"thrussh-libsodium" = rec {
crateName = "thrussh-libsodium";
version = "0.2.1";
edition = "2018";
sha256 = "004mxns64gr3507dzr6s8vgd1478jcsgd2mw2cpbj73vs9q9rs6g";
authors = [
"pe@pijul.org <pe@pijul.org>"
];
dependencies = [
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "libc";
packageId = "libc";
}
{
name = "libsodium-sys";
packageId = "libsodium-sys";
}
];
buildDependencies = [
{
name = "pkg-config";
packageId = "pkg-config";
}
{
name = "vcpkg";
packageId = "vcpkg";
}
];
};
"time" = rec {
crateName = "time";
version = "0.1.44";
edition = "2015";
sha256 = "0m9jwy2pcmk232r3b9r80fs12mkckfjffjha4qfaxcdq9a8ydfbd";
authors = [
"The Rust Project Developers"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
{
name = "wasi";
packageId = "wasi 0.10.0+wasi-snapshot-preview1";
target = { target, features }: (target."os" == "wasi");
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "std" "minwinbase" "minwindef" "ntdef" "profileapi" "sysinfoapi" "timezoneapi" ];
}
];
devDependencies = [
{
name = "winapi";
packageId = "winapi";
features = [ "std" "processthreadsapi" "winbase" ];
}
];
};
"tinyvec" = rec {
crateName = "tinyvec";
version = "1.1.0";
edition = "2018";
sha256 = "0vva61rhzvgyvc0n6vbjn8a2q3mm5wlfrapyx08blbxlkv0xpy6c";
authors = [
"Lokathor <zefria@gmail.com>"
];
dependencies = [
{
name = "tinyvec_macros";
packageId = "tinyvec_macros";
optional = true;
}
];
features = {
"alloc" = [ "tinyvec_macros" ];
};
resolvedDefaultFeatures = [ "alloc" "default" "tinyvec_macros" ];
};
"tinyvec_macros" = rec {
crateName = "tinyvec_macros";
version = "0.1.0";
edition = "2018";
sha256 = "0p5zvgbas5nh403fbxica819mf3g83n8g2hzpfazfr56w6klv9yd";
authors = [
"Soveu <marx.tomasz@gmail.com>"
];
};
"tokio" = rec {
crateName = "tokio";
version = "1.0.1";
edition = "2018";
sha256 = "1gd6qc9xvm568kicbkch40kjn5w0q2nsn527gcy80v3baqgj4n6j";
authors = [
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 1.0.0";
optional = true;
}
{
name = "libc";
packageId = "libc";
optional = true;
target = { target, features }: target."unix";
}
{
name = "memchr";
packageId = "memchr";
optional = true;
}
{
name = "mio";
packageId = "mio";
optional = true;
}
{
name = "num_cpus";
packageId = "num_cpus";
optional = true;
}
{
name = "pin-project-lite";
packageId = "pin-project-lite";
}
{
name = "tokio-macros";
packageId = "tokio-macros";
optional = true;
}
];
buildDependencies = [
{
name = "autocfg";
packageId = "autocfg";
}
];
devDependencies = [
{
name = "libc";
packageId = "libc";
target = {target, features}: target."unix";
}
];
features = {
"full" = [ "fs" "io-util" "io-std" "macros" "net" "parking_lot" "process" "rt" "rt-multi-thread" "signal" "sync" "time" ];
"io-util" = [ "memchr" "bytes" ];
"macros" = [ "tokio-macros" ];
"net" = [ "libc" "mio/os-poll" "mio/os-util" "mio/tcp" "mio/udp" "mio/uds" ];
"process" = [ "bytes" "once_cell" "libc" "mio/os-poll" "mio/os-util" "mio/uds" "signal-hook-registry" "winapi/threadpoollegacyapiset" ];
"rt-multi-thread" = [ "num_cpus" "rt" ];
"signal" = [ "once_cell" "libc" "mio/os-poll" "mio/uds" "mio/os-util" "signal-hook-registry" "winapi/consoleapi" ];
};
resolvedDefaultFeatures = [ "bytes" "default" "io-util" "libc" "macros" "memchr" "mio" "net" "num_cpus" "rt" "rt-multi-thread" "sync" "time" "tokio-macros" ];
};
"tokio-macros" = rec {
crateName = "tokio-macros";
version = "1.0.0";
edition = "2018";
sha256 = "150l6wfcqw2rcjaf22qk3z6ca794x0s2c68n5ar18cfafllpsla2";
procMacro = true;
authors = [
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "full" ];
}
];
};
"tokio-native-tls" = rec {
crateName = "tokio-native-tls";
version = "0.3.0";
edition = "2018";
sha256 = "0yvikgmph2qjq0ni2h2wfaxkzhbnc09c2544av0zidyj1dk9bngp";
authors = [
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "native-tls";
packageId = "native-tls";
}
{
name = "tokio";
packageId = "tokio";
}
];
devDependencies = [
{
name = "tokio";
packageId = "tokio";
features = [ "macros" "rt" "rt-multi-thread" "io-util" "net" ];
}
];
};
"tokio-stream" = rec {
crateName = "tokio-stream";
version = "0.1.1";
edition = "2018";
sha256 = "0mq48id9y6xngsfs5yngmv66iwz3fnpkhcfpvj5gjqvyadrypkg4";
authors = [
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "futures-core";
packageId = "futures-core";
}
{
name = "pin-project-lite";
packageId = "pin-project-lite";
}
{
name = "tokio";
packageId = "tokio";
features = [ "sync" ];
}
];
devDependencies = [
{
name = "tokio";
packageId = "tokio";
features = [ "full" "test-util" ];
}
];
features = {
"default" = [ "time" ];
"fs" = [ "tokio/fs" ];
"io-util" = [ "tokio/io-util" ];
"net" = [ "tokio/net" ];
"time" = [ "tokio/time" ];
};
resolvedDefaultFeatures = [ "default" "time" ];
};
"tokio-util" = rec {
crateName = "tokio-util";
version = "0.6.0";
edition = "2018";
sha256 = "0j1igqc7h33i5bvm5zrzix4i7k5lzbvhj4ikp7wga4d9gmz5n4rn";
authors = [
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "bytes";
packageId = "bytes 1.0.0";
}
{
name = "futures-core";
packageId = "futures-core";
}
{
name = "futures-sink";
packageId = "futures-sink";
}
{
name = "log";
packageId = "log";
}
{
name = "pin-project-lite";
packageId = "pin-project-lite";
}
{
name = "tokio";
packageId = "tokio";
}
{
name = "tokio-stream";
packageId = "tokio-stream";
}
];
devDependencies = [
{
name = "tokio";
packageId = "tokio";
features = [ "full" ];
}
];
features = {
"__docs_rs" = [ "futures-util" ];
"compat" = [ "futures-io" ];
"full" = [ "codec" "compat" "io" "time" "net" "rt" ];
"net" = [ "tokio/net" ];
"rt" = [ "tokio/rt" ];
"time" = [ "tokio/time" "slab" ];
};
resolvedDefaultFeatures = [ "codec" "default" ];
};
"toml" = rec {
crateName = "toml";
version = "0.5.8";
edition = "2018";
sha256 = "1apcmjrrjw429pjw7mqlmdwwd67g8305vwqy4kw3swr612bl44d3";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
];
dependencies = [
{
name = "serde";
packageId = "serde";
}
];
features = {
"preserve_order" = [ "indexmap" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"tower-service" = rec {
crateName = "tower-service";
version = "0.3.0";
edition = "2018";
sha256 = "0q4q53w82w1wd71x7vbspg2l3jicb6al2w1qdwxmnjrz8jzvd1z9";
authors = [
"Tower Maintainers <team@tower-rs.com>"
];
};
"tracing" = rec {
crateName = "tracing";
version = "0.1.22";
edition = "2018";
sha256 = "1qzg7rcfby8f2nn1ln3gk6fjc80q0bg8fw5k95zc1020vin04iwz";
authors = [
"Eliza Weisman <eliza@buoyant.io>"
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "pin-project-lite";
packageId = "pin-project-lite";
}
{
name = "tracing-core";
packageId = "tracing-core";
usesDefaultFeatures = false;
}
];
features = {
"attributes" = [ "tracing-attributes" ];
"default" = [ "std" "attributes" ];
"log-always" = [ "log" ];
"std" = [ "tracing-core/std" ];
};
resolvedDefaultFeatures = [ "std" ];
};
"tracing-core" = rec {
crateName = "tracing-core";
version = "0.1.17";
edition = "2018";
sha256 = "0pvbgv301vw6dq4krc14yqbyyixb42lcs4s57xw05llkgy9f63gm";
authors = [
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "lazy_static";
packageId = "lazy_static";
optional = true;
}
];
features = {
"default" = [ "std" ];
"std" = [ "lazy_static" ];
};
resolvedDefaultFeatures = [ "lazy_static" "std" ];
};
"tracing-futures" = rec {
crateName = "tracing-futures";
version = "0.2.4";
edition = "2018";
sha256 = "0k4vd3jyqz9cx8rbwbp0p93qfp1w6rfk7sc6c1jh1ai18zqvcyxb";
authors = [
"Eliza Weisman <eliza@buoyant.io>"
"Tokio Contributors <team@tokio.rs>"
];
dependencies = [
{
name = "pin-project";
packageId = "pin-project 0.4.27";
optional = true;
}
{
name = "tracing";
packageId = "tracing";
usesDefaultFeatures = false;
}
];
features = {
"default" = [ "std-future" "std" ];
"futures-01" = [ "futures_01" "std" ];
"futures-03" = [ "std-future" "futures" "futures-task" "std" ];
"std" = [ "tracing/std" ];
"std-future" = [ "pin-project" ];
};
resolvedDefaultFeatures = [ "pin-project" "std-future" ];
};
"try-lock" = rec {
crateName = "try-lock";
version = "0.2.3";
edition = "2015";
sha256 = "0hkn1ksmg5hdqgqdw1ahy5qk69f4crh2psf0v61qphyrf777nm2r";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
};
"typenum" = rec {
crateName = "typenum";
version = "1.12.0";
edition = "2015";
sha256 = "0cvbksljz61ian21fnn0h51kphl0pwpzb932bv4s0rwy1wh8lg1p";
build = "build/main.rs";
authors = [
"Paho Lurie-Gregg <paho@paholg.com>"
"Andre Bogus <bogusandre@gmail.com>"
];
features = {
};
};
"unicode-bidi" = rec {
crateName = "unicode-bidi";
version = "0.3.4";
edition = "2015";
sha256 = "1malx8ljgm7v1gbaazkn7iicy5wj0bwcyadj3l727a38ch6bvwj9";
libName = "unicode_bidi";
authors = [
"The Servo Project Developers"
];
dependencies = [
{
name = "matches";
packageId = "matches";
}
];
features = {
"flame_it" = [ "flame" "flamer" ];
"with_serde" = [ "serde" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"unicode-normalization" = rec {
crateName = "unicode-normalization";
version = "0.1.16";
edition = "2018";
sha256 = "01p6mwhrf8c748ad7nd5pf9c6djwsc45874dwpp2mqyvcamn6gm1";
authors = [
"kwantam <kwantam@gmail.com>"
"Manish Goregaokar <manishsmail@gmail.com>"
];
dependencies = [
{
name = "tinyvec";
packageId = "tinyvec";
features = [ "alloc" ];
}
];
features = {
"default" = [ "std" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"unicode-segmentation" = rec {
crateName = "unicode-segmentation";
version = "1.7.1";
edition = "2015";
sha256 = "15n736z0pbj30pj44jb9s9rjavzrmx8v8pzdgsl5yfmfwrxjw3dv";
authors = [
"kwantam <kwantam@gmail.com>"
"Manish Goregaokar <manishsmail@gmail.com>"
];
features = {
};
};
"unicode-width" = rec {
crateName = "unicode-width";
version = "0.1.8";
edition = "2015";
sha256 = "1qxizyi6xbcqyi4z79p523ywvmgsfcgfqb3zv3c8i6x1jcc5jdwk";
authors = [
"kwantam <kwantam@gmail.com>"
"Manish Goregaokar <manishsmail@gmail.com>"
];
features = {
"rustc-dep-of-std" = [ "std" "core" "compiler_builtins" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"unicode-xid" = rec {
crateName = "unicode-xid";
version = "0.2.1";
edition = "2015";
sha256 = "0r6mknipyy9vpz8mwmxvkx65ff2ha1n2pxqjj6f46lcn8yrhpzpp";
authors = [
"erick.tryzelaar <erick.tryzelaar@gmail.com>"
"kwantam <kwantam@gmail.com>"
];
features = {
};
resolvedDefaultFeatures = [ "default" ];
};
"url" = rec {
crateName = "url";
version = "2.2.0";
edition = "2018";
sha256 = "0vlpd0c7y9yv4x5vmb6qlnkxkj63r20wv2rysyg48l3kh6qg42ar";
authors = [
"The rust-url developers"
];
dependencies = [
{
name = "form_urlencoded";
packageId = "form_urlencoded";
}
{
name = "idna";
packageId = "idna";
}
{
name = "matches";
packageId = "matches";
}
{
name = "percent-encoding";
packageId = "percent-encoding";
}
];
};
"uuid" = rec {
crateName = "uuid";
version = "0.8.1";
edition = "2018";
sha256 = "049w16qwk3d3b9cmpgvd7fvcnwgs75l8rlsagh06w7ga9dm2zplz";
authors = [
"Ashley Mannix<ashleymannix@live.com.au>"
"Christopher Armstrong"
"Dylan DPC<dylan.dpc@gmail.com>"
"Hunar Roop Kahlon<hunar.roop@gmail.com>"
];
dependencies = [
{
name = "rand";
packageId = "rand 0.7.3";
optional = true;
}
];
features = {
"default" = [ "std" ];
"guid" = [ "winapi" ];
"stdweb" = [ "rand/stdweb" ];
"v3" = [ "md5" ];
"v4" = [ "rand" ];
"v5" = [ "sha1" ];
"wasm-bindgen" = [ "rand/wasm-bindgen" ];
};
resolvedDefaultFeatures = [ "rand" "v4" ];
};
"vcpkg" = rec {
crateName = "vcpkg";
version = "0.2.11";
edition = "2015";
sha256 = "1yvrd2b97j4hv5bfhcj3al0dpkbzkdsr6dclxqz3zqm50rhwl2xh";
authors = [
"Jim McGrath <jimmc2@gmail.com>"
];
};
"vec_map" = rec {
crateName = "vec_map";
version = "0.8.2";
edition = "2015";
sha256 = "1481w9g1dw9rxp3l6snkdqihzyrd2f8vispzqmwjwsdyhw8xzggi";
authors = [
"Alex Crichton <alex@alexcrichton.com>"
"Jorge Aparicio <japaricious@gmail.com>"
"Alexis Beingessner <a.beingessner@gmail.com>"
"Brian Anderson <>"
"tbu- <>"
"Manish Goregaokar <>"
"Aaron Turon <aturon@mozilla.com>"
"Adolfo Ochagavía <>"
"Niko Matsakis <>"
"Steven Fackler <>"
"Chase Southwood <csouth3@illinois.edu>"
"Eduard Burtescu <>"
"Florian Wilkens <>"
"Félix Raimundo <>"
"Tibor Benke <>"
"Markus Siemens <markus@m-siemens.de>"
"Josh Branchaud <jbranchaud@gmail.com>"
"Huon Wilson <dbau.pp@gmail.com>"
"Corey Farwell <coref@rwell.org>"
"Aaron Liblong <>"
"Nick Cameron <nrc@ncameron.org>"
"Patrick Walton <pcwalton@mimiga.net>"
"Felix S Klock II <>"
"Andrew Paseltiner <apaseltiner@gmail.com>"
"Sean McArthur <sean.monstar@gmail.com>"
"Vadim Petrochenkov <>"
];
features = {
"eders" = [ "serde" ];
};
};
"version_check" = rec {
crateName = "version_check";
version = "0.9.2";
edition = "2015";
sha256 = "1vbaqdf802qinsq8q20w8w0qn2pv0rkq5p73ijcblrwxcvjp5adm";
authors = [
"Sergio Benitez <sb@sergio.bz>"
];
};
"walkdir" = rec {
crateName = "walkdir";
version = "2.3.1";
edition = "2018";
sha256 = "0z9g39f49cycdm9vzjf8hnfh3f1csxgd65kmlphj8r2vffy84wbp";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "same-file";
packageId = "same-file";
}
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "std" "winnt" ];
}
{
name = "winapi-util";
packageId = "winapi-util";
target = { target, features }: target."windows";
}
];
};
"want" = rec {
crateName = "want";
version = "0.3.0";
edition = "2018";
sha256 = "181b2zmwfq389x9n2g1n37cvcvvdand832zz6v8i1l8wrdlaks0w";
authors = [
"Sean McArthur <sean@seanmonstar.com>"
];
dependencies = [
{
name = "log";
packageId = "log";
}
{
name = "try-lock";
packageId = "try-lock";
}
];
};
"wasi 0.10.0+wasi-snapshot-preview1" = rec {
crateName = "wasi";
version = "0.10.0+wasi-snapshot-preview1";
edition = "2018";
sha256 = "07y3l8mzfzzz4cj09c8y90yak4hpsi9g7pllyzpr6xvwrabka50s";
authors = [
"The Cranelift Project Developers"
];
features = {
"default" = [ "std" ];
"rustc-dep-of-std" = [ "compiler_builtins" "core" "rustc-std-workspace-alloc" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"wasi 0.9.0+wasi-snapshot-preview1" = rec {
crateName = "wasi";
version = "0.9.0+wasi-snapshot-preview1";
edition = "2018";
sha256 = "06g5v3vrdapfzvfq662cij7v8a1flwr2my45nnncdv2galrdzkfc";
authors = [
"The Cranelift Project Developers"
];
features = {
"default" = [ "std" ];
"rustc-dep-of-std" = [ "compiler_builtins" "core" "rustc-std-workspace-alloc" ];
};
resolvedDefaultFeatures = [ "default" "std" ];
};
"wasm-bindgen" = rec {
crateName = "wasm-bindgen";
version = "0.2.69";
edition = "2018";
sha256 = "0vkkpz290k6pphmrgkayzdvk1dinxrp6c5zvr9l0zjlm2dsn9lrw";
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "serde";
packageId = "serde";
optional = true;
}
{
name = "serde_json";
packageId = "serde_json";
optional = true;
}
{
name = "wasm-bindgen-macro";
packageId = "wasm-bindgen-macro";
}
];
features = {
"default" = [ "spans" "std" ];
"enable-interning" = [ "std" ];
"serde-serialize" = [ "serde" "serde_json" "std" ];
"spans" = [ "wasm-bindgen-macro/spans" ];
"strict-macro" = [ "wasm-bindgen-macro/strict-macro" ];
"xxx_debug_only_print_generated_code" = [ "wasm-bindgen-macro/xxx_debug_only_print_generated_code" ];
};
resolvedDefaultFeatures = [ "default" "serde" "serde-serialize" "serde_json" "spans" "std" ];
};
"wasm-bindgen-backend" = rec {
crateName = "wasm-bindgen-backend";
version = "0.2.69";
edition = "2018";
sha256 = "0qidxjmcn50v2i5hjz7al69sa3mbq0lbi276amdnw47ln6dgh50i";
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "bumpalo";
packageId = "bumpalo";
}
{
name = "lazy_static";
packageId = "lazy_static";
}
{
name = "log";
packageId = "log";
}
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "full" ];
}
{
name = "wasm-bindgen-shared";
packageId = "wasm-bindgen-shared";
}
];
features = {
"extra-traits" = [ "syn/extra-traits" ];
};
resolvedDefaultFeatures = [ "spans" ];
};
"wasm-bindgen-futures" = rec {
crateName = "wasm-bindgen-futures";
version = "0.4.19";
edition = "2018";
sha256 = "0d8fg2k4a4xyv28japgld7qzy2zyrnvh582pjkp88id8hmh7bs8z";
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "cfg-if";
packageId = "cfg-if 1.0.0";
}
{
name = "js-sys";
packageId = "js-sys";
}
{
name = "wasm-bindgen";
packageId = "wasm-bindgen";
}
{
name = "web-sys";
packageId = "web-sys";
target = { target, features }: (target."feature" == "atomics");
features = [ "MessageEvent" "Worker" ];
}
];
};
"wasm-bindgen-macro" = rec {
crateName = "wasm-bindgen-macro";
version = "0.2.69";
edition = "2018";
sha256 = "113hyzn0dpqasznzcwgmqw03i5yhjkqna7paim50h7xdbscwhsks";
procMacro = true;
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "quote";
packageId = "quote";
}
{
name = "wasm-bindgen-macro-support";
packageId = "wasm-bindgen-macro-support";
}
];
features = {
"spans" = [ "wasm-bindgen-macro-support/spans" ];
"strict-macro" = [ "wasm-bindgen-macro-support/strict-macro" ];
};
resolvedDefaultFeatures = [ "spans" ];
};
"wasm-bindgen-macro-support" = rec {
crateName = "wasm-bindgen-macro-support";
version = "0.2.69";
edition = "2018";
sha256 = "0jbmgj8zxflza1cl15k3r70fqsak4bkkfbn6qxbhbn4ry9r8r95m";
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "proc-macro2";
packageId = "proc-macro2";
}
{
name = "quote";
packageId = "quote";
}
{
name = "syn";
packageId = "syn";
features = [ "visit" "full" ];
}
{
name = "wasm-bindgen-backend";
packageId = "wasm-bindgen-backend";
}
{
name = "wasm-bindgen-shared";
packageId = "wasm-bindgen-shared";
}
];
features = {
"extra-traits" = [ "syn/extra-traits" ];
"spans" = [ "wasm-bindgen-backend/spans" ];
};
resolvedDefaultFeatures = [ "spans" ];
};
"wasm-bindgen-shared" = rec {
crateName = "wasm-bindgen-shared";
version = "0.2.69";
edition = "2018";
sha256 = "0n3ir6gq27np22l6m96y342a6fphk1pkbzbfqx6g364kgzfi2y3y";
authors = [
"The wasm-bindgen Developers"
];
};
"web-sys" = rec {
crateName = "web-sys";
version = "0.3.46";
edition = "2018";
sha256 = "1hx3bcqhczkdc952gychkaakc27kh3yw6gdm7wfs54jg6gwiwar2";
authors = [
"The wasm-bindgen Developers"
];
dependencies = [
{
name = "js-sys";
packageId = "js-sys";
}
{
name = "wasm-bindgen";
packageId = "wasm-bindgen";
}
];
features = {
"AbortSignal" = [ "EventTarget" ];
"AnalyserNode" = [ "AudioNode" "EventTarget" ];
"Animation" = [ "EventTarget" ];
"AnimationEvent" = [ "Event" ];
"AnimationPlaybackEvent" = [ "Event" ];
"Attr" = [ "EventTarget" "Node" ];
"AudioBufferSourceNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ];
"AudioContext" = [ "BaseAudioContext" "EventTarget" ];
"AudioDestinationNode" = [ "AudioNode" "EventTarget" ];
"AudioNode" = [ "EventTarget" ];
"AudioProcessingEvent" = [ "Event" ];
"AudioScheduledSourceNode" = [ "AudioNode" "EventTarget" ];
"AudioStreamTrack" = [ "EventTarget" "MediaStreamTrack" ];
"AudioTrackList" = [ "EventTarget" ];
"AudioWorklet" = [ "Worklet" ];
"AudioWorkletGlobalScope" = [ "WorkletGlobalScope" ];
"AudioWorkletNode" = [ "AudioNode" "EventTarget" ];
"AuthenticatorAssertionResponse" = [ "AuthenticatorResponse" ];
"AuthenticatorAttestationResponse" = [ "AuthenticatorResponse" ];
"BaseAudioContext" = [ "EventTarget" ];
"BatteryManager" = [ "EventTarget" ];
"BeforeUnloadEvent" = [ "Event" ];
"BiquadFilterNode" = [ "AudioNode" "EventTarget" ];
"BlobEvent" = [ "Event" ];
"Bluetooth" = [ "EventTarget" ];
"BluetoothAdvertisingEvent" = [ "Event" ];
"BluetoothDevice" = [ "EventTarget" ];
"BluetoothPermissionResult" = [ "EventTarget" "PermissionStatus" ];
"BluetoothRemoteGattCharacteristic" = [ "EventTarget" ];
"BluetoothRemoteGattService" = [ "EventTarget" ];
"BroadcastChannel" = [ "EventTarget" ];
"CanvasCaptureMediaStream" = [ "EventTarget" "MediaStream" ];
"CdataSection" = [ "CharacterData" "EventTarget" "Node" "Text" ];
"ChannelMergerNode" = [ "AudioNode" "EventTarget" ];
"ChannelSplitterNode" = [ "AudioNode" "EventTarget" ];
"CharacterData" = [ "EventTarget" "Node" ];
"ChromeWorker" = [ "EventTarget" "Worker" ];
"Clipboard" = [ "EventTarget" ];
"ClipboardEvent" = [ "Event" ];
"CloseEvent" = [ "Event" ];
"Comment" = [ "CharacterData" "EventTarget" "Node" ];
"CompositionEvent" = [ "Event" "UiEvent" ];
"ConstantSourceNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ];
"ConvolverNode" = [ "AudioNode" "EventTarget" ];
"CssAnimation" = [ "Animation" "EventTarget" ];
"CssConditionRule" = [ "CssGroupingRule" "CssRule" ];
"CssCounterStyleRule" = [ "CssRule" ];
"CssFontFaceRule" = [ "CssRule" ];
"CssFontFeatureValuesRule" = [ "CssRule" ];
"CssGroupingRule" = [ "CssRule" ];
"CssImportRule" = [ "CssRule" ];
"CssKeyframeRule" = [ "CssRule" ];
"CssKeyframesRule" = [ "CssRule" ];
"CssMediaRule" = [ "CssConditionRule" "CssGroupingRule" "CssRule" ];
"CssNamespaceRule" = [ "CssRule" ];
"CssPageRule" = [ "CssRule" ];
"CssStyleRule" = [ "CssRule" ];
"CssStyleSheet" = [ "StyleSheet" ];
"CssSupportsRule" = [ "CssConditionRule" "CssGroupingRule" "CssRule" ];
"CssTransition" = [ "Animation" "EventTarget" ];
"CustomEvent" = [ "Event" ];
"DedicatedWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ];
"DelayNode" = [ "AudioNode" "EventTarget" ];
"DeviceLightEvent" = [ "Event" ];
"DeviceMotionEvent" = [ "Event" ];
"DeviceOrientationEvent" = [ "Event" ];
"DeviceProximityEvent" = [ "Event" ];
"Document" = [ "EventTarget" "Node" ];
"DocumentFragment" = [ "EventTarget" "Node" ];
"DocumentTimeline" = [ "AnimationTimeline" ];
"DocumentType" = [ "EventTarget" "Node" ];
"DomMatrix" = [ "DomMatrixReadOnly" ];
"DomPoint" = [ "DomPointReadOnly" ];
"DomRect" = [ "DomRectReadOnly" ];
"DomRequest" = [ "EventTarget" ];
"DragEvent" = [ "Event" "MouseEvent" "UiEvent" ];
"DynamicsCompressorNode" = [ "AudioNode" "EventTarget" ];
"Element" = [ "EventTarget" "Node" ];
"ErrorEvent" = [ "Event" ];
"EventSource" = [ "EventTarget" ];
"ExtendableEvent" = [ "Event" ];
"ExtendableMessageEvent" = [ "Event" "ExtendableEvent" ];
"FetchEvent" = [ "Event" "ExtendableEvent" ];
"FetchObserver" = [ "EventTarget" ];
"File" = [ "Blob" ];
"FileReader" = [ "EventTarget" ];
"FileSystemDirectoryEntry" = [ "FileSystemEntry" ];
"FileSystemFileEntry" = [ "FileSystemEntry" ];
"FocusEvent" = [ "Event" "UiEvent" ];
"FontFaceSet" = [ "EventTarget" ];
"FontFaceSetLoadEvent" = [ "Event" ];
"GainNode" = [ "AudioNode" "EventTarget" ];
"GamepadAxisMoveEvent" = [ "Event" "GamepadEvent" ];
"GamepadButtonEvent" = [ "Event" "GamepadEvent" ];
"GamepadEvent" = [ "Event" ];
"GpuDevice" = [ "EventTarget" ];
"GpuUncapturedErrorEvent" = [ "Event" ];
"HashChangeEvent" = [ "Event" ];
"HtmlAnchorElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlAreaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlAudioElement" = [ "Element" "EventTarget" "HtmlElement" "HtmlMediaElement" "Node" ];
"HtmlBaseElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlBodyElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlBrElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlButtonElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlCanvasElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDataElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDataListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDetailsElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDialogElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDirectoryElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDivElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlDocument" = [ "Document" "EventTarget" "Node" ];
"HtmlElement" = [ "Element" "EventTarget" "Node" ];
"HtmlEmbedElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlFieldSetElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlFontElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlFormControlsCollection" = [ "HtmlCollection" ];
"HtmlFormElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlFrameElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlFrameSetElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlHeadElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlHeadingElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlHrElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlHtmlElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlIFrameElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlImageElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlInputElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlLabelElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlLegendElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlLiElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlLinkElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlMapElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlMediaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlMenuElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlMenuItemElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlMetaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlMeterElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlModElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlOListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlObjectElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlOptGroupElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlOptionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlOptionsCollection" = [ "HtmlCollection" ];
"HtmlOutputElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlParagraphElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlParamElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlPictureElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlPreElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlProgressElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlQuoteElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlScriptElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlSelectElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlSlotElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlSourceElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlSpanElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlStyleElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTableCaptionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTableCellElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTableColElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTableElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTableRowElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTableSectionElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTemplateElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTextAreaElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTimeElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTitleElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlTrackElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlUListElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlUnknownElement" = [ "Element" "EventTarget" "HtmlElement" "Node" ];
"HtmlVideoElement" = [ "Element" "EventTarget" "HtmlElement" "HtmlMediaElement" "Node" ];
"IdbCursorWithValue" = [ "IdbCursor" ];
"IdbDatabase" = [ "EventTarget" ];
"IdbFileHandle" = [ "EventTarget" ];
"IdbFileRequest" = [ "DomRequest" "EventTarget" ];
"IdbLocaleAwareKeyRange" = [ "IdbKeyRange" ];
"IdbMutableFile" = [ "EventTarget" ];
"IdbOpenDbRequest" = [ "EventTarget" "IdbRequest" ];
"IdbRequest" = [ "EventTarget" ];
"IdbTransaction" = [ "EventTarget" ];
"IdbVersionChangeEvent" = [ "Event" ];
"IirFilterNode" = [ "AudioNode" "EventTarget" ];
"ImageCaptureErrorEvent" = [ "Event" ];
"InputEvent" = [ "Event" "UiEvent" ];
"KeyboardEvent" = [ "Event" "UiEvent" ];
"KeyframeEffect" = [ "AnimationEffect" ];
"LocalMediaStream" = [ "EventTarget" "MediaStream" ];
"MediaDevices" = [ "EventTarget" ];
"MediaElementAudioSourceNode" = [ "AudioNode" "EventTarget" ];
"MediaEncryptedEvent" = [ "Event" ];
"MediaKeyError" = [ "Event" ];
"MediaKeyMessageEvent" = [ "Event" ];
"MediaKeySession" = [ "EventTarget" ];
"MediaQueryList" = [ "EventTarget" ];
"MediaQueryListEvent" = [ "Event" ];
"MediaRecorder" = [ "EventTarget" ];
"MediaRecorderErrorEvent" = [ "Event" ];
"MediaSource" = [ "EventTarget" ];
"MediaStream" = [ "EventTarget" ];
"MediaStreamAudioDestinationNode" = [ "AudioNode" "EventTarget" ];
"MediaStreamAudioSourceNode" = [ "AudioNode" "EventTarget" ];
"MediaStreamEvent" = [ "Event" ];
"MediaStreamTrack" = [ "EventTarget" ];
"MediaStreamTrackEvent" = [ "Event" ];
"MessageEvent" = [ "Event" ];
"MessagePort" = [ "EventTarget" ];
"MidiAccess" = [ "EventTarget" ];
"MidiConnectionEvent" = [ "Event" ];
"MidiInput" = [ "EventTarget" "MidiPort" ];
"MidiMessageEvent" = [ "Event" ];
"MidiOutput" = [ "EventTarget" "MidiPort" ];
"MidiPort" = [ "EventTarget" ];
"MouseEvent" = [ "Event" "UiEvent" ];
"MouseScrollEvent" = [ "Event" "MouseEvent" "UiEvent" ];
"MutationEvent" = [ "Event" ];
"NetworkInformation" = [ "EventTarget" ];
"Node" = [ "EventTarget" ];
"Notification" = [ "EventTarget" ];
"NotificationEvent" = [ "Event" "ExtendableEvent" ];
"OfflineAudioCompletionEvent" = [ "Event" ];
"OfflineAudioContext" = [ "BaseAudioContext" "EventTarget" ];
"OfflineResourceList" = [ "EventTarget" ];
"OffscreenCanvas" = [ "EventTarget" ];
"OscillatorNode" = [ "AudioNode" "AudioScheduledSourceNode" "EventTarget" ];
"PageTransitionEvent" = [ "Event" ];
"PaintWorkletGlobalScope" = [ "WorkletGlobalScope" ];
"PannerNode" = [ "AudioNode" "EventTarget" ];
"PaymentMethodChangeEvent" = [ "Event" "PaymentRequestUpdateEvent" ];
"PaymentRequestUpdateEvent" = [ "Event" ];
"Performance" = [ "EventTarget" ];
"PerformanceMark" = [ "PerformanceEntry" ];
"PerformanceMeasure" = [ "PerformanceEntry" ];
"PerformanceNavigationTiming" = [ "PerformanceEntry" "PerformanceResourceTiming" ];
"PerformanceResourceTiming" = [ "PerformanceEntry" ];
"PermissionStatus" = [ "EventTarget" ];
"PointerEvent" = [ "Event" "MouseEvent" "UiEvent" ];
"PopStateEvent" = [ "Event" ];
"PopupBlockedEvent" = [ "Event" ];
"PresentationAvailability" = [ "EventTarget" ];
"PresentationConnection" = [ "EventTarget" ];
"PresentationConnectionAvailableEvent" = [ "Event" ];
"PresentationConnectionCloseEvent" = [ "Event" ];
"PresentationConnectionList" = [ "EventTarget" ];
"PresentationRequest" = [ "EventTarget" ];
"ProcessingInstruction" = [ "CharacterData" "EventTarget" "Node" ];
"ProgressEvent" = [ "Event" ];
"PromiseRejectionEvent" = [ "Event" ];
"PublicKeyCredential" = [ "Credential" ];
"PushEvent" = [ "Event" "ExtendableEvent" ];
"RadioNodeList" = [ "NodeList" ];
"RtcDataChannel" = [ "EventTarget" ];
"RtcDataChannelEvent" = [ "Event" ];
"RtcPeerConnection" = [ "EventTarget" ];
"RtcPeerConnectionIceEvent" = [ "Event" ];
"RtcTrackEvent" = [ "Event" ];
"RtcdtmfSender" = [ "EventTarget" ];
"RtcdtmfToneChangeEvent" = [ "Event" ];
"Screen" = [ "EventTarget" ];
"ScreenOrientation" = [ "EventTarget" ];
"ScriptProcessorNode" = [ "AudioNode" "EventTarget" ];
"ScrollAreaEvent" = [ "Event" "UiEvent" ];
"SecurityPolicyViolationEvent" = [ "Event" ];
"ServiceWorker" = [ "EventTarget" ];
"ServiceWorkerContainer" = [ "EventTarget" ];
"ServiceWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ];
"ServiceWorkerRegistration" = [ "EventTarget" ];
"ShadowRoot" = [ "DocumentFragment" "EventTarget" "Node" ];
"SharedWorker" = [ "EventTarget" ];
"SharedWorkerGlobalScope" = [ "EventTarget" "WorkerGlobalScope" ];
"SourceBuffer" = [ "EventTarget" ];
"SourceBufferList" = [ "EventTarget" ];
"SpeechRecognition" = [ "EventTarget" ];
"SpeechRecognitionError" = [ "Event" ];
"SpeechRecognitionEvent" = [ "Event" ];
"SpeechSynthesis" = [ "EventTarget" ];
"SpeechSynthesisErrorEvent" = [ "Event" "SpeechSynthesisEvent" ];
"SpeechSynthesisEvent" = [ "Event" ];
"SpeechSynthesisUtterance" = [ "EventTarget" ];
"StereoPannerNode" = [ "AudioNode" "EventTarget" ];
"StorageEvent" = [ "Event" ];
"SvgAnimateElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ];
"SvgAnimateMotionElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ];
"SvgAnimateTransformElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ];
"SvgAnimationElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgCircleElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgClipPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgComponentTransferFunctionElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgDefsElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgDescElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgElement" = [ "Element" "EventTarget" "Node" ];
"SvgEllipseElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgFilterElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgForeignObjectElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgGeometryElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgGraphicsElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgImageElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgLineElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgLinearGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGradientElement" ];
"SvgMarkerElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgMaskElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgMetadataElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgPathSegArcAbs" = [ "SvgPathSeg" ];
"SvgPathSegArcRel" = [ "SvgPathSeg" ];
"SvgPathSegClosePath" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoCubicAbs" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoCubicRel" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoCubicSmoothAbs" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoCubicSmoothRel" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoQuadraticAbs" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoQuadraticRel" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoQuadraticSmoothAbs" = [ "SvgPathSeg" ];
"SvgPathSegCurvetoQuadraticSmoothRel" = [ "SvgPathSeg" ];
"SvgPathSegLinetoAbs" = [ "SvgPathSeg" ];
"SvgPathSegLinetoHorizontalAbs" = [ "SvgPathSeg" ];
"SvgPathSegLinetoHorizontalRel" = [ "SvgPathSeg" ];
"SvgPathSegLinetoRel" = [ "SvgPathSeg" ];
"SvgPathSegLinetoVerticalAbs" = [ "SvgPathSeg" ];
"SvgPathSegLinetoVerticalRel" = [ "SvgPathSeg" ];
"SvgPathSegMovetoAbs" = [ "SvgPathSeg" ];
"SvgPathSegMovetoRel" = [ "SvgPathSeg" ];
"SvgPatternElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgPolygonElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgPolylineElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgRadialGradientElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGradientElement" ];
"SvgRectElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGeometryElement" "SvgGraphicsElement" ];
"SvgScriptElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgSetElement" = [ "Element" "EventTarget" "Node" "SvgAnimationElement" "SvgElement" ];
"SvgStopElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgStyleElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgSwitchElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgSymbolElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgTextContentElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgTextElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" "SvgTextPositioningElement" ];
"SvgTextPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" ];
"SvgTextPositioningElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" ];
"SvgTitleElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgUseElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgViewElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgaElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgfeBlendElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeColorMatrixElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeComponentTransferElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeCompositeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeConvolveMatrixElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeDiffuseLightingElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeDisplacementMapElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeDistantLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeDropShadowElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeFloodElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeFuncAElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ];
"SvgfeFuncBElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ];
"SvgfeFuncGElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ];
"SvgfeFuncRElement" = [ "Element" "EventTarget" "Node" "SvgComponentTransferFunctionElement" "SvgElement" ];
"SvgfeGaussianBlurElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeImageElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeMergeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeMergeNodeElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeMorphologyElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeOffsetElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfePointLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeSpecularLightingElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeSpotLightElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeTileElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgfeTurbulenceElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvggElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgmPathElement" = [ "Element" "EventTarget" "Node" "SvgElement" ];
"SvgsvgElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" ];
"SvgtSpanElement" = [ "Element" "EventTarget" "Node" "SvgElement" "SvgGraphicsElement" "SvgTextContentElement" "SvgTextPositioningElement" ];
"TcpServerSocket" = [ "EventTarget" ];
"TcpServerSocketEvent" = [ "Event" ];
"TcpSocket" = [ "EventTarget" ];
"TcpSocketErrorEvent" = [ "Event" ];
"TcpSocketEvent" = [ "Event" ];
"Text" = [ "CharacterData" "EventTarget" "Node" ];
"TextTrack" = [ "EventTarget" ];
"TextTrackCue" = [ "EventTarget" ];
"TextTrackList" = [ "EventTarget" ];
"TimeEvent" = [ "Event" ];
"TouchEvent" = [ "Event" "UiEvent" ];
"TrackEvent" = [ "Event" ];
"TransitionEvent" = [ "Event" ];
"UiEvent" = [ "Event" ];
"Usb" = [ "EventTarget" ];
"UsbConnectionEvent" = [ "Event" ];
"UsbPermissionResult" = [ "EventTarget" "PermissionStatus" ];
"UserProximityEvent" = [ "Event" ];
"ValueEvent" = [ "Event" ];
"VideoStreamTrack" = [ "EventTarget" "MediaStreamTrack" ];
"VideoTrackList" = [ "EventTarget" ];
"VrDisplay" = [ "EventTarget" ];
"VttCue" = [ "EventTarget" "TextTrackCue" ];
"WaveShaperNode" = [ "AudioNode" "EventTarget" ];
"WebGlContextEvent" = [ "Event" ];
"WebKitCssMatrix" = [ "DomMatrix" "DomMatrixReadOnly" ];
"WebSocket" = [ "EventTarget" ];
"WheelEvent" = [ "Event" "MouseEvent" "UiEvent" ];
"Window" = [ "EventTarget" ];
"WindowClient" = [ "Client" ];
"Worker" = [ "EventTarget" ];
"WorkerDebuggerGlobalScope" = [ "EventTarget" ];
"WorkerGlobalScope" = [ "EventTarget" ];
"XmlDocument" = [ "Document" "EventTarget" "Node" ];
"XmlHttpRequest" = [ "EventTarget" "XmlHttpRequestEventTarget" ];
"XmlHttpRequestEventTarget" = [ "EventTarget" ];
"XmlHttpRequestUpload" = [ "EventTarget" "XmlHttpRequestEventTarget" ];
"Xr" = [ "EventTarget" ];
"XrBoundedReferenceSpace" = [ "EventTarget" "XrReferenceSpace" "XrSpace" ];
"XrInputSourceEvent" = [ "Event" ];
"XrInputSourcesChangeEvent" = [ "Event" ];
"XrReferenceSpace" = [ "EventTarget" "XrSpace" ];
"XrReferenceSpaceEvent" = [ "Event" ];
"XrSession" = [ "EventTarget" ];
"XrSessionEvent" = [ "Event" ];
"XrSpace" = [ "EventTarget" ];
"XrViewerPose" = [ "XrPose" ];
};
resolvedDefaultFeatures = [ "Blob" "BlobPropertyBag" "Document" "Event" "EventTarget" "FormData" "Headers" "HtmlDocument" "MessageEvent" "Navigator" "Node" "Request" "RequestInit" "RequestMode" "Response" "ServiceWorkerGlobalScope" "Window" "Worker" "WorkerGlobalScope" ];
};
"which" = rec {
crateName = "which";
version = "3.1.1";
edition = "2015";
sha256 = "094pw9pi48szshn9ln69z2kg7syq1jp80h5ps1qncbsaw4d0f4fh";
authors = [
"Harry Fei <tiziyuanfang@gmail.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
];
features = {
"default" = [ "failure" ];
};
};
"whoami 0.9.0" = rec {
crateName = "whoami";
version = "0.9.0";
edition = "2018";
sha256 = "012mw2q72gpmf354yw2qc5w105ziac75shpqp1f62x4hnqx7g13q";
authors = [
"Jeron Aldaron Lau <jeronlau@plopgrizzly.com>"
];
features = {
"cala" = [ "cala_core/cala" ];
"stdweb" = [ "cala_core/stdweb" ];
"wasm-bindgen" = [ "cala_core/wasm-bindgen" ];
};
resolvedDefaultFeatures = [ "default" ];
};
"whoami 1.0.3" = rec {
crateName = "whoami";
version = "1.0.3";
edition = "2018";
sha256 = "0dmlxqq6758wysx46zr64b4q9g72d0ax9f4hlpb86qgk8vhv55fm";
authors = [
"Jeron Aldaron Lau <jeronlau@plopgrizzly.com>"
];
dependencies = [
{
name = "wasm-bindgen";
packageId = "wasm-bindgen";
target = { target, features }: (target."arch" == "wasm32");
}
{
name = "web-sys";
packageId = "web-sys";
target = { target, features }: (target."arch" == "wasm32");
features = [ "Navigator" "HtmlDocument" "Document" "Window" ];
}
];
features = {
};
resolvedDefaultFeatures = [ "default" ];
};
"winapi" = rec {
crateName = "winapi";
version = "0.3.9";
edition = "2015";
sha256 = "06gl025x418lchw1wxj64ycr7gha83m44cjr5sarhynd9xkrm0sw";
authors = [
"Peter Atashian <retep998@gmail.com>"
];
dependencies = [
{
name = "winapi-i686-pc-windows-gnu";
packageId = "winapi-i686-pc-windows-gnu";
target = { target, features }: (stdenv.hostPlatform.config == "i686-pc-windows-gnu");
}
{
name = "winapi-x86_64-pc-windows-gnu";
packageId = "winapi-x86_64-pc-windows-gnu";
target = { target, features }: (stdenv.hostPlatform.config == "x86_64-pc-windows-gnu");
}
];
features = {
"debug" = [ "impl-debug" ];
};
resolvedDefaultFeatures = [ "basetsd" "cfg" "consoleapi" "errhandlingapi" "evntrace" "fileapi" "handleapi" "impl-debug" "impl-default" "in6addr" "inaddr" "ioapiset" "knownfolders" "lmcons" "memoryapi" "minschannel" "minwinbase" "minwindef" "mswsock" "namedpipeapi" "ntdef" "ntsecapi" "ntstatus" "objbase" "processenv" "processthreadsapi" "profileapi" "schannel" "securitybaseapi" "shlobj" "sspi" "std" "synchapi" "sysinfoapi" "timezoneapi" "winbase" "wincon" "wincrypt" "windef" "winerror" "winioctl" "winnt" "winreg" "winsock2" "winuser" "ws2def" "ws2ipdef" "ws2tcpip" ];
};
"winapi-i686-pc-windows-gnu" = rec {
crateName = "winapi-i686-pc-windows-gnu";
version = "0.4.0";
edition = "2015";
sha256 = "1dmpa6mvcvzz16zg6d5vrfy4bxgg541wxrcip7cnshi06v38ffxc";
authors = [
"Peter Atashian <retep998@gmail.com>"
];
};
"winapi-util" = rec {
crateName = "winapi-util";
version = "0.1.5";
edition = "2018";
sha256 = "0y71bp7f6d536czj40dhqk0d55wfbbwqfp2ymqf1an5ibgl6rv3h";
authors = [
"Andrew Gallant <jamslam@gmail.com>"
];
dependencies = [
{
name = "winapi";
packageId = "winapi";
target = { target, features }: target."windows";
features = [ "std" "consoleapi" "errhandlingapi" "fileapi" "minwindef" "processenv" "winbase" "wincon" "winerror" "winnt" ];
}
];
};
"winapi-x86_64-pc-windows-gnu" = rec {
crateName = "winapi-x86_64-pc-windows-gnu";
version = "0.4.0";
edition = "2015";
sha256 = "0gqq64czqb64kskjryj8isp62m2sgvx25yyj3kpc2myh85w24bki";
authors = [
"Peter Atashian <retep998@gmail.com>"
];
};
"winreg" = rec {
crateName = "winreg";
version = "0.7.0";
edition = "2015";
sha256 = "0sdxcyvda4v1v6a0k1j2v1400z3ng323k9a56gxvkq51x21dn801";
authors = [
"Igor Shaula <gentoo90@gmail.com>"
];
dependencies = [
{
name = "winapi";
packageId = "winapi";
features = [ "impl-default" "impl-debug" "minwindef" "minwinbase" "timezoneapi" "winerror" "winnt" "winreg" "handleapi" ];
}
];
features = {
"serialization-serde" = [ "transactions" "serde" ];
"transactions" = [ "winapi/ktmw32" ];
};
};
"xattr" = rec {
crateName = "xattr";
version = "0.2.2";
edition = "2015";
sha256 = "0k556fb6f5jc907975j9c8iynl2fqz3rf0w6fiig83i4yi0kfk14";
authors = [
"Steven Allen <steven@stebalien.com>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
];
features = {
"default" = [ "unsupported" ];
};
resolvedDefaultFeatures = [ "default" "unsupported" ];
};
"yasna" = rec {
crateName = "yasna";
version = "0.3.2";
edition = "2015";
sha256 = "1nsdd1di06yvh6n2mv54wgvkl5fz155lbn7nhmna1wmlfbwvzrqd";
authors = [
"Masaki Hara <ackie.h.gmai@gmail.com>"
];
dependencies = [
{
name = "bit-vec";
packageId = "bit-vec";
optional = true;
usesDefaultFeatures = false;
features = [ "std" ];
}
{
name = "num-bigint";
packageId = "num-bigint";
optional = true;
}
];
features = {
};
resolvedDefaultFeatures = [ "bit-vec" "default" "num-bigint" ];
};
"zeroize" = rec {
crateName = "zeroize";
version = "1.2.0";
edition = "2018";
sha256 = "0dhwwh63cnishw5jvvf9li2lscin6jq7srs19p50szrmvny79ac1";
authors = [
"Tony Arcieri <tony@iqlusion.io>"
];
features = {
"default" = [ "alloc" ];
};
resolvedDefaultFeatures = [ "alloc" ];
};
"zstd-seekable" = rec {
crateName = "zstd-seekable";
version = "0.1.7";
edition = "2018";
sha256 = "0bxzd71myj094hp03jpqbv1r80x0hk1ahsjwfv50j0wgx379jfry";
authors = [
"Pierre-Étienne Meunier <pe@pijul.org>"
];
dependencies = [
{
name = "libc";
packageId = "libc";
}
{
name = "thiserror";
packageId = "thiserror";
}
{
name = "threadpool";
packageId = "threadpool";
}
];
buildDependencies = [
{
name = "cc";
packageId = "cc";
}
{
name = "pkg-config";
packageId = "pkg-config";
}
];
};
};
#
# crate2nix/default.nix (excerpt start)
#
/* Target (platform) data for conditional dependencies.
This corresponds roughly to what buildRustCrate is setting.
*/
defaultTarget = {
unix = true;
windows = false;
fuchsia = true;
test = false;
# This doesn't appear to be officially documented anywhere yet.
# See https://github.com/rust-lang-nursery/rust-forge/issues/101.
os = if stdenv.hostPlatform.isDarwin
then "macos"
else stdenv.hostPlatform.parsed.kernel.name;
arch = stdenv.hostPlatform.parsed.cpu.name;
family = "unix";
env = "gnu";
endian =
if stdenv.hostPlatform.parsed.cpu.significantByte.name == "littleEndian"
then "little" else "big";
pointer_width = toString stdenv.hostPlatform.parsed.cpu.bits;
vendor = stdenv.hostPlatform.parsed.vendor.name;
debug_assertions = false;
};
/* Filters common temp files and build files. */
# TODO(pkolloch): Substitute with gitignore filter
sourceFilter = name: type:
let
baseName = builtins.baseNameOf (builtins.toString name);
in
! (
# Filter out git
baseName == ".gitignore"
|| (type == "directory" && baseName == ".git")
# Filter out build results
|| (
type == "directory" && (
baseName == "target"
|| baseName == "_site"
|| baseName == ".sass-cache"
|| baseName == ".jekyll-metadata"
|| baseName == "build-artifacts"
)
)
# Filter out nix-build result symlinks
|| (
type == "symlink" && lib.hasPrefix "result" baseName
)
# Filter out IDE config
|| (
type == "directory" && (
baseName == ".idea" || baseName == ".vscode"
)
) || lib.hasSuffix ".iml" baseName
# Filter out nix build files
|| baseName == "Cargo.nix"
# Filter out editor backup / swap files.
|| lib.hasSuffix "~" baseName
|| builtins.match "^\\.sw[a-z]$$" baseName != null
|| builtins.match "^\\..*\\.sw[a-z]$$" baseName != null
|| lib.hasSuffix ".tmp" baseName
|| lib.hasSuffix ".bak" baseName
|| baseName == "tests.nix"
);
/* Returns a crate which depends on successful test execution
of crate given as the second argument.
testCrateFlags: list of flags to pass to the test exectuable
testInputs: list of packages that should be available during test execution
*/
crateWithTest = { crate, testCrate, testCrateFlags, testInputs }:
assert builtins.typeOf testCrateFlags == "list";
assert builtins.typeOf testInputs == "list";
let
# override the `crate` so that it will build and execute tests instead of
# building the actual lib and bin targets We just have to pass `--test`
# to rustc and it will do the right thing. We execute the tests and copy
# their log and the test executables to $out for later inspection.
test = let
drv = testCrate.override (
_: {
buildTests = true;
}
);
in
pkgs.runCommand "run-tests-${testCrate.name}" {
inherit testCrateFlags;
buildInputs = testInputs;
} ''
set -ex
cd ${crate.src}
for file in ${drv}/tests/*; do
$file $testCrateFlags 2>&1 | tee -a $out
done
'';
in
crate.overrideAttrs (
old: {
checkPhase = ''
test -e ${test}
'';
passthru = (old.passthru or {}) // {
inherit test;
};
}
);
/* A restricted overridable version of builtRustCratesWithFeatures. */
buildRustCrateWithFeatures =
{ packageId
, features ? rootFeatures
, crateOverrides ? defaultCrateOverrides
, buildRustCrateFunc ? (
if crateOverrides == pkgs.defaultCrateOverrides
then buildRustCrate
else buildRustCrate.override {
defaultCrateOverrides = crateOverrides;
}
)
, runTests ? false
, testCrateFlags ? []
, testInputs ? []
}:
lib.makeOverridable
(
{ features, crateOverrides, runTests, testCrateFlags, testInputs }:
let
builtRustCrates = builtRustCratesWithFeatures {
inherit packageId features buildRustCrateFunc;
runTests = false;
};
builtTestRustCrates = builtRustCratesWithFeatures {
inherit packageId features buildRustCrateFunc;
runTests = true;
};
drv = builtRustCrates.${packageId};
testDrv = builtTestRustCrates.${packageId};
in
if runTests then
crateWithTest {
crate = drv;
testCrate = testDrv;
inherit testCrateFlags testInputs;
}
else drv
)
{ inherit features crateOverrides runTests testCrateFlags testInputs; };
/* Returns an attr set with packageId mapped to the result of buildRustCrateFunc
for the corresponding crate.
*/
builtRustCratesWithFeatures =
{ packageId
, features
, crateConfigs ? crates
, buildRustCrateFunc
, runTests
, target ? defaultTarget
} @ args:
assert (builtins.isAttrs crateConfigs);
assert (builtins.isString packageId);
assert (builtins.isList features);
assert (builtins.isAttrs target);
assert (builtins.isBool runTests);
let
rootPackageId = packageId;
mergedFeatures = mergePackageFeatures (
args // {
inherit rootPackageId;
target = target // { test = runTests; };
}
);
buildByPackageId = packageId: buildByPackageIdImpl packageId;
# Memoize built packages so that reappearing packages are only built once.
builtByPackageId =
lib.mapAttrs (packageId: value: buildByPackageId packageId) crateConfigs;
buildByPackageIdImpl = packageId:
let
features = mergedFeatures."${packageId}" or [];
crateConfig' = crateConfigs."${packageId}";
crateConfig =
builtins.removeAttrs crateConfig' [ "resolvedDefaultFeatures" "devDependencies" ];
devDependencies =
lib.optionals
(runTests && packageId == rootPackageId)
(crateConfig'.devDependencies or []);
dependencies =
dependencyDerivations {
inherit builtByPackageId features target;
dependencies =
(crateConfig.dependencies or [])
++ devDependencies;
};
buildDependencies =
dependencyDerivations {
inherit builtByPackageId features target;
dependencies = crateConfig.buildDependencies or [];
};
filterEnabledDependenciesForThis = dependencies: filterEnabledDependencies {
inherit dependencies features target;
};
dependenciesWithRenames =
lib.filter (d: d ? "rename") (
filterEnabledDependenciesForThis
(
(crateConfig.buildDependencies or [])
++ (crateConfig.dependencies or [])
++ devDependencies
)
);
crateRenames =
builtins.listToAttrs
(map (d: { name = d.name; value = d.rename; }) dependenciesWithRenames);
in
buildRustCrateFunc (
crateConfig // {
src = crateConfig.src or (
pkgs.fetchurl {
name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
url = "https://crates.io/api/v1/crates/${crateConfig.crateName}/${crateConfig.version}/download";
sha256 = crateConfig.sha256;
}
);
inherit features dependencies buildDependencies crateRenames release;
}
);
in
builtByPackageId;
/* Returns the actual derivations for the given dependencies. */
dependencyDerivations =
{ builtByPackageId
, features
, dependencies
, target
}:
assert (builtins.isAttrs builtByPackageId);
assert (builtins.isList features);
assert (builtins.isList dependencies);
assert (builtins.isAttrs target);
let
enabledDependencies = filterEnabledDependencies {
inherit dependencies features target;
};
depDerivation = dependency: builtByPackageId.${dependency.packageId};
in
map depDerivation enabledDependencies;
/* Returns a sanitized version of val with all values substituted that cannot
be serialized as JSON.
*/
sanitizeForJson = val:
if builtins.isAttrs val
then lib.mapAttrs (n: v: sanitizeForJson v) val
else if builtins.isList val
then builtins.map sanitizeForJson val
else if builtins.isFunction val
then "function"
else val;
/* Returns various tools to debug a crate. */
debugCrate = { packageId, target ? defaultTarget }:
assert (builtins.isString packageId);
let
debug = rec {
# The built tree as passed to buildRustCrate.
buildTree = buildRustCrateWithFeatures {
buildRustCrateFunc = lib.id;
inherit packageId;
};
sanitizedBuildTree = sanitizeForJson buildTree;
dependencyTree = sanitizeForJson (
buildRustCrateWithFeatures {
buildRustCrateFunc = crate: {
"01_crateName" = crate.crateName or false;
"02_features" = crate.features or [];
"03_dependencies" = crate.dependencies or [];
};
inherit packageId;
}
);
mergedPackageFeatures = mergePackageFeatures {
features = rootFeatures;
inherit packageId target;
};
diffedDefaultPackageFeatures = diffDefaultPackageFeatures {
inherit packageId target;
};
};
in
{ internal = debug; };
/* Returns differences between cargo default features and crate2nix default
features.
This is useful for verifying the feature resolution in crate2nix.
*/
diffDefaultPackageFeatures =
{ crateConfigs ? crates
, packageId
, target
}:
assert (builtins.isAttrs crateConfigs);
let
prefixValues = prefix: lib.mapAttrs (n: v: { "${prefix}" = v; });
mergedFeatures =
prefixValues
"crate2nix"
(mergePackageFeatures { inherit crateConfigs packageId target; features = [ "default" ]; });
configs = prefixValues "cargo" crateConfigs;
combined = lib.foldAttrs (a: b: a // b) {} [ mergedFeatures configs ];
onlyInCargo =
builtins.attrNames
(lib.filterAttrs (n: v: !(v ? "crate2nix") && (v ? "cargo")) combined);
onlyInCrate2Nix =
builtins.attrNames
(lib.filterAttrs (n: v: (v ? "crate2nix") && !(v ? "cargo")) combined);
differentFeatures = lib.filterAttrs
(
n: v:
(v ? "crate2nix")
&& (v ? "cargo")
&& (v.crate2nix.features or []) != (v."cargo".resolved_default_features or [])
)
combined;
in
builtins.toJSON {
inherit onlyInCargo onlyInCrate2Nix differentFeatures;
};
/* Returns an attrset mapping packageId to the list of enabled features.
If multiple paths to a dependency enable different features, the
corresponding feature sets are merged. Features in rust are additive.
*/
mergePackageFeatures =
{ crateConfigs ? crates
, packageId
, rootPackageId ? packageId
, features ? rootFeatures
, dependencyPath ? [ crates.${packageId}.crateName ]
, featuresByPackageId ? {}
, target
# Adds devDependencies to the crate with rootPackageId.
, runTests ? false
, ...
} @ args:
assert (builtins.isAttrs crateConfigs);
assert (builtins.isString packageId);
assert (builtins.isString rootPackageId);
assert (builtins.isList features);
assert (builtins.isList dependencyPath);
assert (builtins.isAttrs featuresByPackageId);
assert (builtins.isAttrs target);
assert (builtins.isBool runTests);
let
crateConfig = crateConfigs."${packageId}" or (builtins.throw "Package not found: ${packageId}");
expandedFeatures = expandFeatures (crateConfig.features or {}) features;
depWithResolvedFeatures = dependency:
let
packageId = dependency.packageId;
features = dependencyFeatures expandedFeatures dependency;
in
{ inherit packageId features; };
resolveDependencies = cache: path: dependencies:
assert (builtins.isAttrs cache);
assert (builtins.isList dependencies);
let
enabledDependencies = filterEnabledDependencies {
inherit dependencies target;
features = expandedFeatures;
};
directDependencies = map depWithResolvedFeatures enabledDependencies;
foldOverCache = op: lib.foldl op cache directDependencies;
in
foldOverCache
(
cache: { packageId, features }:
let
cacheFeatures = cache.${packageId} or [];
combinedFeatures = sortedUnique (cacheFeatures ++ features);
in
if cache ? ${packageId} && cache.${packageId} == combinedFeatures
then cache
else mergePackageFeatures {
features = combinedFeatures;
featuresByPackageId = cache;
inherit crateConfigs packageId target runTests rootPackageId;
}
);
cacheWithSelf =
let
cacheFeatures = featuresByPackageId.${packageId} or [];
combinedFeatures = sortedUnique (cacheFeatures ++ expandedFeatures);
in
featuresByPackageId // {
"${packageId}" = combinedFeatures;
};
cacheWithDependencies =
resolveDependencies cacheWithSelf "dep" (
crateConfig.dependencies or []
++ lib.optionals
(runTests && packageId == rootPackageId)
(crateConfig.devDependencies or [])
);
cacheWithAll =
resolveDependencies
cacheWithDependencies "build"
(crateConfig.buildDependencies or []);
in
cacheWithAll;
/* Returns the enabled dependencies given the enabled features. */
filterEnabledDependencies = { dependencies, features, target }:
assert (builtins.isList dependencies);
assert (builtins.isList features);
assert (builtins.isAttrs target);
lib.filter
(
dep:
let
targetFunc = dep.target or (features: true);
in
targetFunc { inherit features target; }
&& (
!(dep.optional or false)
|| builtins.any (doesFeatureEnableDependency dep) features
)
)
dependencies;
/* Returns whether the given feature should enable the given dependency. */
doesFeatureEnableDependency = { name, rename ? null, ... }: feature:
let
prefix = "${name}/";
len = builtins.stringLength prefix;
startsWithPrefix = builtins.substring 0 len feature == prefix;
in
(rename == null && feature == name)
|| (rename != null && rename == feature)
|| startsWithPrefix;
/* Returns the expanded features for the given inputFeatures by applying the
rules in featureMap.
featureMap is an attribute set which maps feature names to lists of further
feature names to enable in case this feature is selected.
*/
expandFeatures = featureMap: inputFeatures:
assert (builtins.isAttrs featureMap);
assert (builtins.isList inputFeatures);
let
expandFeature = feature:
assert (builtins.isString feature);
[ feature ] ++ (expandFeatures featureMap (featureMap."${feature}" or []));
outFeatures = builtins.concatMap expandFeature inputFeatures;
in
sortedUnique outFeatures;
/*
Returns the actual features for the given dependency.
features: The features of the crate that refers this dependency.
*/
dependencyFeatures = features: dependency:
assert (builtins.isList features);
assert (builtins.isAttrs dependency);
let
defaultOrNil = if dependency.usesDefaultFeatures or true
then [ "default" ]
else [];
explicitFeatures = dependency.features or [];
additionalDependencyFeatures =
let
dependencyPrefix = (dependency.rename or dependency.name) + "/";
dependencyFeatures =
builtins.filter (f: lib.hasPrefix dependencyPrefix f) features;
in
builtins.map (lib.removePrefix dependencyPrefix) dependencyFeatures;
in
defaultOrNil ++ explicitFeatures ++ additionalDependencyFeatures;
/* Sorts and removes duplicates from a list of strings. */
sortedUnique = features:
assert (builtins.isList features);
assert (builtins.all builtins.isString features);
let
outFeaturesSet = lib.foldl (set: feature: set // { "${feature}" = 1; }) {} features;
outFeaturesUnique = builtins.attrNames outFeaturesSet;
in
builtins.sort (a: b: a < b) outFeaturesUnique;
deprecationWarning = message: value:
if strictDeprecation
then builtins.throw "strictDeprecation enabled, aborting: ${message}"
else builtins.trace message value;
#
# crate2nix/default.nix (excerpt end)
#
};
}
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "addr2line"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
[[package]]
name = "aho-corasick"
version = "0.7.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
dependencies = [
"memchr",
]
[[package]]
name = "anyhow"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1"
[[package]]
name = "arrayref"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
[[package]]
name = "arrayvec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "backtrace"
version = "0.3.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef5140344c85b01f9bbb4d4b7288a8aa4b3287ccef913a14bcc78a1063623598"
dependencies = [
"addr2line",
"cfg-if 1.0.0",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
[[package]]
name = "bincode"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d"
dependencies = [
"byteorder",
"serde",
]
[[package]]
name = "bit-vec"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "blake2b_simd"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587"
dependencies = [
"arrayref",
"arrayvec",
"constant_time_eq",
]
[[package]]
name = "blake3"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9ff35b701f3914bdb8fad3368d822c766ef2858b2583198e41639b936f09d3f"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if 0.1.10",
"constant_time_eq",
"crypto-mac",
"digest",
]
[[package]]
name = "bstr"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "473fc6b38233f9af7baa94fb5852dca389e3d95b8e21c8e3719301462c5d9faf"
dependencies = [
"memchr",
]
[[package]]
name = "bumpalo"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820"
[[package]]
name = "byteorder"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae44d1a3d5a19df61dd0c8beb138458ac2a53a7ac09eba97d55592540004306b"
[[package]]
name = "bytes"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad1f8e949d755f9d79112b5bb46938e0ef9d3804a0b16dfab13aafcaa5f0fa72"
[[package]]
name = "canonical-path"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6e9e01327e6c86e92ec72b1c798d4a94810f147209bbe3ffab6a86954937a6f"
[[package]]
name = "cc"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
dependencies = [
"jobserver",
]
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [
"libc",
"num-integer",
"num-traits",
"serde",
"time",
"winapi",
]
[[package]]
name = "clap"
version = "3.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bd1061998a501ee7d4b6d449020df3266ca3124b941ec56cf2005c3779ca142"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"indexmap",
"lazy_static",
"os_str_bytes",
"strsim",
"termcolor",
"textwrap",
"unicode-width",
"vec_map",
]
[[package]]
name = "clap_derive"
version = "3.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "370f715b81112975b1b69db93e0b56ea4cd4e5002ac43b2da8474106a54096a1"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "console"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cc80946b3480f421c2f17ed1cb841753a371c7c5104f51d507e13f532c856aa"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"regex",
"terminal_size",
"unicode-width",
"winapi",
]
[[package]]
name = "constant_time_eq"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
[[package]]
name = "core-foundation"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "core-foundation-sys"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b"
[[package]]
name = "crc32fast"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
dependencies = [
"autocfg",
"cfg-if 1.0.0",
"lazy_static",
]
[[package]]
name = "crypto-mac"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab"
dependencies = [
"generic-array",
"subtle",
]
[[package]]
name = "cryptovec"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4bfac8fcbf54dc25a5ac1b9f0df13ce561e1419186ed6c562b3d9902aeb7ad5"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "ctrlc"
version = "3.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b57a92e9749e10f25a171adcebfafe72991d45e7ec2dcb853e8f83d9dafaeb08"
dependencies = [
"nix",
"winapi",
]
[[package]]
name = "curve25519-dalek"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f627126b946c25a4638eec0ea634fc52506dea98db118aae985118ce7c3d723f"
dependencies = [
"byteorder",
"digest",
"rand_core 0.5.1",
"serde",
"subtle",
"zeroize",
]
[[package]]
name = "data-encoding"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993a608597367c6377b258c25d7120740f00ed23a2252b729b1932dd7866f908"
[[package]]
name = "diffs"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb4e4582f27b475e229dc0926b12082a6fb8662c7cb25ed1465243486fe181a2"
[[package]]
name = "digest"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
dependencies = [
"generic-array",
]
[[package]]
name = "dirs"
version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "142995ed02755914747cc6ca76fc7e4583cd18578746716d0508ea6ed558b9ff"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-next"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
dependencies = [
"cfg-if 1.0.0",
"dirs-sys-next",
]
[[package]]
name = "dirs-sys"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a"
dependencies = [
"libc",
"redox_users 0.3.5",
"winapi",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users 0.4.0",
"winapi",
]
[[package]]
name = "edit"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "323032447eba6f5aca88b46d6e7815151c16c53e4128569420c09d7840db3bfc"
dependencies = [
"tempfile",
"which",
]
[[package]]
name = "encode_unicode"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "encoding_rs"
version = "0.8.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "801bbab217d7f79c0062f4f7205b5d4427c6d1a7bd7aafdd1475f7c59d62b283"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "env_logger"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26ecb66b4bdca6c1409b40fb255eefc2bd4f6d135dab3c3124f80ffa2a9661e"
dependencies = [
"atty",
"humantime",
"log",
"regex",
"termcolor",
]
[[package]]
name = "errno"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa68f2fb9cae9d37c9b2b3584aba698a2e97f72d7aef7b9f7aa71d8b54ce46fe"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14ca354e36190500e1e1fb267c647932382b54053c50b14970856c0b00a35067"
dependencies = [
"gcc",
"libc",
]
[[package]]
name = "filetime"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c122a393ea57648015bf06fbd3d372378992e86b9ff5a7a497b076a28c79efe"
dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.1.57",
"winapi",
]
[[package]]
name = "flate2"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
dependencies = [
"cfg-if 1.0.0",
"crc32fast",
"libc",
"miniz_oxide",
]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "form_urlencoded"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ece68d15c92e84fa4f19d3780f1294e5ca82a78a6d515f1efaabcc144688be00"
dependencies = [
"matches",
"percent-encoding",
]
[[package]]
name = "fs2"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "futures"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c70be434c505aee38639abccb918163b63158a4b4bb791b45b7023044bdc3c9c"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f01c61843314e95f96cc9245702248733a3a3d744e43e2e755e3c7af8348a0a9"
dependencies = [
"futures-core",
"futures-sink",
]
[[package]]
name = "futures-core"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db8d3b0917ff63a2a96173133c02818fac4a746b0a57569d3baca9ec0e945e08"
[[package]]
name = "futures-executor"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ee9ca2f7eb4475772cf39dd1cd06208dce2670ad38f4d9c7262b3e15f127068"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e37c1a51b037b80922864b8eed90692c5cd8abd4c71ce49b77146caa47f3253b"
[[package]]
name = "futures-macro"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f8719ca0e1f3c5e34f3efe4570ef2c0610ca6da85ae7990d472e9cbfba13664"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "futures-sink"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6adabac1290109cfa089f79192fb6244ad2c3f1cc2281f3e1dd987592b71feb"
[[package]]
name = "futures-task"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92a0843a2ff66823a8f7c77bffe9a09be2b64e533562c412d63075643ec0038"
dependencies = [
"once_cell",
]
[[package]]
name = "futures-util"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "036a2107cdeb57f6d7322f1b6c363dad67cd63ca3b7d1b925bdf75bd5d96cda9"
dependencies = [
"futures-channel",
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project-lite",
"pin-utils",
"proc-macro-hack",
"proc-macro-nested",
"slab",
]
[[package]]
name = "gcc"
version = "0.3.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2"
[[package]]
name = "generic-array"
version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getrandom"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
[[package]]
name = "getrandom"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4060f4657be78b8e766215b02b18a2e862d83745545de804638e2b545e81aee6"
dependencies = [
"cfg-if 1.0.0",
"libc",
"wasi 0.10.1+wasi-snapshot-preview1",
]
[[package]]
name = "gimli"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce"
[[package]]
name = "git2"
version = "0.13.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44f267c9da8a4de3c615b59e23606c75f164f84896e97f4dd6c15a4294de4359"
dependencies = [
"bitflags",
"libc",
"libgit2-sys",
"log",
"openssl-probe",
"openssl-sys",
"url",
]
[[package]]
name = "globset"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c152169ef1e421390738366d2f796655fec62621dabbd0fd476f905934061e4a"
dependencies = [
"aho-corasick",
"bstr",
"fnv",
"log",
"regex",
]
[[package]]
name = "h2"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b67e66362108efccd8ac053abafc8b7a8d86a37e6e48fc4f6f7485eb5e9e6a5"
dependencies = [
"bytes",
"fnv",
"futures-core",
"futures-sink",
"futures-util",
"http",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
"tracing-futures",
]
[[package]]
name = "hashbrown"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
[[package]]
name = "heck"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "hermit-abi"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
dependencies = [
"libc",
]
[[package]]
name = "http"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2861bd27ee074e5ee891e8b539837a9430012e249d7f0ca2d795650f579c1994"
dependencies = [
"bytes",
"http",
]
[[package]]
name = "httparse"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9"
[[package]]
name = "httpdate"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47"
[[package]]
name = "human-panic"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39f357a500abcbd7c5f967c1d45c8838585b36743823b9d43488f24850534e36"
dependencies = [
"backtrace",
"os_type",
"serde",
"serde_derive",
"termcolor",
"toml",
"uuid",
]
[[package]]
name = "humantime"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c1ad908cc71012b7bea4d0c53ba96a8cba9962f048fa68d143376143d863b7a"
[[package]]
name = "hyper"
version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12219dc884514cb4a6a03737f4413c0e01c23a1b059b0156004b23f1e19dccbe"
dependencies = [
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"h2",
"http",
"http-body",
"httparse",
"httpdate",
"itoa",
"pin-project 1.0.4",
"socket2",
"tokio",
"tower-service",
"tracing",
"want",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
"bytes",
"hyper",
"native-tls",
"tokio",
"tokio-native-tls",
]
[[package]]
name = "idna"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
dependencies = [
"matches",
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "ignore"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b287fb45c60bb826a0dc68ff08742b9d88a2fea13d6e0c286b3172065aaf878c"
dependencies = [
"crossbeam-utils",
"globset",
"lazy_static",
"log",
"memchr",
"regex",
"same-file",
"thread_local",
"walkdir",
"winapi-util",
]
[[package]]
name = "indexmap"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb1fa934250de4de8aef298d81c729a7d33d8c239daa3a7575e6b92bfc7313b"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "indicatif"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7baab56125e25686df467fe470785512329883aab42696d661247aca2a2896e4"
dependencies = [
"console",
"lazy_static",
"number_prefix",
"regex",
]
[[package]]
name = "instant"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "ipnet"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47be2f14c678be2fdcab04ab1171db51b2762ce6f0a8ee87c8dd4a04ed216135"
[[package]]
name = "itoa"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
[[package]]
name = "jobserver"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
dependencies = [
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf3d7383929f7c9c7c2d0fa596f325832df98c3704f2c60553080f7127a58175"
dependencies = [
"wasm-bindgen",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89203f3fba0a3795506acaad8ebce3c80c0af93f994d5a1d7a0b1eeb23271929"
[[package]]
name = "libgit2-sys"
version = "0.12.17+1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4ebdf65ca745126df8824688637aa0535a88900b83362d8ca63893bcf4e8841"
dependencies = [
"cc",
"libc",
"libssh2-sys",
"libz-sys",
"openssl-sys",
"pkg-config",
]
[[package]]
name = "libpijul"
version = "1.0.0-alpha.28"
dependencies = [
"anyhow",
"bincode",
"bitflags",
"blake3",
"byteorder",
"canonical-path",
"chrono",
"curve25519-dalek",
"data-encoding",
"diffs",
"env_logger",
"flate2",
"ignore",
"lazy_static",
"log",
"lru-cache",
"pijul-macros",
"rand 0.8.1",
"regex",
"sanakirja",
"serde",
"serde_derive",
"tar",
"tempfile",
"thiserror",
"tokio",
"toml",
"zstd-seekable",
]
[[package]]
name = "libsodium-sys"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a685b64f837b339074115f2e7f7b431ac73681d08d75b389db7498b8892b8a58"
dependencies = [
"cc",
"libc",
"pkg-config",
]
[[package]]
name = "libssh2-sys"
version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df40b13fe7ea1be9b9dffa365a51273816c345fc1811478b57ed7d964fbfc4ce"
dependencies = [
"cc",
"libc",
"libz-sys",
"openssl-sys",
"pkg-config",
"vcpkg",
]
[[package]]
name = "libz-sys"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
[[package]]
name = "lock_api"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312"
dependencies = [
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcf3805d4480bb5b86070dcfeb9e2cb2ebc148adb753c5cca5f884d1d65a42b2"
dependencies = [
"cfg-if 0.1.10",
]
[[package]]
name = "lru-cache"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "matches"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
[[package]]
name = "memchr"
version = "2.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
[[package]]
name = "memmap"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "mime"
version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
[[package]]
name = "miniz_oxide"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "mio"
version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e50ae3f04d169fcc9bde0b547d1c205219b7157e07ded9c5aff03e0637cb3ed7"
dependencies = [
"libc",
"log",
"miow",
"ntapi",
"winapi",
]
[[package]]
name = "miow"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897"
dependencies = [
"socket2",
"winapi",
]
[[package]]
name = "native-tls"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8d96b2e1c8da3957d58100b09f102c6d9cfdfced01b7ec5a8974044bb09dbd4"
dependencies = [
"lazy_static",
"libc",
"log",
"openssl",
"openssl-probe",
"openssl-sys",
"schannel",
"security-framework",
"security-framework-sys",
"tempfile",
]
[[package]]
name = "nix"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83450fe6a6142ddd95fb064b746083fc4ef1705fe81f64a64e1d4b39f54a1055"
dependencies = [
"bitflags",
"cc",
"cfg-if 0.1.10",
"libc",
]
[[package]]
name = "ntapi"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44"
dependencies = [
"winapi",
]
[[package]]
name = "num-bigint"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "number_prefix"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17b02fc0ff9a9e4b35b3342880f48e896ebf69f2967921fe8646bf5b7125956a"
[[package]]
name = "object"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397"
[[package]]
name = "once_cell"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0"
[[package]]
name = "openssl"
version = "0.10.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "038d43985d1ddca7a9900630d8cd031b56e4794eecc2e9ea39dd17aa04399a70"
dependencies = [
"bitflags",
"cfg-if 1.0.0",
"foreign-types",
"lazy_static",
"libc",
"openssl-sys",
]
[[package]]
name = "openssl-probe"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
[[package]]
name = "openssl-sys"
version = "0.9.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "921fc71883267538946025deffb622905ecad223c28efbfdef9bb59a0175f3e6"
dependencies = [
"autocfg",
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "os_str_bytes"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afb2e1c3ee07430c2cf76151675e583e0f19985fa6efae47d6848a3e2c824f85"
[[package]]
name = "os_type"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edc011af0ae98b7f88cf7e4a83b70a54a75d2b8cb013d6efd02e5956207e9eb"
dependencies = [
"regex",
]
[[package]]
name = "pager"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05c7d08cf0d0b55c4f0ffedb5e06569ea212e85d622975071370393970491968"
dependencies = [
"errno",
"libc",
]
[[package]]
name = "parking_lot"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb"
dependencies = [
"instant",
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272"
dependencies = [
"cfg-if 1.0.0",
"instant",
"libc",
"redox_syscall 0.1.57",
"smallvec",
"winapi",
]
[[package]]
name = "percent-encoding"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "pijul"
version = "1.0.0-alpha.31"
dependencies = [
"anyhow",
"byteorder",
"canonical-path",
"chrono",
"clap",
"ctrlc",
"data-encoding",
"dirs-next",
"edit",
"env_logger",
"futures",
"futures-util",
"git2",
"human-panic",
"ignore",
"indicatif",
"lazy_static",
"libpijul",
"log",
"num_cpus",
"pager",
"rand 0.8.1",
"regex",
"reqwest",
"rpassword",
"sanakirja",
"serde",
"serde_derive",
"serde_json",
"thrussh",
"thrussh-config",
"thrussh-keys",
"tokio",
"toml",
"whoami 0.9.0",
]
[[package]]
name = "pijul-macros"
version = "0.3.0"
dependencies = [
"proc-macro2",
"quote",
"regex",
"syn",
]
[[package]]
name = "pin-project"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15"
dependencies = [
"pin-project-internal 0.4.27",
]
[[package]]
name = "pin-project"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95b70b68509f17aa2857863b6fa00bf21fc93674c7a8893de2f469f6aa7ca2f2"
dependencies = [
"pin-project-internal 1.0.4",
]
[[package]]
name = "pin-project-internal"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-internal"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "caa25a6393f22ce819b0f50e0be89287292fda8d425be38ee0ca14c4931d9e71"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-lite"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439697af366c49a6d0a010c56a0d97685bc140ce0d377b13a2ea2aa42d64a827"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkg-config"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
[[package]]
name = "ppv-lite86"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro-nested"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a"
[[package]]
name = "proc-macro2"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
dependencies = [
"unicode-xid",
]
[[package]]
name = "quote"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
"rand_hc 0.2.0",
]
[[package]]
name = "rand"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c24fcd450d3fa2b592732565aa4f17a27a61c65ece4726353e000939b0edee34"
dependencies = [
"libc",
"rand_chacha 0.3.0",
"rand_core 0.6.1",
"rand_hc 0.3.0",
]
[[package]]
name = "rand_chacha"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"ppv-lite86",
"rand_core 0.5.1",
]
[[package]]
name = "rand_chacha"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d"
dependencies = [
"ppv-lite86",
"rand_core 0.6.1",
]
[[package]]
name = "rand_core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.16",
]
[[package]]
name = "rand_core"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c026d7df8b298d90ccbbc5190bd04d85e159eaf5576caeacf8741da93ccbd2e5"
dependencies = [
"getrandom 0.2.1",
]
[[package]]
name = "rand_hc"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
dependencies = [
"rand_core 0.5.1",
]
[[package]]
name = "rand_hc"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73"
dependencies = [
"rand_core 0.6.1",
]
[[package]]
name = "redox_syscall"
version = "0.1.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
[[package]]
name = "redox_syscall"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3d82a0e6791428b9525c116cc671feac3ce1e73b26c5724a6bf26ae130ebf18"
dependencies = [
"bitflags",
]
[[package]]
name = "redox_users"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d"
dependencies = [
"getrandom 0.1.16",
"redox_syscall 0.1.57",
"rust-argon2",
]
[[package]]
name = "redox_users"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64"
dependencies = [
"getrandom 0.2.1",
"redox_syscall 0.2.2",
]
[[package]]
name = "regex"
version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
"thread_local",
]
[[package]]
name = "regex-syntax"
version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
[[package]]
name = "remove_dir_all"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
dependencies = [
"winapi",
]
[[package]]
name = "reqwest"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd281b1030aa675fb90aa994d07187645bb3c8fc756ca766e7c3070b439de9de"
dependencies = [
"base64",
"bytes",
"encoding_rs",
"futures-core",
"futures-util",
"http",
"http-body",
"hyper",
"hyper-tls",
"ipnet",
"js-sys",
"lazy_static",
"log",
"mime",
"native-tls",
"percent-encoding",
"pin-project-lite",
"serde",
"serde_urlencoded",
"tokio",
"tokio-native-tls",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"winreg",
]
[[package]]
name = "rpassword"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d755237fc0f99d98641540e66abac8bc46a0652f19148ac9e21de2da06b326c9"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "rust-argon2"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb"
dependencies = [
"base64",
"blake2b_simd",
"constant_time_eq",
"crossbeam-utils",
]
[[package]]
name = "rustc-demangle"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232"
[[package]]
name = "ryu"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "sanakirja"
version = "0.15.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dc2f1481f4fd49edfc47d14f83d2b3b15c342d27adafd7b78488179ca7df27e"
dependencies = [
"crc32fast",
"fs2",
"git2",
"log",
"memmap",
"parking_lot",
"rand 0.8.1",
"thiserror",
]
[[package]]
name = "schannel"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75"
dependencies = [
"lazy_static",
"winapi",
]
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "security-framework"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1759c2e3c8580017a484a7ac56d3abc5a6c1feadf88db2f3633f12ae4268c69"
dependencies = [
"bitflags",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework-sys"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f99b9d5e26d2a71633cc4f2ebae7cc9f874044e0c351a27e17892d76dce5678b"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "serde"
version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fceb2595057b6891a4ee808f70054bd2d12f0e97f1cbb78689b59f676df325a"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9"
dependencies = [
"form_urlencoded",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "slab"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
[[package]]
name = "smallvec"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
[[package]]
name = "socket2"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e"
dependencies = [
"cfg-if 1.0.0",
"libc",
"winapi",
]
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "subtle"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2"
[[package]]
name = "syn"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc60a3d73ea6594cd712d830cc1f0390fd71542d8c8cd24e70cc54cdfd5e05d5"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "tar"
version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "489997b7557e9a43e192c527face4feacc78bfbe6eed67fd55c4c9e381cba290"
dependencies = [
"filetime",
"libc",
"redox_syscall 0.1.57",
"xattr",
]
[[package]]
name = "tempfile"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
dependencies = [
"cfg-if 0.1.10",
"libc",
"rand 0.7.3",
"redox_syscall 0.1.57",
"remove_dir_all",
"winapi",
]
[[package]]
name = "termcolor"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
dependencies = [
"winapi-util",
]
[[package]]
name = "terminal_size"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bd2d183bd3fac5f5fe38ddbeb4dc9aec4a39a9d7d59e7491d900302da01cbe1"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "textwrap"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "203008d98caf094106cfaba70acfed15e18ed3ddb7d94e49baec153a2b462789"
dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76cc616c6abf8c8928e2fdcc0dbfab37175edd8fb49a4641066ad1364fdab146"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread_local"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb9bc092d0d51e76b2b19d9d85534ffc9ec2db959a2523cdae0697e2972cd447"
dependencies = [
"lazy_static",
]
[[package]]
name = "threadpool"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
dependencies = [
"num_cpus",
]
[[package]]
name = "thrussh"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "750547dae73312fa95bfd66d295ba9744bd4ccdeee2d02a7f7f8a06afeaf2747"
dependencies = [
"bitflags",
"byteorder",
"cryptovec",
"flate2",
"futures",
"log",
"openssl",
"thiserror",
"thrussh-keys",
"thrussh-libsodium",
"tokio",
]
[[package]]
name = "thrussh-config"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67c044d978aff4dfc0036a29ce10bfa190ab84381bb4551fd50fa0e0db2ef5f9"
dependencies = [
"dirs-next",
"futures",
"log",
"thiserror",
"tokio",
"whoami 1.0.3",
]
[[package]]
name = "thrussh-keys"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3235ac21ae190820d5ba0c333e58324cb7edf79f1c463a8c914d88051e31de56"
dependencies = [
"bit-vec",
"byteorder",
"cryptovec",
"data-encoding",
"dirs",
"futures",
"log",
"num-bigint",
"num-integer",
"openssl",
"serde",
"serde_derive",
"thiserror",
"thrussh-libsodium",
"tokio",
"yasna",
]
[[package]]
name = "thrussh-libsodium"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe89c70d27b1cb92e13bc8af63493e890d0de46dae4df0e28233f62b4ed9500"
dependencies = [
"lazy_static",
"libc",
"libsodium-sys",
"pkg-config",
"vcpkg",
]
[[package]]
name = "time"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "tinyvec"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf8dbc19eb42fba10e8feaaec282fb50e2c14b2726d6301dbfeed0f73306a6f"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokio"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d258221f566b6c803c7b4714abadc080172b272090cdc5e244a6d4dd13c3a6bd"
dependencies = [
"autocfg",
"bytes",
"libc",
"memchr",
"mio",
"num_cpus",
"pin-project-lite",
"tokio-macros",
]
[[package]]
name = "tokio-macros"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42517d2975ca3114b22a16192634e8241dc5cc1f130be194645970cc1c371494"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b"
dependencies = [
"native-tls",
"tokio",
]
[[package]]
name = "tokio-stream"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4cdeb73537e63f98adcd73138af75e3f368ccaecffaa29d7eb61b9f5a440457"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-util"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36135b7e7da911f5f8b9331209f7fab4cc13498f3fff52f72a710c78187e3148"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"log",
"pin-project-lite",
"tokio",
"tokio-stream",
]
[[package]]
name = "toml"
version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa"
dependencies = [
"serde",
]
[[package]]
name = "tower-service"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860"
[[package]]
name = "tracing"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f47026cdc4080c07e49b37087de021820269d996f581aac150ef9e5583eefe3"
dependencies = [
"cfg-if 1.0.0",
"pin-project-lite",
"tracing-core",
]
[[package]]
name = "tracing-core"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f"
dependencies = [
"lazy_static",
]
[[package]]
name = "tracing-futures"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab7bb6f14721aa00656086e9335d363c5c8747bae02ebe32ea2c7dece5689b4c"
dependencies = [
"pin-project 0.4.27",
"tracing",
]
[[package]]
name = "try-lock"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642"
[[package]]
name = "typenum"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
[[package]]
name = "unicode-bidi"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
dependencies = [
"matches",
]
[[package]]
name = "unicode-normalization"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a13e63ab62dbe32aeee58d1c5408d35c36c392bba5d9d3142287219721afe606"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb0d2e7be6ae3a5fa87eed5fb451aff96f2573d2694942e40543ae0bbe19c796"
[[package]]
name = "unicode-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
[[package]]
name = "unicode-xid"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
[[package]]
name = "url"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5909f2b0817350449ed73e8bcd81c8c3c8d9a7a5d8acba4b27db277f1868976e"
dependencies = [
"form_urlencoded",
"idna",
"matches",
"percent-encoding",
]
[[package]]
name = "uuid"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
dependencies = [
"getrandom 0.2.1",
]
[[package]]
name = "vcpkg"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "version_check"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
[[package]]
name = "walkdir"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d"
dependencies = [
"same-file",
"winapi",
"winapi-util",
]
[[package]]
name = "want"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0"
dependencies = [
"log",
"try-lock",
]
[[package]]
name = "wasi"
version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasi"
version = "0.10.1+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93c6c3420963c5c64bca373b25e77acb562081b9bb4dd5bb864187742186cea9"
[[package]]
name = "wasm-bindgen"
version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3cd364751395ca0f68cafb17666eee36b63077fb5ecd972bbcd74c90c4bf736e"
dependencies = [
"cfg-if 1.0.0",
"serde",
"serde_json",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1114f89ab1f4106e5b55e688b828c0ab0ea593a1ea7c094b141b14cbaaec2d62"
dependencies = [
"bumpalo",
"lazy_static",
"log",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fe9756085a84584ee9457a002b7cdfe0bfff169f45d2591d8be1345a6780e35"
dependencies = [
"cfg-if 1.0.0",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6ac8995ead1f084a8dea1e65f194d0973800c7f571f6edd70adf06ecf77084"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5a48c72f299d80557c7c62e37e7225369ecc0c963964059509fbafe917c7549"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e7811dd7f9398f14cc76efd356f98f03aa30419dea46aa810d71e819fc97158"
[[package]]
name = "web-sys"
version = "0.3.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "222b1ef9334f92a21d3fb53dc3fd80f30836959a90f9274a626d7e06315ba3c3"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "which"
version = "3.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724"
dependencies = [
"libc",
]
[[package]]
name = "whoami"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7884773ab69074615cb8f8425d0e53f11710786158704fca70f53e71b0e05504"
[[package]]
name = "whoami"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d595b2e146f36183d6a590b8d41568e2bc84c922267f43baf61c956330eeb436"
dependencies = [
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "winreg"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69"
dependencies = [
"winapi",
]
[[package]]
name = "xattr"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c"
dependencies = [
"libc",
]
[[package]]
name = "yasna"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de7bff972b4f2a06c85f6d8454b09df153af7e3a4ec2aac81db1b105b684ddb"
dependencies = [
"bit-vec",
"num-bigint",
]
[[package]]
name = "zeroize"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81a974bcdd357f0dca4d41677db03436324d45a4c9ed2d0b873a5a360ce41c36"
[[package]]
name = "zstd-seekable"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e3b99cee88f0309ca765c6aa8c284a00394c35ef8ca012e2409485fc369bf2f"
dependencies = [
"cc",
"libc",
"pkg-config",
"thiserror",
"threadpool",
]
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.