SXEYMYF7P4RZMZ46WPL4IZUTSQ2ATBWYZX7QNVMS3SGOYXYOHAGQC
with import <nixpkgs> {};
pkgs.mkShell {
name = "pijul";
nativeBuildInputs = with pkgs; [
clang
pkg-config
];
buildInputs = with pkgs; [
rustc
rustfmt
llvmPackages.libclang
libsodium
openssl
xxHash
zstd
libgit2
] ++ (stdenv.lib.optionals stdenv.isDarwin [
CoreServices
Security
]);
LIBCLANG_PATH = "${llvmPackages.libclang}/lib";
}
#![recursion_limit = "256"]
extern crate proc_macro;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use proc_macro2::*;
use std::iter::FromIterator;
fn name_capital(name: &str) -> String {
name.chars()
.enumerate()
.map(|(i, s)| {
if i == 0 {
s.to_uppercase().nth(0).unwrap()
} else {
s
}
})
.collect()
}
#[proc_macro]
pub fn table(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
assert!(input_iter.next().is_none());
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
type #name_capital;
})
}
#[proc_macro]
pub fn sanakirja_table_get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let name = syn::Ident::new(&name, Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre_ = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre = if !pre_.is_empty() {
quote! {
let (key, value) = #pre_;
}
} else {
quote! {}
};
let post_ = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let post = if post_.is_empty() {
quote! { self.txn.get(&self.#name, key, value) }
} else {
quote! { self.txn.get(&self.#name, key, value) . #post_ }
};
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #name_get <'txn> (&'txn self, key: #key, value: Option<#value>) -> Option<#value> {
use ::sanakirja::Transaction;
#pre
#post
}
})
}
#[proc_macro]
pub fn sanakirja_get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #name_get(&self, db: &Self::#name_capital, key: #key, value: Option<#value>) -> Option<#value> {
use ::sanakirja::Transaction;
self.txn.get(db, key, value)
}
})
}
#[proc_macro]
pub fn table_get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #name_get<'txn>(&'txn self, key: #key, value: Option<#value>) -> Option<#value>;
})
}
#[proc_macro]
pub fn get(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_get = syn::Ident::new(&format!("get_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #name_get<'txn>(&'txn self, db: &Self::#name_capital, key: #key, value: Option<#value>) -> Option<#value>;
})
}
fn next(input_iter: &mut proc_macro2::token_stream::IntoIter) -> Vec<TokenTree> {
let mut result = Vec::new();
let mut is_first = true;
loop {
match input_iter.next() {
Some(TokenTree::Punct(p)) => {
if p.as_char() == ',' {
if !is_first {
return result;
}
} else {
result.push(TokenTree::Punct(p))
}
}
Some(e) => result.push(e),
None => return result,
}
is_first = false
}
}
#[proc_macro]
pub fn cursor(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, false, false, false)
}
#[proc_macro]
pub fn cursor_ref(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, false, false, true)
}
#[proc_macro]
pub fn iter(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, false, true, false)
}
#[proc_macro]
pub fn rev_cursor(input: proc_macro::TokenStream) -> TokenStream {
cursor_(input, true, false, false)
}
fn cursor_(input: proc_macro::TokenStream, rev: bool, iter: bool, borrow: bool) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let capital = name_capital(&name);
let cursor_name = syn::Ident::new(&format!("{}Cursor", capital,), Span::call_site());
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_iter = syn::Ident::new(&format!("iter_{}", name), Span::call_site());
let name_next = syn::Ident::new(&format!("cursor_{}_next", name), Span::call_site());
let name_prev = syn::Ident::new(&format!("cursor_{}_prev", name), Span::call_site());
let name_cursor = syn::Ident::new(
&format!("{}cursor_{}", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name_cursor_ref = syn::Ident::new(
&format!("{}cursor_{}_ref", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let cursor_type = if rev {
quote! {
crate::pristine::RevCursor<Self, &'txn Self, Self::#cursor_name, #key, #value>
}
} else {
quote! {
crate::pristine::Cursor<Self, &'txn Self, Self::#cursor_name, #key, #value>
}
};
let def = if rev {
quote! {}
} else {
quote! {
#[doc(hidden)]
type #cursor_name;
#[doc(hidden)]
fn #name_next <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Option<(#key, #value)>;
#[doc(hidden)]
fn #name_prev <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Option<(#key, #value)>;
}
};
let borrow = if borrow {
quote! {
#[doc(hidden)]
fn #name_cursor_ref<RT: std::ops::Deref<Target = Self>>(
txn: RT,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> crate::pristine::Cursor<Self, RT, Self::#cursor_name, #key, #value>;
}
} else {
quote! {}
};
let iter = if !iter {
quote! {}
} else {
quote! {
#[doc(hidden)]
fn #name_iter <'txn> (
&'txn self,
k: #key,
v: Option<#value>
) -> #cursor_type;
}
};
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
#def
#[doc(hidden)]
fn #name_cursor<'txn>(
&'txn self,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> #cursor_type;
#borrow
#iter
})
}
#[proc_macro]
pub fn sanakirja_cursor(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, false, false, false)
}
#[proc_macro]
pub fn sanakirja_cursor_ref(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, false, false, true)
}
#[proc_macro]
pub fn sanakirja_iter(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, false, true, false)
}
#[proc_macro]
pub fn sanakirja_rev_cursor(input: proc_macro::TokenStream) -> TokenStream {
sanakirja_cursor_(input, true, false, false)
}
fn sanakirja_cursor_(
input: proc_macro::TokenStream,
rev: bool,
iter: bool,
borrow: bool,
) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let cursor_name = syn::Ident::new(
&format!("{}Cursor", name_capital(&name),),
Span::call_site(),
);
let name_capital = syn::Ident::new(&name_capital(&name), Span::call_site());
let name_next = syn::Ident::new(&format!("cursor_{}_next", name), Span::call_site());
let name_prev = syn::Ident::new(&format!("cursor_{}_prev", name), Span::call_site());
let name_cursor = syn::Ident::new(
&format!("{}cursor_{}", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name_cursor_ref = syn::Ident::new(
&format!("{}cursor_{}_ref", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name_iter = syn::Ident::new(
&format!("{}iter_{}", if rev { "rev_" } else { "" }, name),
Span::call_site(),
);
let name = syn::Ident::new(&name, Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let post = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre_init = if !pre.is_empty() {
quote! { let pos = #pre; }
} else {
quote! {}
};
let post = if !post.is_empty() {
quote! { . #post }
} else {
quote! {}
};
let iter = if iter {
quote! {
#[doc(hidden)]
fn #name_iter <'txn> (
&'txn self,
k: #key,
v: Option<#value>
) -> super::Cursor<Self, &'txn Self, Self::#cursor_name, #key, #value> {
self.#name_cursor(&self.#name, Some((k, v)))
}
}
} else {
quote! {}
};
let borrow = if borrow {
quote! {
#[doc(hidden)]
fn #name_cursor_ref <RT: std::ops::Deref<Target = Self>> (
txn: RT,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> super::Cursor<Self, RT, Self::#cursor_name, #key, #value> {
#pre_init
let mut cursor = txn.txn.set_cursors(&db, pos).0;
super::Cursor {
cursor,
txn,
marker: std::marker::PhantomData,
}
}
}
} else {
quote! {}
};
let result = proc_macro::TokenStream::from(if rev {
quote! {
#[doc(hidden)]
fn #name_cursor<'txn>(
&'txn self,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> super::RevCursor<Self, &'txn Self, Self::#cursor_name, #key, #value> {
#pre_init
let mut cursor = if pos.is_some() {
self.txn.set_cursors(&db, pos).0
} else {
self.txn.set_cursors_last(&db)
};
super::RevCursor {
cursor,
txn: self,
marker: std::marker::PhantomData,
}
}
}
} else {
quote! {
#[doc(hidden)]
type #cursor_name = ::sanakirja::Cursor;
#[doc(hidden)]
fn #name_cursor<'txn>(
&'txn self,
db: &Self::#name_capital,
pos: Option<(#key, Option<#value>)>,
) -> super::Cursor<Self, &'txn Self, Self::#cursor_name, #key, #value> {
#pre_init
let mut cursor = self.txn.set_cursors(&db, pos).0;
super::Cursor {
cursor,
txn: self,
marker: std::marker::PhantomData,
}
}
#borrow
#[doc(hidden)]
fn #name_next <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Option<(#key, #value)> {
(unsafe { ::sanakirja::next(&self.txn, cursor) })
#post
}
#[doc(hidden)]
fn #name_prev <'txn> (
&'txn self,
cursor: &mut Self::#cursor_name,
) -> Option<(#key, #value)> {
(unsafe { ::sanakirja::prev(&self.txn, cursor) })
#post
}
#iter
}
});
result
}
#[proc_macro]
pub fn initialized_cursor(input: proc_macro::TokenStream) -> TokenStream {
initialized_cursor_(input, false)
}
#[proc_macro]
pub fn initialized_rev_cursor(input: proc_macro::TokenStream) -> TokenStream {
initialized_cursor_(input, true)
}
fn initialized_cursor_(input: proc_macro::TokenStream, rev: bool) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let cursor_name = syn::Ident::new(
&format!("{}Cursor", name_capital(&name),),
Span::call_site(),
);
let name_next = syn::Ident::new(&format!("cursor_{}_next", name), Span::call_site());
let name_prev = syn::Ident::new(&format!("cursor_{}_prev", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
if rev {
proc_macro::TokenStream::from(quote! {
impl<T: TxnT, RT: std::ops::Deref<Target = T>> Iterator for crate::pristine::RevCursor<T, RT, T::#cursor_name, #key, #value>
{
type Item = (#key, #value);
fn next(&mut self) -> Option<(#key, #value)> {
self.txn.#name_prev(&mut self.cursor)
}
}
})
} else {
proc_macro::TokenStream::from(quote! {
impl<T: TxnT, RT: std::ops::Deref<Target = T>>
crate::pristine::Cursor<T, RT, T::#cursor_name, #key, #value>
{
pub fn prev(&mut self) -> Option<(#key, #value)> {
self.txn.#name_prev(&mut self.cursor)
}
}
impl<T: TxnT, RT: std::ops::Deref<Target = T>> Iterator for crate::pristine::Cursor<T, RT, T::#cursor_name, #key, #value>
{
type Item = (#key, #value);
fn next(&mut self) -> Option<(#key, #value)> {
self.txn.#name_next(&mut self.cursor)
}
}
})
}
}
#[proc_macro]
pub fn put_del(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let put = syn::Ident::new(&format!("put_{}", name), Span::call_site());
let del = syn::Ident::new(&format!("del_{}", name), Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #put(
&mut self,
k: #key,
e: #value,
) -> Result<bool, anyhow::Error>;
#[doc(hidden)]
fn #del(
&mut self,
k: #key,
e: Option<#value>,
) -> Result<bool, anyhow::Error>;
})
}
#[proc_macro]
pub fn sanakirja_put_del(input: proc_macro::TokenStream) -> TokenStream {
let input = proc_macro2::TokenStream::from(input);
let mut input_iter = input.into_iter();
let name = match input_iter.next() {
Some(TokenTree::Ident(id)) => id.to_string(),
_ => panic!("txn_table: first argument not an identifier"),
};
let put = syn::Ident::new(&format!("put_{}", name), Span::call_site());
let del = syn::Ident::new(&format!("del_{}", name), Span::call_site());
let name = syn::Ident::new(&name, Span::call_site());
let key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre_key = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
let pre_value = proc_macro2::TokenStream::from_iter(next(&mut input_iter).into_iter());
assert!(input_iter.next().is_none());
if pre_key.is_empty() {
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #put(
&mut self,
k: #key,
v: #value,
) -> Result<bool, anyhow::Error> {
Ok(self.txn.put(&mut self.rng, &mut self.#name, k, v)?)
}
#[doc(hidden)]
fn #del(
&mut self,
k: #key,
v: Option<#value>,
) -> Result<bool, anyhow::Error> {
Ok(self.txn.del(&mut self.rng, &mut self.#name, k, v)?)
}
})
} else {
proc_macro::TokenStream::from(quote! {
#[doc(hidden)]
fn #put(
&mut self,
k: #key,
v: #value,
) -> Result<bool, anyhow::Error> {
let k = #pre_key;
let v = #pre_value;
Ok(self.txn.put(&mut self.rng, &mut self.#name, k, v)?)
}
#[doc(hidden)]
fn #del(
&mut self,
k: #key,
v: Option<#value>,
) -> Result<bool, anyhow::Error> {
let k = #pre_key;
let v = v.map(|v| #pre_value);
Ok(self.txn.del(&mut self.rng, &mut self.#name, k, v)?)
}
})
}
}
[package]
name = "pijul-macros"
description = "Macros used to write libpijul."
version = "0.1.0"
authors = ["Pierre-Étienne Meunier <pmeunier@mailbox.org>"]
edition = "2018"
repository = "https://nest.pijul.com/pijul/pijul"
license = "GPL-2.0"
include = [ "Cargo.toml", "src/lib.rs" ]
[lib]
proc-macro = true
[dependencies]
syn = "1.0"
quote = "1.0"
proc-macro2 = "1.0"
regex = "1.4"
use crate::{config, current_dir, Error};
use libpijul::DOT_DIR;
use std::io::Write;
use std::path::PathBuf;
pub struct Repository {
pub pristine: libpijul::pristine::sanakirja::Pristine,
pub changes: libpijul::changestore::filesystem::FileSystem,
pub working_copy: libpijul::working_copy::filesystem::FileSystem,
pub config: config::Config,
pub path: PathBuf,
pub changes_dir: PathBuf,
}
pub const PRISTINE_DIR: &'static str = "pristine";
pub const CHANGES_DIR: &'static str = "changes";
pub const CONFIG_FILE: &'static str = "config";
impl Repository {
pub fn save_config(&self) -> Result<(), anyhow::Error> {
let config = toml::to_string(&self.config)?;
let mut file = std::fs::File::create(&self.path.join(DOT_DIR).join(CONFIG_FILE))?;
file.write_all(config.as_bytes())?;
Ok(())
}
fn find_root_(cur: Option<PathBuf>, dot_dir: &str) -> Result<PathBuf, anyhow::Error> {
let mut cur = if let Some(cur) = cur {
cur
} else {
current_dir()?
};
cur.push(dot_dir);
loop {
debug!("{:?}", cur);
if std::fs::metadata(&cur).is_err() {
cur.pop();
if cur.pop() {
cur.push(DOT_DIR);
} else {
return Err(Error::NoRepoRoot.into());
}
} else {
break;
}
}
Ok(cur)
}
pub fn find_root(cur: Option<PathBuf>) -> Result<Self, anyhow::Error> {
Self::find_root_with_dot_dir(cur, DOT_DIR)
}
pub fn find_root_with_dot_dir(
cur: Option<PathBuf>,
dot_dir: &str,
) -> Result<Self, anyhow::Error> {
let cur = Self::find_root_(cur, dot_dir)?;
let mut pristine_dir = cur.clone();
pristine_dir.push(PRISTINE_DIR);
let mut changes_dir = cur.clone();
changes_dir.push(CHANGES_DIR);
let mut working_copy_dir = cur.clone();
working_copy_dir.pop();
let config_path = cur.join(CONFIG_FILE);
let config = if let Ok(config) = std::fs::read(&config_path) {
if let Ok(toml) = toml::from_slice(&config) {
toml
} else {
return Err((crate::Error::CouldNotReadConfig { path: config_path }).into());
}
} else {
config::Config::default()
};
Ok(Repository {
pristine: libpijul::pristine::sanakirja::Pristine::new(&pristine_dir)?,
working_copy: libpijul::working_copy::filesystem::FileSystem::from_root(
&working_copy_dir,
),
changes: libpijul::changestore::filesystem::FileSystem::from_root(&working_copy_dir),
config,
path: working_copy_dir,
changes_dir,
})
}
pub fn init(path: Option<std::path::PathBuf>) -> Result<Self, anyhow::Error> {
let cur = if let Some(path) = path {
path
} else {
current_dir()?
};
let mut pristine_dir = cur.clone();
pristine_dir.push(DOT_DIR);
pristine_dir.push(PRISTINE_DIR);
if std::fs::metadata(&pristine_dir).is_err() {
std::fs::create_dir_all(&pristine_dir)?;
let mut changes_dir = cur.clone();
changes_dir.push(DOT_DIR);
changes_dir.push(CHANGES_DIR);
Ok(Repository {
pristine: libpijul::pristine::sanakirja::Pristine::new(&pristine_dir)?,
working_copy: libpijul::working_copy::filesystem::FileSystem::from_root(&cur),
changes: libpijul::changestore::filesystem::FileSystem::from_root(&cur),
config: config::Config::default(),
path: cur,
changes_dir,
})
} else {
Err(Error::AlreadyInARepo.into())
}
}
}
use super::{parse_line, RemoteRef};
use crate::repository::Repository;
use crate::Error;
use byteorder::{BigEndian, ReadBytesExt};
use libpijul::pristine::{Base32, ChannelRef, Hash, Merkle, MutTxnT};
use libpijul::MutTxnTExt;
use regex::Regex;
use std::borrow::Cow;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{Duration, SystemTime};
use thrussh::client::Session;
pub struct Ssh {
pub h: thrussh::client::Handle,
pub c: thrussh::client::Channel,
pub channel: String,
pub remote_cmd: String,
pub path: String,
pub is_running: bool,
pub name: String,
}
lazy_static! {
static ref ADDRESS: Regex = Regex::new(
r#"(ssh://)?((?P<user>[^@]+)@)?((?P<host>(\[([^\]]+)\])|([^:/]+)))((:(?P<port>\d+)/)|:|/)(?P<path>.+)"#
)
.unwrap();
}
#[derive(Debug)]
pub struct Remote<'a> {
host: &'a str,
port: u16,
user: Cow<'a, str>,
path: &'a str,
addr: String,
resolved: std::net::SocketAddr,
}
pub fn ssh_remote<'a>(addr: &'a str) -> Option<Remote<'a>> {
let cap = if let Some(cap) = ADDRESS.captures(addr) {
cap
} else {
return None;
};
debug!("ssh_remote: {:?}", cap);
let user = if let Some(u) = cap.name("user") {
Cow::Borrowed(u.as_str())
} else {
Cow::Owned(whoami::username())
};
let host = cap.name("host").unwrap().as_str();
let port: u16 = cap
.name("port")
.map(|x| x.as_str().parse().unwrap())
.unwrap_or(22);
let path = cap.name("path").unwrap().as_str();
let addr = format!("{}:{}", host, port);
use std::net::ToSocketAddrs;
if let Ok(mut res) = addr.to_socket_addrs() {
let resolved = res.next().unwrap();
Some(Remote {
host,
port,
user,
path,
addr,
resolved,
})
} else {
None
}
}
impl<'a> Remote<'a> {
pub async fn connect(&self, name: &str, channel: &str) -> Result<Ssh, anyhow::Error> {
let mut home = dirs_next::home_dir().unwrap();
home.push(".ssh");
home.push("known_hosts");
let client = SshClient {
addr: self.addr.clone(),
known_hosts: home,
last_window_adjustment: SystemTime::now(),
};
let config = Arc::new(thrussh::client::Config::default());
use std::net::ToSocketAddrs;
debug!("client: {:?}", client.addr);
let addr = client.addr.to_socket_addrs()?.next().unwrap();
let mut h = thrussh::client::connect(config, &addr, client).await?;
let mut key_path = dirs_next::home_dir().unwrap().join(".ssh");
// First try agent auth
let authenticated = self.auth_agent(&mut h).await.unwrap_or(false)
|| self.auth_pk(&mut h, &mut key_path).await
|| self.auth_password(&mut h).await?;
if !authenticated {
return Err(Error::NotAuthenticated.into());
}
let c = h.channel_open_session().await?;
let remote_cmd = if let Ok(cmd) = std::env::var("REMOTE_PIJUL") {
cmd
} else {
"pijul".to_string()
};
Ok(Ssh {
h,
c,
channel: channel.to_string(),
remote_cmd,
path: self.path.to_string(),
is_running: false,
name: name.to_string(),
})
}
async fn auth_agent(&self, h: &mut thrussh::client::Handle) -> Result<bool, anyhow::Error> {
let mut authenticated = false;
let mut agent = thrussh_keys::agent::client::AgentClient::connect_env().await?;
let identities = agent.request_identities().await?;
debug!("identities = {:?}", identities);
let mut agent = Some(agent);
for key in identities {
debug!("Trying key {:?}", key);
let fingerprint = key.fingerprint();
if let Some(a) = agent.take() {
debug!("authenticate future");
match h.authenticate_future(self.user.as_ref(), key, a).await {
Ok((a, auth)) => {
if !auth {
writeln!(
std::io::stderr(),
"Key {:?} (with agent) rejected",
fingerprint
)?
}
debug!("auth");
authenticated = auth;
agent = Some(a);
}
Err(e) => {
debug!("not auth {:?}", e);
if let Ok(thrussh_keys::Error::AgentFailure) = e.downcast() {
writeln!(std::io::stderr(), "Failed to sign with agent")?;
}
}
}
}
if authenticated {
return Ok(true);
}
}
Ok(false)
}
async fn auth_pk(&self, h: &mut thrussh::client::Handle, key_path: &mut PathBuf) -> bool {
let mut authenticated = false;
for k in &["id_ed25519", "id_rsa"] {
key_path.push(k);
let k = if let Some(k) = load_secret_key(&key_path, k) {
k
} else {
key_path.pop();
continue;
};
if let Ok(auth) = h
.authenticate_publickey(self.user.as_ref(), Arc::new(k))
.await
{
authenticated = auth
}
key_path.pop();
if authenticated {
return true;
}
}
false
}
async fn auth_password(&self, h: &mut thrussh::client::Handle) -> Result<bool, anyhow::Error> {
let pass = rpassword::read_password_from_tty(Some(&format!(
"Password for {}@{}: ",
self.user, self.host
)))?;
h.authenticate_password(self.user.to_string(), &pass).await
}
}
pub fn load_secret_key(key_path: &Path, k: &str) -> Option<thrussh_keys::key::KeyPair> {
match thrussh_keys::load_secret_key(&key_path, None) {
Ok(k) => Some(k),
Err(e) => {
if let Ok(thrussh_keys::Error::KeyIsEncrypted) = e.downcast() {
let pass = if let Ok(pass) =
rpassword::read_password_from_tty(Some(&format!("Password for key {:?}: ", k)))
{
pass
} else {
return None;
};
if pass.is_empty() {
return None;
}
if let Ok(k) = thrussh_keys::load_secret_key(&key_path, Some(pass.as_bytes())) {
return Some(k);
}
}
None
}
}
}
pub struct SshClient {
addr: String,
known_hosts: PathBuf,
last_window_adjustment: SystemTime,
}
impl thrussh::client::Handler for SshClient {
type FutureBool = futures::future::Ready<Result<(Self, bool), anyhow::Error>>;
type FutureUnit = futures::future::Ready<Result<(Self, Session), anyhow::Error>>;
fn finished_bool(self, b: bool) -> Self::FutureBool {
futures::future::ready(Ok((self, b)))
}
fn finished(self, session: Session) -> Self::FutureUnit {
futures::future::ready(Ok((self, session)))
}
fn check_server_key(
self,
server_public_key: &thrussh_keys::key::PublicKey,
) -> Self::FutureBool {
let mut it = self.addr.split(':');
let addr = it.next().unwrap();
let port = it.next().unwrap_or("22").parse().unwrap();
match thrussh_keys::check_known_hosts_path(addr, port, server_public_key, &self.known_hosts)
{
Ok(e) => {
if e {
futures::future::ready(Ok((self, true)))
} else {
match learn(addr, port, server_public_key) {
Ok(x) => futures::future::ready(Ok((self, x))),
Err(e) => futures::future::ready(Err(e)),
}
}
}
Err(e) => {
error!("Key changed for {:?}", self.addr);
futures::future::ready(Err(e))
}
}
}
fn adjust_window(&mut self, _channel: thrussh::ChannelId, target: u32) -> u32 {
let elapsed = self.last_window_adjustment.elapsed().unwrap();
self.last_window_adjustment = SystemTime::now();
if target >= 10_000_000 {
return target;
}
if elapsed < Duration::from_secs(2) {
target * 2
} else if elapsed > Duration::from_secs(8) {
target / 2
} else {
target
}
}
}
fn learn(addr: &str, port: u16, pk: &thrussh_keys::key::PublicKey) -> Result<bool, anyhow::Error> {
if port == 22 {
print!(
"Unknown key for {:?}, fingerprint {:?}. Learn it (y/N)? ",
addr,
pk.fingerprint()
);
} else {
print!(
"Unknown key for {:?}:{}, fingerprint {:?}. Learn it (y/N)? ",
addr,
port,
pk.fingerprint()
);
}
std::io::stdout().flush()?;
let mut buffer = String::new();
std::io::stdin().read_line(&mut buffer)?;
let buffer = buffer.trim();
if buffer == "Y" || buffer == "y" {
thrussh_keys::learn_known_hosts(addr, port, pk)?;
Ok(true)
} else {
Ok(false)
}
}
impl Ssh {
pub async fn finish(&mut self) -> Result<(), anyhow::Error> {
self.c.eof().await?;
while let Some(msg) = self.c.wait().await {
debug!("msg = {:?}", msg);
match msg {
thrussh::ChannelMsg::Data { .. } => {}
thrussh::ChannelMsg::ExtendedData { data, ext } => {
debug!("{:?} {:?}", ext, std::str::from_utf8(&data[..]));
if let Ok(data) = std::str::from_utf8(&data) {
writeln!(std::io::stderr(), "{}", data)?;
}
}
thrussh::ChannelMsg::WindowAdjusted { .. } => {}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
return Err((Error::RemoteExit {
status: exit_status,
})
.into());
}
}
msg => error!("wrong message {:?}", msg),
}
}
Ok(())
}
pub async fn get_state(
&mut self,
mid: Option<u64>,
) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
self.run_protocol().await?;
if let Some(mid) = mid {
self.c
.data(format!("state {} {}\n", self.channel, mid).as_bytes())
.await?;
} else {
self.c
.data(format!("state {}\n", self.channel).as_bytes())
.await?;
}
while let Some(msg) = self.c.wait().await {
match msg {
thrussh::ChannelMsg::Data { data } => {
// If we can't parse `data` (for example if the
// remote returns the standard "-\n"), this
// returns None.
let mut s = std::str::from_utf8(&data)?.split(' ');
debug!("s = {:?}", s);
if let (Some(n), Some(m)) = (s.next(), s.next()) {
let n = n.parse().unwrap();
return Ok(Some((n, Merkle::from_base32(m.trim().as_bytes()).unwrap())));
} else {
break;
}
}
thrussh::ChannelMsg::ExtendedData { data, ext } => {
if ext == 1 {
debug!("{:?}", std::str::from_utf8(&data))
}
}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
return Err((Error::RemoteExit {
status: exit_status,
})
.into());
}
}
msg => panic!("wrong message {:?}", msg),
}
}
Ok(None)
}
pub async fn archive<W: std::io::Write>(
&mut self,
prefix: Option<String>,
state: Option<(Merkle, &[Hash])>,
mut w: W,
) -> Result<u64, anyhow::Error> {
self.run_protocol().await?;
if let Some((ref state, ref extra)) = state {
let mut cmd = format!("archive {} {}", self.channel, state.to_base32(),);
for e in extra.iter() {
cmd.push_str(&format!(" {}", e.to_base32()));
}
if let Some(ref p) = prefix {
cmd.push_str(" :");
cmd.push_str(p)
}
cmd.push('\n');
self.c.data(cmd.as_bytes()).await?;
} else {
self.c
.data(
format!(
"archive {}{}{}\n",
self.channel,
if prefix.is_some() { " :" } else { "" },
prefix.unwrap_or(String::new())
)
.as_bytes(),
)
.await?;
}
let mut len = 0;
let mut conflicts = 0;
let mut len_n = 0;
while let Some(msg) = self.c.wait().await {
match msg {
thrussh::ChannelMsg::Data { data } => {
let mut off = 0;
while len_n < 16 && off < data.len() {
if len_n < 8 {
len = (len << 8) | (data[off] as u64);
} else {
conflicts = (conflicts << 8) | (data[off] as u64);
}
len_n += 1;
off += 1;
}
if len_n >= 16 {
w.write_all(&data[off..])?;
len -= (data.len() - off) as u64;
if len == 0 {
break;
}
}
}
thrussh::ChannelMsg::ExtendedData { data, ext } => {
if ext == 1 {
debug!("{:?}", std::str::from_utf8(&data))
}
}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
return Err((Error::RemoteExit {
status: exit_status,
})
.into());
}
}
msg => panic!("wrong message {:?}", msg),
}
}
Ok(conflicts)
}
pub async fn run_protocol(&mut self) -> Result<(), anyhow::Error> {
if !self.is_running {
self.is_running = true;
debug!("run_protocol");
self.c
.exec(
true,
format!(
"{} protocol --version {} --repository {}",
self.remote_cmd,
crate::PROTOCOL_VERSION,
self.path
),
)
.await?;
while let Some(msg) = self.c.wait().await {
debug!("msg = {:?}", msg);
match msg {
thrussh::ChannelMsg::Success => break,
thrussh::ChannelMsg::WindowAdjusted { .. } => {}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
return Err((Error::RemoteExit {
status: exit_status,
})
.into());
}
}
_ => {}
}
}
debug!("run_protocol done");
}
Ok(())
}
pub async fn download_changelist<T: MutTxnT>(
&mut self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<(), anyhow::Error> {
self.run_protocol().await?;
debug!("download_changelist");
let mut command = Vec::new();
write!(command, "changelist {} {}", self.channel, from).unwrap();
for p in paths {
write!(command, " {}", p).unwrap()
}
command.push(b'\n');
self.c.data(&command[..]).await?;
debug!("waiting ssh");
'msg: while let Some(msg) = self.c.wait().await {
debug!("msg = {:?}", msg);
match msg {
thrussh::ChannelMsg::Data { data } => {
if &data[..] == b"\n" {
debug!("log done");
break;
} else if let Ok(data) = std::str::from_utf8(&data) {
for l in data.lines() {
if !l.is_empty() {
debug!("line = {:?}", l);
let (n, h, m) = parse_line(l)?;
txn.put_remote(remote, n, (h, m))?;
} else {
break 'msg;
}
}
}
}
thrussh::ChannelMsg::ExtendedData { data, ext } => {
debug!("{:?} {:?}", ext, std::str::from_utf8(&data[..]));
/*return Err((crate::Error::Remote {
msg: std::str::from_utf8(&data[..]).unwrap().to_string()
}).into())*/
}
thrussh::ChannelMsg::WindowAdjusted { .. } => {}
thrussh::ChannelMsg::Eof => {}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
return Err((Error::RemoteExit {
status: exit_status,
})
.into());
}
}
msg => panic!("wrong message {:?}", msg),
}
}
debug!("no msg");
Ok(())
}
pub async fn upload_changes(
&mut self,
mut local: PathBuf,
to_channel: Option<&str>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
self.run_protocol().await?;
debug!("upload_changes");
for c in changes {
libpijul::changestore::filesystem::push_filename(&mut local, &c);
let mut change_file = std::fs::File::open(&local)?;
let change_len = change_file.metadata()?.len();
let mut change = cryptovec::CryptoVec::new_zeroed(change_len as usize);
use std::io::Read;
change_file.read_exact(&mut change[..])?;
let to_channel = if let Some(t) = to_channel {
t
} else {
self.channel.as_str()
};
self.c
.data(format!("apply {} {} {}\n", to_channel, c.to_base32(), change_len).as_bytes())
.await?;
self.c.data(&change[..]).await?;
libpijul::changestore::filesystem::pop_filename(&mut local);
}
Ok(())
}
pub async fn start_change_download(
&mut self,
c: libpijul::pristine::Hash,
full: bool,
) -> Result<(), anyhow::Error> {
self.run_protocol().await?;
debug!("download_change {:?}", full);
if full {
self.c
.data(format!("change {}\n", c.to_base32()).as_bytes())
.await?;
} else {
self.c
.data(format!("partial {}\n", c.to_base32()).as_bytes())
.await?;
}
Ok(())
}
pub async fn wait_downloads(
&mut self,
changes_dir: &Path,
hashes: &[libpijul::pristine::Hash],
send: &mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>,
) -> Result<(), anyhow::Error> {
debug!("wait_downloads");
if !self.is_running {
return Ok(());
}
let mut remaining_len = 0;
let mut current: usize = 0;
let mut path = changes_dir.to_path_buf();
libpijul::changestore::filesystem::push_filename(&mut path, &hashes[current]);
std::fs::create_dir_all(&path.parent().unwrap())?;
path.set_extension("");
let mut file = std::fs::File::create(&path)?;
'outer: while let Some(msg) = self.c.wait().await {
match msg {
thrussh::ChannelMsg::Data { data } => {
debug!("data = {:?}", &data[..]);
let mut p = 0;
while p < data.len() {
if remaining_len == 0 {
remaining_len = (&data[p..]).read_u64::<BigEndian>().unwrap() as usize;
p += 8;
debug!("remaining_len = {:?}", remaining_len);
}
if data.len() >= p + remaining_len {
file.write_all(&data[p..p + remaining_len])?;
// We have enough data to write the
// file, write it and move to the next
// file.
p += remaining_len;
remaining_len = 0;
file.flush()?;
let mut final_path = path.clone();
final_path.set_extension("change");
debug!("moving {:?} to {:?}", path, final_path);
std::fs::rename(&path, &final_path)?;
debug!("sending");
send.send(hashes[current].clone()).await.unwrap();
debug!("sent");
current += 1;
if current < hashes.len() {
// If we're still waiting for
// another change.
libpijul::changestore::filesystem::pop_filename(&mut path);
libpijul::changestore::filesystem::push_filename(
&mut path,
&hashes[current],
);
std::fs::create_dir_all(&path.parent().unwrap())?;
path.set_extension("");
file = std::fs::File::create(&path)?;
} else {
// Else, just finish.
break 'outer;
}
} else {
// not enough data, we need more.
file.write_all(&data[p..])?;
remaining_len -= data.len() - p;
break;
}
}
}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
debug!("exit: {:?}", exit_status);
if exit_status != 0 {
error!("Remote command returned {:?}", exit_status)
}
self.is_running = false;
return Ok(());
}
msg => {
debug!("{:?}", msg);
}
}
}
debug!("done waiting for downloads");
Ok(())
}
pub async fn clone_channel<T: MutTxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
lazy: bool,
) -> Result<(), anyhow::Error> {
self.run_protocol().await?;
self.c
.data(format!("channel {}\n", self.channel).as_bytes())
.await?;
let from_dump_alive = {
let mut from_dump =
libpijul::pristine::channel_dump::ChannelFromDump::new(txn, channel.clone());
while let Some(msg) = self.c.wait().await {
match msg {
thrussh::ChannelMsg::Data { data } => {
debug!("data = {:?}", &data[..]);
if from_dump.read(&data)? {
break;
}
}
thrussh::ChannelMsg::ExtendedData { data, ext } => {
debug!("data = {:?}, ext = {:?}", &data[..], ext);
}
thrussh::ChannelMsg::ExitStatus { exit_status } => {
if exit_status != 0 {
error!("Remote command returned {:?}", exit_status)
}
self.is_running = false;
break;
}
msg => {
debug!("msg = {:?}", msg);
}
}
}
from_dump.alive
};
let channel_ = channel.borrow();
debug!("cloned, now downloading changes");
let mut hashes = Vec::new();
if lazy {
for &ch in from_dump_alive.iter() {
let h = txn.get_external(ch).unwrap();
self.c
.data(format!("change {}\n", h.to_base32()).as_bytes())
.await?;
hashes.push(h);
}
} else {
for (_, (ch, _)) in txn.changeid_log(&channel_, 0) {
let h = txn.get_external(ch).unwrap();
self.c
.data(format!("change {}\n", h.to_base32()).as_bytes())
.await?;
hashes.push(h);
}
}
std::mem::drop(channel_);
debug!("hashes = {:?}", hashes);
let (mut send, recv) = tokio::sync::mpsc::channel(100);
self.wait_downloads(&repo.changes_dir, &hashes, &mut send)
.await?;
txn.output_repository_no_pending(&mut repo.working_copy, &repo.changes, channel, "", true)?;
std::mem::drop(recv);
Ok(())
}
}
use crate::repository::*;
use crate::Error;
use libpijul::pristine::{Base32, ChannelRef, Hash, Merkle, MutTxnT, RemoteRef};
use libpijul::DOT_DIR;
use libpijul::{MutTxnTExt, TxnTExt};
use std::io::Write;
use std::path::{Path, PathBuf};
pub mod ssh;
use ssh::*;
pub mod local;
use local::*;
pub enum RemoteRepo {
Local(Local),
Ssh(Ssh),
Http(Http),
None,
}
pub struct Http {
pub url: String,
pub channel: String,
pub client: reqwest::Client,
pub name: String,
}
impl Repository {
pub async fn remote(
&self,
name: &str,
channel: &str,
no_cert_check: bool,
) -> Result<RemoteRepo, anyhow::Error> {
if let Some(name) = self.config.remotes.get(name) {
unknown_remote(name, channel, no_cert_check).await
} else {
unknown_remote(name, channel, no_cert_check).await
}
}
}
pub async fn unknown_remote(
name: &str,
channel: &str,
no_cert_check: bool,
) -> Result<RemoteRepo, anyhow::Error> {
if name.starts_with("http://") || name.starts_with("https://") {
debug!("unknown_remote, http = {:?}", name);
Ok(RemoteRepo::Http(Http {
url: name.to_string(),
channel: channel.to_string(),
client: reqwest::ClientBuilder::new()
.danger_accept_invalid_certs(no_cert_check)
.build()?,
name: name.to_string(),
}))
} else if name.starts_with("ssh://") {
if let Some(ssh) = ssh_remote(name) {
debug!("unknown_remote, ssh = {:?}", ssh);
Ok(RemoteRepo::Ssh(ssh.connect(name, channel).await?))
} else {
Err((Error::RemoteNotFound {
remote: name.to_string(),
})
.into())
}
} else {
let mut dot_dir = Path::new(name).join(DOT_DIR);
let changes_dir = dot_dir.join(CHANGES_DIR);
dot_dir.push(PRISTINE_DIR);
debug!("dot_dir = {:?}", dot_dir);
if let Ok(pristine) = libpijul::pristine::sanakirja::Pristine::new(&dot_dir) {
debug!("pristine done");
Ok(RemoteRepo::Local(Local {
channel: channel.to_string(),
changes_dir,
pristine,
root: Path::new(name).to_path_buf(),
name: name.to_string(),
}))
} else if let Some(ssh) = ssh_remote(name) {
debug!("unknown_remote, ssh = {:?}", ssh);
Ok(RemoteRepo::Ssh(ssh.connect(name, channel).await?))
} else {
Err((Error::RemoteNotFound {
remote: name.to_string(),
})
.into())
}
}
}
impl RemoteRepo {
fn name(&self) -> &str {
match *self {
RemoteRepo::Ssh(ref s) => s.name.as_str(),
RemoteRepo::Local(ref l) => l.name.as_str(),
RemoteRepo::Http(ref h) => h.name.as_str(),
RemoteRepo::None => unreachable!(),
}
}
pub fn repo_name(&self) -> Option<String> {
match *self {
RemoteRepo::Ssh(ref s) => {
if let Some(sep) = s.name.rfind(|c| c == ':' || c == '/') {
Some(s.name.split_at(sep + 1).1.to_string())
} else {
Some(s.name.as_str().to_string())
}
}
RemoteRepo::Local(ref l) => {
if let Some(file) = l.root.file_name() {
Some(file.to_str().unwrap().to_string())
} else {
None
}
}
RemoteRepo::Http(ref h) => {
let url = reqwest::Url::parse(&h.url).unwrap();
if let Some(name) = libpijul::path::file_name(url.path()) {
Some(name.to_string())
} else {
url.host().map(|h| h.to_string())
}
}
RemoteRepo::None => unreachable!(),
}
}
pub async fn finish(&mut self) -> Result<(), anyhow::Error> {
match *self {
RemoteRepo::Ssh(ref mut s) => s.finish().await?,
_ => {}
}
Ok(())
}
pub async fn update_changelist<T: MutTxnT>(
&mut self,
txn: &mut T,
path: &[String],
) -> Result<RemoteRef<T>, anyhow::Error> {
debug!("update_changelist");
let name = self.name();
let mut remote = txn.open_or_create_remote(name).unwrap();
let n = self
.dichotomy_changelist(txn, &remote.borrow().remote)
.await?;
debug!("update changelist {:?}", n);
let v: Vec<_> = txn
.iter_remote(&remote.borrow().remote, n)
.filter_map(|(k, _)| if k >= n { Some(k) } else { None })
.collect();
for k in v {
debug!("deleting {:?}", k);
txn.del_remote(&mut remote, k)?;
}
self.download_changelist(txn, &mut remote, n, path).await?;
Ok(remote)
}
async fn dichotomy_changelist<T: MutTxnT>(
&mut self,
txn: &T,
remote: &T::Remote,
) -> Result<u64, anyhow::Error> {
let mut a = 0;
let (mut b, (_, state)) = if let Some(last) = txn.last_remote(remote) {
last
} else {
debug!("the local copy of the remote has no changes");
return Ok(0);
};
if let Some((_, s)) = self.get_state(Some(b)).await? {
if s == state {
// The local list is already up to date.
return Ok(b + 1);
}
}
// Else, find the last state we have in common with the
// remote, it might be older than the last known state (if
// changes were unrecorded on the remote).
while a < b {
let mid = (a + b) / 2;
let (mid, (_, state)) = txn.get_remote_state(remote, mid).unwrap();
let remote_state = self.get_state(Some(mid)).await?;
debug!("dichotomy {:?} {:?} {:?}", mid, state, remote_state);
if let Some((_, remote_state)) = remote_state {
if remote_state == state {
if a == mid {
return Ok(a + 1);
} else {
a = mid;
continue;
}
}
}
if b == mid {
break;
} else {
b = mid
}
}
Ok(a)
}
async fn get_state(
&mut self,
mid: Option<u64>,
) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
match *self {
RemoteRepo::Local(ref mut l) => l.get_state(mid),
RemoteRepo::Ssh(ref mut s) => s.get_state(mid).await,
RemoteRepo::Http(ref h) => {
debug!("get_state {:?}", h.url);
let url = format!("{}/{}", h.url, DOT_DIR);
let q = if let Some(mid) = mid {
[
("state", format!("{}", mid)),
("channel", h.channel.clone()),
]
} else {
[("state", String::new()), ("channel", h.channel.clone())]
};
let res = h.client.get(&url).query(&q).send().await?;
if !res.status().is_success() {
return Err((crate::Error::Http {
status: res.status(),
})
.into());
}
let resp = res.bytes().await?;
let resp = std::str::from_utf8(&resp)?;
debug!("resp = {:?}", resp);
let mut s = resp.split(' ');
if let (Some(n), Some(m)) = (
s.next().and_then(|s| s.parse().ok()),
s.next().and_then(|m| Merkle::from_base32(m.as_bytes())),
) {
Ok(Some((n, m)))
} else {
Ok(None)
}
}
RemoteRepo::None => unreachable!(),
}
}
pub async fn archive<W: std::io::Write>(
&mut self,
prefix: Option<String>,
state: Option<(Merkle, &[Hash])>,
mut w: W,
) -> Result<u64, anyhow::Error> {
match *self {
RemoteRepo::Local(ref mut l) => {
use libpijul::pristine::TxnT;
debug!("archiving local repo");
let changes = libpijul::changestore::filesystem::FileSystem::from_root(&l.root);
let mut tarball = libpijul::output::Tarball::new(w, prefix);
let conflicts = if let Some((state, extra)) = state {
let mut txn = l.pristine.mut_txn_begin();
let mut channel = txn.load_channel(&l.channel).unwrap();
txn.archive_with_state(&changes, &mut channel, state, extra, &mut tarball)?
} else {
let txn = l.pristine.txn_begin()?;
let channel = txn.load_channel(&l.channel).unwrap();
txn.archive(&changes, &channel, &mut tarball)?
};
Ok(conflicts.len() as u64)
}
RemoteRepo::Ssh(ref mut s) => s.archive(prefix, state, w).await,
RemoteRepo::Http(ref h) => {
let url = h.url.clone() + "/" + DOT_DIR;
let res = h.client.get(&url).query(&[("channel", &h.channel)]);
let res = if let Some((ref state, ref extra)) = state {
let mut q = vec![("archive".to_string(), state.to_base32())];
if let Some(pre) = prefix {
q.push(("outputPrefix".to_string(), pre));
}
for e in extra.iter() {
q.push(("change".to_string(), e.to_base32()))
}
res.query(&q)
} else {
res
};
let res = res.send().await?;
if !res.status().is_success() {
return Err((crate::Error::Http {
status: res.status(),
})
.into());
}
use futures_util::StreamExt;
let mut stream = res.bytes_stream();
let mut conflicts = 0;
let mut n = 0;
while let Some(item) = stream.next().await {
let item = item?;
let mut off = 0;
while n < 8 && off < item.len() {
conflicts = (conflicts << 8) | (item[off] as u64);
off += 1;
n += 1
}
w.write_all(&item[off..])?;
}
Ok(conflicts as u64)
}
RemoteRepo::None => unreachable!(),
}
}
async fn download_changelist<T: MutTxnT>(
&mut self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<(), anyhow::Error> {
match *self {
RemoteRepo::Local(ref mut l) => l.download_changelist(txn, remote, from, paths),
RemoteRepo::Ssh(ref mut s) => s.download_changelist(txn, remote, from, paths).await,
RemoteRepo::Http(ref h) => {
let url = h.url.clone() + "/" + DOT_DIR;
let from_ = from.to_string();
let mut query = vec![("changelist", &from_), ("channel", &h.channel)];
for p in paths.iter() {
query.push(("path", p));
}
let res = h.client.get(&url).query(&query).send().await?;
if !res.status().is_success() {
return Err((crate::Error::Http {
status: res.status(),
})
.into());
}
let resp = res.bytes().await?;
if let Ok(data) = std::str::from_utf8(&resp) {
for l in data.lines() {
if !l.is_empty() {
let (n, h, m) = parse_line(l)?;
txn.put_remote(remote, n, (h, m))?;
} else {
break;
}
}
}
Ok(())
}
RemoteRepo::None => unreachable!(),
}
}
pub async fn upload_changes(
&mut self,
mut local: PathBuf,
to_channel: Option<&str>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
match self {
RemoteRepo::Local(ref mut l) => l.upload_changes(local, to_channel, changes),
RemoteRepo::Ssh(ref mut s) => s.upload_changes(local, to_channel, changes).await,
RemoteRepo::Http(ref h) => {
for c in changes {
libpijul::changestore::filesystem::push_filename(&mut local, &c);
let url = h.url.clone() + "/" + DOT_DIR;
let change = std::fs::read(&local)?;
let mut to_channel = if let Some(ch) = to_channel {
vec![("to_channel", ch)]
} else {
Vec::new()
};
let c = c.to_base32();
to_channel.push(("apply", &c));
debug!("url {:?} {:?}", url, to_channel);
h.client
.post(&url)
.query(&to_channel)
.body(change)
.send()
.await?;
libpijul::changestore::filesystem::pop_filename(&mut local);
}
Ok(())
}
RemoteRepo::None => unreachable!(),
}
}
/// Start (and possibly complete) the download of a change.
pub async fn start_change_download(
&mut self,
c: libpijul::pristine::Hash,
path: &mut PathBuf,
full: bool,
) -> Result<bool, anyhow::Error> {
debug!("start_change_download");
libpijul::changestore::filesystem::push_filename(path, &c);
if std::fs::metadata(&path).is_ok() && !full {
debug!("metadata {:?} ok", path);
libpijul::changestore::filesystem::pop_filename(path);
return Ok(false);
}
std::fs::create_dir_all(&path.parent().unwrap())?;
match *self {
RemoteRepo::Local(ref mut l) => l.start_change_download(c, path).await?,
RemoteRepo::Ssh(ref mut s) => s.start_change_download(c, full).await?,
RemoteRepo::Http(ref h) => {
let mut f = std::fs::File::create(&path)?;
let c32 = c.to_base32();
let url = format!("{}/{}", h.url, DOT_DIR);
let mut res = h.client.get(&url).query(&[("change", c32)]).send().await?;
if !res.status().is_success() {
return Err((crate::Error::Http {
status: res.status(),
})
.into());
}
while let Some(chunk) = res.chunk().await? {
f.write_all(&chunk)?;
}
}
RemoteRepo::None => unreachable!(),
}
libpijul::changestore::filesystem::pop_filename(path);
Ok(true)
}
pub async fn wait_downloads(
&mut self,
changes_dir: &Path,
hashes: &[libpijul::pristine::Hash],
send: &mut tokio::sync::mpsc::Sender<libpijul::pristine::Hash>,
) -> Result<(), anyhow::Error> {
if hashes.is_empty() {
return Ok(());
}
if let RemoteRepo::Ssh(ref mut s) = *self {
s.wait_downloads(changes_dir, hashes, send).await?
} else {
for h in hashes {
send.send(*h).await?
}
}
Ok(())
}
pub async fn pull<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
to_download: Vec<Hash>,
do_apply: bool,
) -> Result<(), anyhow::Error> {
let (mut send, mut recv) = tokio::sync::mpsc::channel(100);
let mut change_path_ = repo.changes_dir.clone();
let to_download_ = to_download.clone();
let mut self_ = std::mem::replace(self, RemoteRepo::None);
let t = tokio::spawn(async move {
let mut hashes = Vec::new();
for h in to_download_.iter() {
if self_
.start_change_download(*h, &mut change_path_, false)
.await?
{
hashes.push(*h);
}
}
debug!("hashes = {:?}", hashes);
self_
.wait_downloads(&change_path_, &hashes, &mut send)
.await?;
Ok(self_)
});
let mut ws = libpijul::ApplyWorkspace::new();
let mut change_path = repo.changes_dir.clone();
for h in to_download.iter() {
libpijul::changestore::filesystem::push_filename(&mut change_path, &h);
debug!("change_path = {:?}", change_path);
while std::fs::metadata(&change_path).is_err() {
debug!("waiting");
let r = recv.recv().await;
debug!("r = {:?}", r);
if r.is_none() {
break;
}
}
libpijul::changestore::filesystem::pop_filename(&mut change_path);
if do_apply {
println!("Applying {:?}", h.to_base32());
debug!("applying {:?}", h);
txn.apply_change_ws(&repo.changes, channel, *h, &mut ws)?;
} else {
debug!("not applying {:?}", h)
}
}
std::mem::drop(recv);
debug!("waiting for spawned process");
let r: Result<_, anyhow::Error> = t.await?;
debug!("done");
*self = r?;
Ok(())
}
pub async fn clone_tag<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
tag: &[Hash],
) -> Result<(), anyhow::Error> {
let (mut send_signal, mut recv_signal) = tokio::sync::mpsc::channel(100);
let (mut send_hash, mut recv_hash) = tokio::sync::mpsc::channel(100);
let mut change_path_ = repo.changes_dir.clone();
let mut self_ = std::mem::replace(self, RemoteRepo::None);
let t = tokio::spawn(async move {
let mut hashes = Vec::new();
while let Some(hash) = recv_hash.recv().await {
if self_
.start_change_download(hash, &mut change_path_, false)
.await?
{
hashes.push(hash);
}
}
debug!("hashes = {:?}", hashes);
self_
.wait_downloads(&change_path_, &hashes, &mut send_signal)
.await?;
Ok(self_)
});
for &h in tag.iter() {
send_hash.send(h).await?;
}
let mut change_path = repo.changes_dir.clone();
let mut hashes = Vec::new();
while let Some(hash) = recv_signal.recv().await {
libpijul::changestore::filesystem::push_filename(&mut change_path, &hash);
std::fs::create_dir_all(change_path.parent().unwrap())?;
use libpijul::changestore::ChangeStore;
hashes.push(hash);
for dep in repo.changes.get_dependencies(&hash)? {
let dep: libpijul::pristine::Hash = dep;
send_hash.send(dep).await?;
}
libpijul::changestore::filesystem::pop_filename(&mut change_path);
}
std::mem::drop(recv_signal);
std::mem::drop(send_hash);
let mut ws = libpijul::ApplyWorkspace::new();
while let Some(hash) = hashes.pop() {
txn.apply_change_ws(&repo.changes, channel, hash, &mut ws)?;
}
let r: Result<_, anyhow::Error> = t.await?;
*self = r?;
Ok(())
}
pub async fn clone_state<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
channel: &mut ChannelRef<T>,
state: Merkle,
lazy: bool,
) -> Result<(), anyhow::Error> {
self.update_changelist(txn, &[]).await?;
let name = self.name();
let remote = txn.open_or_create_remote(name).unwrap();
if let RemoteRepo::Ssh(ref mut s) = self {
s.clone_channel(repo, txn, channel, lazy).await?;
let mut to_unrecord = Vec::new();
let mut found = false;
for (n, (h, s)) in txn.iter_rev_remote(&remote.borrow().remote, None) {
debug!("{:?} {:?} {:?}", n, h, s);
if s == state {
found = true;
break;
}
to_unrecord.push(h);
}
if !found {
return Err((Error::StateNotFound { state }).into());
}
self.pull(repo, txn, channel, to_unrecord.clone(), false)
.await?;
for unrec in to_unrecord.iter() {
txn.unrecord(&repo.changes, channel, unrec)?;
}
return Ok(());
}
let mut to_pull = Vec::new();
let mut found = false;
for (n, (h, s)) in txn.iter_remote(&remote.borrow().remote, 0) {
debug!("{:?} {:?} {:?}", n, h, s);
to_pull.push(h);
if s == state {
found = true;
break;
}
}
if !found {
return Err((Error::StateNotFound { state }).into());
}
self.pull(repo, txn, channel, to_pull, true).await?;
Ok(())
}
pub async fn complete_changes<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &crate::repository::Repository,
txn: &T,
local_channel: &mut ChannelRef<T>,
changes: &[Hash],
full: bool,
) -> Result<(), anyhow::Error> {
use libpijul::changestore::ChangeStore;
let (mut send_hash, mut recv_hash) = tokio::sync::mpsc::channel(100);
let (mut send_sig, mut recv_sig) = tokio::sync::mpsc::channel(100);
let mut self_ = std::mem::replace(self, RemoteRepo::None);
let mut changes_dir = repo.changes_dir.clone();
let t = tokio::spawn(async move {
let mut hashes = Vec::new();
while let Some(h) = recv_hash.recv().await {
debug!("downloading full patch: {:?}", h);
if self_
.start_change_download(h, &mut changes_dir, true)
.await?
{
debug!("push");
hashes.push(h);
}
debug!("done");
}
debug!("waiting");
self_
.wait_downloads(&changes_dir, &hashes, &mut send_sig)
.await?;
let result: Result<_, anyhow::Error> = Ok(self_);
result
});
for c in changes {
if repo.changes.has_contents(*c, txn.get_internal(*c)) {
debug!("has contents {:?}", c);
continue;
}
if full {
debug!("sending send_hash");
send_hash.send(*c).await?;
debug!("sent");
continue;
}
let change = if let Some(i) = txn.get_internal(*c) {
i
} else {
continue;
};
// Check if at least one non-empty vertex from c is still alive.
let v = libpijul::pristine::Vertex {
change,
start: libpijul::pristine::ChangePosition(0),
end: libpijul::pristine::ChangePosition(0),
};
let channel = local_channel.borrow();
for (v_, e) in txn.iter_graph(&channel.graph, v, None) {
if v_.change < change {
continue;
} else if v_.change > change {
break;
}
if e.flag.contains(libpijul::pristine::EdgeFlags::PARENT)
&& !e.flag.contains(libpijul::pristine::EdgeFlags::DELETED)
{
// Alive!
debug!("sending alive");
send_hash.send(*c).await?;
debug!("sent");
break;
}
}
}
debug!("dropping send_hash");
std::mem::drop(send_hash);
while recv_sig.recv().await.is_some() {}
*self = t.await??;
Ok(())
}
pub async fn clone_channel<T: MutTxnTExt + TxnTExt>(
&mut self,
repo: &mut Repository,
txn: &mut T,
local_channel: &mut ChannelRef<T>,
lazy: bool,
path: &[String],
) -> Result<(), anyhow::Error> {
if path.is_empty() {
match *self {
RemoteRepo::Ssh(ref mut s) => {
return s.clone_channel(repo, txn, local_channel, lazy).await
}
_ => {}
}
}
let remote_changes = self.update_changelist(txn, path).await?;
let mut pullable = Vec::new();
for (_, (h, _)) in txn.iter_remote(&remote_changes.borrow().remote, 0) {
pullable.push(h)
}
// let pullable = self.pullable(txn, local_channel, path).await?;
self.pull(repo, txn, local_channel, pullable, true).await
}
}
fn parse_line(data: &str) -> Result<(u64, Hash, Merkle), anyhow::Error> {
debug!("data = {:?}", data);
let mut it = data.split('.');
let n = if let Some(n) = it.next().and_then(|n| n.parse().ok()) {
n
} else {
return Err((Error::ProtocolError {
line: data.as_bytes().to_vec(),
})
.into());
};
debug!("n = {:?}", n);
let h = if let Some(h) = it.next().and_then(|h| Hash::from_base32(h.as_bytes())) {
h
} else {
return Err((Error::ProtocolError {
line: data.as_bytes().to_vec(),
})
.into());
};
debug!("h = {:?}", h);
let m = if let Some(m) = it.next().and_then(|m| {
debug!("m = {:?}", m);
Merkle::from_base32(m.as_bytes())
}) {
m
} else {
return Err((Error::ProtocolError {
line: data.as_bytes().to_vec(),
})
.into());
};
debug!("m = {:?}", m);
if it.next().is_some() {
return Err((Error::ProtocolError {
line: data.as_bytes().to_vec(),
})
.into());
}
Ok((n, h, m))
}
use super::RemoteRef;
use libpijul::pristine::{Base32, Hash, Merkle, MutTxnT, TxnT};
use libpijul::{MutTxnTExt, TxnTExt};
use std::path::{Path, PathBuf};
pub struct Local {
pub channel: String,
pub root: std::path::PathBuf,
pub changes_dir: std::path::PathBuf,
pub pristine: libpijul::pristine::sanakirja::Pristine,
pub name: String,
}
impl Local {
pub fn get_state(&mut self, mid: Option<u64>) -> Result<Option<(u64, Merkle)>, anyhow::Error> {
let txn = self.pristine.txn_begin()?;
let channel = txn.load_channel(&self.channel).unwrap();
if let Some(mid) = mid {
Ok(txn.get_changes(&channel, mid).map(|(_, m)| (mid, m)))
} else {
Ok(txn
.reverse_log(&channel.borrow(), None)
.next()
.map(|(n, (_, m))| (n, m)))
}
}
pub fn download_changelist<T: MutTxnT>(
&mut self,
txn: &mut T,
remote: &mut RemoteRef<T>,
from: u64,
paths: &[String],
) -> Result<(), anyhow::Error> {
let store = libpijul::changestore::filesystem::FileSystem::from_root(&self.root);
let remote_txn = self.pristine.txn_begin()?;
let remote_channel = if let Some(channel) = remote_txn.load_channel(&self.channel) {
channel
} else {
debug!("no remote channel named {:?}", self.channel);
return Ok(());
};
let mut paths_ = Vec::new();
for s in paths {
if let Ok((p, _ambiguous)) = remote_txn.follow_oldest_path(&store, &remote_channel, s) {
paths_.push(p)
}
}
for (n, (h, m)) in remote_txn.log(&remote_channel.borrow(), from) {
if n >= from {
debug!(
"downloading changelist item {:?} {:?} {:?}",
n,
h.to_base32(),
m.to_base32()
);
let h_int = remote_txn.get_internal(h).unwrap();
if paths_.is_empty()
|| paths_
.iter()
.any(|x| remote_txn.get_touched_files(*x, Some(h_int)).is_some())
{
txn.put_remote(remote, n, (h, m))?;
}
}
}
Ok(())
}
pub fn upload_changes(
&mut self,
mut local: PathBuf,
to_channel: Option<&str>,
changes: &[Hash],
) -> Result<(), anyhow::Error> {
let store = libpijul::changestore::filesystem::FileSystem::from_root(&self.root);
let mut txn = self.pristine.mut_txn_begin();
let mut channel = txn.open_or_create_channel(to_channel.unwrap_or(&self.channel))?;
let mut ws = libpijul::ApplyWorkspace::new();
for c in changes {
libpijul::changestore::filesystem::push_filename(&mut local, &c);
libpijul::changestore::filesystem::push_filename(&mut self.changes_dir, &c);
std::fs::create_dir_all(&self.changes_dir.parent().unwrap())?;
debug!("hard link {:?} {:?}", local, self.changes_dir);
std::fs::hard_link(&local, &self.changes_dir)?;
debug!("hard link done");
libpijul::changestore::filesystem::pop_filename(&mut local);
libpijul::changestore::filesystem::pop_filename(&mut self.changes_dir);
txn.apply_change_ws(&store, &mut channel, *c, &mut ws)?;
}
let mut repo = libpijul::working_copy::filesystem::FileSystem::from_root(&self.root);
txn.output_repository_no_pending(&mut repo, &store, &mut channel, "", true)?;
txn.commit()?;
Ok(())
}
pub async fn start_change_download(
&mut self,
c: libpijul::pristine::Hash,
path: &Path,
) -> Result<(), anyhow::Error> {
libpijul::changestore::filesystem::push_filename(&mut self.changes_dir, &c);
debug!("hard link {:?} {:?}", self.changes_dir, path);
std::fs::hard_link(&self.changes_dir, path)?;
debug!("hard link done");
libpijul::changestore::filesystem::pop_filename(&mut self.changes_dir);
debug!("sent");
Ok(())
}
}
#[macro_use]
extern crate clap;
#[macro_use]
extern crate thiserror;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate log;
#[macro_use]
extern crate lazy_static;
use clap::Clap;
use human_panic::setup_panic;
mod commands;
mod config;
mod remote;
mod repository;
use commands::*;
use std::io::Write;
const DEFAULT_CHANNEL: &'static str = "main";
const PROTOCOL_VERSION: usize = 3;
#[derive(Clap, Debug)]
#[clap(version = crate_version!(), author = crate_authors!())]
pub struct Opts {
#[clap(subcommand)]
pub subcmd: SubCommand,
}
#[derive(Clap, Debug)]
pub enum SubCommand {
#[clap(name = "init")]
Init(Init),
#[clap(name = "clone")]
Clone(Clone),
#[clap(name = "record", alias = "rec")]
Record(Record),
#[clap(name = "diff")]
Diff(Diff),
#[clap(name = "log")]
Log(Log),
#[clap(name = "push")]
Push(Push),
#[clap(name = "pull")]
Pull(Pull),
#[clap(name = "change")]
Change(Change),
#[clap(name = "channel")]
Channel(Channel),
#[clap(name = "protocol", setting = clap::AppSettings::Hidden)]
Protocol(Protocol),
#[cfg(feature = "git")]
#[clap(name = "git")]
Git(Git),
#[clap(name = "mv")]
Mv(Mv),
#[clap(name = "ls")]
Ls(Ls),
#[clap(name = "add")]
Add(Add),
#[clap(name = "remove")]
Remove(Remove),
#[clap(name = "reset")]
Reset(Reset),
#[cfg(debug_assertions)]
#[clap(name = "debug")]
Debug(Debug),
#[clap(name = "fork")]
Fork(Fork),
#[clap(name = "unrecord", alias = "unrec", alias = "un")]
Unrecord(Unrecord),
#[clap(name = "apply")]
Apply(Apply),
#[clap(name = "remote")]
Remote(Remote),
#[clap(name = "archive")]
Archive(Archive),
#[clap(name = "credit")]
Credit(Credit),
#[clap(name = "upgrade", setting = clap::AppSettings::Hidden)]
Upgrade(Upgrade),
}
#[derive(Debug, Error)]
pub enum Error {
#[error("No Pijul repository found")]
NoRepoRoot,
#[error("Cannot access working directory")]
CannotAccessWorkingDirectory,
#[error("Already in a repository")]
AlreadyInARepo,
#[error("No such channel: {}", channel)]
NoSuchChannel { channel: String },
#[error("Protocol error. Is this the correct URL?")]
ProtocolError { line: Vec<u8> },
#[error("Not authenticated")]
NotAuthenticated,
#[error("No change message")]
NoChangeMessage,
#[error("Incorrect remote: {}", name)]
IncorrectRemote { name: String },
#[error("Unknown host key")]
UnknownHostKey,
#[error("Cannot record a binary change interactively. Use -a")]
RecordBinaryChange,
#[error("Remote not found: {:?}", remote)]
RemoteNotFound { remote: String },
#[error("No global config directory")]
NoGlobalConfigDir,
#[error("Could not parse global config")]
CouldNotParseGlobal,
#[error("Cannot dry-reset multiple files")]
CannotDryReset,
#[error("Remote error: {}", msg)]
Remote { msg: String },
#[error("Remote exited with status {}", status)]
RemoteExit { status: u32 },
#[error("Missing remote")]
MissingRemote,
#[error("State not found in remote: {:?}", state)]
StateNotFound { state: libpijul::pristine::Merkle },
#[error("Missing dependencies for change {:?}", h)]
MissingDep { h: libpijul::pristine::Hash },
#[error("Ambiguous path: {:?}", path)]
AmbiguousPath { path: String },
#[error("No prefixes given. Use `.` to record the current directory.")]
NoRecordPrefixes,
#[error("HTTP error: {}", status.as_str())]
Http { status: reqwest::StatusCode },
#[error("Could not parse configuration file at {:?}", path)]
CouldNotReadConfig { path: std::path::PathBuf },
#[error("No current channel")]
NoCurrentChannel,
#[error("Channel not found: {:?}", channel)]
ChannelNotFound { channel: String },
#[error("Cannot reset, because there are unrecorded changes")]
UnrecordedChanges,
#[error("Could not infer repository name")]
CouldNotInferRepositoryName { repo: String },
}
#[tokio::main]
async fn main() {
if !cfg!(debug_assertions) {
setup_panic!();
}
env_logger::init();
let opts: Opts = Opts::parse();
if let Err(e) = run(opts).await {
writeln!(std::io::stderr(), "Error: {}", e).unwrap_or(());
std::process::exit(1);
}
}
async fn run(opts: Opts) -> Result<(), anyhow::Error> {
match opts.subcmd {
SubCommand::Log(l) => l.run(),
SubCommand::Init(init) => init.run(),
SubCommand::Clone(clone) => clone.run().await,
SubCommand::Record(record) => record.run().await,
SubCommand::Diff(diff) => diff.run(),
SubCommand::Push(push) => push.run().await,
SubCommand::Pull(pull) => pull.run().await,
SubCommand::Change(change) => change.run(),
SubCommand::Channel(channel) => channel.run(),
SubCommand::Protocol(protocol) => protocol.run(),
#[cfg(feature = "git")]
SubCommand::Git(git) => git.run(),
SubCommand::Mv(mv) => mv.run(),
SubCommand::Ls(ls) => ls.run(),
SubCommand::Add(add) => add.run(),
SubCommand::Remove(remove) => remove.run(),
SubCommand::Reset(reset) => reset.run(),
#[cfg(debug_assertions)]
SubCommand::Debug(debug) => debug.run(),
SubCommand::Fork(fork) => fork.run(),
SubCommand::Unrecord(unrecord) => unrecord.run(),
SubCommand::Apply(apply) => apply.run(),
SubCommand::Remote(remote) => remote.run(),
SubCommand::Archive(archive) => archive.run().await,
SubCommand::Credit(credit) => credit.run(),
SubCommand::Upgrade(upgrade) => upgrade.run(),
}
}
pub fn current_dir() -> Result<std::path::PathBuf, Error> {
std::env::current_dir().map_err(|_| Error::CannotAccessWorkingDirectory)
}
use std::collections::HashMap;
#[derive(Debug, Serialize, Deserialize)]
pub struct Global {
pub author: libpijul::change::Author,
}
const CONFIG_DIR: &'static str = "pijul";
impl Global {
pub fn load() -> Result<Global, anyhow::Error> {
if let Some(mut dir) = dirs_next::config_dir() {
dir.push(CONFIG_DIR);
dir.push("config.toml");
let s = std::fs::read(&dir);
let s = match s {
Ok(s) => s,
Err(e) => {
if let Some(mut dir) = dirs_next::home_dir() {
dir.push(".pijulconfig");
std::fs::read(&dir)?
} else {
return Err(e.into());
}
}
};
debug!("s = {:?}", s);
if let Ok(c) = toml::from_slice(&s) {
Ok(c)
} else {
Err((crate::Error::CouldNotReadConfig { path: dir }).into())
}
} else {
Err(crate::Error::NoGlobalConfigDir.into())
}
}
}
#[derive(Debug, Serialize, Deserialize, Default)]
pub struct Config {
pub current_channel: Option<String>,
pub default_remote: Option<String>,
pub remotes: HashMap<String, String>,
pub hooks: Option<Hooks>,
}
#[derive(Debug, Serialize, Deserialize, Default)]
pub struct Hooks {
pub record: Vec<String>,
}
impl Config {
pub fn get_current_channel<'a>(&'a self, alt: Option<&'a String>) -> &'a str {
if let Some(channel) = alt {
channel.as_ref()
} else if let Some(ref channel) = self.current_channel {
channel.as_str()
} else {
crate::DEFAULT_CHANNEL
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct Remote_ {
ssh: Option<SshRemote>,
local: Option<String>,
url: Option<String>,
}
#[derive(Debug)]
pub enum Remote {
Ssh(SshRemote),
Local { local: String },
Http { url: String },
None,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SshRemote {
pub addr: String,
}
impl<'de> serde::Deserialize<'de> for Remote {
fn deserialize<D>(deserializer: D) -> Result<Remote, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let r = Remote_::deserialize(deserializer)?;
if let Some(ssh) = r.ssh {
Ok(Remote::Ssh(ssh))
} else if let Some(local) = r.local {
Ok(Remote::Local { local })
} else if let Some(url) = r.url {
Ok(Remote::Http { url })
} else {
Ok(Remote::None)
}
}
}
impl serde::Serialize for Remote {
fn serialize<D>(&self, serializer: D) -> Result<D::Ok, D::Error>
where
D: serde::ser::Serializer,
{
let r = match *self {
Remote::Ssh(ref ssh) => Remote_ {
ssh: Some(ssh.clone()),
local: None,
url: None,
},
Remote::Local { ref local } => Remote_ {
local: Some(local.to_string()),
ssh: None,
url: None,
},
Remote::Http { ref url } => Remote_ {
local: None,
ssh: None,
url: Some(url.to_string()),
},
Remote::None => Remote_ {
local: None,
ssh: None,
url: None,
},
};
r.serialize(serializer)
}
}
use crate::repository::Repository;
use libpijul::changestore::ChangeStore;
use libpijul::pristine::{Hash, MutTxnT, TxnT};
use libpijul::{MutTxnTExt, TxnTExt};
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Upgrade {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
}
impl Upgrade {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut channels = HashMap::new();
{
let repo = Repository::find_root(self.repo_path.clone())?;
let txn = repo.pristine.txn_begin()?;
let mut hashes = HashSet::new();
for channel in txn.iter_channels("") {
let channel = channel.borrow();
let name = channel.name();
let e = channels.entry(name.to_string()).or_insert(Vec::new());
hashes.clear();
for (_, (h, _)) in txn.reverse_log(&channel, None) {
if !hashes.insert(h) {
continue;
}
let path = repo.changes.filename(&h);
let change = libpijul::change::v3::LocalChange3::deserialize(
path.to_str().unwrap(),
Some(&h),
)
.unwrap();
e.push((h, change))
}
}
std::fs::rename(repo.path.join(".pijul"), repo.path.join(".pijul.old"))?;
}
let repo2 = Repository::init(self.repo_path)?;
let mut txn2 = repo2.pristine.mut_txn_begin();
let mut translations = HashMap::new();
translations.insert(None, None);
translations.insert(Some(Hash::None), Some(Hash::None));
for (channel_name, mut changes) in channels {
let mut channel = txn2.open_or_create_channel(&channel_name)?;
while let Some((old_h, c)) = changes.pop() {
let h = repo2.changes.save_change(&c.to_v4(&translations))?;
translations.insert(Some(old_h), Some(h));
txn2.apply_change(&repo2.changes, &mut channel, h)?;
}
}
txn2.commit()?;
Ok(())
}
}
use crate::repository::Repository;
use libpijul::pristine::{Base32, MutTxnT, TxnT};
use libpijul::MutTxnTExt;
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Unrecord {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(
about = "identifier of the change (unambiguous prefixes are accepted)",
multiple = true
)]
change_id: Vec<String>,
}
impl Unrecord {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut txn = repo.pristine.mut_txn_begin();
let mut stderr = std::io::stderr();
for c in self.change_id.iter() {
let (hash, change_id) = txn.hash_from_prefix(c)?;
if let Some(mut channel) = txn.load_channel(channel_name) {
let mut can_unrecord = true;
let channel_ = channel.borrow();
for (p, d) in txn.iter_revdep(change_id) {
if p < change_id {
continue;
} else if p > change_id {
break;
}
if txn.get_changeset(&channel_.changes, d, None).is_some() {
if can_unrecord {
writeln!(stderr, "Cannot unrecord change {}, because the following changes depend on it:", c)?
}
can_unrecord = false;
writeln!(stderr, " {}", txn.get_external(d).unwrap().to_base32())?
}
}
std::mem::drop(channel_);
if can_unrecord {
txn.unrecord(&repo.changes, &mut channel, &hash)?;
}
}
}
txn.commit()?;
Ok(())
}
}
use crate::repository::Repository;
use crate::Error;
use libpijul::pristine::MutTxnT;
use libpijul::{MutTxnTExt, TxnTExt};
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Reset {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(long = "dry-run")]
dry_run: bool,
#[clap(long = "change")]
change: Option<String>,
files: Vec<PathBuf>,
}
impl Reset {
pub fn run(self) -> Result<(), anyhow::Error> {
let has_repo_path = self.repo_path.is_some();
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
use libpijul::pristine::TxnT;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut channel = if let Some(channel) = txn.load_channel(&channel_name) {
channel
} else if self.change.is_some() {
txn.open_or_create_channel(&channel_name)?
} else {
return Err((Error::NoSuchChannel {
channel: channel_name.to_string(),
})
.into());
};
if self.dry_run {
if self.files.len() > 1 {
return Err(Error::CannotDryReset.into());
}
let (pos, _ambiguous) = if has_repo_path {
let root = std::fs::canonicalize(repo.path.join(&self.files[0]))?;
let path = root.strip_prefix(&repo.path)?.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
} else {
let path = self.files[0].to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
};
txn.output_file(
&repo.changes,
&channel,
pos,
&mut libpijul::vertex_buffer::Writer::new(std::io::stdout()),
)?;
} else {
let current_channel = repo.config.get_current_channel(None);
if self.channel.is_some()
&& self.channel.as_ref().map(|x| x.as_str()) != Some(current_channel)
{
if let Some(mut channel) = txn.load_channel(current_channel) {
let mut state = libpijul::RecordBuilder::new();
txn.record(
&mut state,
libpijul::Algorithm::default(),
&mut channel,
&mut repo.working_copy,
&repo.changes,
"",
)?;
let rec = state.finish();
debug!("actions = {:?}", rec.actions);
if !rec.actions.is_empty() {
return Err(Error::UnrecordedChanges.into());
}
}
}
if self.channel.is_some() {
repo.config.current_channel = self.channel;
repo.save_config()?;
}
if let Some(ch) = self.change {
let (hash, _) = txn.hash_from_prefix(&ch)?;
txn.apply_change_rec(&repo.changes, &mut channel, hash)?
}
if self.files.is_empty() {
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
"",
true,
)?;
} else {
for root in self.files.iter() {
let root = std::fs::canonicalize(&root)?;
let path = root.strip_prefix(&repo.path)?.to_str().unwrap();
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
&path,
true,
)?;
}
}
txn.commit()?
}
Ok(())
}
}
use crate::repository::*;
use crate::Error;
use chrono::Utc;
use libpijul::change::*;
use libpijul::changestore::*;
use libpijul::pristine::{Base32, ChannelRef, MutTxnT, TxnT};
use libpijul::{MutTxnTExt, TxnTExt};
use std::collections::HashMap;
use std::io::Write;
use std::path::{Path, PathBuf};
use thrussh_keys::PublicKeyBase64;
#[derive(Clap, Debug)]
pub struct Record {
#[clap(short = 'a', long = "all")]
pub all: bool,
#[clap(short = 'm', long = "message")]
pub message: Option<String>,
#[clap(long = "author")]
pub author: Option<String>,
#[clap(long = "channel")]
pub channel: Option<String>,
#[clap(long = "repository")]
pub repo_path: Option<PathBuf>,
#[clap(long = "timestamp")]
pub timestamp: Option<i64>,
#[clap(short = 'S')]
pub sign: bool,
#[clap(long = "stdin")]
pub stdin: bool,
#[clap(long = "tag")]
pub tag: bool,
#[clap(long = "amend")]
pub amend: Option<String>,
pub prefixes: Vec<PathBuf>,
}
impl Record {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path.clone())?;
let mut stdout = std::io::stdout();
let mut stderr = std::io::stderr();
if let Some(ref hooks) = repo.config.hooks {
for h in hooks.record.iter() {
let mut proc = std::process::Command::new("bash")
.current_dir(&repo.path)
.args(&["-c", &h])
.spawn()?;
let status = proc.wait()?;
if !status.success() {
writeln!(stderr, "Hook {:?} exited with code {:?}", h, status)?;
std::process::exit(status.code().unwrap_or(1))
}
}
}
let mut txn = repo.pristine.mut_txn_begin();
let mut channel =
txn.open_or_create_channel(repo.config.get_current_channel(self.channel.as_ref()))?;
let sign = self.sign;
let header = if let Some(ref amend) = self.amend {
let (h, _) = txn.hash_from_prefix(amend)?;
let header = repo.changes.get_header(&h)?;
txn.unrecord(&repo.changes, &mut channel, &h)?;
header
} else {
self.header()
};
let result = self.record(
&mut txn,
&mut channel,
&mut repo.working_copy,
&repo.changes,
&repo.path,
header,
)?;
if let Some((mut change, updates, hash)) = result {
let hash = hash.unwrap();
if sign {
let mut key_path = dirs_next::home_dir().unwrap().join(".ssh");
if let Some((pk, signature)) = sign_hash(&mut key_path, hash).await? {
let sig = toml::Value::try_from(vec![Signature {
public_key: pk,
timestamp: change.header.timestamp,
signature: signature,
}])?;
let mut toml = toml::map::Map::new();
toml.insert("signatures".to_string(), sig);
change.unhashed = Some(toml.into());
let hash2 = repo.changes.save_change(&change).unwrap();
assert_eq!(hash2, hash);
}
}
txn.apply_local_change(&mut channel, &change, hash, &updates)?;
writeln!(stdout, "Hash: {}", hash.to_base32())?;
txn.commit()?;
} else {
writeln!(stderr, "Nothing to record")?;
}
Ok(())
}
fn header(&self) -> ChangeHeader {
let authors = if let Some(ref a) = self.author {
vec![libpijul::change::Author {
name: a.clone(),
full_name: None,
email: None,
}]
} else if let Ok(global) = crate::config::Global::load() {
vec![global.author]
} else {
Vec::new()
};
ChangeHeader {
message: self.message.clone().unwrap_or(String::new()),
authors,
description: None,
timestamp: if let Some(t) = self.timestamp {
chrono::DateTime::from_utc(chrono::NaiveDateTime::from_timestamp(t, 0), chrono::Utc)
} else {
Utc::now()
},
}
}
fn fill_relative_prefixes(&mut self) -> Result<(), anyhow::Error> {
let cwd = std::env::current_dir()?;
for p in self.prefixes.iter_mut() {
if p.is_relative() {
*p = cwd.join(&p);
}
}
Ok(())
}
fn record<T: TxnT + TxnTExt + MutTxnTExt, C: ChangeStore>(
mut self,
txn: &mut T,
channel: &mut ChannelRef<T>,
working_copy: &mut libpijul::working_copy::FileSystem,
changes: &C,
repo_path: &Path,
header: ChangeHeader,
) -> Result<
Option<(
Change,
HashMap<usize, libpijul::InodeUpdate>,
Option<libpijul::pristine::Hash>,
)>,
anyhow::Error,
> {
let mut state = libpijul::RecordBuilder::new();
if self.prefixes.is_empty() {
txn.record(
&mut state,
libpijul::Algorithm::default(),
channel,
working_copy,
changes,
"",
)?
} else {
self.fill_relative_prefixes()?;
working_copy.record_prefixes(
txn,
channel,
changes,
&mut state,
repo_path,
&self.prefixes,
)?;
}
let mut rec = state.finish();
if rec.actions.is_empty() {
return Ok(None);
}
let actions = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn))
.collect();
let change =
LocalChange::make_change(txn, channel, actions, rec.contents, header, Vec::new());
let file_name = |local: &Local, _| -> String { format!("{}:{}", local.path, local.line) };
debug!("has_binary = {:?}", rec.has_binary_files);
let mut change = if self.all {
change
} else if rec.has_binary_files {
return Err(Error::RecordBinaryChange.into());
} else {
let mut o = Vec::new();
change.write(changes, None, file_name, true, &mut o)?;
let mut with_errors: Option<Vec<u8>> = None;
let change = loop {
let mut bytes = if let Some(ref o) = with_errors {
edit::edit_bytes(&o[..])?
} else {
edit::edit_bytes(&o[..])?
};
if bytes.iter().all(|c| (*c as char).is_whitespace()) {
return Ok(None);
}
let mut change = std::io::BufReader::new(std::io::Cursor::new(&bytes));
if let Ok(change) =
Change::read_and_deps(&mut change, &mut rec.updatables, txn, channel)
{
break change;
}
let mut err = SYNTAX_ERROR.as_bytes().to_vec();
err.append(&mut bytes);
with_errors = Some(err)
};
if change.changes.is_empty() {
return Ok(None);
}
change
};
if change.header.message.trim().is_empty() {
return Err(Error::NoChangeMessage.into());
}
let (dependencies, extra_known) = if self.tag {
full_dependencies(txn, channel)
} else {
dependencies(txn, channel, change.changes.iter())
};
change.dependencies = dependencies;
change.extra_known = extra_known;
debug!("saving change");
let hash = changes.save_change(&change).unwrap();
debug!("saved");
Ok(Some((change, rec.updatables, Some(hash))))
}
}
#[derive(Debug, Serialize, Deserialize)]
struct Signature {
public_key: String,
timestamp: chrono::DateTime<chrono::Utc>,
signature: String,
}
async fn sign_hash(
key_path: &mut PathBuf,
hash: libpijul::pristine::Hash,
) -> Result<Option<(String, String)>, anyhow::Error> {
let to_sign = hash.to_bytes();
match thrussh_keys::agent::client::AgentClient::connect_env().await {
Ok(agent) => {
let mut agent = Some(agent);
for k in &["id_ed25519.pub", "id_rsa.pub"] {
key_path.push(k);
if let Ok(key) = thrussh_keys::load_public_key(&key_path) {
debug!("key");
if let Some(a) = agent.take() {
debug!("authenticate future");
if let (_, Ok(sig)) = a.sign_request_base64(&key, &to_sign).await {
key_path.pop();
let key = key.public_key_base64();
return Ok(Some((key, sig)));
}
}
}
key_path.pop();
}
}
Err(e) => {
error!("{:?}", e);
}
}
for k in &["id_ed25519", "id_rsa"] {
key_path.push(k);
if let Some(k) = crate::remote::ssh::load_secret_key(&key_path, k) {
key_path.pop();
let pk = k.public_key_base64();
return Ok(Some((pk, k.sign_detached(&to_sign)?.to_base64())));
} else {
key_path.pop();
}
}
Ok(None)
}
const SYNTAX_ERROR: &'static str = "# Syntax errors, please try again.
# Alternatively, you may delete the entire file (including this
# comment to abort).
";
use crate::repository::Repository;
use libpijul::changestore::ChangeStore;
use libpijul::pristine::{MutTxnT, TxnT};
use libpijul::{MutTxnTExt, TxnTExt};
use regex::Regex;
use std::collections::HashSet;
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Remote {
#[clap(subcommand)]
subcmd: Option<SubRemote>,
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
}
#[derive(Clap, Debug)]
pub enum SubRemote {
#[clap(name = "delete")]
Delete { remote: String },
#[clap(name = "list")]
List,
}
impl Remote {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let mut stdout = std::io::stdout();
match self.subcmd {
None | Some(SubRemote::List) => {
let txn = repo.pristine.txn_begin()?;
for r in txn.iter_remotes("") {
writeln!(stdout, " {}", r.name())?;
}
}
Some(SubRemote::Delete { remote }) => {
let mut txn = repo.pristine.mut_txn_begin();
if !txn.drop_named_remote(&remote)? {
writeln!(std::io::stderr(), "Remote not found: {:?}", remote)?
} else {
txn.commit()?;
}
}
}
Ok(())
}
}
#[derive(Clap, Debug)]
pub struct Push {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(short = 'a')]
all: bool,
#[clap(short = 'k', about = "Do not check certificates")]
no_cert_check: bool,
#[clap(last = true)]
changes: Vec<String>,
#[clap(long = "path")]
path: Option<String>,
to: Option<String>,
#[clap(long = "to-channel")]
to_channel: Option<String>,
}
#[derive(Clap, Debug)]
pub struct Pull {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(long = "all", short = 'a')]
all: bool,
#[clap(short = 'k', about = "Do not check certificates")]
no_cert_check: bool,
#[clap(
long = "full",
about = "Download full changes, even when not necessary"
)]
full: bool, // This can't be symmetric with push
#[clap(
last = true,
about = "Pull changes from the local repository, not necessarily from a channel"
)]
changes: Vec<String>, // For local changes only, can't be symmetric.
#[clap(long = "path")]
path: Option<String>,
from: Option<String>,
#[clap(long = "from-channel")]
from_channel: Option<String>,
}
lazy_static! {
static ref CHANNEL: Regex = Regex::new(r#"([^:]*)(:(.*))?"#).unwrap();
}
impl Push {
pub async fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let remote_name = if let Some(ref rem) = self.to {
rem
} else if let Some(ref def) = repo.config.default_remote {
def
} else {
return Err(crate::Error::MissingRemote.into());
};
let mut push_channel = None;
let remote_channel = if let Some(ref c) = self.to_channel {
let c = CHANNEL.captures(c).unwrap();
push_channel = c.get(3).map(|x| x.as_str());
let c = c.get(1).unwrap().as_str();
if c.is_empty() {
channel_name
} else {
c
}
} else {
channel_name
};
debug!("remote_channel = {:?} {:?}", remote_channel, push_channel);
let mut remote = repo
.remote(&remote_name, remote_channel, self.no_cert_check)
.await?;
let mut txn = repo.pristine.mut_txn_begin();
let mut paths = if let Some(p) = self.path {
vec![p.to_string()]
} else {
vec![]
};
let remote_changes = remote.update_changelist(&mut txn, &paths).await?;
let channel = txn.open_or_create_channel(channel_name)?;
let path = if let Some(path) = paths.pop() {
let (p, ambiguous) = txn.follow_oldest_path(&repo.changes, &channel, &path)?;
if ambiguous {
return Err((crate::Error::AmbiguousPath { path: path.clone() }).into());
}
Some(p)
} else {
None
};
let mut to_upload = Vec::new();
for (_, (h, m)) in txn.reverse_log(&channel.borrow(), None) {
if txn.remote_has_state(&remote_changes, m) {
break;
}
let h_int = txn.get_internal(h).unwrap();
if !txn.remote_has_change(&remote_changes, h) {
if let Some(ref p) = path {
if txn.get_touched_files(*p, Some(h_int)).is_some() {
to_upload.push(h)
}
} else {
to_upload.push(h)
}
}
}
if to_upload.is_empty() {
return Ok(());
}
to_upload.reverse();
debug!("to_upload = {:?}", to_upload);
let to_upload = if !self.all {
let o = make_changelist(&repo.changes, &to_upload)?;
let u = parse_changelist(&edit::edit_bytes(&o[..])?);
check_deps(&repo.changes, &to_upload, &u)?;
u
} else {
to_upload
};
debug!("to_upload = {:?}", to_upload);
remote
.upload_changes(repo.changes_dir.clone(), push_channel, &to_upload)
.await?;
txn.commit()?;
remote.finish().await?;
Ok(())
}
}
impl Pull {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut channel = txn.open_or_create_channel(channel_name)?;
debug!("{:?}", repo.config);
let remote_name = if let Some(ref rem) = self.from {
rem
} else if let Some(ref def) = repo.config.default_remote {
def
} else {
return Err(crate::Error::MissingRemote.into());
};
let from_channel = if let Some(ref c) = self.from_channel {
c
} else {
crate::DEFAULT_CHANNEL
};
let mut remote = repo
.remote(&remote_name, from_channel, self.no_cert_check)
.await?;
debug!("downloading");
let to_download = if self.changes.is_empty() {
let paths = if let Some(p) = self.path {
vec![p.to_string()]
} else {
vec![]
};
let remote_changes = remote.update_changelist(&mut txn, &paths).await?;
debug!("changelist done");
let mut to_download = Vec::new();
for (_, (h, m)) in txn.iter_remote(&remote_changes.borrow().remote, 0) {
if txn.channel_has_state(&channel, m) {
break;
} else if txn.get_revchanges(&channel, h).is_none() {
to_download.push(h)
}
}
to_download.reverse();
to_download
} else {
let r: Result<Vec<libpijul::pristine::Hash>, anyhow::Error> = self
.changes
.iter()
.map(|h| Ok(txn.hash_from_prefix(h)?.0))
.collect();
r?
};
if to_download.is_empty() {
return Ok(());
}
debug!("recording");
let recorded = txn.record_all(
libpijul::Algorithm::default(),
&mut channel,
&mut repo.working_copy,
&repo.changes,
"",
)?;
let hash = if recorded.actions.is_empty() {
None
} else {
Some(txn.apply_recorded(&mut channel, recorded, &repo.changes)?)
};
remote
.pull(
&mut repo,
&mut txn,
&mut channel,
to_download.clone(),
self.all,
)
.await?;
if !self.all {
let o = make_changelist(&repo.changes, &to_download)?;
let d = parse_changelist(&edit::edit_bytes(&o[..])?);
check_deps(&repo.changes, &to_download, &d)?;
let mut ws = libpijul::ApplyWorkspace::new();
debug!("to_download = {:?}", to_download);
for h in d.iter() {
txn.apply_change_rec_ws(&repo.changes, &mut channel, *h, &mut ws)?;
}
}
debug!("completing changes");
remote
.complete_changes(&repo, &txn, &mut channel, &to_download, self.full)
.await?;
remote.finish().await?;
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
"",
true,
)?;
if let Some(h) = hash {
txn.unrecord(&repo.changes, &mut channel, &h)?;
repo.changes.del_change(&h)?;
}
txn.commit()?;
Ok(())
}
}
/// Make the "changelist", i.e. the list of patches, editable in a
/// text editor.
fn make_changelist<S: ChangeStore>(
changes: &S,
pullable: &[libpijul::pristine::Hash],
) -> Result<Vec<u8>, anyhow::Error> {
use libpijul::pristine::Base32;
let mut v = Vec::new();
writeln!(
v,
"# Please select the changes to pull. The lines that contain just a
# valid hash, and no other character (except possibly a newline), will
# be pulled.\n"
)
.unwrap();
let mut first_p = true;
for p in pullable {
if !first_p {
writeln!(v, "").unwrap();
}
first_p = false;
writeln!(v, "{}\n", p.to_base32()).unwrap();
let change = changes.get_header(&p)?;
write!(v, " Author: [").unwrap();
let mut first = true;
for a in change.authors.iter() {
if !first {
write!(v, ", ").unwrap();
}
first = false;
write!(v, "{}", a).unwrap();
}
writeln!(v, "]").unwrap();
writeln!(v, " Date: {}\n", change.timestamp).unwrap();
for l in change.message.lines() {
writeln!(v, " {}", l).unwrap();
}
}
Ok(v)
}
fn parse_changelist(o: &[u8]) -> Vec<libpijul::pristine::Hash> {
use libpijul::pristine::Base32;
if let Ok(o) = std::str::from_utf8(o) {
o.lines()
.filter_map(|l| libpijul::pristine::Hash::from_base32(l.as_bytes()))
.collect()
} else {
Vec::new()
}
}
fn check_deps<C: ChangeStore>(
c: &C,
original: &[libpijul::pristine::Hash],
now: &[libpijul::pristine::Hash],
) -> Result<(), anyhow::Error> {
let original_: HashSet<_> = original.iter().collect();
let now_: HashSet<_> = now.iter().collect();
for n in now {
// check that all of `now`'s deps are in now or not in original
for d in c.get_dependencies(n)? {
if original_.get(&d).is_some() && now_.get(&d).is_none() {
return Err((crate::Error::MissingDep { h: *n }).into());
}
}
}
Ok(())
}
use crate::repository::Repository;
use crate::Error;
use byteorder::{BigEndian, WriteBytesExt};
use libpijul::pristine::{Base32, ChannelRef, Hash, MutTxnT};
use libpijul::{MutTxnTExt, TxnTExt};
use regex::Regex;
use std::collections::HashMap;
use std::io::BufWriter;
use std::io::{BufRead, Read, Write};
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Protocol {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "version")]
version: usize,
}
lazy_static! {
static ref STATE: Regex = Regex::new(r#"state\s+(\S+)(\s+([0-9]+)?)\s+"#).unwrap();
static ref CHANGELIST: Regex = Regex::new(r#"changelist\s+(\S+)\s+([0-9]+)(.*)\s+"#).unwrap();
static ref CHANGELIST_PATHS: Regex = Regex::new(r#""((\\")|[^"])+""#).unwrap();
static ref CHANGE: Regex = Regex::new(r#"((change)|(partial))\s+([^ ]*)\s+"#).unwrap();
static ref APPLY: Regex = Regex::new(r#"apply\s+(\S+)\s+(S+)\s+([0-9]+)\s+"#).unwrap();
static ref CHANNEL: Regex = Regex::new(r#"channel\s+(\S+)\s+"#).unwrap();
static ref ARCHIVE: Regex =
Regex::new(r#"archive\s+(\S+)\s*(( ([^:]+))*)( :(.*))?\n"#).unwrap();
}
fn load_channel<T: MutTxnT>(txn: &T, name: &str) -> Result<ChannelRef<T>, Error> {
if let Some(c) = txn.load_channel(name) {
Ok(c)
} else {
Err((Error::NoSuchChannel {
channel: name.to_string(),
})
.into())
}
}
const PARTIAL_CHANGE_SIZE: u64 = 1 << 20;
impl Protocol {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
let mut ws = libpijul::ApplyWorkspace::new();
use libpijul::pristine::TxnT;
let mut buf = String::new();
let mut buf2 = vec![0; 4096 * 10];
let s = std::io::stdin();
let mut s = s.lock();
let o = std::io::stdout();
let o = o.lock();
let mut applied = HashMap::new();
let mut o = BufWriter::new(o);
debug!("reading");
while s.read_line(&mut buf)? > 0 {
debug!("{:?}", buf);
if let Some(cap) = STATE.captures(&buf) {
let channel = load_channel(&txn, &cap[1])?;
let init = if let Some(u) = cap.get(3) {
u.as_str().parse().ok()
} else {
None
};
if let Some(pos) = init {
for (n, (_, m)) in txn.log(&channel.borrow(), pos) {
if n < pos {
continue;
} else if n > pos {
writeln!(o, "-")?;
break;
} else {
writeln!(o, "{} {}", n, m.to_base32())?;
break;
}
}
} else {
if let Some((n, (_, m))) = txn.reverse_log(&channel.borrow(), None).next() {
writeln!(o, "{} {}", n, m.to_base32())?
} else {
writeln!(o, "-")?;
}
}
o.flush()?;
} else if let Some(cap) = CHANGELIST.captures(&buf) {
let channel = load_channel(&txn, &cap[1])?;
let from: u64 = cap[2].parse().unwrap();
let mut paths = Vec::new();
for r in CHANGELIST_PATHS.captures_iter(&cap[3]) {
let s: String = r[0].parse().unwrap();
if let Ok((p, ambiguous)) = txn.follow_oldest_path(&repo.changes, &channel, &s)
{
if ambiguous {
return Err((Error::ProtocolError {
line: buf.as_bytes().to_vec(),
})
.into());
}
paths.push(p)
} else {
return Err((Error::ProtocolError {
line: buf.as_bytes().to_vec(),
})
.into());
}
}
for (n, (h, m)) in txn.log(&channel.borrow(), from) {
let h_int = txn.get_internal(h).unwrap();
if paths.is_empty()
|| paths
.iter()
.any(|x| txn.get_touched_files(*x, Some(h_int)).is_some())
{
writeln!(o, "{}.{}.{}", n, h.to_base32(), m.to_base32())?
}
}
writeln!(o, "")?;
o.flush()?;
} else if let Some(cap) = CHANGE.captures(&buf) {
let h_ = &cap[4];
let h = if let Some(h) = Hash::from_base32(h_.as_bytes()) {
h
} else {
return Err((Error::ProtocolError {
line: buf.as_bytes().to_vec(),
})
.into());
};
libpijul::changestore::filesystem::push_filename(&mut repo.changes_dir, &h);
debug!("repo = {:?}", repo.changes_dir);
let mut f = std::fs::File::open(&repo.changes_dir)?;
let size = std::fs::metadata(&repo.changes_dir)?.len();
let size = if &cap[1] == "change" || size <= PARTIAL_CHANGE_SIZE {
size
} else {
libpijul::change::Change::size_no_contents(&mut f)?
};
o.write_u64::<BigEndian>(size)?;
let mut size = size as usize;
while size > 0 {
if size < buf2.len() {
buf2.truncate(size as usize);
}
let n = f.read(&mut buf2[..])?;
if n == 0 {
break;
}
size -= n;
o.write_all(&buf2[..n])?;
}
o.flush()?;
libpijul::changestore::filesystem::pop_filename(&mut repo.changes_dir);
} else if let Some(cap) = APPLY.captures(&buf) {
let h = if let Some(h) = Hash::from_base32(cap[2].as_bytes()) {
h
} else {
return Err((Error::ProtocolError {
line: buf.as_bytes().to_vec(),
})
.into());
};
let mut path = repo.changes_dir.clone();
libpijul::changestore::filesystem::push_filename(&mut path, &h);
let size: usize = cap[3].parse().unwrap();
buf2.resize(size, 0);
s.read_exact(&mut buf2)?;
std::fs::write(&path, &buf2)?;
libpijul::change::Change::deserialize(&path.to_string_lossy(), Some(&h))?;
let mut channel = load_channel(&txn, &cap[1])?;
txn.apply_change_ws(&repo.changes, &mut channel, h, &mut ws)?;
applied.insert(cap[4].to_string(), channel);
} else if let Some(cap) = CHANNEL.captures(&buf) {
let channel = load_channel(&txn, &cap[1])?;
let channel = channel.borrow();
for d in libpijul::pristine::channel_dump::dump_channel(&txn, channel) {
o.write_all(&d)?;
}
o.flush()?;
} else if let Some(cap) = ARCHIVE.captures(&buf) {
let mut w = Vec::new();
let mut tarball = libpijul::output::Tarball::new(
&mut w,
cap.get(6).map(|x| x.as_str().to_string()),
);
let channel = load_channel(&txn, &cap[1])?;
let conflicts = if let Some(caps) = cap.get(2) {
debug!("caps = {:?}", caps.as_str());
let mut hashes = caps.as_str().split(' ').filter(|x| !x.is_empty());
let state: libpijul::pristine::Merkle = hashes.next().unwrap().parse().unwrap();
let extra: Vec<libpijul::pristine::Hash> =
hashes.map(|x| x.parse().unwrap()).collect();
debug!("state = {:?}, extra = {:?}", state, extra);
if txn.current_state(&channel.borrow()) == Some(state) && extra.is_empty() {
txn.archive(&repo.changes, &channel, &mut tarball)?
} else {
use rand::Rng;
let fork_name: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(30)
.collect();
let mut fork = txn.fork(&channel, &fork_name)?;
let conflicts = txn.archive_with_state(
&repo.changes,
&mut fork,
state,
&extra,
&mut tarball,
)?;
txn.drop_channel(&fork_name)?;
conflicts
}
} else {
txn.archive(&repo.changes, &channel, &mut tarball)?
};
std::mem::drop(tarball);
let mut o = std::io::stdout();
o.write_u64::<BigEndian>(w.len() as u64)?;
o.write_u64::<BigEndian>(conflicts.len() as u64)?;
o.write_all(&w)?;
o.flush()?;
} else {
error!("unmatched")
}
buf.clear();
}
let applied_nonempty = !applied.is_empty();
for (_, mut channel) in applied {
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
"",
true,
)?;
}
if applied_nonempty {
txn.commit()?;
}
Ok(())
}
}
mod init;
pub use init::Init;
mod clone;
pub use clone::Clone;
mod pushpull;
pub use pushpull::*;
mod log;
pub use self::log::Log;
mod record;
pub use record::Record;
mod diff;
pub use diff::Diff;
mod change;
pub use change::Change;
mod protocol;
pub use protocol::Protocol;
#[cfg(feature = "git")]
mod git;
#[cfg(feature = "git")]
pub use git::Git;
mod channel;
pub use channel::*;
mod reset;
pub use reset::*;
mod fork;
pub use fork::*;
mod unrecord;
pub use unrecord::*;
mod file_operations;
pub use file_operations::*;
mod apply;
pub use apply::*;
mod archive;
pub use archive::*;
mod credit;
pub use credit::*;
#[cfg(debug_assertions)]
mod debug;
#[cfg(debug_assertions)]
pub use debug::*;
mod upgrade;
pub use upgrade::*;
use crate::repository::Repository;
use crate::Error;
use libpijul::changestore::*;
use libpijul::pristine::Base32;
use libpijul::TxnTExt;
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Log {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(long = "hash-only")]
hash_only: bool,
#[clap(long = "state")]
states: bool,
#[clap(long = "description")]
descriptions: bool,
}
impl Log {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
use libpijul::pristine::TxnT;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = if let Some(channel) = txn.load_channel(channel_name) {
channel
} else {
return Err((Error::NoSuchChannel {
channel: channel_name.to_string(),
})
.into());
};
let changes = repo.changes;
let mut stdout = std::io::stdout();
if self.hash_only {
for h in libpijul::change::full_dependencies(&txn, &channel).0 {
writeln!(stdout, "{}", h.to_base32())?
}
} else {
let states = self.states;
for (_, (h, mrk)) in txn.reverse_log(&channel.borrow(), None) {
let change = changes.get_change(&h)?;
writeln!(stdout, "Change {}", h.to_base32())?;
writeln!(stdout, "Author: {:?}", change.header.authors)?;
writeln!(stdout, "Date: {}", change.header.timestamp)?;
if states {
writeln!(stdout, "State: {}", mrk.to_base32())?;
}
writeln!(stdout, "\n {}\n", change.header.message)?;
if self.descriptions {
if let Some(ref descr) = change.header.description {
writeln!(stdout, "\n {}\n", descr)?;
}
}
}
}
Ok(())
}
}
use crate::repository::*;
use libpijul::pristine::MutTxnT;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Init {
#[clap(long = "channel")]
channel: Option<String>,
path: Option<PathBuf>,
}
impl Init {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::init(self.path)?;
let mut txn = repo.pristine.mut_txn_begin();
let channel_name = self.channel.unwrap_or(crate::DEFAULT_CHANNEL.to_string());
txn.open_or_create_channel(&channel_name)?;
repo.config.current_channel = Some(channel_name);
repo.save_config()?;
txn.commit()?;
Ok(())
}
}
use crate::repository::*;
use libpijul::pristine::*;
use libpijul::*;
use std::cell::RefCell;
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::io::Write;
use std::path::{Path, PathBuf};
use std::rc::Rc;
#[derive(Clap, Debug)]
pub struct Git {
#[clap(long = "stats")]
stats: Option<PathBuf>,
pub repo_path: Option<PathBuf>,
#[clap(default_value = "0")]
check: usize,
}
#[derive(Debug, Error)]
pub enum Error {
#[error("Pijul channel changed since last import. Please unrecord channel {} to state {}", channel, state.to_base32())]
MerkleChanged {
channel: String,
state: libpijul::pristine::Merkle,
},
}
struct OpenRepo {
repo: Repository,
stats: Option<std::fs::File>,
n: usize,
check: usize,
current_commit: Option<git2::Oid>,
}
impl Git {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = if let Ok(repo) = Repository::find_root(self.repo_path.clone()) {
repo
} else {
Repository::init(self.repo_path.clone())?
};
let git = git2::Repository::open(&repo.path)?;
let head = git.head()?;
info!("Loading history…");
let oid = head.target().unwrap();
let mut path_git = repo.path.join(libpijul::DOT_DIR);
path_git.push("git");
std::fs::create_dir_all(&path_git)?;
let mut env_git = ::sanakirja::Env::new(&path_git, 1 << 15)?;
let dag = Dag::dfs(&git, oid, &mut env_git)?;
trace!(target: "dag", "{:?}", dag);
info!("Done");
let mut pristine = repo.path.join(DOT_DIR);
pristine.push(PRISTINE_DIR);
std::fs::create_dir_all(&pristine)?;
let mut repo = OpenRepo {
repo,
stats: self.stats.and_then(|f| std::fs::File::create(f).ok()),
n: 0,
check: self.check,
current_commit: None,
};
import(&git, &mut env_git, &mut repo, &dag)?;
Ok(())
}
}
#[derive(Debug)]
struct Dag {
children: BTreeMap<git2::Oid, Vec<git2::Oid>>,
parents: BTreeMap<git2::Oid, Vec<git2::Oid>>,
root: Vec<(git2::Oid, Option<libpijul::pristine::Merkle>)>,
}
impl Dag {
/// Load a Git repository in memory. The main reason this is
/// needed is to compute the *backward* relations from a commit to
/// its parents.
fn dfs(
git: &git2::Repository,
oid: git2::Oid,
env_git: &mut ::sanakirja::Env,
) -> Result<Self, anyhow::Error> {
use ::sanakirja::Transaction;
let mut stack = vec![git.find_commit(oid)?];
let mut oids_set = BTreeSet::new();
let mut dag = Dag {
children: BTreeMap::new(),
parents: BTreeMap::new(),
root: Vec::new(),
};
oids_set.insert(oid.clone());
let mut txn_git = ::sanakirja::Env::mut_txn_begin(env_git)?;
let db: ::sanakirja::Db<git2::Oid, libpijul::pristine::Merkle> =
if let Some(db) = txn_git.root(0) {
db
} else {
txn_git.create_db()?
};
let mut state = HashMap::new();
for (commit, merk) in txn_git.iter(&db, None) {
state.insert(commit, merk);
}
debug!("state = {:?}", state);
while let Some(commit) = stack.pop() {
if let Some(state) = state.get(&commit.id()) {
dag.root.push((commit.id(), Some(*state)));
continue;
}
let mut has_parents = false;
for p in commit.parents() {
trace!("parent {:?}", p);
dag.children
.entry(p.id())
.or_insert(Vec::new())
.push(commit.id());
dag.parents
.entry(commit.id())
.or_insert(Vec::new())
.push(p.id());
if oids_set.insert(p.id()) {
stack.push(p);
}
has_parents = true
}
if !has_parents {
dag.root.push((commit.id(), None))
}
}
txn_git.set_root(0, db);
::sanakirja::Commit::commit(txn_git)?;
Ok(dag)
}
fn collect_dead_parents<T: MutTxnTExt>(
&self,
oid: &git2::Oid,
todo: &mut Todo,
txn: &mut T,
) -> Result<(), anyhow::Error> {
if let Some(parents) = self.parents.get(oid) {
debug!("parents {:?}", parents);
for p in parents {
let rc = todo.refs.get_mut(p).unwrap();
*rc -= 1;
if *rc == 0 {
let p_name = format!("{}", p);
debug!("dropping channel {:?}", p_name);
txn.drop_channel(&p_name)?;
}
}
}
Ok(())
}
fn insert_children_in_todo(&self, oid: &git2::Oid, todo: &mut Todo) {
if let Some(c) = self.children.get(&oid) {
for child in c {
debug!("child = {:?}", c);
if todo.next_todo_set.insert(*child) {
todo.next_todo.push(*child);
}
*todo.refs.entry(*oid).or_insert(0) += 1;
}
} else {
debug!("no children")
}
}
}
#[derive(Debug)]
struct Todo {
todo: Vec<git2::Oid>,
todo_set: HashSet<git2::Oid>,
next_todo: Vec<git2::Oid>,
next_todo_set: HashSet<git2::Oid>,
// For each key k, number of items in the union of todo and
// next_todo that have k as a parent. Moreover, all commits that
// were imported are in this map.
refs: HashMap<git2::Oid, usize>,
}
impl Todo {
fn new() -> Self {
Todo {
todo: Vec::new(),
todo_set: HashSet::new(),
next_todo: Vec::new(),
next_todo_set: HashSet::new(),
refs: HashMap::new(),
}
}
fn swap_next(&mut self, todo: Vec<git2::Oid>) {
self.todo = todo;
std::mem::swap(&mut self.todo, &mut self.next_todo);
self.todo_set.clear();
std::mem::swap(&mut self.todo_set, &mut self.next_todo_set);
}
fn insert_next(&mut self, oid: git2::Oid) {
if self.next_todo_set.insert(oid) {
self.next_todo.push(oid)
}
}
fn is_empty(&self) -> bool {
self.todo.is_empty()
}
fn all_processed(&self, parents: &[git2::Oid]) -> bool {
parents.iter().all(|x| self.refs.contains_key(x))
}
}
/// Import the entire Git DAG into Pijul.
fn import(
git: &git2::Repository,
env_git: &mut ::sanakirja::Env,
repo: &mut OpenRepo,
dag: &Dag,
) -> Result<(), anyhow::Error> {
let mut ws = libpijul::ApplyWorkspace::new();
let mut todo = Todo::new();
let txn = repo.repo.pristine.mut_txn_begin();
for &(oid, merkle) in dag.root.iter() {
if let Some(merkle) = merkle {
let oid_ = format!("{}", oid);
let channel = txn.load_channel(&oid_).unwrap();
let (_, (_, merkle_)) = txn
.changeid_rev_log(&channel.borrow(), None)
.next()
.unwrap();
if merkle != merkle_ {
return Err((Error::MerkleChanged {
channel: oid_,
state: merkle,
})
.into());
}
if let Some(children) = dag.children.get(&oid) {
*todo.refs.entry(oid).or_insert(0) += children.len();
for c in children.iter() {
todo.insert_next(*c);
}
}
} else {
todo.insert_next(oid);
if let Some(parents) = dag.parents.get(&oid) {
for p in parents.iter() {
*todo.refs.entry(*p).or_insert(0) += 1;
}
}
}
}
std::mem::drop(txn);
todo.swap_next(Vec::new());
while !todo.is_empty() {
info!("TODO: {:?}", todo);
let mut todo_ = std::mem::replace(&mut todo.todo, Vec::new());
{
let mut txn = repo.repo.pristine.mut_txn_begin();
let mut draining = todo_.drain(..);
while let Some(oid) = draining.next() {
let mut channel = if let Some(parents) = dag.parents.get(&oid) {
// If we don't have all the parents, continue.
if !todo.all_processed(&parents) {
todo.insert_next(oid);
continue;
}
let first_parent = parents.iter().next().unwrap();
let parent_name = format!("{}", first_parent);
let parent_channel = txn.load_channel(&parent_name).unwrap();
let name = format!("{}", oid);
let channel = txn.fork(&parent_channel, &name)?;
channel
} else {
// Create a new channel for this commit.
let name = format!("{}", oid);
let channel = txn.open_or_create_channel(&name)?;
channel
};
let mut stats = Stats::new(oid);
import_commit_parents(
repo,
dag,
&mut txn,
&mut channel,
&oid,
&mut ws,
&mut stats,
)?;
let state = import_commit(git, repo, &mut txn, &mut channel, &oid, &mut stats)?;
save_state(env_git, &oid, state)?;
dag.collect_dead_parents(&oid, &mut todo, &mut txn)?;
dag.insert_children_in_todo(&oid, &mut todo);
if let Some(ref mut f) = repo.stats {
stats.write(repo.n, &repo.repo.path, f)?
}
// Just add the remaining commits to the todo list,
// because we prefer to move each channel as far as
// possible before switching channels.
while let Some(oid) = draining.next() {
todo.insert_next(oid)
}
}
txn.commit()?;
}
todo.swap_next(todo_)
}
Ok(())
}
fn save_state(
git: &mut ::sanakirja::Env,
oid: &git2::Oid,
state: libpijul::pristine::Merkle,
) -> Result<(), anyhow::Error> {
use ::sanakirja::{Commit, Transaction};
let mut txn = ::sanakirja::Env::mut_txn_begin(git)?;
let mut db: ::sanakirja::Db<git2::Oid, libpijul::pristine::Merkle> =
if let Some(db) = txn.root(0) {
db
} else {
txn.create_db()?
};
txn.put(&mut rand::thread_rng(), &mut db, *oid, state)?;
txn.set_root(0, db);
txn.commit()?;
Ok(())
}
fn make_apply_plan<T: TxnTExt>(
repo: &OpenRepo,
txn: &T,
channel: &ChannelRef<T>,
dag: &Dag,
oid: &git2::Oid,
) -> (bool, Vec<(libpijul::pristine::Hash, u64)>) {
let mut to_apply = Vec::new();
let mut to_apply_set = BTreeSet::new();
let mut needs_output = false;
if let Some(parents) = dag.parents.get(&oid) {
for p in parents {
// If one of the parents is not the repo's current commit,
// then we're doing either a merge or a checkout of
// another branch. If that is the case, we need to output
// the entire repository to update the
// tree/revtree/inodes/revinodes tables.
if let Some(current_commit) = repo.current_commit {
if current_commit != *p {
needs_output = true
}
}
let p_name = format!("{}", p);
let p_channel = txn.load_channel(&p_name).unwrap();
for (n, (h, _)) in txn.log(&p_channel.borrow(), 0) {
if txn.has_change(&channel, h).is_none() {
if to_apply_set.insert(h) {
to_apply.push((h, n));
}
}
}
}
} else {
needs_output = true
}
// Since we're pulling from multiple channels, the change numbers
// are not necessarily in order (especially since we've
// de-duplicated using `to_apply_set`.
to_apply.sort_by(|a, b| a.1.cmp(&b.1));
(needs_output, to_apply)
}
/// Apply the changes corresponding to a commit's parents to `channel`.
fn import_commit_parents<T: TxnTExt + MutTxnTExt>(
repo: &mut OpenRepo,
dag: &Dag,
txn: &mut T,
channel: &mut ChannelRef<T>,
oid: &git2::Oid,
ws: &mut libpijul::ApplyWorkspace,
stats: &mut Stats,
) -> Result<(), anyhow::Error> {
// Apply all the parent's logs to `channel`
let (needs_output, to_apply) = make_apply_plan(repo, txn, channel, dag, oid);
let parent_application_time = std::time::Instant::now();
for h in to_apply.iter() {
debug!("to_apply {:?}", h)
}
for (h, _) in to_apply.iter() {
info!("applying {:?} to {:?}", h, channel.borrow().name());
txn.apply_change_ws(&repo.repo.changes, channel, *h, ws)?;
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive(&repo.repo.changes, txn, channel, line!())?;
}
}
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive(&repo.repo.changes, txn, channel, line!())?;
}
stats.parent_application_time = if to_apply.is_empty() {
std::time::Duration::from_secs(0)
} else {
parent_application_time.elapsed()
};
if repo.check > 0 && repo.n % repo.check == 0 && !to_apply.is_empty() {
txn.check_channel_log(&channel);
}
debug!(
"last_recorded {:?}, name {:?}",
repo.repo.config.current_channel,
channel.borrow().name()
);
stats.output_time = if !to_apply.is_empty() || needs_output {
debug!("outputting");
let output_time = std::time::Instant::now();
txn.output_repository_no_pending(
&mut repo.repo.working_copy,
&repo.repo.changes,
channel,
"",
false,
)?;
let t = output_time.elapsed();
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive(&repo.repo.changes, txn, channel, line!())?;
}
t
} else {
std::time::Duration::from_secs(0)
};
if repo.check > 0 && repo.n % repo.check == 0 {
check_tree_inodes(txn, &channel.borrow());
}
Ok(())
}
/// Reset to the Git commit specified by `child`, telling Pijul which
/// files were moved in the reset.
fn git_reset<'a, T: TxnTExt + MutTxnTExt>(
git: &'a git2::Repository,
repo: &mut OpenRepo,
txn: &mut T,
channel: &mut ChannelRef<T>,
child: &git2::Oid,
stats: &mut Stats,
) -> Result<(git2::Object<'a>, BTreeSet<PathBuf>), anyhow::Error> {
// Reset the Git branch.
debug!("resetting the git branch to {:?}", child);
let reset_time = std::time::Instant::now();
let object = git.find_object(*child, None)?;
let reset_was_useful = Rc::new(RefCell::new(false));
let mut builder = git2::build::CheckoutBuilder::new();
let repo_path = repo.repo.path.clone();
let reset_was_useful_ = reset_was_useful.clone();
builder
.force()
.remove_untracked(true)
.remove_ignored(true)
.progress(move |file, a, b| {
debug!("Git progress: {:?} {:?} {:?}", file, a, b);
if let Some(file) = file {
let file = repo_path.join(file);
if let Ok(meta) = std::fs::metadata(&file) {
if !meta.file_type().is_symlink() {
*reset_was_useful_.borrow_mut() = true
}
}
}
});
builder.notify(|notif, file, _, _, _| {
info!("Git reset: {:?} {:?}", notif, file);
true
});
git.reset(&object, git2::ResetType::Hard, Some(&mut builder))?;
repo.current_commit = Some(*child);
stats.reset_time = reset_time.elapsed();
debug!("reset done");
let mut prefixes = BTreeSet::new();
{
let commit = object.as_commit().unwrap();
let new_tree = commit.tree().unwrap();
debug!("inspecting commit");
let git_diff_time = std::time::Instant::now();
for parent in commit.parents() {
let old_tree = parent.tree().unwrap();
let mut diff = git
.diff_tree_to_tree(Some(&old_tree), Some(&new_tree), None)
.unwrap();
diff.find_similar(None).unwrap();
let mut moves = Vec::new();
for delta in diff.deltas() {
let old_path = delta.old_file().path().unwrap();
let new_path = delta.new_file().path().unwrap();
match delta.status() {
git2::Delta::Renamed => {
info!(
"mv {:?} {:?}",
old_path.to_string_lossy(),
new_path.to_string_lossy()
);
if let Ok((vertex, _)) = txn.follow_oldest_path(
&repo.repo.changes,
&channel,
&old_path.to_string_lossy(),
) {
if let Some(inode) = txn.get_revinodes(vertex, None) {
if let Some(old_path) = libpijul::fs::inode_filename(txn, inode) {
debug!(
"moving {:?} ({:?}) from {:?} to {:?}",
inode, vertex, old_path, new_path
);
let mut tmp_path = new_path.to_path_buf();
tmp_path.pop();
use rand::Rng;
let s: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(30)
.collect();
tmp_path.push(&s);
if let Err(e) =
txn.move_file(&old_path, &tmp_path.to_string_lossy())
{
error!("{}", e);
} else {
moves.push((tmp_path, new_path));
}
}
}
}
let new_path = new_path.to_path_buf();
prefixes.insert(new_path);
}
git2::Delta::Deleted => {
let old_path = old_path.to_path_buf();
prefixes.insert(old_path);
}
_ => {
if delta.new_file().mode() != git2::FileMode::Link {
debug!("delta old = {:?} new = {:?}", old_path, new_path);
let old_path = old_path.to_path_buf();
let new_path = new_path.to_path_buf();
prefixes.insert(old_path);
prefixes.insert(new_path);
}
}
}
}
debug!("moves = {:?}", moves);
for (a, b) in moves.drain(..) {
if let Err(e) = txn.move_file(&a.to_string_lossy(), &b.to_string_lossy()) {
error!("{}", e);
}
}
}
stats.git_diff_time = git_diff_time.elapsed();
debug!("done inspecting commit");
if prefixes.contains(std::path::Path::new("")) {
prefixes.clear();
}
info!("record prefixes {:?}", prefixes);
}
Ok((object, prefixes))
}
/// Reset to the Git commit specified as `child`, and record the
/// corresponding change in Pijul.
fn import_commit<T: TxnTExt + MutTxnTExt>(
git: &git2::Repository,
repo: &mut OpenRepo,
txn: &mut T,
channel: &mut ChannelRef<T>,
child: &git2::Oid,
stats: &mut Stats,
) -> Result<libpijul::pristine::Merkle, anyhow::Error> {
let (object, prefixes) = git_reset(git, repo, txn, channel, child, stats)?;
for p in prefixes.iter() {
if let Ok(m) = std::fs::metadata(&p) {
if m.is_dir() {
txn.add_dir(p.to_str().unwrap()).unwrap_or(());
} else {
txn.add_file(p.to_str().unwrap()).unwrap_or(());
}
}
}
let commit = object.as_commit().unwrap();
let signature = commit.author();
// Record+Apply
info!("recording on channel {:?}", channel.borrow().name());
let record_time = std::time::Instant::now();
let prefix_vec: Vec<_> = prefixes.into_iter().collect();
let rec = record_apply(
txn,
channel,
&mut repo.repo.working_copy,
&repo.repo.changes,
&repo.repo.path,
&prefix_vec,
libpijul::change::ChangeHeader {
message: commit.message().unwrap().to_string(),
authors: vec![libpijul::change::Author {
name: signature.name().unwrap().to_string(),
email: signature.email().map(|e| e.to_string()),
full_name: None,
}],
description: None,
timestamp: chrono::DateTime::from_utc(
chrono::NaiveDateTime::from_timestamp(signature.when().seconds(), 0),
chrono::Utc,
),
},
);
let (n_actions, hash, state) = match rec {
Ok(x) => x,
Err(e) => match e.downcast() {
Ok(libpijul::Error::ChangeAlreadyOnChannel { hash }) => {
error!("change already on channel: {:?}", hash);
return Ok(txn.current_state(&channel.borrow()).unwrap());
}
Ok(e) => return Err(e.into()),
Err(e) => return Err(e),
},
};
stats.record_time = record_time.elapsed();
if repo.check > 0 && repo.n % repo.check == 0 {
check_alive(&repo.repo.changes, txn, channel, line!())?;
}
stats.n_actions = n_actions;
stats.hash = hash;
if let Some(ref mut cur) = repo.repo.config.current_channel {
cur.clear();
cur.push_str(channel.borrow().name());
} else {
repo.repo.config.current_channel = Some(channel.borrow().name().to_string())
}
repo.repo.save_config()?;
if repo.check > 0 && repo.n % repo.check == 0 {
check_tree_inodes(txn, &channel.borrow());
}
repo.n += 1;
if let Some(state) = state {
Ok(state)
} else {
Ok(txn.current_state(&channel.borrow()).unwrap())
}
}
fn record_apply<T: TxnT + TxnTExt + MutTxnTExt, C: libpijul::changestore::ChangeStore>(
txn: &mut T,
channel: &mut ChannelRef<T>,
working_copy: &mut libpijul::working_copy::FileSystem,
changes: &C,
repo_path: &Path,
prefixes: &[PathBuf],
header: libpijul::change::ChangeHeader,
) -> Result<
(
usize,
Option<libpijul::pristine::Hash>,
Option<libpijul::pristine::Merkle>,
),
anyhow::Error,
> {
let mut state = libpijul::RecordBuilder::new();
working_copy.record_prefixes(txn, channel, changes, &mut state, repo_path, prefixes)?;
let rec = state.finish();
if rec.actions.is_empty() {
return Ok((0, None, txn.current_state(&channel.borrow())));
}
let actions: Vec<_> = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn))
.collect();
let n = actions.len();
let (dependencies, extra_known) = libpijul::change::dependencies(txn, channel, actions.iter());
let mut change = libpijul::change::LocalChange::make_change(
txn,
channel,
actions,
rec.contents,
header,
Vec::new(),
);
change.dependencies = dependencies;
change.extra_known = extra_known;
debug!("saving change");
let hash = changes.save_change(&change).unwrap();
debug!("saved");
let (_, m) = txn.apply_local_change(channel, &change, hash, &rec.updatables)?;
Ok((n, Some(hash), Some(m)))
}
struct Stats {
child: git2::Oid,
n_changes: usize,
parent_application_time: std::time::Duration,
output_time: std::time::Duration,
reset_time: std::time::Duration,
git_diff_time: std::time::Duration,
record_time: std::time::Duration,
n_actions: usize,
n_files: usize,
n_dirs: usize,
total_size: u64,
changes_size: u64,
pristine_size: u64,
hash: Option<libpijul::pristine::Hash>,
}
impl Stats {
fn new(child: git2::Oid) -> Self {
let z = std::time::Duration::from_secs(0);
Stats {
child,
n_changes: 0,
parent_application_time: z,
output_time: z,
reset_time: z,
git_diff_time: z,
record_time: z,
n_actions: 0,
n_files: 0,
n_dirs: 0,
total_size: 0,
changes_size: 0,
pristine_size: 0,
hash: None,
}
}
fn write(
&mut self,
n: usize,
repo_path: &Path,
f: &mut std::fs::File,
) -> Result<(), anyhow::Error> {
// Count files.
let mut walk = ignore::WalkBuilder::new(&repo_path);
walk.add_ignore(DOT_DIR).unwrap();
for f in walk.build() {
let meta = f?.metadata()?;
if meta.is_dir() {
self.n_dirs += 1
} else {
self.n_files += 1;
self.total_size += meta.len();
}
}
let dot_dir = repo_path.join(DOT_DIR);
let pristine_dir = dot_dir.join(PRISTINE_DIR);
let changes_dir = dot_dir.join(CHANGES_DIR);
if let Ok(walk) = std::fs::read_dir(&pristine_dir) {
for f in walk {
let meta = f?.metadata()?;
self.pristine_size += meta.len();
}
}
if let Ok(walk) = std::fs::read_dir(&changes_dir) {
for f in walk {
let meta = f?.metadata()?;
self.changes_size += meta.len();
self.n_changes += 1
}
}
let timers = libpijul::get_timers();
writeln!(
f, "{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}",
self.child,
n,
self.parent_application_time.as_secs_f64(),
timers.alive_output.as_secs_f64(),
timers.alive_retrieve.as_secs_f64(),
timers.alive_graph.as_secs_f64(),
timers.alive_contents.as_secs_f64(),
timers.alive_write.as_secs_f64(),
timers.apply.as_secs_f64(),
timers.record.as_secs_f64(),
timers.repair_context.as_secs_f64(),
timers.check_cyclic_paths.as_secs_f64(),
timers.find_alive.as_secs_f64(),
self.output_time.as_secs_f64(),
self.reset_time.as_secs_f64(),
self.git_diff_time.as_secs_f64(),
self.record_time.as_secs_f64(),
self.n_actions,
self.n_files,
self.n_dirs,
self.total_size,
self.changes_size,
self.pristine_size,
if let Some(ref h) = self.hash { h.to_base32() } else { String::new() },
)?;
libpijul::reset_timers();
Ok(())
}
}
/// Check that each alive vertex in the graph is reachable, and vice-versa.
fn check_alive<T: TxnT, C: libpijul::changestore::ChangeStore>(
changes: &C,
txn: &T,
channel: &ChannelRef<T>,
line: u32,
) -> Result<(), anyhow::Error> {
let (alive, reachable) = txn.check_alive(&channel);
let mut h = BTreeSet::new();
if !alive.is_empty() {
for (k, file) in alive.iter() {
debug!("alive = {:?}, file = {:?}", k, file);
h.insert(file);
}
}
if !reachable.is_empty() {
for (k, file) in reachable.iter() {
debug!("reachable = {:?}, file = {:?}", k, file);
h.insert(file);
}
}
for file in h.iter() {
let file_ = file.unwrap().start_pos();
let mut f = std::fs::File::create(&format!("debug_{:?}", file_))?;
let graph = libpijul::alive::retrieve::retrieve(txn, &channel.borrow(), file_);
graph.debug(changes, txn, &channel.borrow(), false, false, &mut f)?;
let mut f = std::fs::File::create(&format!("debug_all_{:?}", file_))?;
txn.debug_root(channel, file.unwrap(), &mut f)?;
}
if !h.is_empty() {
panic!("alive call line {}", line);
}
Ok(())
}
/// Check that each inode in the inodes table maps to an alive vertex,
/// and that each inode in the tree table is reachable by only one
/// path.
fn check_tree_inodes<T: TxnT>(txn: &T, channel: &Channel<T>) {
// Sanity check
for (inode, vertex) in txn.iter_inodes() {
let mut inode_ = inode;
while !inode_.is_root() {
if let Some(next) = txn.get_revtree(inode_, None) {
inode_ = next.parent_inode;
} else {
panic!("inode = {:?}, inode_ = {:?}", inode, inode_);
}
}
if !txn.is_alive(&channel, vertex.inode_vertex()) {
for e in txn.iter_adjacent(
&channel,
vertex.inode_vertex(),
EdgeFlags::empty(),
EdgeFlags::all(),
) {
error!("{:?} {:?} {:?}", inode, vertex, e)
}
panic!(
"inode {:?}, vertex {:?}, is not alive, {:?}",
inode,
vertex,
txn.tree_path(vertex)
)
}
}
let mut h = HashMap::new();
let id0 = OwnedPathId {
parent_inode: Inode::ROOT,
basename: libpijul::small_string::SmallString::new(),
};
for (id, inode) in txn.iter_tree(id0, None) {
if let Some(inode_) = h.insert(id.clone(), inode) {
panic!("id {:?} maps to two inodes: {:?} {:?}", id, inode, inode_);
}
}
}
use crate::repository::Repository;
use libpijul::pristine::{MutTxnT, TxnT};
use libpijul::MutTxnTExt;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Fork {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel", conflicts_with = "change")]
channel: Option<String>,
#[clap(long = "change", conflicts_with = "channel")]
change: Option<String>,
to: String,
}
impl Fork {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let mut txn = repo.pristine.mut_txn_begin();
if let Some(ref ch) = self.change {
let (hash, _) = txn.hash_from_prefix(ch)?;
let mut channel = txn.open_or_create_channel(&self.to)?;
txn.apply_change_rec(&repo.changes, &mut channel, hash)?
} else {
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
if let Some(channel) = txn.load_channel(channel_name) {
txn.fork(&channel, &self.to)?;
}
}
txn.commit()?;
Ok(())
}
}
use crate::repository::Repository;
use libpijul::pristine::MutTxnT;
use libpijul::{MutTxnTExt, TxnTExt};
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Mv {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
paths: Vec<PathBuf>,
}
impl Mv {
pub fn run(mut self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let to = if let Some(to) = self.paths.pop() {
to
} else {
return Ok(());
};
let to = path(&self.repo_path, to);
let is_dir = if let Ok(m) = std::fs::metadata(&to) {
m.is_dir()
} else {
false
};
if !is_dir && self.paths.len() > 1 {
return Ok(());
}
let mut txn = repo.pristine.mut_txn_begin();
for p in self.paths {
debug!("p = {:?}", p);
let source = std::fs::canonicalize(&path(&self.repo_path, p.clone()))?;
let target = if is_dir { to.join(p) } else { to.clone() };
debug!("target = {:?}", target);
std::fs::rename(&source, &target)?;
let target = std::fs::canonicalize(&target)?;
let source = source.strip_prefix(&repo.path)?;
let target = target.strip_prefix(&repo.path)?;
debug!("moving {:?} -> {:?}", source, target);
txn.move_file(&source.to_string_lossy(), &target.to_string_lossy())?
}
txn.commit()?;
Ok(())
}
}
fn path(root: &Option<PathBuf>, path: PathBuf) -> PathBuf {
if let Some(ref p) = root {
p.join(path)
} else {
path
}
}
#[derive(Clap, Debug)]
pub struct Ls {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
}
impl Ls {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let txn = repo.pristine.txn_begin()?;
let mut stdout = std::io::stdout();
for (_, p) in txn.iter_working_copy() {
writeln!(stdout, "{}", p)?;
}
Ok(())
}
}
#[derive(Clap, Debug)]
pub struct Add {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
paths: Vec<PathBuf>,
}
impl Add {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let mut txn = repo.pristine.mut_txn_begin();
let mut stderr = std::io::stderr();
for path in self.paths.iter() {
debug!("{:?}", path);
if let Some(p) = path.file_name() {
if let Some(p) = p.to_str() {
if p.ends_with("~") || (p.starts_with("#") && p.ends_with("#")) {
continue;
}
}
}
let path = path.canonicalize()?;
let meta = std::fs::metadata(&path)?;
let path = if let Ok(path) = path.strip_prefix(&repo.path) {
path
} else {
continue;
};
let path_str = path.to_str().unwrap();
if !txn.is_tracked(&path_str) {
writeln!(stderr, "Adding {:?}", path)?;
info!("Adding {:?}", path);
txn.add(&path_str, meta.is_dir())?
}
}
txn.commit()?;
Ok(())
}
}
#[derive(Clap, Debug)]
pub struct Remove {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
paths: Vec<PathBuf>,
}
impl Remove {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let mut txn = repo.pristine.mut_txn_begin();
for path in self.paths.iter() {
debug!("{:?}", path);
if let Some(p) = path.file_name() {
if let Some(p) = p.to_str() {
if p.ends_with("~") || (p.starts_with("#") && p.ends_with("#")) {
continue;
}
}
}
let path = path.canonicalize()?;
let path = if let Ok(path) = path.strip_prefix(&repo.path) {
path
} else {
continue;
};
let path_str = path.to_str().unwrap();
if txn.is_tracked(&path_str) {
txn.remove_file(&path_str)?;
}
}
txn.commit()?;
Ok(())
}
}
use crate::repository::*;
use libpijul::change::*;
use libpijul::pristine::MutTxnT;
use libpijul::MutTxnTExt;
use std::collections::BTreeMap;
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Diff {
#[clap(long = "repository")]
pub repo_path: Option<PathBuf>,
#[clap(long = "json")]
pub json: bool,
#[clap(long = "channel")]
pub channel: Option<String>,
#[clap(long = "tag")]
pub tag: bool,
pub prefixes: Vec<PathBuf>,
}
impl Diff {
pub fn run(mut self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path.clone())?;
let mut txn = repo.pristine.mut_txn_begin();
let mut stdout = std::io::stdout();
let mut channel =
txn.open_or_create_channel(repo.config.get_current_channel(self.channel.as_ref()))?;
let mut state = libpijul::RecordBuilder::new();
if self.prefixes.is_empty() {
txn.record(
&mut state,
libpijul::Algorithm::default(),
&mut channel,
&mut repo.working_copy,
&repo.changes,
"",
)?
} else {
self.fill_relative_prefixes()?;
repo.working_copy.record_prefixes(
&mut txn,
&mut channel,
&repo.changes,
&mut state,
&repo.path,
&self.prefixes,
)?;
}
let rec = state.finish();
if rec.actions.is_empty() {
return Ok(());
}
let actions = rec
.actions
.into_iter()
.map(|rec| rec.globalize(&txn))
.collect();
let mut change = LocalChange::make_change(
&txn,
&channel,
actions,
rec.contents,
ChangeHeader::default(),
Vec::new(),
);
let (dependencies, extra_known) = if self.tag {
full_dependencies(&txn, &channel)
} else {
dependencies(&txn, &channel, change.changes.iter())
};
change.dependencies = dependencies;
change.extra_known = extra_known;
if self.json {
let mut changes = BTreeMap::new();
for ch in change.changes.iter() {
changes.entry(ch.path()).or_insert(Vec::new()).push(Status {
operation: match ch {
Record::FileMove { .. } => "file move",
Record::FileDel { .. } => "file del",
Record::FileUndel { .. } => "file undel",
Record::SolveNameConflict { .. } => "solve name conflict",
Record::UnsolveNameConflict { .. } => "unsolve name conflict",
Record::FileAdd { .. } => "file add",
Record::Edit { .. } => "edit",
Record::Replacement { .. } => "replacement",
Record::SolveOrderConflict { .. } => "solve order conflict",
Record::UnsolveOrderConflict { .. } => "unsolve order conflict",
Record::ResurrectZombies { .. } => "resurrect zombies",
},
line: ch.line(),
});
}
serde_json::to_writer_pretty(&mut std::io::stdout(), &changes)?;
writeln!(stdout, "")?;
} else {
change.write(
&repo.changes,
None,
|local: &libpijul::change::Local, _| -> String {
format!("{}:{}", local.path, local.line)
},
true,
&mut std::io::stdout(),
)?
}
Ok(())
}
fn fill_relative_prefixes(&mut self) -> Result<(), anyhow::Error> {
let cwd = std::env::current_dir()?;
for p in self.prefixes.iter_mut() {
if p.is_relative() {
*p = cwd.join(&p);
}
}
Ok(())
}
}
#[derive(Debug, Serialize)]
struct Status {
operation: &'static str,
line: Option<usize>,
}
use crate::repository::Repository;
use crate::Error;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Debug {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
}
impl Debug {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
use libpijul::pristine::TxnT;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = if let Some(channel) = txn.load_channel(&channel_name) {
channel
} else {
return Err((Error::NoSuchChannel {
channel: channel_name.to_string(),
})
.into());
};
let channel = channel.borrow();
txn.debug(&channel, std::io::stdout())?;
Ok(())
}
}
use crate::repository::Repository;
use crate::Error;
use libpijul::pristine::{ChangeId, Channel, EdgeFlags, TxnT, Vertex};
use libpijul::vertex_buffer::VertexBuffer;
use libpijul::TxnTExt;
use std::collections::HashSet;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Credit {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
file: PathBuf,
}
impl Credit {
pub fn run(self) -> Result<(), anyhow::Error> {
let has_repo_path = self.repo_path.is_some();
let mut repo = Repository::find_root(self.repo_path)?;
let txn = repo.pristine.txn_begin()?;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = if let Some(channel) = txn.load_channel(&channel_name) {
channel
} else {
return Err((Error::NoSuchChannel {
channel: channel_name.to_string(),
})
.into());
};
if self.channel.is_some() {
repo.config.current_channel = self.channel;
repo.save_config()?;
}
let (pos, _ambiguous) = if has_repo_path {
let root = std::fs::canonicalize(repo.path.join(&self.file))?;
let path = root.strip_prefix(&repo.path)?.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
} else {
let path = self.file.to_str().unwrap();
txn.follow_oldest_path(&repo.changes, &channel, &path)?
};
let channel_ = channel.borrow();
txn.output_file(
&repo.changes,
&channel,
pos,
&mut Creditor::new(std::io::stdout(), &txn, &channel_),
)?;
Ok(())
}
}
pub struct Creditor<'a, W: std::io::Write, T: TxnT> {
w: W,
buf: Vec<u8>,
new_line: bool,
changes: HashSet<ChangeId>,
txn: &'a T,
channel: &'a Channel<T>,
}
impl<'a, W: std::io::Write, T: TxnT> Creditor<'a, W, T> {
pub fn new(w: W, txn: &'a T, channel: &'a Channel<T>) -> Self {
Creditor {
w,
new_line: true,
buf: Vec::new(),
txn,
channel,
changes: HashSet::new(),
}
}
}
impl<'a, W: std::io::Write, T: TxnT> VertexBuffer for Creditor<'a, W, T> {
fn output_line<C: FnOnce(&mut Vec<u8>) -> Result<(), anyhow::Error>>(
&mut self,
v: Vertex<ChangeId>,
c: C,
) -> Result<(), anyhow::Error> {
debug!("outputting vertex {:?}", v);
self.buf.clear();
c(&mut self.buf)?;
use libpijul::pristine::Base32;
if !v.change.is_root() {
self.changes.clear();
for e in self
.txn
.iter_adjacent(self.channel, v, EdgeFlags::PARENT, EdgeFlags::all())
{
self.changes.insert(e.introduced_by);
}
if !self.new_line {
write!(self.w, "\n")?;
}
let mut is_first = true;
for c in self.changes.drain() {
write!(
self.w,
"{}{}",
if is_first { "" } else { ", " },
c.to_base32()
)?;
is_first = false;
}
writeln!(self.w, "")?;
}
let ends_with_newline = self.buf.ends_with(b"\n");
if let Ok(s) = std::str::from_utf8(&self.buf[..]) {
for l in s.lines() {
self.w.write_all(b"> ")?;
self.w.write_all(l.as_bytes())?;
self.w.write_all(b"\n")?;
}
}
if !self.buf.is_empty() {
// empty "lines" (such as in the beginning of a file)
// don't change the status of self.new_line.
self.new_line = ends_with_newline;
}
Ok(())
}
fn output_conflict_marker(&mut self, s: &str) -> Result<(), anyhow::Error> {
if !self.new_line {
self.w.write(s.as_bytes())?;
} else {
self.w.write(&s.as_bytes()[1..])?;
}
Ok(())
}
}
use crate::repository::*;
use libpijul::pristine::MutTxnT;
use libpijul::MutTxnTExt;
use std::path::PathBuf;
use tempfile::TempDir;
#[derive(Clap, Debug)]
pub struct Clone {
#[clap(long = "lazy", about = "only download changes with alive contents")]
lazy: bool,
#[clap(long = "channel", about = "set the remote channel", default_value = crate::DEFAULT_CHANNEL)]
channel: String,
#[clap(
long = "change",
about = "clone this change and its dependencies",
conflicts_with = "state"
)]
change: Option<String>,
#[clap(long = "state", about = "clone this state", conflicts_with = "change")]
state: Option<String>,
#[clap(long = "path", about = "clone this path", multiple(true))]
partial_paths: Vec<String>,
#[clap(short = 'k', about = "Do not check certificates")]
no_cert_check: bool,
remote: String,
path: Option<PathBuf>,
}
impl Clone {
pub async fn run(self) -> Result<(), anyhow::Error> {
let mut remote =
crate::remote::unknown_remote(&self.remote, &self.channel, self.no_cert_check).await?;
let path = if let Some(path) = self.path {
if path.is_relative() {
let mut p = std::env::current_dir()?;
p.push(path);
p
} else {
path
}
} else if let Some(path) = remote.repo_name() {
let mut p = std::env::current_dir()?;
p.push(path);
p
} else {
return Err((crate::Error::CouldNotInferRepositoryName { repo: self.remote }).into());
};
debug!("path = {:?}", path);
let parent = std::fs::canonicalize(path.parent().unwrap())?;
let temp = TempDir::new_in(&parent)?;
debug!("temp = {:?}", temp.path());
let mut repo = Repository::init(Some(temp.path().to_path_buf()))?;
let mut txn = repo.pristine.mut_txn_begin();
let mut channel = txn.open_or_create_channel(&self.channel)?;
if let Some(ref change) = self.change {
let h = change.parse()?;
remote
.clone_tag(&mut repo, &mut txn, &mut channel, &[h])
.await?
} else if let Some(ref state) = self.state {
let h = state.parse()?;
remote
.clone_state(&mut repo, &mut txn, &mut channel, h, self.lazy)
.await?
} else {
remote
.clone_channel(
&mut repo,
&mut txn,
&mut channel,
self.lazy,
&self.partial_paths,
)
.await?;
}
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
"",
true,
)?;
txn.commit()?;
repo.config.current_channel = Some(self.channel);
repo.save_config()?;
std::fs::rename(&temp.into_path(), &path)?;
Ok(())
}
}
use crate::repository::Repository;
use libpijul::pristine::{MutTxnT, TxnT};
use libpijul::MutTxnTExt;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Checkout {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
channel: String,
}
impl Checkout {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
debug!("{:?}", repo.config);
let mut txn = repo.pristine.mut_txn_begin();
if let Some(mut channel) = txn.load_channel(&self.channel) {
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
"",
true,
)?;
}
txn.commit()?;
repo.config.current_channel = Some(self.channel);
repo.save_config()?;
Ok(())
}
}
use crate::repository::Repository;
use libpijul::pristine::{MutTxnT, TxnT};
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Channel {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(subcommand)]
subcmd: Option<SubCommand>,
}
#[derive(Clap, Debug)]
pub enum SubCommand {
#[clap(name = "delete")]
Delete { delete: String },
#[clap(name = "rename")]
Rename { from: String, to: Option<String> },
}
impl Channel {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path)?;
let mut stdout = std::io::stdout();
let current = if let Some(ref c) = repo.config.current_channel {
Some(c.as_str())
} else {
None
};
match self.subcmd {
None => {
let txn = repo.pristine.txn_begin()?;
for channel in txn.iter_channels("") {
let channel = channel.borrow();
let name = channel.name();
if current == Some(name) {
writeln!(stdout, "* {}", name)?;
} else {
writeln!(stdout, " {}", name)?;
}
}
}
Some(SubCommand::Delete { ref delete }) => {
let mut txn = repo.pristine.mut_txn_begin();
txn.drop_channel(delete)?;
txn.commit()?;
}
Some(SubCommand::Rename { ref from, ref to }) => {
let mut txn = repo.pristine.mut_txn_begin();
let (from, to) = if let Some(to) = to {
(from.as_str(), to.as_str())
} else if let Some(current) = current {
(current, from.as_str())
} else {
return Err(crate::Error::NoCurrentChannel.into());
};
let mut channel = if let Some(channel) = txn.load_channel(from) {
channel
} else {
return Err((crate::Error::ChannelNotFound {
channel: from.to_string(),
})
.into());
};
txn.rename_channel(&mut channel, to)?;
txn.commit()?;
}
}
Ok(())
}
}
use crate::repository::*;
use libpijul::change::Local;
use libpijul::changestore::ChangeStore;
use libpijul::pristine::*;
use libpijul::*;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Change {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
hash: Option<String>,
}
impl Change {
pub fn run(self) -> Result<(), anyhow::Error> {
let repo = Repository::find_root(self.repo_path.clone())?;
let txn = repo.pristine.txn_begin()?;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let channel = txn.load_channel(channel_name).unwrap();
let changes = repo.changes;
let hash = if let Some(hash) = self.hash {
txn.hash_from_prefix(&hash)?.0
} else if let Some((_, (h, _))) = txn.reverse_log(&channel.borrow(), None).next() {
h
} else {
return Ok(());
};
let change = changes.get_change(&hash).unwrap();
let file_name = |l: &Local, inode: Position<Option<Hash>>| -> String {
if txn.get_revchanges(&channel, hash).is_some() {
let inode = Position {
change: inode.change.unwrap_or(hash),
pos: inode.pos,
};
format!(
"{}:{}",
txn.find_youngest_path(&changes, &channel, inode).unwrap().0,
l.line
)
} else {
format!("{}:{}", l.path, l.line)
}
};
let o = std::io::stdout();
let mut o = o.lock();
change.write(&changes, Some(hash), file_name, true, &mut o)?;
Ok(())
}
}
use crate::repository::Repository;
use libpijul::pristine::{Hash, Merkle, TxnT};
use libpijul::{MutTxnTExt, TxnTExt};
use std::io::Write;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Archive {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(long = "remote")]
remote: Option<String>,
#[clap(short = 'k', about = "Do not check certificates")]
no_cert_check: bool,
#[clap(long = "state")]
state: Option<String>,
#[clap(long = "change", multiple = true)]
change: Vec<String>,
#[clap(long = "prefix")]
prefix: Option<String>,
#[clap(short = 'o')]
name: String,
}
impl Archive {
pub async fn run(self) -> Result<(), anyhow::Error> {
let state: Option<Merkle> = if let Some(ref state) = self.state {
Some(state.parse()?)
} else {
None
};
let mut extra: Vec<Hash> = Vec::new();
for h in self.change.iter() {
extra.push(h.parse()?);
}
if let Some(ref rem) = self.remote {
debug!("unknown");
let mut remote = crate::remote::unknown_remote(
rem,
if let Some(ref channel) = self.channel {
channel
} else {
crate::DEFAULT_CHANNEL
},
self.no_cert_check,
)
.await?;
let mut p = std::path::Path::new(&self.name).to_path_buf();
if !self.name.ends_with(".tar.gz") {
p.set_extension("tar.gz");
}
let mut f = std::fs::File::create(&p)?;
remote
.archive(self.prefix, state.map(|x| (x, &extra[..])), &mut f)
.await?;
} else if let Ok(repo) = Repository::find_root(self.repo_path.clone()) {
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut p = std::path::Path::new(&self.name).to_path_buf();
if !self.name.ends_with(".tar.gz") {
p.set_extension("tar.gz");
}
let mut f = std::fs::File::create(&p)?;
let mut tarball = libpijul::output::Tarball::new(&mut f, self.prefix);
let conflicts = if let Some(state) = state {
let mut txn = repo.pristine.mut_txn_begin();
let mut channel = txn.load_channel(&channel_name).unwrap();
txn.archive_with_state(
&repo.changes,
&mut channel,
state,
&extra[..],
&mut tarball,
)?
} else {
let txn = repo.pristine.txn_begin()?;
let channel = txn.load_channel(&channel_name).unwrap();
txn.archive(&repo.changes, &channel, &mut tarball)?
};
if !conflicts.is_empty() {
writeln!(
std::io::stderr(),
"There were {} conflicts",
conflicts.len()
)?
}
}
Ok(())
}
}
use crate::repository::Repository;
use crate::Error;
use libpijul::changestore::ChangeStore;
use libpijul::pristine::MutTxnT;
use libpijul::MutTxnTExt;
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Clap, Debug)]
pub struct Apply {
#[clap(long = "repository")]
repo_path: Option<PathBuf>,
#[clap(long = "channel")]
channel: Option<String>,
#[clap(long = "deps-only")]
deps_only: bool,
change: Option<String>,
}
impl Apply {
pub fn run(self) -> Result<(), anyhow::Error> {
let mut repo = Repository::find_root(self.repo_path)?;
let mut txn = repo.pristine.mut_txn_begin();
use libpijul::pristine::TxnT;
let channel_name = repo.config.get_current_channel(self.channel.as_ref());
let mut channel = if let Some(channel) = txn.load_channel(&channel_name) {
channel
} else if self.change.is_some() {
txn.open_or_create_channel(&channel_name)?
} else {
return Err((Error::NoSuchChannel {
channel: channel_name.to_string(),
})
.into());
};
let hash = if let Some(ch) = self.change {
if let Ok(h) = txn.hash_from_prefix(&ch) {
h.0
} else {
let change = libpijul::change::Change::deserialize(&ch, None)?;
repo.changes.save_change(&change)?
}
} else {
let mut change = std::io::BufReader::new(std::io::stdin());
let change = libpijul::change::Change::read(&mut change, &mut HashMap::new())?;
repo.changes.save_change(&change)?
};
if self.deps_only {
txn.apply_deps_rec(&repo.changes, &mut channel, hash)?;
} else {
txn.apply_change_rec(&repo.changes, &mut channel, hash)?;
}
txn.output_repository_no_pending(
&mut repo.working_copy,
&repo.changes,
&mut channel,
"",
true,
)?;
txn.commit()?;
Ok(())
}
}
[package]
name = "pijul"
description = "The sound distributed version control system."
version = "1.0.0-alpha.1"
authors = ["Pierre-Étienne Meunier <pe@pijul.org>"]
edition = "2018"
repository = "https://nest.pijul.com/pijul/pijul"
license = "GPL-2.0"
include = [
"Cargo.toml",
"src",
"src/commands",
"src/commands/log.rs",
"src/commands/protocol.rs",
"src/commands/apply.rs",
"src/commands/debug.rs",
"src/commands/checkout.rs",
"src/commands/file_operations.rs",
"src/commands/clone.rs",
"src/commands/git.rs",
"src/commands/record.rs",
"src/commands/change.rs",
"src/commands/diff.rs",
"src/commands/unrecord.rs",
"src/commands/channel.rs",
"src/commands/init.rs",
"src/commands/mod.rs",
"src/commands/archive.rs",
"src/commands/reset.rs",
"src/commands/fork.rs",
"src/commands/pushpull.rs",
"src/config.rs",
"src/repository.rs",
"src/main.rs",
"src/remote",
"src/remote/local.rs",
"src/remote/ssh.rs",
"src/remote/mod.rs",
]
[features]
git = [ "git2", "sanakirja/git2" ]
default = [ ]
[dependencies]
human-panic = "1.0"
clap = "3.0.0-beta.2"
anyhow = "1.0"
thiserror = "1.0"
libpijul = { version = "1.0.0-alpha.1", features = [ "tarball" ] }
chrono = { version = "0.4" }
ignore = "0.4"
env_logger = "0.8"
log = "0.4"
serde = "1.0"
serde_json = "1.0"
serde_derive = "1.0"
toml = "0.5"
tokio = { version = "0.2", features = [ "rt-threaded", "macros", "sync" ] }
thrussh = "0.29"
thrussh-keys = "0.18.2"
cryptovec = "0.5"
reqwest = { version = "0.10", features = [ "stream" ] }
byteorder = "1.3"
sanakirja = "0.13.1"
futures = "0.3"
dirs-next = "2.0"
lazy_static = "1.4"
regex = "1.4"
whoami = "0.9"
rpassword = "5.0"
git2 = { version = "0.13", optional = true }
rand = "0.7"
edit = "0.1"
data-encoding = "2.3"
futures-util = "0.3"
tempfile = "3.1"
// org id jgSEtEI/xIjz/bF+vtGtYbEA9bNIeFWLqnZT+M51S64=
use crate::pristine::InodeMetadata;
#[cfg(feature = "ondisk-repos")]
pub mod filesystem;
#[cfg(feature = "ondisk-repos")]
pub use filesystem::FileSystem;
pub mod memory;
pub use memory::Memory;
pub trait WorkingCopy {
fn create_dir_all(&mut self, path: &str) -> Result<(), anyhow::Error>;
fn file_metadata(&self, file: &str) -> Result<InodeMetadata, anyhow::Error>;
fn read_file(&self, file: &str, buffer: &mut Vec<u8>) -> Result<(), anyhow::Error>;
fn modified_time(&self, file: &str) -> Result<std::time::SystemTime, anyhow::Error>;
fn remove_path(&mut self, name: &str) -> Result<(), anyhow::Error>;
fn rename(&mut self, former: &str, new: &str) -> Result<(), anyhow::Error>;
fn set_permissions(&mut self, name: &str, permissions: u16) -> Result<(), anyhow::Error>;
fn write_file<A, F: FnOnce(&mut dyn std::io::Write) -> Result<A, anyhow::Error>>(
&mut self,
file: &str,
writer: F,
) -> Result<A, anyhow::Error>;
}
// org id 0KxFIPg0ga5vhSwltRkYUH0GqkExY80aPF4KPWp42YI=
use super::*;
use crate::pristine::InodeMetadata;
use std::collections::HashMap;
use std::time::SystemTime;
#[derive(Debug)]
pub struct Memory {
pub files: FileTree,
pub last_modified: SystemTime,
}
#[derive(Debug)]
pub struct FileTree {
children: HashMap<String, Inode>,
}
#[derive(Debug)]
enum Inode {
File {
meta: InodeMetadata,
last_modified: SystemTime,
contents: Vec<u8>,
},
Directory {
meta: InodeMetadata,
last_modified: SystemTime,
children: FileTree,
},
}
impl Memory {
pub fn new() -> Self {
Memory {
files: FileTree {
children: HashMap::new(),
},
last_modified: SystemTime::now(),
}
}
pub fn list_files(&self) -> Vec<String> {
let mut result = Vec::new();
let mut current_files = vec![(String::new(), &self.files)];
let mut next_files = Vec::new();
loop {
if current_files.is_empty() {
break;
}
for (path, tree) in current_files.iter() {
for (name, inode) in tree.children.iter() {
let mut path = path.clone();
crate::path::push(&mut path, name);
match inode {
Inode::File { .. } => {
result.push(path);
}
Inode::Directory { ref children, .. } => {
result.push(path.clone());
next_files.push((path, children))
}
}
}
}
std::mem::swap(&mut current_files, &mut next_files);
next_files.clear();
}
result
}
pub fn add_file(&mut self, file: &str, file_contents: Vec<u8>) {
let file_meta = InodeMetadata::new(0o644, false);
let last = SystemTime::now();
self.add_inode(
file,
Inode::File {
meta: file_meta,
last_modified: last,
contents: file_contents,
},
)
}
pub fn add_dir(&mut self, file: &str) {
let file_meta = InodeMetadata::new(0o755, true);
let last = SystemTime::now();
self.add_inode(
file,
Inode::Directory {
meta: file_meta,
last_modified: last,
children: FileTree {
children: HashMap::new(),
},
},
)
}
fn add_inode(&mut self, file: &str, inode: Inode) {
let mut file_tree = &mut self.files;
let last = SystemTime::now();
self.last_modified = last;
let file = file.split('/').filter(|c| !c.is_empty());
let mut p = file.peekable();
while let Some(f) = p.next() {
if p.peek().is_some() {
let entry = file_tree
.children
.entry(f.to_string())
.or_insert(Inode::Directory {
meta: InodeMetadata::new(0o755, true),
children: FileTree {
children: HashMap::new(),
},
last_modified: last,
});
match *entry {
Inode::Directory {
ref mut children, ..
} => file_tree = children,
_ => panic!("Not a directory"),
}
} else {
file_tree.children.insert(f.to_string(), inode);
break;
}
}
}
fn get_file(&self, file: &str) -> Option<&Inode> {
debug!("get_file {:?}", file);
debug!("repo = {:?}", self);
let mut t = Some(&self.files);
let mut inode = None;
let mut it = file.split('/').filter(|c| !c.is_empty());
while let Some(c) = it.next() {
debug!("c = {:?}", c);
inode = t.take().unwrap().children.get(c);
debug!("inode = {:?}", inode);
match inode {
Some(Inode::Directory { ref children, .. }) => t = Some(children),
_ => break,
}
}
inode
}
fn get_file_mut<'a>(&'a mut self, file: &str) -> Option<&'a mut Inode> {
debug!("get_file_mut {:?}", file);
debug!("repo = {:?}", self);
let mut t = Some(&mut self.files);
let mut it = file.split('/').filter(|c| !c.is_empty()).peekable();
self.last_modified = SystemTime::now();
while let Some(c) = it.next() {
debug!("c = {:?}", c);
let inode_ = t.take().unwrap().children.get_mut(c);
debug!("inode = {:?}", inode_);
if it.peek().is_none() {
return inode_;
}
match inode_ {
Some(Inode::Directory {
ref mut children, ..
}) => t = Some(children),
_ => return None,
}
}
None
}
fn remove_path_(&mut self, path: &str) -> Option<Inode> {
debug!("remove_path {:?}", path);
debug!("repo = {:?}", self);
let mut t = Some(&mut self.files);
let mut it = path.split('/').filter(|c| !c.is_empty());
let mut c = it.next().unwrap();
self.last_modified = SystemTime::now();
loop {
debug!("c = {:?}", c);
let next_c = it.next();
let t_ = t.take().unwrap();
let next_c = if let Some(next_c) = next_c {
next_c
} else {
return t_.children.remove(c);
};
let inode = t_.children.get_mut(c);
c = next_c;
debug!("inode = {:?}", inode);
match inode {
Some(Inode::Directory {
ref mut children, ..
}) => t = Some(children),
_ => return None,
}
}
}
}
impl WorkingCopy for Memory {
fn create_dir_all(&mut self, file: &str) -> Result<(), anyhow::Error> {
if self.get_file(file).is_none() {
let last = SystemTime::now();
self.add_inode(
file,
Inode::Directory {
meta: InodeMetadata::new(0o755, true),
children: FileTree {
children: HashMap::new(),
},
last_modified: last,
},
);
}
Ok(())
}
fn file_metadata(&self, file: &str) -> Result<InodeMetadata, anyhow::Error> {
match self.get_file(file) {
Some(Inode::Directory { meta, .. }) => Ok(*meta),
Some(Inode::File { meta, .. }) => Ok(*meta),
None => Err((crate::Error::FileNotFound {
path: file.to_string(),
})
.into()),
}
}
fn read_file(&self, file: &str, buffer: &mut Vec<u8>) -> Result<(), anyhow::Error> {
match self.get_file(file) {
Some(Inode::Directory { .. }) => panic!("Not a file: {:?}", file),
Some(Inode::File { ref contents, .. }) => {
buffer.extend(contents);
Ok(())
}
None => {
return Err((crate::Error::FileNotFound {
path: file.to_string(),
})
.into())
}
}
}
fn modified_time(&self, _file: &str) -> Result<std::time::SystemTime, anyhow::Error> {
Ok(self.last_modified)
}
fn remove_path(&mut self, path: &str) -> Result<(), anyhow::Error> {
self.remove_path_(path);
Ok(())
}
fn rename(&mut self, old: &str, new: &str) -> Result<(), anyhow::Error> {
debug!("rename {:?} to {:?}", old, new);
if let Some(inode) = self.remove_path_(old) {
self.add_inode(new, inode)
}
Ok(())
}
fn set_permissions(&mut self, file: &str, permissions: u16) -> Result<(), anyhow::Error> {
debug!("set_permissions {:?}", file);
match self.get_file_mut(file) {
Some(Inode::File { ref mut meta, .. }) => {
*meta = InodeMetadata::new(permissions as usize, false);
}
Some(Inode::Directory { ref mut meta, .. }) => {
*meta = InodeMetadata::new(permissions as usize, true);
}
None => panic!("file not found: {:?}", file),
}
Ok(())
}
fn write_file<A, F: FnOnce(&mut dyn std::io::Write) -> Result<A, anyhow::Error>>(
&mut self,
file: &str,
writer: F,
) -> Result<A, anyhow::Error> {
match self.get_file_mut(file) {
Some(Inode::File {
ref mut contents, ..
}) => {
contents.clear();
writer(contents)
}
None => {
let mut contents = Vec::new();
let last_modified = SystemTime::now();
let a = writer(&mut contents)?;
self.add_inode(
file,
Inode::File {
meta: InodeMetadata::new(0o644, false),
contents,
last_modified,
},
);
Ok(a)
}
_ => panic!("not a file: {:?}", file),
}
}
}
// org id c2D/NoY1VKCjNo0OezNVrmuG67Szl/Bfhi3G2Z7tcLU=
use super::*;
use crate::pristine::InodeMetadata;
use ignore::WalkBuilder;
use std::path::{Path, PathBuf};
pub struct FileSystem {
root: PathBuf,
}
pub fn get_prefix(
repo_path: Option<&Path>,
prefix: &Path,
) -> Result<(PathBuf, String), anyhow::Error> {
let mut p = String::new();
let repo = if let Some(repo) = repo_path {
std::fs::canonicalize(repo)?
} else {
std::env::current_dir()?
};
debug!("get prefix {:?}", repo);
let prefix_ = if let Ok(prefix_) = std::fs::canonicalize(repo.join(&prefix)) {
prefix_
} else {
repo.join(&prefix)
};
debug!("get prefix {:?}", prefix_);
if let Ok(prefix) = prefix_.strip_prefix(repo) {
for c in prefix.components() {
if !p.is_empty() {
p.push('/');
}
let c: &std::path::Path = c.as_ref();
p.push_str(&c.to_string_lossy())
}
}
Ok((prefix_, p))
}
impl FileSystem {
pub fn from_root<P: AsRef<Path>>(root: P) -> Self {
FileSystem {
root: root.as_ref().to_path_buf(),
}
}
pub fn record_prefixes<
T: crate::MutTxnTExt + crate::TxnTExt,
C: crate::changestore::ChangeStore,
P: AsRef<Path>,
>(
&mut self,
txn: &mut T,
channel: &mut crate::pristine::ChannelRef<T>,
changes: &C,
state: &mut crate::RecordBuilder,
repo_path: &Path,
prefixes: &[P],
) -> Result<(), anyhow::Error> {
for prefix in prefixes.iter() {
if let Err(e) =
self.record_prefix(txn, channel, changes, state, repo_path, prefix.as_ref())
{
eprintln!("{}", e)
}
}
if prefixes.is_empty() {
if let Err(e) =
self.record_prefix(txn, channel, changes, state, repo_path, Path::new(""))
{
eprintln!("{}", e)
}
}
Ok(())
}
pub fn record_prefix<
T: crate::MutTxnTExt + crate::TxnTExt,
C: crate::changestore::ChangeStore,
>(
&mut self,
txn: &mut T,
channel: &mut crate::pristine::ChannelRef<T>,
changes: &C,
state: &mut crate::RecordBuilder,
repo_path: &Path,
prefix: &Path,
) -> Result<(), anyhow::Error> {
debug!("record_prefix {:?}", prefix);
let repo_path_ = std::fs::canonicalize(repo_path)?;
if let Ok((full, prefix)) = get_prefix(Some(&repo_path), prefix) {
debug!("full = {:?}", full);
let meta = std::fs::metadata(&full);
debug!("meta = {:?}", meta);
debug!("{:?}", full.strip_prefix(&repo_path_));
if let Ok(meta) = meta {
if meta.is_dir() {
let mut walk = WalkBuilder::new(&full);
walk.standard_filters(true);
let walk = walk.build();
for entry in walk {
let entry = entry?;
let p = entry.path();
if let Some(p) = p.file_name() {
if let Some(p) = p.to_str() {
if p.ends_with("~") || (p.starts_with("#") && p.ends_with("#")) {
continue;
}
}
}
debug!("entry path = {:?} {:?}", entry.path(), repo_path);
if let Ok(path) = entry.path().strip_prefix(&repo_path_) {
let path_str = path.to_str().unwrap();
if !txn.is_tracked(&path_str) {
eprintln!("Adding {:?}", path);
info!("Adding {:?}", path);
txn.add(path_str, entry.file_type().unwrap().is_dir())?
} else {
debug!("already tracked {:?}", path_str)
}
} else {
debug!("entry = {:?}", entry.path());
}
}
} else if let Ok(path) = full.strip_prefix(&repo_path_) {
let path_str = path.to_str().unwrap();
if !txn.is_tracked(&path_str) {
eprintln!("Adding {:?}", path);
info!("Adding file {:?}", path);
txn.add(path_str, false)?
}
}
}
debug!("recording from prefix {:?}", prefix);
txn.record(
state,
crate::Algorithm::default(),
channel,
self,
changes,
&prefix,
)?;
debug!("recorded");
}
Ok(())
}
}
impl WorkingCopy for FileSystem {
fn create_dir_all(&mut self, file: &str) -> Result<(), anyhow::Error> {
Ok(std::fs::create_dir_all(&self.root.join(file))?)
}
fn file_metadata(&self, file: &str) -> Result<InodeMetadata, anyhow::Error> {
let attr = std::fs::metadata(&self.root.join(file))?;
let permissions = permissions(&attr).unwrap_or(0o755);
debug!("permissions = {:?}", permissions);
Ok(InodeMetadata::new(permissions & 0o777, attr.is_dir()))
}
fn read_file(&self, file: &str, buffer: &mut Vec<u8>) -> Result<(), anyhow::Error> {
use std::io::Read;
let mut f = std::fs::File::open(&self.root.join(file))?;
f.read_to_end(buffer)?;
Ok(())
}
fn modified_time(&self, file: &str) -> Result<std::time::SystemTime, anyhow::Error> {
let attr = std::fs::metadata(&self.root.join(file))?;
Ok(attr.modified()?)
}
fn remove_path(&mut self, path: &str) -> Result<(), anyhow::Error> {
let path = self.root.join(path);
if let Ok(meta) = std::fs::metadata(&path) {
if let Err(e) = if meta.is_dir() {
std::fs::remove_dir_all(&path)
} else {
std::fs::remove_file(&path)
} {
error!("while deleting {:?}: {:?}", path, e);
}
}
Ok(())
}
fn rename(&mut self, former: &str, new: &str) -> Result<(), anyhow::Error> {
let former = self.root.join(former);
let new = self.root.join(new);
if let Some(p) = new.parent() {
std::fs::create_dir_all(p)?
}
if let Err(e) = std::fs::rename(&former, &new) {
error!("while renaming {:?} to {:?}: {:?}", former, new, e)
}
Ok(())
}
#[cfg(not(windows))]
fn set_permissions(&mut self, name: &str, permissions: u16) -> Result<(), anyhow::Error> {
use std::os::unix::fs::PermissionsExt;
let name = self.root.join(name);
debug!("set_permissions: {:?}", name);
let metadata = std::fs::metadata(&name)?;
let mut current = metadata.permissions();
debug!(
"setting mode for {:?} to {:?} (currently {:?})",
name, permissions, current
);
current.set_mode(permissions as u32);
std::fs::set_permissions(name, current)?;
Ok(())
}
#[cfg(windows)]
fn set_permissions(&mut self, _name: &str, _permissions: u16) -> Result<(), anyhow::Error> {
Ok(())
}
fn write_file<A, F: FnOnce(&mut dyn std::io::Write) -> Result<A, anyhow::Error>>(
&mut self,
file: &str,
writer: F,
) -> Result<A, anyhow::Error> {
let path = self.root.join(file);
if let Some(p) = path.parent() {
std::fs::create_dir_all(p)?
}
std::fs::remove_file(&path).unwrap_or(());
let mut file = std::io::BufWriter::new(std::fs::File::create(&path)?);
writer(&mut file)
}
}
#[cfg(not(windows))]
fn permissions(attr: &std::fs::Metadata) -> Option<usize> {
use std::os::unix::fs::PermissionsExt;
Some(attr.permissions().mode() as usize)
}
#[cfg(windows)]
fn permissions(_: &std::fs::Metadata) -> Option<usize> {
None
}
// org id YHNNzZV5am1BYRE9P9yHBV54v3BJ2n5DcyZHps06YG4=
use crate::pristine::*;
pub const START_MARKER: &'static str = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n";
pub const SEPARATOR: &'static str = "\n================================\n";
pub const END_MARKER: &'static str = "\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n";
/// A trait for outputting keys and their contents. This trait allows
/// to retain more information about conflicts than directly
/// outputting as bytes to a `Write`. The diff algorithm uses that
/// information, for example.
pub trait VertexBuffer {
fn output_line<F: FnOnce(&mut Vec<u8>) -> Result<(), anyhow::Error>>(
&mut self,
key: Vertex<ChangeId>,
contents: F,
) -> Result<(), anyhow::Error>;
fn output_conflict_marker(&mut self, s: &str) -> Result<(), anyhow::Error>;
fn begin_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), anyhow::Error> {
self.begin_conflict()
}
fn begin_cyclic_conflict(&mut self) -> Result<(), anyhow::Error> {
self.begin_conflict()
}
fn conflict_next(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(SEPARATOR)
}
fn end_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(END_MARKER)
}
fn end_zombie_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(END_MARKER)
}
fn end_cyclic_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(END_MARKER)
}
}
pub(crate) struct ConflictsWriter<'a, 'b, W: std::io::Write> {
pub w: W,
pub lines: usize,
pub new_line: bool,
pub path: &'b str,
pub conflicts: &'a mut Vec<crate::output::Conflict>,
pub buf: Vec<u8>,
}
impl<'a, 'b, W: std::io::Write> ConflictsWriter<'a, 'b, W> {
pub fn new(w: W, path: &'b str, conflicts: &'a mut Vec<crate::output::Conflict>) -> Self {
ConflictsWriter {
w,
new_line: true,
lines: 1,
path,
conflicts,
buf: Vec::new(),
}
}
}
impl<'a, 'b, W: std::io::Write> std::ops::Deref for ConflictsWriter<'a, 'b, W> {
type Target = W;
fn deref(&self) -> &Self::Target {
&self.w
}
}
impl<'a, 'b, W: std::io::Write> std::ops::DerefMut for ConflictsWriter<'a, 'b, W> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.w
}
}
impl<'a, 'b, W: std::io::Write> VertexBuffer for ConflictsWriter<'a, 'b, W> {
fn output_line<C: FnOnce(&mut Vec<u8>) -> Result<(), anyhow::Error>>(
&mut self,
_: Vertex<ChangeId>,
c: C,
) -> Result<(), anyhow::Error> {
self.buf.clear();
c(&mut self.buf)?;
let ends_with_newline = self.buf.ends_with(b"\n");
self.lines += self.buf.iter().filter(|c| **c == b'\n').count();
self.w.write_all(&self.buf)?;
if !self.buf.is_empty() {
// empty "lines" (such as in the beginning of a file)
// don't change the status of self.new_line.
self.new_line = ends_with_newline;
}
Ok(())
}
fn output_conflict_marker(&mut self, s: &str) -> Result<(), anyhow::Error> {
debug!("output_conflict_marker {:?}", self.new_line);
if !self.new_line {
self.lines += 2;
self.w.write(s.as_bytes())?;
} else {
self.lines += 1;
debug!("{:?}", &s.as_bytes()[1..]);
self.w.write(&s.as_bytes()[1..])?;
}
self.new_line = true;
Ok(())
}
fn begin_conflict(&mut self) -> Result<(), anyhow::Error> {
self.conflicts.push(crate::output::Conflict::Order {
path: self.path.to_string(),
line: self.lines,
});
self.output_conflict_marker(START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), anyhow::Error> {
self.conflicts.push(crate::output::Conflict::Zombie {
path: self.path.to_string(),
line: self.lines,
});
self.output_conflict_marker(START_MARKER)
}
fn begin_cyclic_conflict(&mut self) -> Result<(), anyhow::Error> {
self.conflicts.push(crate::output::Conflict::Cyclic {
path: self.path.to_string(),
line: self.lines,
});
self.output_conflict_marker(START_MARKER)
}
}
pub struct Writer<W: std::io::Write> {
w: W,
buf: Vec<u8>,
new_line: bool,
}
impl<W: std::io::Write> Writer<W> {
pub fn new(w: W) -> Self {
Writer {
w,
new_line: true,
buf: Vec::new(),
}
}
pub fn into_inner(self) -> W {
self.w
}
}
impl<W: std::io::Write> std::ops::Deref for Writer<W> {
type Target = W;
fn deref(&self) -> &Self::Target {
&self.w
}
}
impl<W: std::io::Write> std::ops::DerefMut for Writer<W> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.w
}
}
impl<W: std::io::Write> VertexBuffer for Writer<W> {
fn output_line<C: FnOnce(&mut Vec<u8>) -> Result<(), anyhow::Error>>(
&mut self,
_: Vertex<ChangeId>,
c: C,
) -> Result<(), anyhow::Error> {
self.buf.clear();
c(&mut self.buf)?;
let ends_with_newline = self.buf.ends_with(b"\n");
self.w.write_all(&self.buf[..])?;
if !self.buf.is_empty() {
// empty "lines" (such as in the beginning of a file)
// don't change the status of self.new_line.
self.new_line = ends_with_newline;
}
Ok(())
}
fn output_conflict_marker(&mut self, s: &str) -> Result<(), anyhow::Error> {
debug!("output_conflict_marker {:?}", self.new_line);
if !self.new_line {
self.w.write(s.as_bytes())?;
} else {
debug!("{:?}", &s.as_bytes()[1..]);
self.w.write(&s.as_bytes()[1..])?;
}
Ok(())
}
fn begin_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(START_MARKER)
}
fn begin_cyclic_conflict(&mut self) -> Result<(), anyhow::Error> {
self.output_conflict_marker(START_MARKER)
}
}
// org id zbhEFFLH9dRwVwwwwfF92Y0wFF4fPrXmDJAX4GwqE9A=
pub(crate) struct Vector2<A> {
v: Vec<A>,
bounds: Vec<usize>,
}
impl<A> Vector2<A> {
pub(crate) fn new() -> Self {
Vector2 {
v: Vec::new(),
bounds: vec![0],
}
}
pub(crate) fn len(&self) -> usize {
self.bounds.len() - 1
}
pub(crate) fn with_capacities(total: usize, n: usize) -> Self {
let mut bounds = Vec::with_capacity(n);
bounds.push(0);
Vector2 {
v: Vec::with_capacity(total),
bounds,
}
}
pub(crate) fn push_to_last(&mut self, a: A) {
assert!(self.bounds.len() > 1);
*self.bounds.last_mut().unwrap() += 1;
self.v.push(a)
}
pub(crate) fn push(&mut self) {
self.bounds.push(self.v.len())
}
pub(crate) fn last_mut(&mut self) -> Option<&mut [A]> {
if self.bounds.len() >= 2 {
let i = self.bounds.len() - 2;
Some(&mut self.v[self.bounds[i]..self.bounds[i + 1]])
} else {
None
}
}
}
impl<A> std::ops::Index<usize> for Vector2<A> {
type Output = [A];
fn index(&self, i: usize) -> &[A] {
&self.v[self.bounds[i]..self.bounds[i + 1]]
}
}
impl<A> std::ops::IndexMut<usize> for Vector2<A> {
fn index_mut(&mut self, i: usize) -> &mut [A] {
&mut self.v[self.bounds[i]..self.bounds[i + 1]]
}
}
impl<A: std::fmt::Debug> std::fmt::Debug for Vector2<A> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "[")?;
for i in 0..self.bounds.len() - 1 {
if i > 0 {
write!(fmt, ", ")?
}
write!(fmt, "{:?}", &self[i])?
}
write!(fmt, "]")?;
Ok(())
}
}
#[test]
fn test_v2() {
let mut v: Vector2<usize> = Vector2::new();
v.push();
v.push_to_last(0);
v.push_to_last(1);
v.push_to_last(2);
v.push();
v.push_to_last(4);
v.push_to_last(5);
v.push_to_last(6);
assert_eq!(&v[0], &[0, 1, 2][..]);
assert_eq!(&v[1], &[4, 5, 6][..]);
}
#[test]
#[should_panic]
fn test_v2_() {
let w: Vector2<usize> = Vector2::new();
println!("{:?}", &w[0]);
}
// org id hPrt74aGC9MoSCkacm35YMV1UsqOTCIc9O8ApjoChHI=
use crate::change::*;
use crate::changestore::*;
use crate::pristine::*;
use crate::small_string::*;
// org id XWsbPEqsD3paeM/LxVDyIr2Q565JHhSez0T72hx9cHs=
pub fn undo_file_addition<T: MutTxnT>(
txn: &mut T,
change_id: ChangeId,
new_vertex: &NewVertex<Option<Hash>>,
) -> Result<(), anyhow::Error> {
if new_vertex.start == new_vertex.end {
let pos = Position {
change: change_id,
pos: new_vertex.start,
};
if let Some(inode) = txn.get_revinodes(pos, None) {
let inode = inode.to_owned();
txn.del_revinodes(pos, None)?;
txn.del_inodes(inode, None)?;
}
}
Ok(())
}
// org id aM0a1a29Y251MOlVfR8GafvillHOtUZNFlHypSf5e0c=
pub fn undo_file_deletion<T: MutTxnT, P: ChangeStore>(
txn: &mut T,
changes: &P,
channel: &Channel<T>,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
) -> Result<(), anyhow::Error> {
for e in newedges.edges.iter().rev() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
let source = txn.find_block_end(&channel, txn.internal_pos(&e.from, change_id)?)?;
if e.flag.contains(EdgeFlags::FOLDER) && e.to.start_pos() == e.to.end_pos() {
let dest = txn.internal_pos(&e.to.start_pos(), change_id)?;
restore(txn, changes, channel, source, dest)?
}
}
Ok(())
}
// org id jpnOhTUKi2/wrb/uh1iJyGyO0F8zmYxoJlnFRItxZo0=
fn restore<T: MutTxnT, P: ChangeStore>(
txn: &mut T,
changes: &P,
channel: &Channel<T>,
source: Vertex<ChangeId>,
dest: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
let mut stack = vec![(source, dest)];
let mut return_value = None;
while let Some((source, dest)) = stack.pop() {
if let Some(parent_inode) = return_value {
return_value = Some(restore_inode(txn, changes, source, dest, parent_inode)?);
continue;
}
let source_parent = txn
.iter_adjacent(
&channel,
source,
EdgeFlags::PARENT | EdgeFlags::FOLDER,
EdgeFlags::all(),
)
.filter(|e| e.flag.contains(EdgeFlags::PARENT | EdgeFlags::FOLDER))
.next()
.unwrap()
.dest;
// org id tgKUYrZqx9aEX8jZs1AQbH/VCnJJAtXyCdfty6XGk3I=
if source_parent.change.is_root() {
return_value = Some(restore_inode(txn, changes, source, dest, Inode::ROOT)?)
} else if let Some(inode) = txn.get_revinodes(source_parent, None) {
return_value = Some(restore_inode(txn, changes, source, dest, inode)?)
} else {
let grandparent = find_youngest_parent(txn, channel, source_parent.inode_vertex())?;
stack.push((source, dest));
stack.push((grandparent, source_parent));
}
}
Ok(())
}
// org id 6G9wZJSePFPM3jnKlWo/js7upKaYQwFWH9ep4l2BR8Y=
fn restore_inode<T: MutTxnT, P: ChangeStore>(
txn: &mut T,
changes: &P,
source: Vertex<ChangeId>,
dest: Position<ChangeId>,
parent_inode: Inode,
) -> Result<Inode, anyhow::Error> {
let inode = crate::fs::create_new_inode(txn);
let mut name = Vec::new();
let (_, basename) = changes.get_file_name(|h| txn.get_external(h), source, &mut name)?;
let basename = SmallString::from_str(basename);
let file_id = OwnedPathId {
parent_inode,
basename,
};
txn.put_tree(file_id.as_file_id(), inode)?;
txn.put_revtree(inode, file_id.as_file_id())?;
txn.replace_inodes(inode, dest)?;
txn.replace_revinodes(dest, inode)?;
Ok(inode)
}
// org id RM0rj9DnLmh6LYZN5VK6ADMYoieH0mZJcaFzBEZzf9c=
fn find_youngest_parent<T: TxnT>(
txn: &T,
channel: &Channel<T>,
current: Vertex<ChangeId>,
) -> Result<Vertex<ChangeId>, crate::Error> {
let mut next = None;
for e in txn
.iter_adjacent(
channel,
current,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK,
)
.filter(|e| e.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT))
{
if e.flag.contains(EdgeFlags::DELETED) {
let age = txn
.get_changeset(&channel.changes, e.introduced_by, None)
.unwrap();
if let Some((ref mut age0, ref mut v)) = next {
if age > *age0 {
*age0 = age;
*v = e.dest
}
} else {
next = Some((age, e.dest))
}
} else {
next = Some((0, e.dest));
break;
}
}
txn.find_block_end(channel, next.unwrap().1)
}
// org id T313j+8glo3jbLVV755vMhVhm9EW2lx+7wowRud9Bfo=
pub fn undo_file_reinsertion<T: MutTxnT>(
txn: &mut T,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
) -> Result<(), anyhow::Error> {
for e in newedges.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
// org id HBoHMB0n95+Pap0r6KHw3Kj1HRX0MyKXyd0dYieT2cs=
if e.to.start_pos() == e.to.end_pos() {
let position = txn.internal_pos(&e.to.start_pos(), change_id)?;
if let Some(inode) = txn.get_revinodes(position, None) {
let inode = inode.to_owned();
txn.del_revinodes(position, None)?;
txn.del_inodes(inode, None)?;
}
}
}
Ok(())
}
// org id IuiCX9taX7EZv/Aqz/UZc5r7v/fH7MJn3/QIj46oHwc=
use crate::apply;
use crate::change::*;
use crate::changestore::*;
use crate::missing_context::*;
use crate::pristine::*;
use crate::Error;
use std::collections::{HashMap, HashSet};
mod working_copy;
pub fn unrecord<T: MutTxnT, P: ChangeStore>(
txn: &mut T,
channel: &mut ChannelRef<T>,
changes: &P,
hash: &Hash,
) -> Result<bool, anyhow::Error> {
let change = changes.get_change(hash)?;
let change_id = if let Some(h) = txn.get_internal(*hash) {
h
} else {
return Ok(false);
};
let unused = unused_in_other_channels(txn, &channel, change_id);
let mut channel = channel.r.borrow_mut();
del_channel_changes(txn, &mut channel, change_id)?;
unapply(txn, &mut channel, changes, change_id, &change)?;
if unused {
assert!(txn.get_revdep(change_id, None).is_none());
while txn.del_dep(change_id, None)? {}
txn.del_external(change_id, None)?;
txn.del_internal(*hash, None)?;
for dep in change.dependencies.iter() {
let dep = txn.get_internal(*dep).unwrap();
txn.del_revdep(dep, Some(change_id))?;
}
Ok(false)
} else {
Ok(true)
}
}
// org id 4kyMdTywL4dWm7Vw3kXLcabNNGbFzSP3FTUptNMLPwg=
fn del_channel_changes<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
change_id: ChangeId,
) -> Result<(), anyhow::Error> {
let timestamp = if let Some(ts) = txn.get_changeset(&channel.changes, change_id, None) {
ts
} else {
return Err((Error::ChangeNotOnChannel { change_id }).into());
};
for (p, d) in txn.iter_revdep(change_id) {
if p < change_id {
continue;
} else if p > change_id {
break;
}
if txn.get_changeset(&channel.changes, d, None).is_some() {
return Err((Error::ChangeIsDependedUpon { change_id }).into());
}
}
txn.del_changes(channel, change_id, timestamp)?;
Ok(())
}
// org id F70s1W/0AmTkJZfg47g3+OXxINmwCgXU7G5j5QZkvh8=
fn unused_in_other_channels<T: TxnT>(
txn: &mut T,
channel: &ChannelRef<T>,
change_id: ChangeId,
) -> bool {
let channel = channel.borrow();
for br in txn.iter_channels("") {
let br = br.borrow();
if br.name != channel.name {
if txn.get_changeset(&br.changes, change_id, None).is_some() {
return false;
}
}
}
true
}
// org id UmzPcrZozUPTljK9LrlZa2FIXJwwvHpPmfRQ9uV4qTs=
fn unapply<T: MutTxnT, C: ChangeStore>(
txn: &mut T,
channel: &mut Channel<T>,
changes: &C,
change_id: ChangeId,
change: &Change,
) -> Result<(), anyhow::Error> {
let mut clean_inodes = HashSet::new();
let mut ws = Workspace::new();
for change_ in change.changes.iter().rev().flat_map(|r| r.rev_iter()) {
match *change_ {
Atom::EdgeMap(ref newedges) => {
unapply_edges(changes, txn, channel, change_id, newedges, &mut ws)?
}
Atom::NewVertex(ref newvertex) => {
if clean_inodes.insert(newvertex.inode) {
crate::alive::remove_forward_edges(
txn,
channel,
txn.internal_pos(&newvertex.inode, change_id)?,
)?;
}
unapply_newvertex(txn, channel, change_id, &mut ws, newvertex)?
}
}
}
repair_newvertex_contexts(txn, channel, &mut ws)?;
// org id zX78t6A+99OopoXmY9luuPGgXMHwzPEPT8sYitaxNyc=
for change in change.changes.iter().rev().flat_map(|r| r.rev_iter()) {
match *change {
Atom::EdgeMap(ref n) => {
remove_zombies(txn, channel, change_id, n)?;
repair_edges_context(
changes,
txn,
channel,
&mut ws.apply.missing_context,
change_id,
n,
)?
}
_ => {}
}
}
crate::apply::clean_obsolete_pseudo_edges(txn, channel, &mut ws.apply)?;
channel.last_modified = 0;
Ok(())
}
// org id ysIZl8M4RTRZyISrNO1VOKOsKxkQsZFux9RvLLl192Y=
struct Workspace {
up: HashMap<Vertex<ChangeId>, Position<Option<Hash>>>,
down: HashMap<Vertex<ChangeId>, (Position<Option<Hash>>, bool)>,
del: Vec<Edge>,
apply: crate::apply::Workspace,
}
impl Workspace {
fn new() -> Self {
Workspace {
up: HashMap::new(),
down: HashMap::new(),
del: Vec::new(),
apply: crate::apply::Workspace::new(),
}
}
}
fn unapply_newvertex<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
change_id: ChangeId,
ws: &mut Workspace,
new_vertex: &NewVertex<Option<Hash>>,
) -> Result<(), anyhow::Error> {
let mut pos = Position {
change: change_id,
pos: new_vertex.start,
};
debug!("unapply_newvertex = {:?}", new_vertex);
while pos.pos <= new_vertex.end {
debug!("pos = {:?}", pos);
let vertex = if let Ok(v) = txn.find_block(channel, pos) {
v
} else {
// This means that the only edges to this block were
// removed from the graph, which can only happen if this
// block is the bottommost vertex in the graph.
if cfg!(debug_assertions) {
while pos.pos <= new_vertex.end {
assert!(txn.find_block(channel, pos).is_err());
pos.pos = pos.pos + 1;
}
}
break;
};
debug!("vertex = {:?}", vertex);
for e in txn.iter_adjacent(channel, vertex, EdgeFlags::empty(), EdgeFlags::all()) {
debug!("e = {:?}", e);
if !e.flag.contains(EdgeFlags::DELETED) {
if e.flag.contains(EdgeFlags::PARENT) {
if !e.flag.contains(EdgeFlags::FOLDER) {
let up_v = txn.find_block_end(channel, e.dest)?;
ws.up.insert(up_v, new_vertex.inode);
}
} else {
let down_v = txn.find_block(channel, e.dest)?;
ws.down.insert(
down_v,
(new_vertex.inode, e.flag.contains(EdgeFlags::FOLDER)),
);
}
}
ws.del.push(e)
}
debug!("del = {:#?}", ws.del);
ws.up.remove(&vertex);
ws.down.remove(&vertex);
// Delete all in `del`.
for e in ws.del.drain(..) {
let (a, b) = if e.flag.contains(EdgeFlags::PARENT) {
(txn.find_block_end(channel, e.dest)?, vertex)
} else {
(vertex, txn.find_block(channel, e.dest)?)
};
txn.del_graph_with_rev(channel, e.flag - EdgeFlags::PARENT, a, b, e.introduced_by)?;
}
// Move on to the next split of ~new_vertex~.
pos.pos = vertex.end
}
if new_vertex.flag.contains(EdgeFlags::FOLDER) {
if new_vertex.down_context.is_empty() {
// let mut f = std::fs::File::create("debug_unrecord")?;
// txn.debug(&channel, &mut f).unwrap();
working_copy::undo_file_addition(txn, change_id, new_vertex)?;
}
}
Ok(())
}
fn repair_newvertex_contexts<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
) -> Result<(), anyhow::Error> {
debug!("up = {:#?}", ws.up);
for (up, inode) in ws.up.drain() {
if txn.is_alive(channel, up) {
crate::missing_context::repair_missing_down_context(
txn,
channel,
&mut ws.apply.missing_context,
inode,
up,
&[up],
)?
}
}
debug!("down = {:#?}", ws.down);
let mut parents = HashSet::new();
for (down, (inode, is_folder)) in ws.down.drain() {
for parent in txn.iter_adjacent(
channel,
down,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
) {
let parent = txn.find_block_end(channel, parent.dest)?;
if !txn.is_alive(channel, parent) {
parents.insert(parent);
}
}
debug!("parents {:#?}", parents);
for up in parents.drain() {
crate::missing_context::repair_missing_up_context(
txn,
channel,
&mut ws.apply.missing_context,
inode,
up,
&[down],
is_folder,
)?
}
}
Ok(())
}
// org id TtkPh63N/jM5q98a/9yDp97cHPAUzouetXVib5fpVnY=
fn unapply_edges<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut Channel<T>,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
ws: &mut Workspace,
) -> Result<(), anyhow::Error> {
debug!("newedges = {:#?}", newedges);
let ext = txn.get_external(change_id).unwrap();
let mut f = |h, from, to, flag| changes.has_edge(h, from, to, flag);
for edge in newedges.edges.iter() {
// This is ok: since this change is applied, this can't fail.
let int = txn.internal(&edge.introduced_by, change_id).unwrap();
apply::put_newedge(
&mut f,
txn,
channel,
&mut ws.apply,
int,
newedges.inode,
&NewEdge {
previous: edge.flag,
flag: edge.previous,
from: edge.from,
to: edge.to,
introduced_by: Some(ext),
},
|txn, channel, a, b| {
must_reintroduce(
txn,
channel,
changes,
a,
b,
edge.introduced_by.unwrap(),
int,
)
},
)?;
}
if newedges.edges[0].flag.contains(EdgeFlags::FOLDER) {
if newedges.edges[0].flag.contains(EdgeFlags::DELETED) {
working_copy::undo_file_deletion(txn, changes, channel, change_id, newedges)?
} else {
working_copy::undo_file_reinsertion(txn, change_id, newedges)?
}
}
Ok(())
}
fn must_reintroduce<T: TxnT, C: ChangeStore>(
txn: &T,
channel: &Channel<T>,
changes: &C,
a: Vertex<ChangeId>,
b: Vertex<ChangeId>,
intro: Hash,
intro_id: ChangeId,
) -> Result<bool, anyhow::Error> {
debug!("a = {:?}, b = {:?}", a, b);
// does a patch introduced by an edge parallel to
// this one remove this edge from the graph?
let b_ext = Position {
change: txn.get_external(b.change),
pos: b.start,
};
let mut stack = Vec::new();
for e in txn
.iter_adjacent(channel, a, EdgeFlags::empty(), EdgeFlags::all())
.filter(|e| {
!e.flag.contains(EdgeFlags::PARENT)
&& e.dest == b.start_pos()
&& !e.introduced_by.is_root()
})
{
// Optimisation to avoid opening change files
// in the vast majority of cases: if there is
// an edge `e` parallel to a -> b introduced
// by the change that introduced a or b, don't
// reinsert a -> b: that edge was removed by
// `e`.
if !cfg!(debug_assertions) && (a.change == intro_id || b.change == intro_id) {
return Ok(false);
}
stack.push(e.introduced_by)
}
edge_is_in_channel(txn, changes, b_ext, intro, &mut stack)
}
fn edge_is_in_channel<T: TxnT, C: ChangeStore>(
txn: &T,
changes: &C,
pos: Position<Option<Hash>>,
introduced_by: Hash,
stack: &mut Vec<ChangeId>,
) -> Result<bool, anyhow::Error> {
let mut visited = HashSet::new();
while let Some(s) = stack.pop() {
if !visited.insert(s) {
continue;
}
debug!("stack: {:?}", s);
for next in changes.change_deletes_position(|c| txn.get_external(c), s, pos)? {
if next == introduced_by {
return Ok(false);
} else if let Some(i) = txn.get_internal(next) {
stack.push(i)
}
}
}
Ok(true)
}
fn remove_zombies<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
change_id: ChangeId,
newedges: &EdgeMap<Option<Hash>>,
) -> Result<(), anyhow::Error> {
let mut del = Vec::new();
let mut stack = vec![];
let mut visited = HashSet::new();
debug!("remove_zombies, change_id = {:?}", change_id);
for edge in newedges.edges.iter() {
// Remove zombies caused by this EdgeMap.
let to = txn.internal_pos(&edge.to.start_pos(), change_id)?;
stack.push(txn.find_block(channel, to)?);
visited.clear();
while let Some(v) = stack.pop() {
debug!("remove_zombies, v = {:?}", v);
if !visited.insert(v) {
continue;
}
for e in txn.iter_adjacent(channel, v, EdgeFlags::empty(), EdgeFlags::all()) {
debug!("e = {:?}", e);
// If the edge is a parent-non-block edge, go up.
let mut follow =
e.flag.contains(EdgeFlags::DELETED) && e.introduced_by != change_id;
follow |= e.flag & (EdgeFlags::BLOCK | EdgeFlags::PARENT) == EdgeFlags::PARENT;
if !follow {
if e.introduced_by != change_id {
continue;
}
}
if e.flag.contains(EdgeFlags::PARENT) {
stack.push(txn.find_block_end(channel, e.dest)?)
} else {
stack.push(txn.find_block(channel, e.dest)?)
}
if e.introduced_by == change_id {
del.push((v, e))
}
}
}
debug!("remove_zombies = {:#?}", del);
for (v, e) in del.drain(..) {
if e.flag.contains(EdgeFlags::PARENT) {
let u = txn.find_block_end(channel, e.dest)?;
txn.del_graph_with_rev(channel, e.flag - EdgeFlags::PARENT, u, v, e.introduced_by)?;
if txn
.iter_adjacent(
channel,
u,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED - EdgeFlags::PARENT,
)
.filter(|e| e.dest == v.start_pos())
.next()
.is_none()
{
let f = if e.flag.contains(EdgeFlags::FOLDER) {
EdgeFlags::FOLDER
} else {
EdgeFlags::empty()
};
txn.put_graph_with_rev(channel, f, u, v, u.change)?;
}
} else {
let w = txn.find_block(channel, e.dest)?;
txn.del_graph_with_rev(channel, e.flag, v, w, e.introduced_by)?;
}
}
}
Ok(())
}
// org id 4dFFHO4v8n5FooqRmC0cyb1+9EjtjPMqW7wdJCUIRsU=
fn repair_edges_context<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut crate::missing_context::Workspace,
change_id: ChangeId,
n: &EdgeMap<Option<Hash>>,
) -> Result<(), anyhow::Error> {
let change_hash = txn.get_external(change_id).unwrap();
for e in n.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
if e.previous.contains(EdgeFlags::DELETED) {
let e = NewEdge {
previous: e.flag,
flag: e.previous,
from: e.from,
to: e.to,
introduced_by: Some(change_hash),
};
let intro = txn.internal(&e.introduced_by, change_id).unwrap();
let intro_ext = e.introduced_by.unwrap_or(change_hash);
repair_context_deleted(
txn,
channel,
ws,
n.inode,
intro,
|h| changes.knows(&intro_ext, &h).unwrap(),
&e,
)?
} else {
let intro = txn.internal(&e.introduced_by, change_id).unwrap();
let intro_ext = e.introduced_by.unwrap_or(change_hash);
let to = txn.internal_pos(&e.to.start_pos(), change_id)?;
let to = txn.find_block(channel, to)?;
if !txn.is_alive(channel, to) {
continue;
}
repair_context_nondeleted(
txn,
channel,
ws,
n.inode,
intro,
|h| changes.knows(&intro_ext, &h).unwrap(),
&NewEdge {
previous: e.flag,
flag: e.previous,
from: e.from,
to: e.to,
introduced_by: Some(change_hash),
},
)?
}
}
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
/// Add a file, write to it, then fork the branch and unrecord once on
/// one side.
#[test]
fn test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
let _h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file("dir/file", |w| {
w.write_all(b"a\nx\nb\nd\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let channel2 = txn.fork(&channel, "main2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let mut buf = Vec::new();
repo.read_file("dir/file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), Ok("a\nb\nc\nd\n"));
txn.debug_to_file(&channel, "debug_un")?;
txn.debug_to_file(&channel2, "debug_un2")?;
txn.commit()?;
Ok(())
}
#[test]
fn replace() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
let _h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file("dir/file", |w| {
w.write_all(b"a\nx\ny\nd\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let channel2 = txn.fork(&channel, "main2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let mut buf = Vec::new();
repo.read_file("dir/file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), Ok("a\nb\nc\nd\n"));
txn.debug_to_file(&channel, "debug_un")?;
txn.debug_to_file(&channel2, "debug_un2")?;
txn.commit()?;
Ok(())
}
#[test]
fn file_move() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("file")?;
let mut channel = txn.open_or_create_channel("main")?;
let _h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.rename("file", "dir/file")?;
txn.move_file("file", "dir/file")?;
debug!("recording the move");
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug_un")?;
debug!("unrecording the move");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
txn.debug_to_file(&channel, "debug_un2")?;
assert_eq!(
crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect::<Vec<_>>(),
vec!["dir", "dir/file"]
);
assert_eq!(repo.list_files(), vec!["dir", "dir/file"]);
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
assert_eq!(
crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect::<Vec<_>>(),
vec!["file"]
);
assert_eq!(repo.list_files(), vec!["file"]);
txn.commit()?;
Ok(())
}
#[test]
fn reconnect_lines() -> Result<(), anyhow::Error> {
reconnect_(false)
}
#[test]
fn reconnect_files() -> Result<(), anyhow::Error> {
reconnect_(true)
}
fn reconnect_(delete_file: bool) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let mut repo2 = working_copy::memory::Memory::new();
let mut repo3 = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let env3 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut txn2 = env2.mut_txn_begin();
let mut txn3 = env3.mut_txn_begin();
txn.add_file("file")?;
let mut channel = txn.open_or_create_channel("main")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut channel2 = txn2.open_or_create_channel("main")?;
let mut channel3 = txn3.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn2, &mut channel2, h0)?;
output::output_repository_no_pending(&mut repo2, &changes, &mut txn2, &mut channel2, "", true)?;
apply::apply_change(&changes, &mut txn3, &mut channel3, h0)?;
output::output_repository_no_pending(&mut repo3, &changes, &mut txn3, &mut channel3, "", true)?;
// This test removes a line (in h1), then replaces it with another
// one (in h2), removes the pseudo-edges (output, below), and then
// unrecords h2 to delete the connection. Test: do the
// pseudo-edges reappear?
///////////
if delete_file {
repo.remove_path("file")?;
} else {
repo.write_file("file", |w| {
w.write_all(b"a\nd\n")?;
Ok(())
})?;
}
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
///////////
repo2.write_file("file", |w| {
w.write_all(b"a\nb\nx\nc\nd\n")?;
Ok(())
})?;
let h2 = record_all(&mut repo2, &changes, &mut txn2, &mut channel2, "")?;
repo2.write_file("file", |w| {
w.write_all(b"a\nb\nx\nc\ny\nd\n")?;
Ok(())
})?;
let h3 = record_all(&mut repo2, &changes, &mut txn2, &mut channel2, "")?;
///////////
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
apply::apply_change(&changes, &mut txn, &mut channel, h3)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
txn.debug_to_file(&channel, "debug_un")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
txn.debug_to_file(&channel, "debug_un2")?;
Ok(())
}
#[test]
fn zombie_file_test() -> Result<(), anyhow::Error> {
zombie_(None)
}
#[test]
fn zombie_lines_test() -> Result<(), anyhow::Error> {
zombie_(Some(b"d\n"))
}
fn zombie_(file: Option<&[u8]>) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let mut repo2 = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut txn2 = env2.mut_txn_begin();
txn.add_file("file")?;
let mut channel = txn.open_or_create_channel("main")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut channel2 = txn2.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn2, &mut channel2, h0)?;
output::output_repository_no_pending(&mut repo2, &changes, &mut txn2, &mut channel2, "", true)?;
///////////
if let Some(file) = file {
repo.write_file("file", |w| {
w.write_all(file)?;
Ok(())
})?;
} else {
repo.remove_path("file")?;
}
let h1 = record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug_a")?;
///////////
repo2.write_file("file", |w| {
w.write_all(b"a\nb\nx\nc\nd\n")?;
Ok(())
})?;
let h2 = record_all_output(&mut repo2, &changes, &mut txn2, &mut channel2, "")?;
txn2.debug_to_file(&channel2, "debug_b")?;
///////////
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
txn.debug_to_file(&channel, "debug_un")?;
debug!("unrecording");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
txn.debug_to_file(&channel, "debug_un2")?;
let mut buf = Vec::new();
if let Some(f) = file {
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
repo.read_file("file", &mut buf)?;
assert_eq!(&buf[..], f);
} else {
if conflicts.len() != 1 {
panic!("conflicts = {:#?}", conflicts)
}
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "file"),
ref c => panic!("c = {:#?}", c),
}
}
let (alive_, reachable_) = txn.check_alive(&channel);
if !alive_.is_empty() {
panic!("alive: {:?}", alive_);
}
if !reachable_.is_empty() {
panic!("reachable: {:?}", reachable_);
}
txn.commit()?;
// Applying the symmetric.
apply::apply_change(&changes, &mut txn2, &mut channel2, h1)?;
txn2.debug_to_file(&channel2, "debug_un3")?;
debug!("unrecording h1 = {:?}", h1);
crate::unrecord::unrecord(&mut txn2, &mut channel2, &changes, &h1)?;
txn2.debug_to_file(&channel2, "debug_un4")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn2,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
Ok(())
}
// Should fail: we're resurecting a file in a directory that doesn't
// exist anymore.
#[test]
fn zombie_dir() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c/d", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("a/b/c/d")?;
let mut channel = txn.open_or_create_channel("main")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.remove_path("a/b/c/d")?;
let h1 = record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.remove_path("a/b")?;
let _h2 = record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
let files = repo.list_files();
assert_eq!(files, &["a"]);
debug!("files={:?}", files);
txn.debug_to_file(&channel, "debug_un")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
txn.debug_to_file(&channel, "debug_un2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("c = {:?}", c),
}
match conflicts[1] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b/c"),
ref c => panic!("c = {:?}", c),
}
let files = repo.list_files();
debug!("files={:?}", files);
assert_eq!(files, &["a", "a/b", "a/b/c", "a/b/c/d"]);
let (alive_, reachable_) = txn.check_alive(&channel);
if !alive_.is_empty() {
panic!("alive: {:?}", alive_);
}
if !reachable_.is_empty() {
panic!("reachable: {:?}", reachable_);
}
txn.commit()?;
Ok(())
}
#[test]
fn nodep() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
txn.debug_inodes();
let mut channel = txn.open_or_create_channel("main")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file("dir/file", |w| {
w.write_all(b"a\nx\nb\nd\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_inodes();
match crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h0) {
Err(e) => {
let e = e.downcast();
if let Ok(Error::ChangeIsDependedUpon { .. }) = e {
} else {
unreachable!()
}
}
_ => panic!("Should not be able to unrecord"),
}
txn.debug_inodes();
let mut channel2 = txn.open_or_create_channel("main2")?;
match crate::unrecord::unrecord(&mut txn, &mut channel2, &changes, &h0) {
Err(e) => {
let e = e.downcast();
if let Ok(Error::ChangeNotOnChannel { .. }) = e {
} else {
unreachable!()
}
}
_ => panic!("Should not be able to unrecord"),
}
for p in txn.log(&channel.borrow(), 0) {
debug!("p = {:?}", p);
}
txn.debug_inodes();
txn.debug_to_file(&channel, "debug")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h1)?;
for p in txn.log(&channel.borrow(), 0) {
debug!("p = {:?}", p);
}
txn.debug_inodes();
txn.debug_to_file(&channel, "debug2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h0)?;
txn.debug_to_file(&channel, "debug3")?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)?;
let files = repo.list_files();
if !files.is_empty() {
panic!("Files should be empty {:?}", files);
}
assert!(crate::fs::iter_working_copy(&txn, Inode::ROOT)
.next()
.is_none());
txn.commit()?;
Ok(())
}
#[test]
fn file_del() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("file", b"blabla".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.remove_path("file")?;
let h = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug")?;
debug!("unrecord h");
// Unrecording the deletion.
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h)?;
txn.debug_to_file(&channel, "debug2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
assert_eq!(repo.list_files(), vec!["file"]);
// Unrecording the initial change.
debug!("unrecord h0");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h0)?;
txn.debug_to_file(&channel, "debug3")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let files = repo.list_files();
if !files.is_empty() {
panic!("Files should be empty {:?}", files);
}
txn.commit()?;
Ok(())
}
/// Unrecording a change that edits the file around a conflict marker.
#[test]
fn self_context() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("file", b"a\nb\n".to_vec());
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut channel2 = txn.fork(&channel, "main2")?;
repo.write_file("file", |w| Ok(w.write_all(b"a\nx\nb\n")?))?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file("file", |w| Ok(w.write_all(b"a\ny\nb\n")?))?;
let b = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
apply::apply_change(&changes, &mut txn, &mut channel, b)?;
txn.debug_to_file(&channel, "debug")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::Order { .. } => {}
ref c => panic!("c = {:?}", c),
}
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file("file", |w| {
for l in conflict.iter() {
if l.starts_with(">>>") {
writeln!(w, "bla\n{}\nbli", l)?
} else {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
let c = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug2")?;
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &c)?;
txn.debug_to_file(&channel, "debug3")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::Order { .. } => {}
ref c => panic!("c = {:?}", c),
}
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
let mut conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
conflict.sort();
assert_eq!(
conflict,
vec![
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"================================",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"a",
"b",
"x",
"y"
]
);
txn.commit()?;
Ok(())
}
#[test]
fn rollback_lines() -> Result<(), anyhow::Error> {
rollback_(false)
}
#[test]
fn rollback_file() -> Result<(), anyhow::Error> {
rollback_(true)
}
fn rollback_(delete_file: bool) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
// Write a-b-c
repo.add_file("file", b"a\nb\nc\n".to_vec());
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
// Delete -b-
if delete_file {
repo.remove_path("file")?
} else {
repo.write_file("file", |w| {
w.write_all(b"a\nd\n")?;
Ok(())
})?;
}
let h_del = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
// Rollback the deletion of -b-
let p_del = changes.get_change(&h_del)?;
debug!("p_del = {:#?}", p_del);
let p_inv = p_del.inverse(
&h_del,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let h_inv = changes.save_change(&p_inv)?;
apply::apply_change(&changes, &mut txn, &mut channel, h_inv)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
txn.debug_to_file(&channel, "debug")?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), Ok("a\nb\nc\n"));
// Unrecord the rollback
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h_inv)?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
txn.debug_to_file(&channel, "debug2")?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
let mut buf = Vec::new();
let r = repo.read_file("file", &mut buf);
if delete_file {
assert!(r.is_err())
} else {
assert_eq!(std::str::from_utf8(&buf), Ok("a\nd\n"));
}
txn.commit()?;
Ok(())
}
/// Delete a line twice on two different channels, merge and unrecord
/// only one of them. Does the deleted edge reappear? It shouldn't.
#[test]
fn double() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
let mut channel2 = txn.open_or_create_channel("main2")?;
repo.add_file("file", b"blabla\nblibli\nblublu\n".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h0 = {:?}", h0);
apply::apply_change(&changes, &mut txn, &mut channel2, h0)?;
// First deletion
repo.write_file("file", |w| {
write!(w, "blabla\nblibli\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h1 = {:?}", h1);
txn.debug_to_file(&channel, "debug0")?;
// Second deletion
repo.write_file("file", |w| {
write!(w, "blabla\n")?;
Ok(())
})?;
let h2 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
debug!("h2 = {:?}", h2);
// Both deletions together.
debug!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
txn.debug_to_file(&channel, "debug1a")?;
txn.debug_to_file(&channel2, "debug1b")?;
debug!("unrecord h");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
txn.debug_to_file(&channel, "debug2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
txn.commit()?;
Ok(())
}
/// Same as `double` above, but with a (slightly) more convoluted change
/// dependency graph made by rolling the change back a few times.
#[test]
fn double_convoluted() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
let mut channel2 = txn.open_or_create_channel("main2")?;
repo.add_file("file", b"blabla\nblibli\nblublu\n".to_vec());
txn.add_file("file")?;
let h0 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h0 = {:?}", h0);
apply::apply_change(&changes, &mut txn, &mut channel2, h0)?;
// First deletion
repo.write_file("file", |w| {
write!(w, "blabla\nblibli\n")?;
Ok(())
})?;
let h1 = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
debug!("h1 = {:?}", h1);
txn.debug_to_file(&channel, "debug0")?;
// Second deletion
repo.write_file("file", |w| {
write!(w, "blabla\n")?;
Ok(())
})?;
let h2 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "")?;
debug!("h2 = {:?}", h2);
// Both deletions together, then unrecord on ~channel~.
debug!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, h2)?;
txn.debug_to_file(&channel, "debug1a")?;
txn.debug_to_file(&channel2, "debug1b")?;
debug!("unrecord h");
crate::unrecord::unrecord(&mut txn, &mut channel, &changes, &h2)?;
txn.debug_to_file(&channel, "debug2")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
// Same on ~channel2~, but with a few extra layers of rollbacks in between.
debug!("rolling back");
apply::apply_change(&changes, &mut txn, &mut channel2, h1)?;
let rollback = |h| {
let p = changes.get_change(&h).unwrap();
let p_inv = p.inverse(
&h,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let h_inv = changes.save_change(&p_inv).unwrap();
h_inv
};
let mut h = h2;
for i in 0..6 {
let r = rollback(h);
apply::apply_change(&changes, &mut txn, &mut channel2, r).unwrap();
txn.debug_to_file(&channel2, format!("debug_{}", i))?;
h = r
}
crate::unrecord::unrecord(&mut txn, &mut channel2, &changes, &h1)?;
txn.debug_to_file(&channel2, "debug_final")?;
let conflicts = output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts)
}
txn.commit()?;
Ok(())
}
// org id pmxSHD/C7q1dK2EUb0CLkD0B2goCZkjog2twUdtj+t0=
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn remove_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/d", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/d").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Bob removes a/b and records
repo_bob.remove_path("a/b/c")?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
Ok(())
}
// org id yeulGdXdjG310z4jeyybg6GQH3njDFclmYFkTn69Y/o=
use super::*;
use crate::working_copy::WorkingCopy;
// Avoiding quadratic reconnects when possible.
#[test]
fn quadratic_pseudo_edges() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"XXXXX\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, "debug").unwrap();
let n = 100;
for i in 0..=n {
repo.write_file("file", |w| {
for j in 0..i {
writeln!(w, "{}", j)?;
}
w.write_all(&contents[..])?;
for j in (0..i).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, &format!("debug{}", i)).unwrap();
}
repo.write_file("file", |w| {
for j in 0..n {
writeln!(w, "{}", j)?;
}
for j in (0..n).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, "debug_final").unwrap();
// Test that not too many edges have been inserted.
{
let channel = channel.borrow();
let m = txn.cursor_graph(&channel.graph, None).count();
let m0 = n * 8 + 6;
if m > m0 {
panic!("{} > {}", m, m0)
}
}
txn.commit().unwrap();
Ok(())
}
// Avoiding linear context repairs when possible.
use crate::MutTxnTExt;
#[test]
fn linear_context_repair() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"XXXXX\nZZZZZ\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("file")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, "debug").unwrap();
let n = 10;
for i in 0..=n {
repo.write_file("file", |w| {
for j in 0..i {
writeln!(w, "{}", j)?;
}
w.write_all(&contents[..])?;
for j in (0..i).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, &format!("debug{}", i)).unwrap();
}
let mut channel2 = txn.fork(&channel, "fork")?;
repo.write_file("file", |w| {
for j in 0..n {
writeln!(w, "{}", j)?;
}
w.write_all(b"XXXXX\nYYYYY\nZZZZZ\n")?;
for j in (0..n).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
let p1 = record_all(&mut repo, &changes, &mut txn, &mut channel2, "").unwrap();
txn.debug_to_file(&channel2, "debug_bob0").unwrap();
repo.write_file("file", |w| {
for j in 0..n {
writeln!(w, "{}", j)?;
}
for j in (0..n).rev() {
writeln!(w, "{}", j)?;
}
Ok(())
})
.unwrap();
let p2 = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, "debug_alice0").unwrap();
debug!("Applying P1");
txn.apply_change(&changes, &mut channel, p1)?;
txn.debug_to_file(&channel, "debug_alice").unwrap();
debug!("Applying P2");
txn.apply_change(&changes, &mut channel2, p2)?;
txn.debug_to_file(&channel2, "debug_bob").unwrap();
// Test that not too many edges have been inserted.
{
let channel = channel.borrow();
let m = txn.cursor_graph(&channel.graph, None).count();
debug!("m (channel, alice) = {:?}", m);
let original_edges = 8 * n + 18;
if m > original_edges {
panic!("{} > {}", m, original_edges)
}
}
{
let channel = channel2.borrow();
let m = txn.cursor_graph(&channel.graph, None).count();
debug!("m (channel2, bob) = {:?}", m);
let original_edges = 8 * n + 18;
if m > original_edges {
panic!("{} > {}", m, original_edges)
}
}
txn.commit().unwrap();
Ok(())
}
// org id gXNh0eP6/JRfVcJHHR8Y4h0m0xEBUNNruLtr0w14rHw=
use crate::fs::*;
use crate::patch::*;
use crate::pristine::*;
use crate::record::*;
use crate::*;
fn hash_mismatch(patch: &Patch3) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
use crate::patch::*;
let mut buf = tempfile::NamedTempFile::new()?;
let mut h = patch.serialize(&mut buf)?;
match h {
crate::pristine::Hash::Blake3(ref mut h) => h[0] = h[0].wrapping_add(1),
_ => unreachable!(),
}
match Patch3::deserialize(buf.path().to_str().unwrap(), &h) {
Err(e) => {
let e = e.downcast();
if let Ok(Error::PatchHashMismatch { .. }) = e {
} else {
unreachable!()
}
}
_ => unreachable!(),
}
let mut f = PatchFile::open(buf.path().to_str().unwrap())?;
assert_eq!(f.read_header()?, patch.header);
assert_eq!(f.read_dependencies()?, patch.dependencies);
assert_eq!(f.read_metadata()?, &patch.metadata[..]);
assert_eq!(f.read_changes()?, patch.changes);
Ok(())
}
#[test]
fn hash_mism() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let patches = patchstore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
repo.add_file("file2", contents.to_vec());
let mut env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let branch = txn.open_or_create_branch("main")?;
let mut branch = branch.borrow_mut();
add_file(&mut txn, "file")?;
add_file(&mut txn, "file2")?;
let mut state = Builder::new();
state
.record(
&mut txn,
Algorithm::Myers,
&mut branch,
&mut repo,
&patches,
"",
)
.unwrap();
let rec = state.finish();
let changes: Vec<_> = rec.actions
.into_iter()
.flat_map(|x| x.globalize(&txn).into_iter())
.collect();
info!("changes = {:?}", changes);
let patch0 = crate::patch::Patch3::make_patch(
&txn,
&branch,
changes,
rec.contents,
crate::patch::PatchHeader {
name: "test".to_string(),
authors: vec![],
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
apply::apply_local_patch(&patches, &mut txn, &mut branch, &patch0, &rec.updatables)?;
hash_mismatch(&patch0)?;
txn.debug_to_file(&branch, "debug")?;
Ok(())
}
// org id WSMNvyR4bu2brx+dqb9Z2EL3nECuN/R8COJKX3iDnto=
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn partial_clone() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c", contents.to_vec());
repo.add_file("d/e/f", contents.to_vec());
repo.add_file("g/h/i", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
{
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("a/b/c")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.add_file("d/e/f")?;
let hd = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.add_file("g/h/i")?;
let hg = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.rename("g/h/i", "d/e/ff")?;
txn.move_file("g/h/i", "d/e/ff")?;
let hmove = record_all(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug").unwrap();
let inode = crate::fs::find_inode(&txn, "d")?;
let key = txn.get_inodes(inode, None).unwrap();
let changes: Vec<_> = txn.log_for_path(&channel.borrow(), key, 0).collect();
let check = vec![hd, hg, hmove];
assert_eq!(changes, check)
}
txn.commit().unwrap();
Ok(())
}
#[test]
fn clone_prefixes() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c/d", contents.to_vec());
repo.add_file("e/f/g/h", contents.to_vec());
repo.add_file("i/j/k/l", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let h = {
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("a/b/c/d")?;
txn.add_file("e/f/g/h")?;
txn.add_file("i/j/k/l")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?
};
let h2 = {
let mut channel = txn.open_or_create_channel("main").unwrap();
repo.write_file("a/b/c/d", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
repo.write_file("e/f/g/h", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "a/b/c/d")?
};
txn.commit().unwrap();
// Cloning
debug!("Cloning");
let mut repo2 = working_copy::memory::Memory::new();
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
{
let mut channel = txn2.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn2, &mut channel, h).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"e/f",
true,
)?;
assert_eq!(
repo2.list_files(),
["e", "e/f", "e/f/g", "e/f/g/h"]
.iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
);
apply::apply_change(&changes, &mut txn2, &mut channel, h2).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"",
true,
)?;
let mut buf = Vec::new();
repo2.read_file("a/b/c/d", &mut buf)?;
assert_eq!(buf, b"edits\n");
buf.clear();
repo2.read_file("e/f/g/h", &mut buf)?;
assert_eq!(buf, contents);
}
txn2.commit().unwrap();
let mut txn2 = env2.mut_txn_begin();
txn2.open_or_create_channel("main2").unwrap();
Ok(())
}
// org id DQYPBj/aarc6S1hsotGYEzxSk3K5U5+4bF1dfi5WjAY=
use crate::changestore::ChangeStore;
use crate::pristine::*;
use crate::record::{Algorithm, Builder};
use crate::working_copy::WorkingCopy;
use crate::*;
use chrono::*;
mod add_file;
mod change;
mod clone;
mod conflict;
mod file_conflicts;
mod filesystem;
mod missing_context;
mod partial;
mod performance;
mod rm_file;
mod unrecord;
fn record_all<'env, T: MutTxnT, R: WorkingCopy, P: ChangeStore>(
repo: &mut R,
store: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
) -> Result<Hash, anyhow::Error> {
let mut state = Builder::new();
state.record(txn, Algorithm::default(), channel, repo, store, prefix)?;
let rec = state.finish();
let changes = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn))
.collect();
let change0 = crate::change::Change::make_change(
txn,
&channel,
changes,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
// Beware of changing the following line: two changes
// doing the same thing will be equal. Sometimes we don't
// want that, as in tests::unrecord::unrecord_double.
timestamp: Utc::now(),
},
Vec::new(),
);
let hash = store.save_change(&change0)?;
if log_enabled!(log::Level::Debug) {
change0
.write(
store,
Some(hash),
|l, _p| format!("{}:{}", l.path, l.line),
true,
&mut std::io::stderr(),
)
.unwrap();
}
apply::apply_local_change(txn, channel, &change0, hash, &rec.updatables)?;
Ok(hash)
}
fn record_all_output<'env, T: MutTxnT, R: WorkingCopy, P: ChangeStore + Clone + Send + 'static>(
repo: &mut R,
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
) -> Result<Hash, anyhow::Error> {
let hash = record_all(repo, changes, txn, channel, prefix)?;
output::output_repository_no_pending(repo, changes, txn, channel, "", true).unwrap();
Ok(hash)
}
// org id Wm0VaE4AsPZ6mzh1X0S76snwx4OGE0BFKf3AML3aYZs=
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn missing_context_newnodes_lines() -> Result<(), anyhow::Error> {
missing_context_newnodes(Some("a\nf\n"))
}
#[test]
fn missing_context_newnodes_file() -> Result<(), anyhow::Error> {
missing_context_newnodes(None)
}
fn missing_context_newnodes(alice: Option<&str>) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let bob = b"a\nb\nc\nxyz\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main").unwrap();
txn_alice.add_file("file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)
.unwrap();
// Bob edits and records
repo_bob
.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})
.unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
// Alice edits and records
if let Some(alice) = alice {
repo_alice.write_file("file", |w| {
w.write_all(alice.as_bytes()).unwrap();
Ok(())
})?
} else {
repo_alice.remove_path("file")?;
}
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug0")?;
// Alice applies Bob's change
debug!("applying Bob's change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
if alice.is_some() {
assert_eq!(
std::str::from_utf8(&buf),
Ok(
&"a\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nxyz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\nf\n"
[..]
)
);
} else {
assert_eq!(
std::str::from_utf8(&buf),
Ok(&">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nxyz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"[..])
);
}
// Alice solves the conflict by confirming the deads.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file("file", |w| {
for l in conflict.iter().filter(|l| l.len() <= 3) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
info!("starting fix_deletion");
let _fix_deletion = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
info!("fix_deletion over");
// Bob applies Alice's change
info!("Bob applies Alice's change");
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h).unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
info!("Outputting Bob's working_copy");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
if alice.is_some() {
assert_eq!(
std::str::from_utf8(&buf),
Ok(
&"a\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nxyz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\nf\n"
[..]
)
);
} else {
assert_eq!(
std::str::from_utf8(&buf),
Ok(&">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nxyz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"[..])
);
}
// Bob solves the conflict by deleting the offending line.
repo_bob.write_file("file", |w| {
for l in conflict.iter().filter(|&&l| l != "xyz") {
writeln!(w, "{}", l)?
}
Ok(())
})?;
info!("starting fix_insertion");
let _fix_insertion = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
info!("fix_insertion over");
Ok(())
}
#[test]
fn missing_context_newedges() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let alice = b"d\nf\n";
let bob = b"a\nb\nc\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
txn_alice.add_file("file")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_init")
.unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
debug!("Bob edits and records");
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
debug!("Alice edits and records");
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let _alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice0")
.unwrap();
// Alice applies Bob's change
debug!("Alice applies Bob's change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice1")
.unwrap();
// Bob reverts his change.
debug!("Bob reverts");
let bob_change = changes.get_change(&bob_h)?;
let inv = bob_change.inverse(
&bob_h,
crate::change::ChangeHeader {
authors: vec![],
message: "rollback".to_string(),
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let inv_h = changes.save_change(&inv)?;
// Alice applies Bob's inverse change.
info!("Applying inverse change");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, inv_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice2")
.unwrap();
Ok(())
}
use super::*;
#[test]
fn filesystem() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
repo.write_file("dir/file", |f| Ok(f.write_all(&b"a\nb\nc\nd\ne\nf\n"[..])?))?;
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
let mut channel = txn.open_or_create_channel("main2").unwrap();
info!("applying");
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
repo.rename("dir/file", "dir/file.old")?;
repo.remove_path("dir/file.old")?;
repo.remove_path("dir")?;
Ok(())
}
#[test]
fn symlink() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
repo.write_file("dir/file", |f| Ok(f.write_all(&b"a\nb\nc\nd\ne\nf\n"[..])?))?;
std::os::unix::fs::symlink(&r.path().join("dir/file"), &r.path().join("dir/link")).unwrap();
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
txn.add_file("dir/link").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
info!("applying");
let mut channel = txn.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
repo.rename("dir/file", "dir/file.old")?;
repo.remove_path("dir/file.old")?;
repo.remove_path("dir")?;
Ok(())
}
#[test]
fn record_dead_symlink() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
std::fs::create_dir_all(&r.path().join("dir")).unwrap();
std::os::unix::fs::symlink("../file", &r.path().join("dir/link")).unwrap();
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/link").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
info!("applying");
let mut channel = txn.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
Ok(())
}
#[test]
fn overwrite_dead_symlink() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let r = tempfile::tempdir()?;
let mut repo = working_copy::filesystem::FileSystem::from_root(r.path());
let f = tempfile::tempdir()?;
let changes = changestore::filesystem::FileSystem::from_root(f.path());
repo.write_file("dir/file", |f| Ok(f.write_all(&b"a\nb\nc\nd\ne\nf\n"[..])?))?;
let f = tempfile::tempdir()?;
std::fs::create_dir_all(f.path())?;
let env = pristine::sanakirja::Pristine::new(f.path())?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
let p = record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
info!("applying");
let mut channel = txn.open_or_create_channel("main2").unwrap();
// Substitute dir/file with a dead symlink
std::fs::remove_file(&r.path().join("dir/file")).unwrap();
std::os::unix::fs::symlink("a/b/c/d/file", &r.path().join("dir/file")).unwrap();
debug!("meta = {:?}", std::fs::metadata("dir/file"));
// And output.
apply::apply_change(&changes, &mut txn, &mut channel, p)?;
output::output_repository_no_pending(&mut repo, &changes, &mut txn, &mut channel, "", true)
.unwrap();
txn.commit().unwrap();
Ok(())
}
use super::*;
use crate::working_copy::WorkingCopy;
/// Rename conflict
#[test]
fn same_file_test() -> Result<(), anyhow::Error> {
same_file_("file", "alice", "bob")
}
/// Rename conflict
#[test]
fn same_file_dirs_test() -> Result<(), anyhow::Error> {
same_file_("file", "alice/file", "bob/file")
}
fn same_file_(file: &str, alice: &str, bob: &str) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file(file, contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file(file).unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Alice renames "file" to "alice"
repo_alice.rename(file, alice)?;
txn_alice.move_file(file, alice)?;
debug!("repo_bob = {:?}", repo_alice.list_files());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob renames "file" to "bob"
repo_bob.rename(file, bob)?;
txn_bob.move_file(file, bob)?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
match conflicts[0] {
Conflict::MultipleNames { .. } => {}
ref c => panic!("{:#?}", c),
}
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
alice,
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
bob,
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
match conflicts[0] {
Conflict::MultipleNames { .. } => {}
ref c => panic!("{:#?}", c),
}
// Bob solves.
let bob_solution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
// Alice applies Bob's solution.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_solution)?;
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
txn_bob.debug_tree("debug_tree")?;
Ok(())
}
/// Alice and Bob move two different files to the same name.
#[test]
fn same_name_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file1", contents.to_vec());
repo_alice.add_file("file2", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("file1")?;
txn_alice.add_file("file2")?;
info!("recording file additions");
debug!("working_copy = {:?}", repo_alice);
txn_alice.debug_tree("debug_tree")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Alice renames "file1" to "file"
repo_alice.rename("file1", "file")?;
txn_alice.move_file("file1", "file")?;
debug!("repo_bob = {:?}", repo_alice.list_files());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob renames "file2" to "file"
repo_bob.rename("file2", "file")?;
txn_bob.move_file("file2", "file")?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
assert!(!conflicts.is_empty());
let mut files_alice = repo_alice.list_files();
debug!("repo_alice = {:?}", files_alice);
assert_eq!(files_alice.len(), 2);
files_alice.sort();
assert_eq!(files_alice[0], "file");
assert!(files_alice[1].starts_with("file."));
// Alice solves it.
txn_alice.move_file(&files_alice[1], "a1")?;
repo_alice.rename(&files_alice[0], "file")?;
repo_alice.rename(&files_alice[1], "a1")?;
let solution_alice = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
let mut files_bob = repo_bob.list_files();
debug!("repo_bob = {:?}", files_bob);
assert_eq!(files_bob.len(), 2);
files_bob.sort();
assert_eq!(files_bob[0], "file");
assert!(files_bob[1].starts_with("file."));
// Bob applies Alice's solution and checks that it does solve his problem.
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, solution_alice)?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("conflicts = {:#?}", conflicts);
}
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
let mut files_bob = repo_bob.list_files();
files_bob.sort();
assert_eq!(files_bob, vec!["a1", "file"]);
Ok(())
}
#[test]
fn file_conflicts_same_name_and_two_names() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let mut repo_charlie = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file1", contents.to_vec());
repo_alice.add_file("file2", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("file1")?;
txn_alice.add_file("file2")?;
info!("recording file additions");
debug!("working_copy = {:?}", repo_alice);
txn_alice.debug_tree("debug_tree")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones and renames "file2" to "file"
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
repo_bob.rename("file2", "file")?;
txn_bob.move_file("file2", "file")?;
debug!("repo_bob = {:?}", repo_bob.list_files());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice renames "file1" to "file"
repo_alice.rename("file1", "file")?;
txn_alice.move_file("file1", "file")?;
debug!("repo_bob = {:?}", repo_alice.list_files());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Charlie clones, moves "file1" to "file3" and applies both
// Alice's and Bob's change.
let env_charlie = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_charlie = env_charlie.mut_txn_begin();
let mut channel_charlie = txn_charlie.open_or_create_channel("charlie").unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
repo_charlie.rename("file1", "file3")?;
txn_charlie.move_file("file1", "file3")?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie1")?;
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, bob_h)?;
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, alice_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie2")?;
let files_charlie = repo_charlie.list_files();
debug!("files_charlie {:?}", files_charlie);
// Alice applies Bob's change and Charlie's change.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, charlie_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let files_alice = repo_alice.list_files();
debug!("files_alice {:?}", files_alice);
repo_alice.remove_path(&files_alice[1]).unwrap();
let _alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice3")?;
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, charlie_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
debug!("files_bob {:?}", files_bob);
repo_bob.remove_path(&files_bob[1]).unwrap();
let _bob_solution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob3")?;
Ok(())
}
#[test]
fn zombie_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob edits "file"
repo_bob.write_file("a/b/c/file", |w| {
w.write_all(contents2)?;
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
debug!("alice2");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
let files_alice = repo_alice.list_files();
assert_eq!(files_alice, vec!["a", "a/b", "a/b/c", "a/b/c/file"]);
for x in txn_alice.iter_tree(
OwnedPathId {
parent_inode: Inode::ROOT,
basename: crate::small_string::SmallString::new(),
},
None,
) {
debug!("x = {:?}", x);
}
debug!("recording a solution");
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice3")?;
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
assert_eq!(files_bob, vec!["a", "a/b", "a/b/c", "a/b/c/file"]);
Ok(())
}
#[test]
fn rename_zombie_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob renames "file"
repo_bob.rename("a/b/c/file", "a/b/c/file2")?;
repo_bob.write_file("a/b/c/file2", |w| {
w.write_all(contents2)?;
Ok(())
})?;
txn_bob.move_file("a/b/c/file", "a/b/c/file2")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
debug!("alice2");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice3")?;
let files_alice = repo_alice.list_files();
debug!("Alice records {:?}", files_alice);
repo_alice.rename("a/b/c/file", "a/b/c/file2").unwrap_or(());
// repo_alice.remove_path("a/b/c/file").unwrap_or(());
// repo_alice.remove_path("a/b/c/file2").unwrap_or(());
txn_alice
.move_file("a/b/c/file", "a/b/c/file2")
.unwrap_or(());
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice4")?;
debug!("Alice recorded {:?}", alice_solution);
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob3")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
assert!(["a", "a/b", "a/b/c", "a/b/c/file2"]
.iter()
.all(|n| files_bob.iter().any(|m| m == n)));
Ok(())
}
#[test]
fn rename_zombie_dir() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/c/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob renames "file"
repo_bob.rename("a/b/c", "a/b/d")?;
repo_bob.write_file("a/b/d/file", |w| {
w.write_all(contents2)?;
Ok(())
})?;
txn_bob.move_file("a/b/c", "a/b/d")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
debug!("alice2");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice3")?;
let files_alice = repo_alice.list_files();
if files_alice.iter().any(|x| x == "a/b/d/file") {
txn_alice.add_file("a/b/d/file").unwrap_or(());
} else {
assert!(files_alice.iter().any(|x| x == "a/b/c/file"));
txn_alice.move_file("a/b/c", "a/b/d").unwrap();
repo_alice.rename("a/b/c", "a/b/d").unwrap();
}
debug!("Alice records");
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice4")?;
debug!("Alice recorded {:?}", alice_solution);
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("repo_alice = {:?}", repo_alice.list_files());
debug!("repo_bob = {:?}", repo_bob.list_files());
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob3")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let files_bob = repo_bob.list_files();
debug!("files_bob = {:?}", files_bob);
assert!(["a", "a/b", "a/b/d", "a/b/d/file"]
.iter()
.all(|n| files_bob.iter().any(|m| m == n)));
Ok(())
}
#[test]
fn double_zombie_file() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\nc\nx\nd\ne\nf\n";
let contents3 = b"a\nby\n\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let mut repo_charlie = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let env_charlie = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_charlie = env_charlie.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
repo_alice.add_file("a/b/c/file", contents.to_vec());
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
// Bob and Charlie clone
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
let mut channel_charlie = txn_charlie.open_or_create_channel("charlie").unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, init_h).unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie0")?;
if !conflicts.is_empty() {
panic!("charlie has conflicts: {:?}", conflicts);
}
// Alice deletes "file"
repo_alice.remove_path("a/b")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob edits "file"
repo_bob.write_file("a/b/c/file", |w| {
w.write_all(contents2)?;
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
// Charlie edits "file"
repo_charlie.write_file("a/b/c/file", |w| {
w.write_all(contents3)?;
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie1")?;
// Alice applies Bob's and Charlie's changes
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, charlie_h)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
debug!("alice2");
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
let files_alice = repo_alice.list_files();
assert_eq!(files_alice, vec!["a", "a/b", "a/b/c", "a/b/c/file"]);
assert_eq!(conflicts.len(), 5);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("unexpected conflict {:#?}", c),
}
let mut buf = Vec::new();
repo_alice.read_file("a/b/c/file", &mut buf)?;
// Alice removes conflict markers.
repo_alice.write_file("a/b/c/file", |w| {
for l in std::str::from_utf8(&buf).unwrap().lines() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
let alice_solution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice3")?;
// Bob applies
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, charlie_h)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
assert_eq!(conflicts.len(), 5);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("unexpected conflict {:#?}", c),
}
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_solution)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("bob has conflicts: {:?}", conflicts);
}
// Charlie applies
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, bob_h)?;
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie2")?;
debug!("charlie applies Alice's change");
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, alice_h)?;
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
assert_eq!(conflicts.len(), 5);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("charlie applies Alice's solution");
apply::apply_change(
&changes,
&mut txn_charlie,
&mut channel_charlie,
alice_solution,
)?;
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie4")?;
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("charlie has conflicts: {:?}", conflicts);
}
Ok(())
}
#[test]
fn zombie_file_post_resolve() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let mut repo_charlie = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let env_charlie = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_charlie = env_charlie.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
repo_alice.add_file("a/b/c/file", contents.to_vec());
txn_alice.add_file("a/b/c/file").unwrap();
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice0")?;
repo_alice.rename("a/b/c/file", "a/b/c/alice")?;
txn_alice.move_file("a/b/c/file", "a/b/c/alice")?;
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Bob deletes "file"
repo_bob.remove_path("a/b/c/file")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, alice_h).unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("Bob resolves");
let bob_resolution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("Bob has conflicts: {:?}", conflicts);
}
// Alice applies Bob's patch and solution.
debug!("Alice applies Bob's resolution");
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h).unwrap();
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_resolution).unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
if !conflicts.is_empty() {
panic!("Alice has conflicts: {:?}", conflicts);
}
// Charlie applies Alice's move and deletes.
let mut channel_charlie = txn_charlie.open_or_create_channel("charlie").unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, init_h).unwrap();
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, alice_h).unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie0")?;
if !conflicts.is_empty() {
panic!("charlie has conflicts: {:?}", conflicts);
}
debug!("Charlie applies Alice's move and deletes");
repo_charlie.remove_path("a/b/c/alice")?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
)
.unwrap();
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie1")?;
//
apply::apply_change(&changes, &mut txn_charlie, &mut channel_charlie, bob_h).unwrap();
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie2")?;
apply::apply_change(
&changes,
&mut txn_charlie,
&mut channel_charlie,
bob_resolution,
)
.unwrap();
txn_charlie.debug_to_file(&channel_charlie, "debug_charlie3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn_charlie,
&mut channel_charlie,
"",
true,
)?;
assert_eq!(conflicts.len(), 2);
match (&conflicts[0], &conflicts[1]) {
(Conflict::ZombieFile { ref path }, Conflict::MultipleNames { .. })
| (Conflict::MultipleNames { .. }, Conflict::ZombieFile { ref path }) => {
assert!(path == "a/b/c/alice" || path == "a/b/c/file")
}
ref c => panic!("unexpected conflict {:#?}", c),
}
//
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, charlie_h).unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert!(path == "a/b/c/file" || path == "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
//
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, charlie_h).unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob3")?;
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert!(path == "a/b/c/file" || path == "a/b/c/alice"),
ref c => panic!("unexpected conflict {:#?}", c),
}
Ok(())
}
#[test]
fn move_vs_delete_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
repo_alice.add_file("file", b"a\n".to_vec());
txn_alice.add_file("file")?;
let init = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, init)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Alice moves "file"
repo_alice.rename("file", "alice/file").unwrap_or(());
txn_alice.move_file("file", "alice/file").unwrap_or(());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice1")
.unwrap();
// Bob deletes "file"
repo_bob.remove_path("file").unwrap_or(());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Bob applies Alice's change
debug!("Bob applies Alice's change");
txn_bob
.apply_change(&changes, &mut channel_bob, alice_h)
.unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "alice/file"),
ref c => panic!("unexpected conflict {:#?}", c),
}
let files = repo_bob.list_files();
if files.iter().any(|f| f == "alice/file") {
repo_bob.remove_path("bob").unwrap()
} else {
repo_bob.remove_path("alice").unwrap()
}
let resolution = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("Bob has conflicts: {:?}", conflicts);
}
// Alice applies Bob's change
debug!("Alice applies Bob's change");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h)
.unwrap();
txn_alice
.debug_to_file(&channel_alice, "debug_alice2")
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "alice/file"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("Alice applies Bob's resolution");
txn_alice
.apply_change(&changes, &mut channel_alice, resolution)
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice3")
.unwrap();
if !conflicts.is_empty() {
panic!("Alice has conflicts: {:?}", conflicts);
}
Ok(())
}
// Delete the context of an edit inside a file, then delete the file,
// and see if the edit has its context fixed.
#[test]
fn delete_zombie_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
repo_alice.add_file("file", b"a\nb\nc\nd\n".to_vec());
txn_alice.add_file("file")?;
let init = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, init)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Alice adds a zombie line.
repo_alice.write_file("file", |w| {
w.write_all(b"a\nb\nx\nc\nd\n")?;
Ok(())
})?;
record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice1")
.unwrap();
// Bob deletes the context of Alice's new line, and then deletes
// "file".
repo_bob.write_file("file", |w| {
w.write_all(b"a\nd\n")?;
Ok(())
})?;
let bob_h1 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0").unwrap();
repo_bob.remove_path("file").unwrap_or(());
let bob_h2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob1").unwrap();
// Alice applies Bob's changes.
debug!("Alice applies Bob's change");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h1)
.unwrap();
txn_alice
.debug_to_file(&channel_alice, "debug_alice2")
.unwrap();
debug!("Applying bob_h2");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h2)
.unwrap();
txn_alice
.debug_to_file(&channel_alice, "debug_alice3")
.unwrap();
let (alive, reachable) = txn_alice.check_alive(&channel_alice);
if !alive.is_empty() {
panic!("alive (bob0): {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable (bob0): {:?}", reachable);
}
crate::unrecord::unrecord(&mut txn_alice, &mut channel_alice, &changes, &bob_h2).unwrap();
txn_alice
.debug_to_file(&channel_alice, "debug_alice4")
.unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("file = {:?}", std::str::from_utf8(&buf));
Ok(())
}
#[test]
fn move_into_deleted_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("main")?;
repo_alice.add_file("file", b"a\n".to_vec());
repo_alice.add_dir("dir");
txn_alice.add_file("file")?;
txn_alice.add_dir("dir")?;
let init = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, init)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Alice moves "file"
repo_alice.rename("file", "dir/file").unwrap_or(());
txn_alice.move_file("file", "dir/file").unwrap_or(());
let alice_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice1")
.unwrap();
// Bob deletes "dir"
repo_bob.remove_path("dir").unwrap_or(());
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Bob applies Alice's change
debug!("Bob applies Alice's change");
txn_bob
.apply_change(&changes, &mut channel_bob, alice_h)
.unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob1").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:#?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "dir"),
ref c => panic!("unexpected conflict {:#?}", c),
}
let resolution = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
if !conflicts.is_empty() {
panic!("Bob has conflicts: {:?}", conflicts);
}
// Alice applies Bob's change
debug!("Alice applies Bob's change");
txn_alice
.apply_change(&changes, &mut channel_alice, bob_h)
.unwrap();
txn_alice
.debug_to_file(&channel_alice, "debug_alice2")
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "dir"),
ref c => panic!("unexpected conflict {:#?}", c),
}
debug!("Alice applies Bob's resolution");
txn_alice
.apply_change(&changes, &mut channel_alice, resolution)
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_alice3")
.unwrap();
if !conflicts.is_empty() {
panic!("Alice has conflicts: {:?}", conflicts);
}
Ok(())
}
// org id D0balg0/wtKfdDhdphXnGFSzLq/K7vZDjx/dtkoHoyw=
use super::*;
#[test]
fn solve_order_conflict() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\ny\nz\nb\n";
let bob = b"a\nu\nv\nw\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
debug!("{:?}", conflict);
{
let mut conflict = conflict.clone();
(&mut conflict[2..9]).sort();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"u",
"v",
"w",
"x",
"y",
"z",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b"
]
);
}
Ok(())
};
// check_conflict(&buf)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file("file", |w| {
for (n, l) in conflict.iter().enumerate() {
if n == 0 || n == 2 || n == 3 || n == 7 || n == 8 || n == 10 {
writeln!(w, "{}", l)?
} else if n == 4 {
writeln!(w, "{}\nbla!", l)?
} else if n == 6 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob2").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert!(std::str::from_utf8(&buf)?.lines().all(|l| l.len() < 10));
crate::unrecord::unrecord(&mut txn, &mut channel_bob, &changes, &resolution).unwrap();
txn.debug_to_file(&channel_bob, "debug_bob3").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
Ok(())
}
// org id XADwz9T0kn2XwUanHtR9EYo1Be+wsQRBiLQ2UF6sFgo=
#[test]
fn order_conflict_simple() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\nb\n";
let bob = b"a\ny\nb\n";
let charlie = b"a\nz\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Charlie clones
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Charlie edits and records
repo_charlie.write_file("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
txn.debug_to_file(&channel_charlie, "debug_charlie0")
.unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
debug!("{:?}", conflict);
{
let mut conflict = conflict.clone();
(&mut conflict[2..7]).sort();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"================================",
"x",
"y",
"z",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b"
]
);
}
Ok(())
};
// check_conflict(&buf)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file("file", |w| {
for l in conflict.iter().filter(|l| l.len() == 1) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
let mut alice_resolution = Vec::new();
repo_alice.read_file("file", &mut alice_resolution)?;
info!("resolving");
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_bob, charlie_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
{
let mut conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
(&mut conflict[2..6]).sort();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"x",
"y",
"z",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b"
]
)
}
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file("file", |w| {
for l in conflict.iter().filter(|l| l.len() == 1) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
let mut bob_resolution = Vec::new();
repo_bob.read_file("file", &mut bob_resolution)?;
info!("resolving");
let resolution2 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
// Charlie applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h).unwrap();
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
txn.debug_to_file(&channel_charlie, "debug_charlie1")
.unwrap();
buf.clear();
repo_charlie.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, resolution).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, resolution2).unwrap();
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
buf.clear();
repo_charlie.read_file("file", &mut buf)?;
assert_eq!(
std::str::from_utf8(&bob_resolution),
std::str::from_utf8(&buf)
);
Ok(())
}
#[test]
fn order_conflict_edit() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\ny\nb\n";
let bob = b"a\nu\nv\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
let mut is_conflict = 0;
repo_alice.write_file("file", |w| {
for l in conflict.iter() {
if l.len() == 1 {
if is_conflict < 2 {
writeln!(w, "{}", l)?
}
is_conflict += 1
} else if l.as_bytes()[0] == b'<' {
is_conflict = 0
} else {
// === or >>>
is_conflict = 1
}
}
Ok(())
})?;
let mut alice_resolution = Vec::new();
repo_alice.read_file("file", &mut alice_resolution)?;
info!("resolving {:?}", std::str::from_utf8(&alice_resolution));
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(alice_resolution, buf);
Ok(())
}
#[test]
fn edit_conflict_sides() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice = b"a\nx\nb\nc\n";
let bob = b"a\ny\nb\nc\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
repo.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
// Alice edits sides of the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file("file", |w| {
let mut ended = false;
let mut n = 0;
for l in conflict.iter() {
debug!("line: {:?}", l);
if l.len() > 5 {
if l.as_bytes()[0] == b'<' {
ended = true
}
if true {
writeln!(w, "pre{}\n{}\npost{}", n, l, n)?;
} else {
writeln!(w, "{}", l)?;
}
n += 1
} else if !ended {
writeln!(w, "{}", l)?
} else {
debug!("writing c: {:?}", l);
writeln!(w, "c")?
}
}
Ok(())
})?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
info!("resolving");
let resolution = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo.read_file("file", &mut buf2)?;
info!("{:?}", std::str::from_utf8(&buf2).unwrap());
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
// Bob applies
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
let mut buf3 = Vec::new();
repo.read_file("file", &mut buf3)?;
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort();
let mut lines3: Vec<_> = std::str::from_utf8(&buf3).unwrap().lines().collect();
lines3.sort();
assert_eq!(lines2, lines3);
Ok(())
}
#[test]
fn edit_after_conflict() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice = b"a\nx\ny\nb\nc\n";
let bob = b"a\nx\ny\nb\nc\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
repo.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
// Alice edits sides of the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file("file", |w| {
for l in conflict.iter() {
debug!("line: {:?}", l);
if l.len() > 5 && l.as_bytes()[0] != b'<' {
writeln!(w, "pre\n{}\npost", l)?;
} else if *l != "b" && *l != "x" {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
info!("resolving");
let resolution = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo.read_file("file", &mut buf2)?;
info!("{:?}", std::str::from_utf8(&buf2).unwrap());
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
let mut buf3 = Vec::new();
repo.read_file("file", &mut buf3)?;
let mut lines2: Vec<_> = std::str::from_utf8(&buf2)?.lines().collect();
lines2.sort();
let mut lines3: Vec<_> = std::str::from_utf8(&buf3)?.lines().collect();
lines3.sort();
assert_eq!(lines2, lines3);
Ok(())
}
#[test]
fn delete_before_marker() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice0 = b"a\nx\ny\nb\nc\n";
let alice1 = b"a\nx\ny\nz\nb\nc\n";
let bob0 = b"a\nu\nv\nb\nc\n";
let bob1 = b"a\nu\nv\nw\nb\nc\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob edits and records
let bob_edits: &[&[u8]] = &[bob0, bob1];
let bob_changes: Vec<_> = bob_edits
.iter()
.map(|bob| {
repo.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel_bob, "").unwrap()
})
.collect();
// Alice edits and records
let alice_edits: &[&[u8]] = &[alice0, alice1];
let alice_changes: Vec<_> = alice_edits
.iter()
.map(|alice| {
repo.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})
.unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "").unwrap()
})
.collect();
// Alice applies Bob's changes
for bob_h in bob_changes.iter() {
apply::apply_change(&changes, &mut txn, &mut channel_alice, *bob_h)?;
}
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
// Alice edits sides of the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo.write_file("file", |w| {
let mut ended = false;
for l in conflict.iter() {
debug!("line: {:?}", l);
if *l == "z" || *l == "w" {
} else if l.starts_with("<<<") {
writeln!(w, "{}", l)?;
ended = true
} else if ended {
writeln!(w, "end\n{}", l)?;
ended = false
} else {
writeln!(w, "{}", l)?;
}
}
Ok(())
})?;
let mut buf = Vec::new();
repo.read_file("file", &mut buf)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
info!("resolving");
let conflict_edits = record_all(&mut repo, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo.read_file("file", &mut buf2)?;
info!("{:?}", std::str::from_utf8(&buf2).unwrap());
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
// Bob pulls
for alice_h in alice_changes.iter() {
apply::apply_change(&changes, &mut txn, &mut channel_bob, *alice_h)?;
}
apply::apply_change(&changes, &mut txn, &mut channel_bob, conflict_edits)?;
output::output_repository_no_pending(
&mut repo,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
buf2.clear();
repo.read_file("file", &mut buf2)?;
let mut lines: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
lines.sort();
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort();
assert_eq!(lines, lines2);
Ok(())
}
#[test]
fn conflict_last_line() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\n";
let alice = b"a\nx";
let bob = b"a\ny";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
debug!("{:?}", conflict);
{
let mut conflict = conflict.clone();
(&mut conflict[2..5]).sort();
assert_eq!(
conflict,
vec![
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"================================",
"x",
"y",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
]
);
}
Ok(())
};
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file("file", |w| {
for l in conflict.iter().filter(|l| l.len() <= 2) {
writeln!(w, "{}", l)?
}
Ok(())
})?;
info!("resolving");
let mut buf_alice = Vec::new();
repo_alice.read_file("file", &mut buf_alice)?;
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
txn.debug_to_file(&channel_bob, "debug_bob2").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf_alice));
Ok(())
}
#[test]
fn zombie_last_line() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb";
let alice = b"a\nx";
let bob = b"";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
let check_conflict = |buf: &[u8]| -> Result<(), anyhow::Error> {
let conflict: Vec<_> = std::str::from_utf8(buf)?.lines().collect();
assert_eq!(
conflict,
vec![
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"x",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
]
);
Ok(())
};
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
{
let mut state = Builder::new();
state
.record(
&mut txn,
Algorithm::default(),
&mut channel_alice,
&mut repo_alice,
&changes,
"",
)
.unwrap();
let rec = state.finish();
assert!(rec.actions.is_empty())
}
// Alice solves the conflict.
repo_alice.write_file("file", |w| {
write!(w, "x")?;
Ok(())
})?;
info!("resolving");
let mut buf_alice = Vec::new();
repo_alice.read_file("file", &mut buf_alice)?;
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check_conflict(&buf)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
txn.debug_to_file(&channel_bob, "debug_bob2").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf_alice));
Ok(())
}
#[test]
fn edit_post_conflict() -> Result<(), anyhow::Error> {
edit_post_conflict_(
|buf| {
let buf: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
assert!(
buf == &[
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"0",
"1",
"2",
"================================",
"3",
"4",
"5",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
] || buf
== &[
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"3",
"4",
"5",
"================================",
"0",
"1",
"2",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
]
)
},
|buf, w| {
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
for l in conflict.iter() {
if *l == "a" {
writeln!(w, "a\na'")?
} else if l.len() == 1 && *l != "0" && *l != "3" {
writeln!(w, "{}", l)?
}
}
Ok(())
},
)
}
#[test]
fn edit_around_conflict() -> Result<(), anyhow::Error> {
edit_post_conflict_(
|buf| {
let buf: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
assert!(
buf == &[
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"0",
"1",
"2",
"================================",
"3",
"4",
"5",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
] || buf
== &[
"a",
">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>",
"3",
"4",
"5",
"================================",
"0",
"1",
"2",
"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<",
"b",
]
)
},
|buf, w| {
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
for l in conflict.iter() {
if *l == "a" {
writeln!(w, "a\na'")?
} else if *l == "b" {
writeln!(w, "c")?
} else {
writeln!(w, "{}", l)?
}
}
Ok(())
},
)
}
fn edit_post_conflict_<
Check: FnMut(&[u8]),
Resolve: FnOnce(&[u8], &mut dyn std::io::Write) -> Result<(), anyhow::Error>,
>(
mut check: Check,
resolve: Resolve,
) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\n0\n1\n2\nb\n";
let bob = b"a\n3\n4\n5\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
check(&buf);
// Alice solves the conflict.
repo_alice.write_file("file", |mut w| {
resolve(&buf, &mut w)?;
Ok(())
})?;
info!("resolving");
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
check(&buf);
// Bob applies Alice's solution.
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob2").unwrap();
let mut buf2 = Vec::new();
repo_bob.read_file("file", &mut buf2)?;
buf.clear();
repo_alice.read_file("file", &mut buf)?;
let mut lines: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
lines.sort();
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort();
assert_eq!(lines, lines2);
Ok(())
}
#[test]
fn nested_conflict() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\nb\n";
let bob = b"a\ny\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
debug!("Alice records");
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
debug!("Alice applies");
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
repo_alice.write_file("file", |w| {
let buf = std::str::from_utf8(&buf).unwrap();
w.write_all(buf.replace("x\n", "u\nx\n").as_bytes())?;
Ok(())
})?;
info!("resolving");
let resolution_alice = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
// Bob resolves.
buf.clear();
repo_bob.read_file("file", &mut buf)?;
repo_bob.write_file("file", |w| {
let buf = std::str::from_utf8(&buf).unwrap();
w.write_all(buf.replace("x\n", "i\nx\n").as_bytes())?;
Ok(())
})?;
info!("resolving");
let resolution_bob = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Alice applies Bob's resolution.
apply::apply_change(&changes, &mut txn, &mut channel_alice, resolution_bob).unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
txn.debug_to_file(&channel_alice, "debug_alice3").unwrap();
buf.clear();
repo_alice.read_file("file", &mut buf)?;
debug!("{}", std::str::from_utf8(&buf).unwrap());
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution_alice).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
let mut buf2 = Vec::new();
repo_bob.read_file("file", &mut buf2)?;
let mut lines: Vec<_> = std::str::from_utf8(&buf).unwrap().lines().collect();
lines.sort();
let mut lines2: Vec<_> = std::str::from_utf8(&buf2).unwrap().lines().collect();
lines2.sort();
assert_eq!(lines, lines2);
Ok(())
}
// org id Bte0vzPV77vHQoub3xRBIm5ALiVSerFDkJRoTZOrQ20=
#[test]
fn zombie_context_resolution() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
// Alice records
txn_alice.add_file("file").unwrap();
repo_alice.add_file("file", b"".to_vec());
let x: &[&[u8]] = &[b"c\n", b"a\nc\n", b"a\nb\nc\n", b"a\n", b""];
let p_alice: Vec<_> = x
.iter()
.map(|c| {
repo_alice
.write_file("file", |w| {
w.write_all(c)?;
Ok(())
})
.unwrap();
record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap()
})
.collect();
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, p_alice[0]).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob")?;
// Bob creates an order conflict just to keep line "c" connected
// to the root.
repo_bob.write_file("file", |w| {
w.write_all(b"x\nc\n")?;
Ok(())
})?;
debug!("bob records conflict");
let p_bob = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Bob applies all of Alice's other changes
for (n, p) in (&p_alice[1..]).into_iter().enumerate() {
info!("{}. Applying {:?}", n, p);
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, *p).unwrap();
txn_bob.debug_to_file(&channel_bob, &format!("debug_bob_{}", n))?;
}
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
debug!("file = {:?}", std::str::from_utf8(&buf));
assert_eq!(
std::str::from_utf8(&buf),
Ok(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n")
);
repo_bob.write_file("file", |w| {
w.write_all(b"x\nc\n")?;
Ok(())
})?;
let resolution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(buf, b"x\nc\n");
// Alice applies Bob's change and resolution.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, p_bob).unwrap();
txn_alice.debug_to_file(&channel_alice, "debug_alice1")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo_alice.read_file("file", &mut buf2)?;
assert_eq!(
std::str::from_utf8(&buf2),
Ok(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n")
);
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
txn_alice.debug_to_file(&channel_alice, "debug_alice2")?;
let mut buf2 = Vec::new();
repo_alice.read_file("file", &mut buf2)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
Ok(())
}
// org id tXrBFjxSrgRDsKlaKi10s0yufnnu+1r7I5vhdHV/TtY=
#[test]
fn zombie_half_survivor() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice").unwrap();
// Alice records
txn_alice.add_file("file").unwrap();
repo_alice.add_file("file", b"".to_vec());
let x: &[&[u8]] = &[b"a\nb\nc\nd\n", b""];
let p_alice: Vec<_> = x
.iter()
.map(|c| {
repo_alice
.write_file("file", |w| {
w.write_all(c)?;
Ok(())
})
.unwrap();
record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)
.unwrap()
})
.collect();
// Bob clones
let mut channel_bob = txn_bob.open_or_create_channel("bob").unwrap();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, p_alice[0]).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
// Bob creates an order conflict just to keep line "c" connected
// to the root.
repo_bob.write_file("file", |w| {
w.write_all(b"a\nb\nx\ny\nz\nc\nd\n")?;
Ok(())
})?;
let p_bob = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob0")?;
// Bob applies all of Alice's other changes
for p in &p_alice[1..] {
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, *p).unwrap();
}
txn_bob.debug_to_file(&channel_bob, "debug_bob1")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob1_")?;
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(
std::str::from_utf8(&buf),
Ok(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\nx\ny\nz\n<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n")
);
repo_bob.write_file("file", |w| {
w.write_all(b"a\nz\nd\n")?;
Ok(())
})?;
let resolution =
record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob2")?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
assert_eq!(buf, b"a\nz\nd\n");
// Alice applies Bob's change and resolution.
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, p_bob).unwrap();
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, resolution).unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let mut buf2 = Vec::new();
repo_alice.read_file("file", &mut buf2)?;
assert_eq!(std::str::from_utf8(&buf), std::str::from_utf8(&buf2));
Ok(())
}
#[test]
fn three_way_zombie() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"u\na\nb\nc\nd\nv\n";
let alice = b"u\na\nb\nx\nc\nd\nv\n";
let bob = b"u\na\nd\nv\n";
let alice_bob = b"u\na\nx\nd\nv\n";
let charlie = b"u\nv\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Charlie clones
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
// Alice adds a line.
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Bob deletes the context.
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Charlie also deletes the context.
repo_charlie.write_file("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
txn.debug_to_file(&channel_charlie, "debug_charlie0")
.unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("alice = {:?}", std::str::from_utf8(&buf));
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
repo_alice.write_file("file", |w| Ok(w.write_all(alice_bob)?))?;
let resolution = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's edits and resolution.
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, resolution)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
// Charlie applies all changes
/*output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;*/
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h)?;
txn.debug_to_file(&channel_charlie, "debug_charlie1")
.unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h)?;
txn.debug_to_file(&channel_charlie, "debug_charlie2")
.unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, resolution)?;
txn.debug_to_file(&channel_charlie, "debug_charlie3")
.unwrap();
Ok(())
}
// org id apJ3ammGRRUv7jQ3uZqtvWwLtPaJJpl5S0O14CnL8ps=
#[test]
fn cyclic_conflict_resolution() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let alice = b"a\nx\ny\nz\nb\n";
let bob = b"a\nu\nv\nw\nb\n";
let charlie = b"a\nU\nV\nW\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Charlie clones and makes something independent.
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
repo_charlie.write_file("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
info!("Done outputting Charlie's working_copy");
{
let mut buf = Vec::new();
repo_charlie.read_file("file", &mut buf).unwrap();
info!("Charlie = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
txn.debug_to_file(&channel_alice, "debug_alice").unwrap();
txn.debug_to_file(&channel_bob, "debug_bob").unwrap();
let bob_h = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("alice: {:?}", std::str::from_utf8(&buf));
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let alices_resolution =
record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
debug!("bob: {:?}", std::str::from_utf8(&buf));
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let _bobs_resolution = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, alices_resolution).unwrap();
// Bob applies Charlie's side
apply::apply_change(&changes, &mut txn, &mut channel_bob, charlie_h).unwrap();
txn.debug_to_file(&channel_bob, "debug_bob2").unwrap();
debug!("outputting bob2");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob3").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Check that there is a conflict.
assert!(std::str::from_utf8(&buf)?.lines().any(|l| l.len() >= 10));
debug!("{:?}", std::str::from_utf8(&buf));
// Solve it again, in the same way and output the result.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
debug!("resolving again");
let second_resolution = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
buf.clear();
repo_bob.read_file("file", &mut buf)?;
txn.debug_to_file(&channel_bob, "debug_bob4").unwrap();
// Check that the conflict is gone.
assert!(std::str::from_utf8(&buf)?.lines().all(|l| l.len() < 10));
// Unrecord
crate::unrecord::unrecord(&mut txn, &mut channel_bob, &changes, &second_resolution).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob5").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Check that the conflict is back.
assert!(std::str::from_utf8(&buf)?.lines().any(|l| l.len() >= 10));
Ok(())
}
#[test]
fn cyclic_zombies() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\n";
let alice = b"a\nx\ny\nz\nb\nc\n";
let alice2 = b"a\nx\nX\ny\nz\nb\nc\n";
let alice3 = b"a\nx\nX\nY\ny\nz\nb\nc\n";
let bob = b"a\nu\nv\nw\nb\nc\n";
let bob2 = b"a\nu\nU\nv\nw\nb\nc\n";
let bob3 = b"a\nu\nU\nV\nv\nw\nb\nc\n";
let charlie = b"a\nc\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel_alice = txn.open_or_create_channel("alice")?;
txn.add_file("file")?;
let init_h = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_init").unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
let mut channel_bob = txn.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
{
let mut buf = Vec::new();
repo_bob.read_file("file", &mut buf).unwrap();
info!("Bob = {:?}", std::str::from_utf8(&buf));
}
// Bob edits and records
repo_bob.write_file("file", |w| {
w.write_all(bob).unwrap();
Ok(())
})?;
let bob_h1 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
repo_bob.write_file("file", |w| {
w.write_all(bob2).unwrap();
Ok(())
})?;
let bob_h2 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
repo_bob.write_file("file", |w| {
w.write_all(bob3).unwrap();
Ok(())
})?;
let bob_h3 = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
txn.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice edits and records
repo_alice.write_file("file", |w| {
w.write_all(alice).unwrap();
Ok(())
})?;
let alice_h1 = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
repo_alice.write_file("file", |w| {
w.write_all(alice2).unwrap();
Ok(())
})?;
let alice_h2 = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
repo_alice.write_file("file", |w| {
w.write_all(alice3).unwrap();
Ok(())
})?;
let alice_h3 = record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice0").unwrap();
// Alice applies Bob's change
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h1)?;
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h2)?;
apply::apply_change(&changes, &mut txn, &mut channel_alice, bob_h3)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn,
&mut channel_alice,
"",
true,
)?;
let mut buf = Vec::new();
repo_alice.read_file("file", &mut buf)?;
debug!("alice: {:?}", std::str::from_utf8(&buf));
txn.debug_to_file(&channel_alice, "debug_alice1").unwrap();
// Alice solves the conflict.
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_alice.write_file("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
let alices_resolution =
record_all(&mut repo_alice, &changes, &mut txn, &mut channel_alice, "")?;
txn.debug_to_file(&channel_alice, "debug_alice2").unwrap();
// Bob applies Alice's change
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h1).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h2).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_bob, alice_h3).unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob1").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
debug!("bob: {:?}", std::str::from_utf8(&buf));
let conflict: Vec<_> = std::str::from_utf8(&buf)?.lines().collect();
repo_bob.write_file("file", |w| {
for l in conflict.iter() {
if l.len() < 10 {
writeln!(w, "{}", l)?
}
}
Ok(())
})?;
info!("resolving");
// Bob solves the conflict
let bobs_resolution = record_all(&mut repo_bob, &changes, &mut txn, &mut channel_bob, "")?;
// Charlie clones and deletes
let mut repo_charlie = working_copy::memory::Memory::new();
let mut channel_charlie = txn.open_or_create_channel("charlie")?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, init_h)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h1)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h2)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alice_h3)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h1)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h2)?;
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bob_h3)?;
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
repo_charlie.write_file("file", |w| {
w.write_all(charlie).unwrap();
Ok(())
})?;
let charlie_h = record_all(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
)?;
// Bob applies Alice's resolution
apply::apply_change(&changes, &mut txn, &mut channel_bob, alices_resolution).unwrap();
txn.debug_to_file(&channel_bob, "debug_bob2").unwrap();
debug!("outputting bob2");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob3").unwrap();
buf.clear();
repo_bob.read_file("file", &mut buf)?;
// Bob applies Charlie's side
debug!("applying charlie's patch");
apply::apply_change(&changes, &mut txn, &mut channel_bob, charlie_h).unwrap();
txn.debug_to_file(&channel_bob, "debug_bob4").unwrap();
let (alive_, reachable_) = txn.check_alive(&channel_bob);
if !alive_.is_empty() {
error!("alive (bob0): {:?}", alive_);
}
if !reachable_.is_empty() {
error!("reachable (bob0): {:?}", reachable_);
}
debug!("outputting bob's repo");
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn,
&mut channel_bob,
"",
true,
)?;
txn.debug_to_file(&channel_bob, "debug_bob5").unwrap();
let (alive, reachable) = txn.check_alive(&channel_bob);
if !alive.is_empty() {
panic!("alive (bob1): {:?}", alive);
} else if !alive_.is_empty() {
panic!("alive_ (bob1): {:?}", alive_);
}
if !reachable.is_empty() {
panic!("reachable (bob1): {:?}", reachable);
} else if !reachable_.is_empty() {
panic!("reachable_ (bob1): {:?}", reachable_);
}
// Symmetric: Charlie applies the other sides.
debug!("Charlie applies");
apply::apply_change(&changes, &mut txn, &mut channel_charlie, alices_resolution).unwrap();
apply::apply_change(&changes, &mut txn, &mut channel_charlie, bobs_resolution).unwrap();
txn.debug_to_file(&channel_charlie, "debug_charlie")
.unwrap();
let (alive, reachable) = txn.check_alive(&channel_charlie);
if !alive.is_empty() {
panic!("alive (charlie0): {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable (charlie0): {:?}", reachable);
}
output::output_repository_no_pending(
&mut repo_charlie,
&changes,
&mut txn,
&mut channel_charlie,
"",
true,
)?;
txn.debug_to_file(&channel_charlie, "debug_charlie1")
.unwrap();
let (alive, reachable) = txn.check_alive(&channel_charlie);
if !alive.is_empty() {
panic!("alive (charlie1): {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable (charlie1): {:?}", reachable);
}
Ok(())
}
#[test]
fn cyclic_files() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let mut repo_bob = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/file", contents.to_vec());
repo_alice.add_file("b/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("a/file")?;
txn_alice.add_file("b/file")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_init")
.unwrap();
// Bob clones and moves a -> a/b
let mut channel_bob = txn_bob.open_or_create_channel("bob")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
txn_bob.move_file("a", "b/a").unwrap();
repo_bob.rename("a", "b/a").unwrap();
let ab = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
// Alice moves b -> b/a
txn_alice.move_file("b", "a/b").unwrap();
repo_alice.rename("b", "a/b").unwrap();
let _ba = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, ab)?;
txn_alice.debug_to_file(&channel_alice, "debug").unwrap();
debug!("outputting cycle");
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
let v: Vec<_> = txn_alice.iter_working_copy().collect();
println!("{:?}", v);
let (alive, reachable) = txn_alice.check_alive(&channel_alice);
if !alive.is_empty() {
panic!("alive: {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable: {:?}", reachable);
}
debug!("recording the resolution");
let _resolution = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice.debug_to_file(&channel_alice, "debug2").unwrap();
Ok(())
}
// org id QgpTD2RjlNLcS9Q2bivXjqgdZptO4jMCAyAKKy5M4Cw=
#[test]
fn tree_inodes_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\n";
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a/b/file", contents.to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
let mut channel_alice = txn_alice.open_or_create_channel("alice")?;
txn_alice.add_file("a/b/file")?;
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("bob")?;
txn_bob.add_file("a/b/file")?;
let init_h = record_all(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
)?;
txn_alice
.debug_to_file(&channel_alice, "debug_init")
.unwrap();
// Bob clones
let mut repo_bob = working_copy::memory::Memory::new();
apply::apply_change(&changes, &mut txn_bob, &mut channel_bob, init_h)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
info!("Done outputting Bob's working_copy");
// Bob moves and deletes a/b
repo_bob.rename("a/b/file", "c/d/file")?;
txn_bob.move_file("a/b/file", "c/d/file")?;
repo_bob.remove_path("a")?;
txn_bob.remove_file("a")?;
let bob_h = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob0").unwrap();
// Alice applies
apply::apply_change(&changes, &mut txn_alice, &mut channel_alice, bob_h)?;
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel_alice,
"",
true,
)?;
check_tree_inodes(&txn_alice, &channel_alice.borrow());
Ok(())
}
fn check_tree_inodes<'env, T: TxnT>(txn: &T, channel: &Channel<T>) {
// Sanity check
for (inode, vertex) in txn.iter_inodes() {
debug!("inode = {:?}, vertex = {:?}", inode, vertex);
let mut inode_ = inode;
while !inode_.is_root() {
if let Some(next) = txn.get_revtree(inode_, None) {
debug!("next = {:?}", next);
inode_ = next.parent_inode;
} else {
panic!("inode = {:?}, inode_ = {:?}", inode, inode_);
}
}
if !txn.is_alive(&channel, vertex.inode_vertex()) {
for e in txn.iter_adjacent(
&channel,
vertex.inode_vertex(),
EdgeFlags::empty(),
EdgeFlags::all(),
) {
error!("{:?} {:?} {:?}", inode, vertex, e)
}
panic!(
"inode {:?}, vertex {:?}, is not alive, {:?}",
inode,
vertex,
txn.tree_path(vertex)
)
}
}
}
// org id KXO4Q0zwlWvUD/nYh+BNlcg+iAVCRRrq/PjV4cNJH0k=
use super::*;
use crate::working_copy::WorkingCopy;
#[test]
fn clone_simple() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let contents2 = b"a\nb\n\nc\nd\nx\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut recorded_changes = Vec::new();
let mut txn = env.mut_txn_begin();
{
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("file")?;
recorded_changes.push(record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap());
txn.debug_to_file(&channel, "debug0").unwrap();
repo.write_file("file", |w| {
w.write_all(contents2).unwrap();
Ok(())
})
.unwrap();
recorded_changes.push(record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap());
txn.debug_to_file(&channel, "debug1").unwrap();
}
txn.commit().unwrap();
let mut channel_changes = Vec::new();
{
let txn = env.txn_begin()?;
for channel in txn.iter_channels("") {
for (_, (i, _)) in txn.log(&channel.borrow(), 0) {
channel_changes.push(i)
}
}
}
info!("{:?}", channel_changes);
assert_eq!(channel_changes, recorded_changes);
let mut repo2 = working_copy::memory::Memory::new();
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
{
let mut channel = txn2.open_or_create_channel("main2").unwrap();
for h in channel_changes.iter() {
info!("applying {:?}", h);
apply::apply_change(&changes, &mut txn2, &mut channel, *h).unwrap();
txn2.debug_to_file(&channel, "debug2").unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"",
true,
)
.unwrap();
}
assert_eq!(repo2.list_files(), vec!["file".to_string()]);
let mut file = Vec::new();
repo2.read_file("file", &mut file).unwrap();
assert_eq!(file, contents2);
}
txn2.commit().unwrap();
Ok(())
}
#[test]
fn clone_prefixes() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("a/b/c/d", contents.to_vec());
repo.add_file("e/f/g/h", contents.to_vec());
repo.add_file("i/j/k/l", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let h = {
let mut channel = txn.open_or_create_channel("main").unwrap();
txn.add_file("a/b/c/d")?;
txn.add_file("e/f/g/h")?;
txn.add_file("i/j/k/l")?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "")?
};
let h2 = {
let mut channel = txn.open_or_create_channel("main").unwrap();
repo.write_file("a/b/c/d", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
repo.write_file("e/f/g/h", |w| {
w.write_all(b"edits\n")?;
Ok(())
})?;
record_all(&mut repo, &changes, &mut txn, &mut channel, "a/b/c/d")?
};
txn.commit().unwrap();
// Cloning
debug!("Cloning");
let mut repo2 = working_copy::memory::Memory::new();
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
{
let mut channel = txn2.open_or_create_channel("main2").unwrap();
apply::apply_change(&changes, &mut txn2, &mut channel, h).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"e/f",
true,
)?;
assert_eq!(
repo2.list_files(),
["e", "e/f", "e/f/g", "e/f/g/h"]
.iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
);
apply::apply_change(&changes, &mut txn2, &mut channel, h2).unwrap();
output::output_repository_no_pending(
&mut repo2,
&changes,
&mut txn2,
&mut channel,
"",
true,
)?;
let mut buf = Vec::new();
repo2.read_file("a/b/c/d", &mut buf)?;
assert_eq!(buf, b"edits\n");
buf.clear();
repo2.read_file("e/f/g/h", &mut buf)?;
assert_eq!(buf, contents);
}
txn2.commit().unwrap();
let mut txn2 = env2.mut_txn_begin();
txn2.open_or_create_channel("main2").unwrap();
Ok(())
}
use crate::change::*;
use crate::changestore::*;
use crate::pristine::*;
use crate::record::*;
use crate::working_copy::*;
use crate::*;
fn hash_mismatch(change: &Change) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
use crate::change::*;
let mut buf = tempfile::NamedTempFile::new()?;
let mut h = change.serialize(&mut buf)?;
match h {
crate::pristine::Hash::Blake3(ref mut h) => h[0] = h[0].wrapping_add(1),
_ => unreachable!(),
}
match Change::deserialize(buf.path().to_str().unwrap(), Some(&h)) {
Err(e) => {
let e = e.downcast();
if let Ok(Error::ChangeHashMismatch { .. }) = e {
} else {
unreachable!()
}
}
_ => unreachable!(),
}
let f = ChangeFile::open(h, buf.path().to_str().unwrap())?;
assert_eq!(f.hashed(), &change.hashed);
Ok(())
}
#[test]
fn hash_mism() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let store = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
repo.add_file("file2", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
txn.add_file("file")?;
txn.add_file("file2")?;
let mut state = Builder::new();
state
.record(
&mut txn,
Algorithm::Myers,
&mut channel,
&mut repo,
&store,
"",
)
.unwrap();
let rec = state.finish();
let changes: Vec<_> = rec
.actions
.into_iter()
.map(|rec| rec.globalize(&txn))
.collect();
info!("changes = {:?}", changes);
let change0 = crate::change::Change::make_change(
&txn,
&channel,
changes,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let hash0 = store.save_change(&change0)?;
apply::apply_local_change(&mut txn, &mut channel, &change0, hash0, &rec.updatables)?;
hash_mismatch(&change0)?;
txn.debug_to_file(&channel, "debug")?;
Ok(())
}
fn record_all<'env, T: MutTxnT, R: WorkingCopy, P: ChangeStore>(
repo: &mut R,
store: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
) -> Result<(Hash, Change), anyhow::Error> {
let mut state = Builder::new();
state.record(txn, Algorithm::default(), channel, repo, store, prefix)?;
let rec = state.finish();
let changes = rec
.actions
.into_iter()
.map(|rec| rec.globalize(txn))
.collect();
let change0 = crate::change::Change::make_change(
txn,
&channel,
changes,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
// Beware of changing the following line: two changes
// doing the same thing will be equal. Sometimes we don't
// want that, as in tests::unrecord::unrecord_double.
timestamp: chrono::Utc::now(),
},
Vec::new(),
);
let hash = store.save_change(&change0)?;
apply::apply_local_change(txn, channel, &change0, hash, &rec.updatables)?;
Ok((hash, change0))
}
#[cfg(feature = "text-changes")]
#[test]
fn text() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let contents = b"a\nb\nc\nd\ne\nf\n";
let mut repo = working_copy::memory::Memory::new();
let store = changestore::memory::Memory::new();
repo.add_file("file", contents.to_vec());
repo.add_file("file2", contents.to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
txn.add_file("file")?;
txn.add_file("file2")?;
let (h0, change0) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change0, h0);
repo.write_file("file", |w| {
write!(w, "a\nx\nc\ne\ny\nf\n")?;
Ok(())
})?;
let (h1, change1) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change1, h1);
repo.remove_path("file2")?;
let (h2, change2) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change2, h2);
repo.rename("file", "file3")?;
txn.move_file("file", "file3")?;
let (h3, change3) = record_all(&mut repo, &store, &mut txn, &mut channel, "")?;
text_test(&store, &change3, h3);
// name conflicts
let env2 = pristine::sanakirja::Pristine::new_anon()?;
let mut txn2 = env2.mut_txn_begin();
let mut channel2 = txn2.open_or_create_channel("main")?;
let mut repo2 = working_copy::memory::Memory::new();
apply::apply_change(&store, &mut txn2, &mut channel2, h0)?;
apply::apply_change(&store, &mut txn2, &mut channel2, h1)?;
apply::apply_change(&store, &mut txn2, &mut channel2, h2)?;
output::output_repository_no_pending(&mut repo2, &store, &mut txn2, &mut channel2, "", true)?;
repo2.rename("file", "file4")?;
txn2.move_file("file", "file4")?;
let (_, _) = record_all(&mut repo2, &store, &mut txn2, &mut channel2, "")?;
apply::apply_change(&store, &mut txn2, &mut channel2, h3)?;
output::output_repository_no_pending(&mut repo2, &store, &mut txn2, &mut channel2, "", true)?;
let (h, solution) = record_all(&mut repo2, &store, &mut txn2, &mut channel2, "")?;
text_test(&store, &solution, h);
Ok(())
}
fn text_test<'env, C: ChangeStore>(c: &C, change0: &Change, h: Hash) {
let mut v = Vec::new();
// let channel = channel.borrow();
change0
.write(
c,
Some(h),
|l, _p| format!("{}:{}", l.path, l.line),
true,
&mut v,
)
.unwrap();
for i in std::str::from_utf8(&v).unwrap().lines() {
debug!("{}", i);
}
let change1 = Change::read(std::io::Cursor::new(&v[..]), &mut HashMap::new()).unwrap();
if change0.header != change1.header {
error!("header: {:#?} != {:#?}", change0.header, change1.header);
}
if change0.dependencies != change1.dependencies {
error!(
"deps: {:#?} != {:#?}",
change0.dependencies, change1.dependencies
);
}
if change0.extra_known != change1.extra_known {
error!(
"extra: {:#?} != {:#?}",
change0.extra_known, change1.extra_known
);
}
if change0.metadata != change1.metadata {
error!("meta: {:#?} != {:#?}", change0.metadata, change1.metadata);
}
if change0.changes != change1.changes {
if change0.changes.len() != change1.changes.len() {
error!("change0.changes = {:#?}", change0.changes);
error!("change1.changes = {:#?}", change1.changes);
} else {
for (a, b) in change0.changes.iter().zip(change1.changes.iter()) {
error!("change0: {:#?}", a);
error!("change1: {:#?}", b);
}
}
}
if change0.contents != change1.contents {
error!("change0.contents = {:?}", change0.contents);
error!("change1.contents = {:?}", change1.contents);
}
assert_eq!(change0, &change1);
}
use super::*;
/// Add a simple file and clone.
#[test]
fn add_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
// Adding the inode another time.
assert!(txn.add_file("dir/file").is_err());
txn.debug_to_file(&channel, "debug").unwrap();
txn.commit().unwrap();
}
{
let txn = env.txn_begin()?;
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect();
assert_eq!(files, vec!["dir", "dir/file"]);
let channel_ = txn.load_channel("main").unwrap();
let channel = channel_.borrow();
let mut it = crate::fs::iter_graph_children(&txn, &changes, &channel, Position::ROOT);
let (key, meta, file) = it.next().unwrap();
assert!(meta.is_dir());
assert_eq!(file, "dir");
assert!(it.next().is_none());
let mut it = crate::fs::iter_graph_children(&txn, &changes, &channel, key);
let (file_key, _, _) = it.next().unwrap();
crate::fs::iter_paths(&txn, &channel_, file_key, |path| {
debug!("begin path");
for path in path {
debug!("path = {:?}", path);
}
debug!("end path");
true
});
txn.debug_to_file(&channel_, "debug2").unwrap();
let mut it = crate::fs::iter_basenames(&txn, &changes, &channel, key);
let (key, _, name) = it.next().unwrap();
assert_eq!(key, Position::ROOT);
assert_eq!(name, "dir");
assert!(it.next().is_none());
assert!(txn.is_tracked("dir/file"));
}
Ok(())
}
/// Test that we can add a directory with a file in it.
#[test]
fn add_dir_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_dir("dir/file")?;
assert!(txn.is_tracked("dir"));
assert!(txn.is_tracked("dir/file"));
let (name, inode) = crate::fs::working_copy_children(&txn, Inode::ROOT)
.next()
.unwrap();
assert_eq!(name.as_str(), "dir");
assert!(txn.is_directory(inode));
debug!("name = {:?}", inode);
txn.debug_tree("debug_tree")?;
let mut it = crate::fs::working_copy_children(&txn, inode);
let (name, _) = it.next().unwrap();
assert_eq!(name.as_str(), "file");
assert!(it.next().is_none());
Ok(())
}
/// Test that we can delete a file.
#[test]
fn del_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug0").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, file)| file)
.collect();
assert_eq!(files, vec!["dir", "dir/file"]);
repo.remove_path("dir/file")?;
txn.remove_file("dir")?;
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect();
debug!("files = {:?}", files);
assert!(files.is_empty());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect();
debug!("files = {:?}", files);
assert!(files.is_empty());
// Test deletions without recording.
txn.add_file("dir2/file")?;
txn.remove_file("dir2")?;
assert!(crate::fs::iter_working_copy(&txn, Inode::ROOT).all(|(_, file)| file != "dir2"));
assert!(txn.remove_file("dir2").is_err());
txn.commit()?;
}
{
let txn = env.txn_begin()?;
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect();
debug!("files = {:?}", files);
assert!(files.is_empty());
}
Ok(())
}
/// Test that we can delete the end of a file.
#[test]
fn del_eof_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, "debug").unwrap();
repo.write_file("dir/file", |w| {
w.write_all(b"a\nb\nc\n")?;
Ok(())
})?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
let mut file = Vec::new();
repo.read_file("dir/file", &mut file).unwrap();
assert_eq!(std::str::from_utf8(&file), Ok("a\nb\nc\n"));
txn.debug_to_file(&channel, "debug").unwrap();
txn.commit()?;
Ok(())
}
/// Just delete a few lines of a file.
#[test]
fn del_nonzombie_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file("dir/file", |w| {
w.write_all(b"a\nb\nc\ne\nf\n")?;
Ok(())
})?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
repo.write_file("dir/file", |w| {
w.write_all(b"a\nb\nc\nf\n")?;
Ok(())
})?;
txn.debug_to_file(&channel, "debug0").unwrap();
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug1").unwrap();
repo.write_file("dir/file", |w| {
w.write_all(b"a\nb\nc\n")?;
Ok(())
})?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
let mut file = Vec::new();
repo.read_file("dir/file", &mut file).unwrap();
assert_eq!(std::str::from_utf8(&file), Ok("a\nb\nc\n"));
txn.debug_to_file(&channel, "debug2").unwrap();
txn.commit()?;
Ok(())
}
/// Are permissions properly recorded?
#[test]
fn permissions_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("file")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice0 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.debug_to_file(&channel, "debug0").unwrap();
repo_alice.set_permissions("file", 0o755)?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.debug_to_file(&channel, "debug1").unwrap();
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel = txn_bob.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice0)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
txn_bob.debug_to_file(&channel, "debug_bob1").unwrap();
let bob_perm = repo_bob.file_metadata("file")?;
assert_eq!(bob_perm.0, 0o644);
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice1)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
txn_bob.debug_to_file(&channel, "debug_bob2").unwrap();
let bob_perm = repo_bob.file_metadata("file")?;
assert_eq!(bob_perm.0, 0o755);
let alice_perm = repo_alice.file_metadata("file")?;
assert_eq!(alice_perm.0, 0o755);
Ok(())
}
/// Move a file to a directory, then delete the file and clone the whole thing.
#[test]
fn move_file_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("file")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice0 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice0 = {:?}", alice0);
txn_alice.debug_to_file(&channel, "debug0").unwrap();
txn_alice.add_dir("dir")?;
txn_alice.move_file("file", "dir/file2")?;
repo_alice.add_dir("dir");
repo_alice.rename("file", "dir/file2")?;
txn_alice.debug_tree("debug_tree")?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice1 = {:?}", alice1);
txn_alice.debug_to_file(&channel, "debug1").unwrap();
txn_alice.debug_tree("debug_tree")?;
txn_alice.debug_inodes();
debug!("{:?}", repo_alice);
repo_alice.remove_path("dir/file2")?;
debug!("{:?}", repo_alice);
let alice2 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.debug_to_file(&channel, "debug2").unwrap();
txn_alice.commit()?;
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel = txn_bob.open_or_create_channel("main")?;
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice0)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
assert_eq!(repo_bob.list_files(), &["file"]);
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice1)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
let mut files = repo_bob.list_files();
files.sort();
assert_eq!(files, &["dir", "dir/file2"]);
apply::apply_change(&changes, &mut txn_bob, &mut channel, alice2)?;
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
assert_eq!(repo_bob.list_files(), &["dir"]);
Ok(())
}
/// Overwrite a file with a move.
#[test]
fn move_file_existing_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
repo_alice.add_file("file2", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("file")?;
txn_alice.add_file("file2")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.move_file("file", "file2")?;
repo_alice.rename("file", "file2")?;
record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.debug_to_file(&channel, "debug1").unwrap();
let mut files = repo_alice.list_files();
files.sort();
assert_eq!(files, &["file2"]);
Ok(())
}
#[test]
fn move_back_delete_test() -> Result<(), anyhow::Error> {
move_back_test_(true)
}
#[test]
fn move_back_test() -> Result<(), anyhow::Error> {
move_back_test_(false)
}
fn move_back_test_(resolve_by_deleting: bool) -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("a", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("a")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
// Alice moves a -> b
txn_alice.move_file("a", "b")?;
repo_alice.rename("a", "b")?;
let alice2 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.debug_to_file(&channel, "debug1").unwrap();
// Alice moves b back -> a
txn_alice.move_file("b", "a")?;
repo_alice.rename("b", "a")?;
let alice3 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
txn_alice.debug_to_file(&channel, "debug2").unwrap();
// Bob deletes in parallel to the move + moveback
let mut repo_bob = working_copy::memory::Memory::new();
let env_bob = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_bob = env_bob.mut_txn_begin();
let mut channel_bob = txn_bob.open_or_create_channel("main")?;
txn_bob
.apply_change(&changes, &mut channel_bob, alice1)
.unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
repo_bob.remove_path("a")?;
let bob1 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob1").unwrap();
txn_bob
.apply_change(&changes, &mut channel_bob, alice2)
.unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob2").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
debug!("APPLYING {:?}", alice3);
txn_bob
.apply_change(&changes, &mut channel_bob, alice3)
.unwrap();
txn_bob.debug_to_file(&channel_bob, "debug_bob3").unwrap();
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel,
"",
true,
)?;
if resolve_by_deleting {
debug!("Bob records a solution");
let bob2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
txn_bob.debug_to_file(&channel_bob, "debug_bob4").unwrap();
// Alice applies Bob's patch.
txn_alice
.apply_change(&changes, &mut channel, bob1)
.unwrap();
txn_alice.debug_to_file(&channel, "debug_alice2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(!conflicts.is_empty());
// Alice applies Bob's resolution
txn_alice
.apply_change(&changes, &mut channel, bob2)
.unwrap();
txn_alice.debug_to_file(&channel, "debug_alice3").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
// Testing Bob's tree by outputting
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
txn_bob.debug_to_file(&channel_bob, "debug_bob4").unwrap();
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
} else {
output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("Bob records a solution");
let bob2 = record_all(&mut repo_bob, &changes, &mut txn_bob, &mut channel_bob, "")?;
// Alice applies Bob's patch.
txn_alice
.apply_change(&changes, &mut channel, bob1)
.unwrap();
txn_alice.debug_to_file(&channel, "debug_alice2").unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert_eq!(conflicts.len(), 1);
match conflicts[0] {
Conflict::ZombieFile { ref path } => assert_eq!(path, "a"),
ref c => panic!("unexpected conflict {:#?}", c),
}
// Alice applies Bob's resolution
txn_alice
.apply_change(&changes, &mut channel, bob2)
.unwrap();
let conflicts = output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
// Testing Bob's tree by outputting
let conflicts = output::output_repository_no_pending(
&mut repo_bob,
&changes,
&mut txn_bob,
&mut channel_bob,
"",
true,
)?;
debug!("conflicts = {:?}", conflicts);
assert!(conflicts.is_empty());
}
Ok(())
}
// Move a file into a directory, and delete the former parent in the same change.
#[test]
fn move_delete_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo_alice = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo_alice.add_file("dir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
repo_alice.add_file("dir/file2", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env_alice = pristine::sanakirja::Pristine::new_anon()?;
let mut txn_alice = env_alice.mut_txn_begin();
txn_alice.add_file("dir/file")?;
txn_alice.add_file("dir/file2")?;
let mut channel = txn_alice.open_or_create_channel("main")?;
let alice0 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice0 = {:?}", alice0);
txn_alice.debug_to_file(&channel, "debug0").unwrap();
repo_alice.add_dir("dir2");
repo_alice.rename("dir/file", "dir2/file")?;
repo_alice.rename("dir/file2", "dir2/file2")?;
repo_alice.remove_path("dir")?;
txn_alice.move_file("dir/file", "dir2/file")?;
txn_alice.move_file("dir/file2", "dir2/file2")?;
let alice1 = record_all(&mut repo_alice, &changes, &mut txn_alice, &mut channel, "")?;
debug!("alice1 = {:?}", alice1);
txn_alice.debug_to_file(&channel, "debug1").unwrap();
output::output_repository_no_pending(
&mut repo_alice,
&changes,
&mut txn_alice,
&mut channel,
"",
true,
)?;
txn_alice.debug_to_file(&channel, "debug2").unwrap();
repo_alice.rename("dir2/file", "dir/file").unwrap_or(());
repo_alice.rename("dir2/file2", "dir/file2").unwrap_or(());
txn_alice.move_file("dir2/file", "dir/file").unwrap_or(());
txn_alice.move_file("dir2/file2", "dir/file2").unwrap_or(());
repo_alice.remove_path("dir2")?;
let mut state = Builder::new();
debug!("recording in dir");
state.record(
&mut txn_alice,
Algorithm::default(),
&mut channel,
&mut repo_alice,
&changes,
"dir",
)?;
debug!("recording in dir2");
state.record(
&mut txn_alice,
Algorithm::default(),
&mut channel,
&mut repo_alice,
&changes,
"dir2",
)?;
let rec = state.finish();
let changes_ = rec
.actions
.into_iter()
.map(|rec| rec.globalize(&txn_alice))
.collect();
let alice2 = crate::change::Change::make_change(
&txn_alice,
&channel,
changes_,
rec.contents,
crate::change::ChangeHeader {
message: "test".to_string(),
authors: vec![],
description: None,
timestamp: Utc::now(),
},
Vec::new(),
);
let h_alice2 = changes.save_change(&alice2)?;
apply::apply_local_change(
&mut txn_alice,
&mut channel,
&alice2,
h_alice2,
&rec.updatables,
)?;
debug!("done {:?}", h_alice2);
txn_alice.debug_to_file(&channel, "debug3").unwrap();
let (alive, reachable) = txn_alice.check_alive(&channel);
if !alive.is_empty() {
panic!("alive: {:?}", alive);
}
if !reachable.is_empty() {
panic!("reachable: {:?}", reachable);
}
let mut files = repo_alice.list_files();
files.sort();
assert_eq!(files, &["dir", "dir/file", "dir/file2"]);
Ok(())
}
#[test]
fn file_becomes_dir_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
repo.add_file("filedir", b"a\nb\nc\nd\ne\nf\n".to_vec());
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
txn.add_file("filedir").unwrap();
let mut channel = txn.open_or_create_channel("main").unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
repo.remove_path("filedir").unwrap();
repo.add_file("filedir/file", b"a\nb\nc\nd\ne\nf\n".to_vec());
txn.add_file("filedir/file").unwrap();
record_all(&mut repo, &changes, &mut txn, &mut channel, "").unwrap();
txn.debug_to_file(&channel, "debug").unwrap();
Ok(())
}
#[test]
fn record_deleted_test() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
txn.add_file("dir/file")?;
let mut channel = txn.open_or_create_channel("main")?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect();
assert!(files.is_empty());
}
Ok(())
}
#[test]
fn record_prefix() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
{
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
txn.debug_to_file(&channel, "debug").unwrap();
let files: Vec<_> = crate::fs::iter_working_copy(&txn, Inode::ROOT)
.map(|(_, name)| name)
.collect();
assert!(files.is_empty());
}
Ok(())
}
#[test]
fn record_not_in_repo() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
assert!(record_all_output(&mut repo, &changes, &mut txn, &mut channel, "dir").is_err());
Ok(())
}
#[test]
fn record_not_modified() -> Result<(), anyhow::Error> {
env_logger::try_init().unwrap_or(());
let mut repo = working_copy::memory::Memory::new();
let changes = changestore::memory::Memory::new();
let env = pristine::sanakirja::Pristine::new_anon()?;
let mut txn = env.mut_txn_begin();
let mut channel = txn.open_or_create_channel("main")?;
repo.add_file("file", b"a\nb\nc\nd\ne\nf\n".to_vec());
txn.add_file("file")?;
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
std::thread::sleep(std::time::Duration::from_secs(1));
record_all_output(&mut repo, &changes, &mut txn, &mut channel, "")?;
Ok(())
}
// org id rP8Kh1k96knLJkWk+H2Hc2r6JSDxQTt2P4qm38mX8mM=
pub const MAX_LENGTH: usize = 255;
/// A string of length at most 255, with a more compact on-disk
/// encoding.
#[repr(packed)]
pub struct SmallString {
pub len: u8,
pub str: [u8; MAX_LENGTH],
}
/// A borrowed version of `SmallStr`.
#[derive(Clone, Copy)]
pub struct SmallStr<'a> {
pub p: *const u8,
pub marker: std::marker::PhantomData<&'a ()>,
}
impl Clone for SmallString {
fn clone(&self) -> Self {
Self::from_str(self.as_str())
}
}
impl std::fmt::Debug for SmallString {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
self.as_small_str().fmt(fmt)
}
}
impl<'a> PartialEq for SmallStr<'a> {
fn eq(&self, x: &SmallStr) -> bool {
self.as_str().eq(x.as_str())
}
}
#[test]
fn eq() {
let s0 = SmallString::from_str("blabla");
let s1 = SmallString::from_str("blabla");
assert_eq!(s0, s1);
assert_eq!(s0, s0);
assert_eq!(s1, s1);
assert_eq!(s0.clone(), s1);
assert_eq!(s0.as_small_str(), s1.as_small_str());
assert_eq!(s0.as_small_str(), s0.as_small_str());
assert_eq!(s1.as_small_str(), s1.as_small_str());
}
#[test]
fn debug() {
let s = SmallString::from_str("blabla");
assert_eq!(format!("{:?}", s), "\"blabla\"");
assert_eq!(format!("{:?}", s.as_small_str()), "\"blabla\"");
}
impl<'a> Eq for SmallStr<'a> {}
impl PartialEq for SmallString {
fn eq(&self, x: &SmallString) -> bool {
self.as_str().eq(x.as_str())
}
}
impl Eq for SmallString {}
/*
impl<'a> std::hash::Hash for SmallStr<'a> {
fn hash<H: std::hash::Hasher>(&self, x: &mut H) {
self.as_str().hash(x)
}
}
*/
impl std::hash::Hash for SmallString {
fn hash<H: std::hash::Hasher>(&self, x: &mut H) {
self.as_str().hash(x)
}
}
impl<'a> PartialOrd for SmallStr<'a> {
fn partial_cmp(&self, x: &SmallStr) -> Option<std::cmp::Ordering> {
self.as_str().partial_cmp(x.as_str())
}
}
impl<'a> Ord for SmallStr<'a> {
fn cmp(&self, x: &SmallStr) -> std::cmp::Ordering {
self.as_str().cmp(x.as_str())
}
}
impl PartialOrd for SmallString {
fn partial_cmp(&self, x: &SmallString) -> Option<std::cmp::Ordering> {
self.as_str().partial_cmp(x.as_str())
}
}
impl Ord for SmallString {
fn cmp(&self, x: &SmallString) -> std::cmp::Ordering {
self.as_str().cmp(x.as_str())
}
}
#[test]
fn ord() {
let s0 = SmallString::from_str("1234");
let s1 = SmallString::from_str("5678");
assert!(s0.as_small_str() < s1.as_small_str());
assert!(s0 < s1);
assert_eq!(s0.cmp(&s1), std::cmp::Ordering::Less);
}
impl<'a> std::fmt::Debug for SmallStr<'a> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
self.as_str().fmt(fmt)
}
}
impl SmallString {
pub fn new() -> Self {
SmallString {
len: 0,
str: [0; MAX_LENGTH],
}
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah!");
/// assert_eq!(s.len(), s.as_str().len());
/// ```
pub fn len(&self) -> usize {
self.len as usize
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// s.clear();
/// assert_eq!(s.as_str(), "");
/// assert!(s.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn from_str(s: &str) -> Self {
let mut b = SmallString {
len: s.len() as u8,
str: [0; MAX_LENGTH],
};
b.clone_from_str(s);
b
}
pub fn clone_from_str(&mut self, s: &str) {
self.len = s.len() as u8;
(&mut self.str[..s.len()]).copy_from_slice(s.as_bytes());
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// s.clear();
/// assert!(s.is_empty());
/// ```
pub fn clear(&mut self) {
self.len = 0;
}
pub fn push_str(&mut self, s: &str) {
let l = self.len as usize;
assert!(l + s.len() <= 0xff);
(&mut self.str[l..l + s.len()]).copy_from_slice(s.as_bytes());
self.len += s.len() as u8;
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// let s_ = s.as_small_str();
/// let s2_ = s_;
/// let s3_ = s_.clone();
/// assert_eq!(s_, s2_);
/// assert_eq!(s_, s3_);
/// ```
pub fn as_small_str(&self) -> SmallStr {
SmallStr {
p: self as *const SmallString as *const u8,
marker: std::marker::PhantomData,
}
}
pub fn as_str(&self) -> &str {
self.as_small_str().as_str()
}
pub fn as_bytes(&self) -> &[u8] {
self.as_small_str().as_bytes()
}
}
impl SmallStr<'static> {
pub const EMPTY: SmallStr<'static> = SmallStr {
p: [0].as_ptr(),
marker: std::marker::PhantomData,
};
}
impl<'a> SmallStr<'a> {
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("");
/// assert!(s.as_small_str().is_empty());
/// s.push_str("blah");
/// assert!(!s.as_small_str().is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// ```ignore
/// use libpijul::small_string::*;
/// let mut s = SmallString::from_str("blah");
/// assert_eq!(s.as_small_str().len(), "blah".len())
/// ```
pub fn len(&self) -> usize {
unsafe { (*self.p) as usize }
}
pub fn as_str(&self) -> &'a str {
unsafe { std::str::from_utf8_unchecked(self.as_bytes()) }
}
pub fn as_bytes(&self) -> &'a [u8] {
unsafe { std::slice::from_raw_parts(self.p.offset(1), *self.p as usize) }
}
pub fn to_owned(&self) -> SmallString {
SmallString::from_str(self.as_str())
}
}
/// Faster than running doc tests.
#[test]
fn all_doc_tests() {
{
let s = SmallString::from_str("blah!");
assert_eq!(s.len(), s.as_str().len());
}
{
let mut s = SmallString::from_str("blah");
s.clear();
assert_eq!(s.as_str(), "");
assert!(s.is_empty());
}
{
let mut s = SmallString::from_str("blah");
s.clear();
assert!(s.is_empty());
}
{
let s = SmallString::from_str("blah");
let s_ = s.as_small_str();
let s2_ = s_;
let s3_ = s_.clone();
assert_eq!(s_, s2_);
assert_eq!(s_, s3_);
}
{
let mut s = SmallString::from_str("");
assert!(s.as_small_str().is_empty());
s.push_str("blah");
assert!(!s.as_small_str().is_empty());
}
{
let s = SmallString::from_str("blah");
assert_eq!(s.as_small_str().len(), "blah".len())
}
}
/// An internal "unsafe" version of a [`small_string::SmallStr`], used
/// to circumvent the absence of associated type constructors in Rust
/// (else this would be borrow on a table).
#[derive(Clone, Copy)]
#[doc(hidden)]
pub struct UnsafeSmallStr(*const u8);
impl std::fmt::Debug for UnsafeSmallStr {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
unsafe { self.to_small_str().fmt(fmt) }
}
}
impl UnsafeSmallStr {
pub fn from_small_str(u: SmallStr) -> UnsafeSmallStr {
UnsafeSmallStr(u.p)
}
pub unsafe fn to_small_str<'a>(&self) -> SmallStr<'a> {
SmallStr {
p: self.0,
marker: std::marker::PhantomData,
}
}
}
impl sanakirja::Representable for UnsafeSmallStr {
fn alignment() -> sanakirja::Alignment {
sanakirja::Alignment::B1
}
fn onpage_size(&self) -> u16 {
unsafe {
let len = (*self.0) as u16;
1 + len
}
}
unsafe fn write_value(&self, p: *mut u8) {
std::ptr::copy(self.0, p, self.onpage_size() as usize)
}
unsafe fn read_value(p: *const u8) -> Self {
UnsafeSmallStr(p)
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
let a = UnsafeSmallStr(self.0).to_small_str();
let b = x.to_small_str();
a.as_str().cmp(b.as_str())
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
//! Record a change from a pristine and a working copy.
use crate::alive::retrieve;
use crate::change::*;
use crate::changestore::ChangeStore;
pub use crate::diff::Algorithm;
use crate::path::{components, Components};
use crate::pristine::*;
use crate::small_string::SmallString;
use crate::working_copy::WorkingCopy;
use crate::{diff, Error};
use std::collections::{HashMap, HashSet};
/// A change in the process of being recorded. This is typically
/// created using `RecordState::new`.
pub struct Builder {
pub(crate) rec: Recorded,
pub(crate) redundant: Vec<(Vertex<ChangeId>, Edge)>,
recorded_inodes: HashMap<Inode, Position<Option<ChangeId>>>,
deleted_vertices: HashSet<Position<ChangeId>>,
former_parents: Vec<Parent>,
force_rediff: bool,
}
#[derive(Debug)]
struct Parent {
basename: String,
metadata: InodeMetadata,
parent: Position<Option<ChangeId>>,
}
/// The result of recording a change:
pub struct Recorded {
/// The "byte contents" of the change.
pub contents: Vec<u8>,
/// The current records, to be lated converted into change operations.
pub actions: Vec<Record<Option<ChangeId>, Local>>,
/// The updates that need to be made to the ~tree~ and ~revtree~
/// tables when this change is applied to the local repository.
pub updatables: HashMap<usize, InodeUpdate>,
/// The size of the largest file that was recorded in this change.
pub largest_file: u64,
/// Whether we have recorded binary files.
pub has_binary_files: bool,
}
impl Builder {
/// Initialise a `Builder`.
pub fn new() -> Self {
Builder {
rec: Recorded {
contents: Vec::new(),
actions: Vec::new(),
updatables: HashMap::new(),
has_binary_files: false,
largest_file: 0,
},
redundant: Vec::new(),
recorded_inodes: HashMap::new(),
deleted_vertices: HashSet::new(),
former_parents: Vec::new(),
force_rediff: true,
}
}
/// Finish the recording.
pub fn finish(self) -> Recorded {
self.rec
}
}
/// An account of the files that have been added, moved or deleted, as
/// returned by record, and used by apply (when applying a change
/// created locally) to update the trees and inodes databases.
#[derive(Debug, Hash, PartialEq, Eq)]
pub enum InodeUpdate {
Add {
/// Inode vertex in the graph.
pos: ChangePosition,
/// `Inode` added by this file addition.
inode: Inode,
},
Deleted {
/// `Inode` of the deleted file.
inode: Inode,
},
}
#[derive(Debug, Clone)]
struct RecordItem<'a> {
v_papa: Position<Option<ChangeId>>,
papa: Inode,
inode: Inode,
basename: String,
full_path: String,
metadata: InodeMetadata,
components: Components<'a>,
}
impl<'a> RecordItem<'a> {
fn root(prefix: &'a str) -> Self {
RecordItem {
inode: Inode::ROOT,
papa: Inode::ROOT,
v_papa: Position::OPTION_ROOT,
basename: String::new(),
full_path: String::new(),
metadata: InodeMetadata::new(0, true),
components: components(prefix),
}
}
}
const CHECK_UTF8: usize = 1000;
/// Ignore inodes that are in another channel
fn get_inodes<T: TxnT>(txn: &T, channel: &Channel<T>, inode: Inode) -> Option<Position<ChangeId>> {
if let Some(vertex) = txn.get_inodes(inode, None) {
if txn
.get_changeset(&channel.changes, vertex.change, None)
.is_some()
{
Some(vertex)
} else {
None
}
} else {
None
}
}
impl Builder {
pub(crate) fn record<T: MutTxnT, W: WorkingCopy, C: ChangeStore>(
&mut self,
txn: &mut T,
diff_algorithm: diff::Algorithm,
channel: &mut ChannelRef<T>,
working_copy: &mut W,
changes: &C,
prefix: &str,
) -> Result<(), anyhow::Error> {
let now = std::time::Instant::now();
let channel = channel.borrow();
let mut stack = vec![RecordItem::root(prefix)];
while let Some(mut item) = stack.pop() {
debug!("stack.pop() = Some({:?})", item);
// Check for moves and file conflicts.
let vertex = if let Some(vertex) = self.recorded_inodes.get(&item.inode) {
*vertex
} else if item.inode == Inode::ROOT {
self.recorded_inodes
.insert(Inode::ROOT, Position::OPTION_ROOT);
self.delete_obsolete_children(
txn,
&channel,
working_copy,
&item.full_path,
Position::ROOT,
);
Position::OPTION_ROOT
} else if let Some(vertex) = get_inodes(txn, &channel, item.inode) {
self.record_existing_file(
txn,
diff_algorithm,
&channel,
working_copy,
changes,
&item,
vertex,
)?;
self.recorded_inodes.insert(item.inode, vertex.to_option());
self.delete_obsolete_children(
txn,
&channel,
working_copy,
&item.full_path,
Position::ROOT,
);
vertex.to_option()
} else {
match self.add_file(working_copy, item.clone()) {
Ok(Some(vertex)) => {
// Path addition (maybe just a single directory).
self.recorded_inodes.insert(item.inode, vertex);
vertex
}
Ok(None) => continue,
Err(_) => {
let parent = txn.get_revtree(item.inode, None).unwrap().to_owned();
info!("recursively deleting {:?} {:?}", parent, item.inode);
crate::fs::rec_delete(txn, parent, item.inode, false)?;
continue;
}
}
};
// Move on to the next step.
self.push_children(
txn,
&channel,
working_copy,
&mut item,
vertex,
&mut stack,
prefix,
)?;
}
crate::TIMERS.lock().unwrap().record += now.elapsed();
Ok(())
}
// org id Xhmtot+omA6Cs8gd3eURvHuvO/QxyZXUuMmXS3KcYvs=
fn add_file<W: WorkingCopy>(
&mut self,
working_copy: &mut W,
item: RecordItem,
) -> Result<Option<Position<Option<ChangeId>>>, anyhow::Error> {
debug!("record_file_addition {:?}", item);
// org id 0DBdlruBGIG8VfGv40IJofXY3H0uMRRZOXqAd6vDZUk=
let meta = working_copy.file_metadata(&item.full_path)?;
let name_start = ChangePosition(self.rec.contents.len() as u64);
meta.write(&mut self.rec.contents).unwrap();
self.rec.contents.extend(item.basename.as_bytes());
let name_end = ChangePosition(self.rec.contents.len() as u64);
// org id 3YhQMj68U/yx+DuSuj48c2C2aIypx8Oo+g8bIX3x9Wk=
self.rec.contents.push(0);
let inode_pos = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
let contents = if meta.is_file() {
let start = ChangePosition(self.rec.contents.len() as u64);
working_copy.read_file(&item.full_path, &mut self.rec.contents)?;
let end = ChangePosition(self.rec.contents.len() as u64);
self.rec.largest_file = self.rec.largest_file.max(end.0 - start.0);
self.rec.has_binary_files |= {
let s = start.0 as usize;
let e = (end.0 as usize).min(s + CHECK_UTF8 + 4);
let utf8 = std::str::from_utf8(&self.rec.contents[s..e]);
debug!("utf8 = {:?}", utf8);
match utf8 {
Err(e) => e.valid_up_to() < CHECK_UTF8,
Ok(_) => false,
}
};
self.rec.contents.push(0);
if self.rec.contents.len() as u64 > inode_pos.0 + 1 {
Some(Atom::NewVertex(NewVertex {
up_context: vec![Position {
change: None,
pos: inode_pos,
}],
down_context: vec![],
start,
end,
flag: EdgeFlags::empty(),
inode: Position {
change: None,
pos: inode_pos,
},
}))
} else {
None
}
} else {
None
};
// org id rGmDCfATnak44N1gQ/6fWM8leRH20nehuPdcnwZH06U=
self.rec.actions.push(Record::FileAdd {
add_name: Atom::NewVertex(NewVertex {
up_context: vec![item.v_papa],
down_context: vec![],
start: name_start,
end: name_end,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
inode: item.v_papa,
}),
add_inode: Atom::NewVertex(NewVertex {
up_context: vec![Position {
change: None,
pos: name_end,
}],
down_context: vec![],
start: inode_pos,
end: inode_pos,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
inode: item.v_papa,
}),
contents,
path: item.full_path.clone(),
});
debug!("{:?}", self.rec.actions.last().unwrap());
// org id IEO9vkKZ3U+8uhaTOe+3A3ZMCX9FjBgVub1CSotzKkY=
self.rec.updatables.insert(
self.rec.actions.len(),
InodeUpdate::Add {
inode: item.inode,
pos: inode_pos,
},
);
// org id jPYMsfp8Xwvr+CK+4qpMI3GVICLl/d/DN0IhYY+oXo4=
if meta.is_dir() {
Ok(Some(Position {
change: None,
pos: inode_pos,
}))
} else {
Ok(None)
}
}
fn record_existing_file<T: MutTxnT, W: WorkingCopy, C: ChangeStore>(
&mut self,
txn: &mut T,
diff_algorithm: diff::Algorithm,
channel: &Channel<T>,
working_copy: &mut W,
changes: &C,
item: &RecordItem,
vertex: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
debug!(
"record_existing_file {:?}: {:?} {:?}",
item.full_path, item.inode, vertex
);
// Former parent(s) of vertex
self.former_parents.clear();
let f0 = EdgeFlags::FOLDER | EdgeFlags::PARENT;
let f1 = EdgeFlags::all();
let mut is_deleted = true;
for name_ in txn
.iter_adjacent(&channel, vertex.inode_vertex(), f0, f1)
.filter(|e| e.flag.contains(EdgeFlags::PARENT))
{
debug!("name_ = {:?}", name_);
if name_.flag.contains(EdgeFlags::DELETED) {
debug!("is_deleted {:?}: {:?}", item.full_path, name_);
is_deleted = true;
break;
}
let name_dest = txn.find_block_end(&channel, name_.dest).unwrap();
let mut name = Vec::new();
changes.get_contents(|p| txn.get_external(p), name_dest, &mut name)?;
let (metadata, basename) = name.split_at(2);
let metadata = InodeMetadata::from_basename(metadata);
let basename = std::str::from_utf8(basename).unwrap().to_string();
debug!("former basename of {:?}: {:?}", vertex, basename);
if let Some(v_papa) = txn.iter_adjacent(&channel, name_dest, f0, f1).next() {
self.former_parents.push(Parent {
basename,
metadata,
parent: v_papa.dest.to_option(),
})
}
}
debug!(
"record_existing_file: {:?} {:?} {:?}",
item, self.former_parents, is_deleted,
);
assert!(!self.former_parents.is_empty());
if let Ok(new_meta) = working_copy.file_metadata(&item.full_path) {
debug!("new_meta = {:?}", new_meta);
if self.former_parents.len() > 1
|| self.former_parents[0].basename != item.basename
|| self.former_parents[0].metadata != item.metadata
|| self.former_parents[0].parent != item.v_papa
|| is_deleted
{
// This parent has changed.
let new_papa = *self.recorded_inodes.get(&item.papa).unwrap();
debug!("new_papa = {:?}", new_papa);
self.record_moved_file(
changes,
txn,
&channel,
&item,
vertex,
new_papa,
self.former_parents[0].metadata,
)?
}
if new_meta.is_file()
&& (self.force_rediff
|| modified_since_last_commit(&channel, working_copy, &item.full_path)?)
{
let mut ret = retrieve(txn, &channel, vertex);
let mut b = Vec::new();
working_copy.read_file(&item.full_path, &mut b)?;
debug!("diffing…");
let len = self.rec.actions.len();
self.diff(
changes,
txn,
&channel,
diff_algorithm,
item.full_path.clone(),
vertex.to_option(),
&mut ret,
&b,
)?;
debug!("new actions: {:?}", &self.rec.actions.len() - len);
}
} else {
debug!("calling record_deleted_file on {:?}", item.full_path);
self.record_deleted_file(txn, &channel, working_copy, &item.full_path, vertex)
}
Ok(())
}
fn delete_obsolete_children<'a, T: MutTxnT, W: WorkingCopy>(
&mut self,
txn: &T,
channel: &Channel<T>,
working_copy: &W,
full_path: &str,
v: Position<ChangeId>,
) {
let f0 = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
let f1 = f0 | EdgeFlags::PSEUDO;
debug!("v = {:?}", v);
for child in txn.iter_adjacent(&channel, v.inode_vertex(), f0, f1) {
let child = txn.find_block(&channel, child.dest).unwrap();
for grandchild in txn.iter_adjacent(&channel, child, f0, f1) {
debug!("grandchild {:?}", grandchild);
let needs_deletion = if let Some(inode) = txn.get_revinodes(grandchild.dest, None) {
debug!("inode = {:?} {:?}", inode, txn.get_revtree(inode, None));
txn.get_revtree(inode, None).is_none()
} else {
true
};
if needs_deletion {
// delete recursively.
self.record_deleted_file(
txn,
&channel,
working_copy,
full_path,
grandchild.dest,
)
}
}
}
}
fn push_children<'a, T: MutTxnT, W: WorkingCopy>(
&mut self,
txn: &T,
channel: &Channel<T>,
working_copy: &W,
item: &mut RecordItem<'a>,
vertex: Position<Option<ChangeId>>,
stack: &mut Vec<RecordItem<'a>>,
prefix: &str,
) -> Result<(), anyhow::Error> {
let comp = item.components.next();
let full_path = item.full_path.clone();
let fileid = OwnedPathId {
parent_inode: item.inode,
basename: SmallString::new(),
};
let mut has_matching_children = false;
for (fileid_, child_inode) in txn.iter_tree(fileid.clone(), None) {
if fileid_.parent_inode < fileid.parent_inode || fileid_.basename.is_empty() {
continue;
} else if fileid_.parent_inode > fileid.parent_inode {
break;
}
if let Some(comp) = comp {
if comp != fileid_.basename.as_str() {
continue;
}
}
has_matching_children = true;
let basename = fileid_.basename.as_str().to_string();
let full_path = if full_path.is_empty() {
basename.clone()
} else {
full_path.clone() + "/" + &basename
};
debug!("fileid_ {:?} child_inode {:?}", fileid_, child_inode);
if let Ok(meta) = working_copy.file_metadata(&full_path) {
stack.push(RecordItem {
papa: item.inode,
inode: child_inode,
v_papa: vertex,
basename,
full_path,
metadata: meta,
components: item.components.clone(),
})
} else if let Some(vertex) = get_inodes(txn, &channel, child_inode) {
self.record_deleted_file(txn, &channel, working_copy, &full_path, vertex)
}
}
if comp.is_some() && !has_matching_children {
return Err((Error::FileNotInRepo {
path: prefix.to_string(),
})
.into());
}
Ok(())
}
}
// org id DS+N4kAmqk1FDK+qw7BnysxLa7Ywb9LEvluzB79u6GQ=
fn modified_since_last_commit<T: TxnT, W: WorkingCopy>(
channel: &Channel<T>,
working_copy: &W,
prefix: &str,
) -> Result<bool, anyhow::Error> {
if let Ok(last_modified) = working_copy.modified_time(prefix) {
debug!(
"last_modified = {:?}, channel.last = {:?}",
last_modified
.duration_since(std::time::UNIX_EPOCH)?
.as_secs(),
channel.last_modified
);
Ok(last_modified
.duration_since(std::time::UNIX_EPOCH)?
.as_secs()
+ 2
>= channel.last_modified)
} else {
Ok(true)
}
}
// org id 5O3+CI2LLrnmA6V30GAG0YsOy5x1t39sAhn6+LHylzs=
impl Builder {
fn record_moved_file<T: TxnT, C: ChangeStore>(
&mut self,
changes: &C,
txn: &T,
channel: &Channel<T>,
item: &RecordItem,
vertex: Position<ChangeId>,
new_papa: Position<Option<ChangeId>>,
old_meta: InodeMetadata,
) -> Result<(), anyhow::Error> {
debug!("record_moved_file {:?} {:?}", item, old_meta);
// org id svyyji0bEjJBztjFhnSAfxuRFZoPqdU7oYQxF0xrcAI=
let name_start = ChangePosition(self.rec.contents.len() as u64);
item.metadata.write(&mut self.rec.contents)?;
self.rec.contents.extend(item.basename.as_bytes());
let name_end = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
let name = &self.rec.contents[name_start.0 as usize..name_end.0 as usize];
// org id oBZg+v5vsRuJuxenoo+uPc3fskCmXq/VrCY3c5p8Kmw=
let mut moved = collect_moved_edges(
txn,
changes,
channel,
new_papa,
vertex,
item.metadata,
old_meta,
name,
)?;
// org id B5uLQzVsMdND0cJxfZbH11TZxt5E4FHBpjIrnezZ75E=
debug!("moved = {:#?}", moved);
if !moved.resurrect.is_empty() {
moved.resurrect.extend(moved.alive.into_iter());
self.rec.actions.push(Record::FileUndel {
undel: Atom::EdgeMap(EdgeMap {
edges: moved.resurrect,
inode: item.v_papa,
}),
contents: None,
path: item.full_path.clone(),
});
}
if !moved.edges.is_empty() {
if moved.need_new_name {
self.rec.actions.push(Record::FileMove {
del: Atom::EdgeMap(EdgeMap {
edges: moved.edges,
inode: item.v_papa,
}),
add: Atom::NewVertex(NewVertex {
up_context: vec![item.v_papa],
down_context: vec![vertex.to_option()],
start: name_start,
end: name_end,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
inode: item.v_papa,
}),
path: item.full_path.clone(),
});
} else {
self.rec.actions.push(Record::SolveNameConflict {
name: Atom::EdgeMap(EdgeMap {
edges: moved.edges,
inode: item.v_papa,
}),
path: item.full_path.clone(),
});
self.rec.contents.truncate(name_start.0 as usize)
}
} else {
self.rec.contents.truncate(name_start.0 as usize)
}
Ok(())
}
}
// org id RiIULuSdUr3BfIyIj6wuGxhdiPlgaYi8BRJChkI34Y0=
#[derive(Debug)]
struct MovedEdges {
edges: Vec<NewEdge<Option<ChangeId>>>,
alive: Vec<NewEdge<Option<ChangeId>>>,
resurrect: Vec<NewEdge<Option<ChangeId>>>,
need_new_name: bool,
}
fn collect_moved_edges<T: TxnT, C: ChangeStore>(
txn: &T,
changes: &C,
channel: &Channel<T>,
parent_pos: Position<Option<ChangeId>>,
current_pos: Position<ChangeId>,
new_meta: InodeMetadata,
old_meta: InodeMetadata,
name: &[u8],
) -> Result<MovedEdges, anyhow::Error> {
debug!("collect_moved_edges {:?}", current_pos);
let mut moved = MovedEdges {
edges: Vec::new(),
alive: Vec::new(),
resurrect: Vec::new(),
need_new_name: true,
};
let mut del_del = HashMap::new();
let mut alive = HashMap::new();
let mut previous_name = Vec::new();
for parent in txn
.iter_adjacent(
channel,
current_pos.inode_vertex(),
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)
.filter(|e| e.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT))
{
// org id fIIXSike+L92Ume3OTTPmilsrGywr5YQ8AfEvLCP83A=
debug!("parent = {:?}", parent);
let mut parent_was_resurrected = false;
if !parent.flag.contains(EdgeFlags::PSEUDO) {
if parent.flag.contains(EdgeFlags::DELETED) {
moved.resurrect.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: current_pos.inode_vertex().to_option(),
introduced_by: Some(parent.introduced_by),
});
parent_was_resurrected = true;
let v = alive
.entry((parent.dest, current_pos.inode_vertex()))
.or_insert(Vec::new());
v.push(None)
} else {
let v = alive
.entry((parent.dest, current_pos.inode_vertex()))
.or_insert(Vec::new());
v.push(Some(parent.introduced_by))
}
}
previous_name.clear();
let parent_dest = txn.find_block_end(&channel, parent.dest).unwrap();
changes.get_contents(|p| txn.get_external(p), parent_dest, &mut previous_name)?;
debug!(
"parent_dest {:?} {:?}",
parent_dest,
std::str::from_utf8(&previous_name[2..])
);
debug!("new_meta = {:?}, old_meta = {:?}", new_meta, old_meta);
let name_changed =
(&previous_name[2..] != &name[2..]) || (new_meta != old_meta && cfg!(not(windows)));
for grandparent in txn
.iter_adjacent(
channel,
parent_dest,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)
.filter(|e| {
e.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT)
&& !e.flag.contains(EdgeFlags::PSEUDO)
})
{
// org id aFLJipavvFSV6W0GngE9D67Lu59Fibp/NxOo7S4PfBU=
debug!("grandparent: {:?}", grandparent);
let grandparent_dest = txn.find_block_end(&channel, grandparent.dest).unwrap();
assert_eq!(grandparent_dest.start, grandparent_dest.end);
debug!(
"grandparent_dest {:?} {:?}",
grandparent_dest,
std::str::from_utf8(&previous_name[2..])
);
let grandparent_changed = parent_pos != grandparent.dest.to_option();
debug!("change = {:?}", grandparent_changed || name_changed);
if grandparent.flag.contains(EdgeFlags::DELETED) {
if !grandparent_changed && !name_changed {
// We resurrect the name
moved.resurrect.push(NewEdge {
previous: grandparent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: grandparent.dest.to_option(),
to: parent_dest.to_option(),
introduced_by: Some(grandparent.introduced_by),
});
if !parent_was_resurrected && !parent.flag.contains(EdgeFlags::PSEUDO) {
moved.alive.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: current_pos.inode_vertex().to_option(),
introduced_by: Some(parent.introduced_by),
})
}
moved.need_new_name = false
} else {
// Clean up the extra deleted edges.
debug!("cleanup");
let v = del_del
.entry((grandparent.dest, parent_dest))
.or_insert(Vec::new());
v.push(Some(grandparent.introduced_by))
}
} else if grandparent_changed || name_changed {
moved.edges.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: grandparent.dest.to_option(),
to: parent_dest.to_option(),
introduced_by: Some(grandparent.introduced_by),
});
// The following is really important in missing context detection:
if !parent_was_resurrected && !parent.flag.contains(EdgeFlags::PSEUDO) {
moved.alive.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: current_pos.inode_vertex().to_option(),
introduced_by: Some(parent.introduced_by),
})
}
} else {
let v = alive
.entry((grandparent.dest, parent_dest))
.or_insert(Vec::new());
v.push(Some(grandparent.introduced_by));
moved.need_new_name = false
}
}
}
for ((from, to), intro) in del_del {
if intro.len() > 1 {
for introduced_by in intro {
if introduced_by.is_some() {
moved.edges.push(NewEdge {
previous: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: from.to_option(),
to: to.to_option(),
introduced_by,
})
}
}
}
}
for ((from, to), intro) in alive {
if intro.len() > 1 {
for introduced_by in intro {
if introduced_by.is_some() {
moved.alive.push(NewEdge {
previous: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK,
from: from.to_option(),
to: to.to_option(),
introduced_by,
})
}
}
}
}
Ok(moved)
}
// org id /9wv5iBmVVp4FyMrDp7baUtY5ISICj5WK9XyO8OC7T4=
impl Builder {
fn record_deleted_file<T: TxnT, W: WorkingCopy>(
&mut self,
txn: &T,
channel: &Channel<T>,
working_copy: &W,
full_path: &str,
current_vertex: Position<ChangeId>,
) {
debug!("record_deleted_file {:?} {:?}", current_vertex, full_path);
let mut stack = vec![(current_vertex.inode_vertex(), None)];
let mut visited = HashSet::new();
while let Some((vertex, inode)) = stack.pop() {
debug!("vertex {:?}, inode {:?}", vertex, inode);
if let Some(path) = txn.tree_path(vertex.start_pos()) {
if working_copy.file_metadata(&path).is_ok() {
debug!("not deleting {:?}", path);
continue;
}
}
// Kill this vertex
if let Some(inode) = inode {
self.delete_file_edge(txn, channel, vertex, inode)
} else {
if vertex.start == vertex.end {
debug!("delete_recursively {:?}", vertex);
// Killing an inode.
if !self.deleted_vertices.insert(vertex.start_pos()) {
continue;
}
if let Some(inode) = txn.get_revinodes(vertex.start_pos(), None) {
debug!(
"delete_recursively, vertex = {:?}, inode = {:?}",
vertex, inode
);
self.recorded_inodes
.insert(inode, vertex.start_pos().to_option());
self.rec
.updatables
.insert(self.rec.actions.len(), InodeUpdate::Deleted { inode });
}
self.delete_inode_vertex(txn, channel, vertex, vertex.start_pos(), full_path)
}
}
// Move on to the descendants.
for edge in txn.iter_adjacent(
channel,
vertex,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED - EdgeFlags::PARENT,
) {
debug!("delete_recursively, edge: {:?}", edge);
let dest = txn
.find_block(&channel, edge.dest)
.expect("delete_recursively, descendants");
let inode = if inode.is_some() {
assert!(!edge.flag.contains(EdgeFlags::FOLDER));
inode
} else if edge.flag.contains(EdgeFlags::FOLDER) {
None
} else {
assert_eq!(vertex.start, vertex.end);
Some(vertex.start_pos())
};
if visited.insert(edge.dest) {
stack.push((dest, inode))
}
}
}
}
fn delete_inode_vertex<T: TxnT>(
&mut self,
txn: &T,
channel: &Channel<T>,
vertex: Vertex<ChangeId>,
inode: Position<ChangeId>,
path: &str,
) {
let mut edges = Vec::new();
for parent in txn
.iter_adjacent(
channel,
vertex,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)
.filter(|e| e.flag.contains(EdgeFlags::PARENT))
{
assert!(parent.flag.contains(EdgeFlags::PARENT));
assert!(parent.flag.contains(EdgeFlags::FOLDER));
let parent_dest = txn.find_block_end(&channel, parent.dest).unwrap();
for grandparent in txn
.iter_adjacent(
channel,
parent_dest,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::all(),
)
.filter(|e| {
e.flag.contains(EdgeFlags::PARENT) && !e.flag.contains(EdgeFlags::PSEUDO)
})
{
assert!(grandparent.flag.contains(EdgeFlags::PARENT));
assert!(grandparent.flag.contains(EdgeFlags::FOLDER));
edges.push(NewEdge {
previous: grandparent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: grandparent.dest.to_option(),
to: parent_dest.to_option(),
introduced_by: Some(grandparent.introduced_by),
});
}
if !parent.flag.contains(EdgeFlags::PSEUDO) {
edges.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED,
from: parent.dest.to_option(),
to: vertex.to_option(),
introduced_by: Some(parent.introduced_by),
});
}
}
debug!("deleting {:?}", edges);
if !edges.is_empty() {
self.rec.actions.push(Record::FileDel {
del: Atom::EdgeMap(EdgeMap {
edges: edges,
inode: inode.to_option(),
}),
contents: None,
path: path.to_string(),
})
}
}
fn delete_file_edge<T: TxnT>(
&mut self,
txn: &T,
channel: &Channel<T>,
to: Vertex<ChangeId>,
inode: Position<ChangeId>,
) {
if let Some(Record::FileDel {
ref mut contents, ..
}) = self.rec.actions.last_mut()
{
if contents.is_none() {
*contents = Some(Atom::EdgeMap(EdgeMap {
edges: Vec::new(),
inode: inode.to_option(),
}))
}
if let Some(Atom::EdgeMap(ref mut e)) = *contents {
for parent in txn
.iter_adjacent(
channel,
to,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)
.filter(|e| !e.flag.contains(EdgeFlags::PSEUDO))
{
assert!(parent.flag.contains(EdgeFlags::PARENT));
assert!(!parent.flag.contains(EdgeFlags::FOLDER));
e.edges.push(NewEdge {
previous: parent.flag - EdgeFlags::PARENT,
flag: EdgeFlags::DELETED | EdgeFlags::BLOCK,
from: parent.dest.to_option(),
to: to.to_option(),
introduced_by: Some(parent.introduced_by),
})
}
}
}
}
}
// org id v9IGAXnDBProMNkxGxCjY4SHSAORGJUle7gFFdK/1kk=
use super::change_id::*;
/// A node in the repository graph, made of a change internal
/// identifier, and a line identifier in that change.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct Vertex<H> {
/// The change that introduced this node.
pub change: H,
/// The line identifier of the node in that change. Here,
/// "line" does not imply anything on the contents of the
/// chunk.
pub start: ChangePosition,
pub end: ChangePosition,
}
impl Vertex<ChangeId> {
/// The node at the root of the repository graph.
pub const ROOT: Vertex<ChangeId> = Vertex {
change: ChangeId::ROOT,
start: ChangePosition::ROOT,
end: ChangePosition::ROOT,
};
/// The node at the root of the repository graph.
pub(crate) const BOTTOM: Vertex<ChangeId> = Vertex {
change: ChangeId::ROOT,
start: ChangePosition::BOTTOM,
end: ChangePosition::BOTTOM,
};
/// Is this the root key? (the root key is all 0s).
pub fn is_root(&self) -> bool {
self == &Vertex::ROOT
}
pub(crate) fn to_option(&self) -> Vertex<Option<ChangeId>> {
Vertex {
change: Some(self.change),
start: self.start,
end: self.end,
}
}
}
impl<H: Clone> Vertex<H> {
/// Convenience function to get the start position of a
/// [`Vertex<ChangeId>`](struct.Vertex.html) as a
/// [`Position`](struct.Position.html).
pub fn start_pos(&self) -> Position<H> {
Position {
change: self.change.clone(),
pos: self.start,
}
}
/// Convenience function to get the end position of a
/// [`Vertex<ChangeId>`](struct.Vertex.html) as a
/// [`Position`](struct.Position.html).
pub fn end_pos(&self) -> Position<H> {
Position {
change: self.change.clone(),
pos: self.end,
}
}
/// Length of this key, in bytes.
pub fn len(&self) -> usize {
self.end - self.start
}
}
// org id 7sgxPkcNsFwRPsxCxoYwmtIUucaNKnXOZhKK0683eg0=
/// The position of a byte within a change.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct ChangePosition(pub u64);
impl ChangePosition {
pub(crate) const ROOT: ChangePosition = ChangePosition(0);
pub(crate) const BOTTOM: ChangePosition = ChangePosition(1);
}
impl std::ops::Add<usize> for ChangePosition {
type Output = ChangePosition;
fn add(self, x: usize) -> Self::Output {
ChangePosition(self.0 + x as u64)
}
}
impl std::ops::Sub<ChangePosition> for ChangePosition {
type Output = usize;
fn sub(self, x: ChangePosition) -> Self::Output {
(self.0 - x.0) as usize
}
}
// org id i14HsT+IsL1ts+DLixuKpjGKOcweKYlyNFdOi0R5Gio=
/// A byte identifier, i.e. a change together with a position.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[doc(hidden)]
pub struct Position<P> {
pub change: P,
pub pos: ChangePosition,
}
use super::Base32;
use byteorder::{ByteOrder, LittleEndian};
impl<H: super::Base32> Base32 for Position<H> {
fn to_base32(&self) -> String {
let mut v = self.change.to_base32();
let mut bytes = [0; 8];
LittleEndian::write_u64(&mut bytes, self.pos.0);
let mut i = 7;
while i > 2 && bytes[i] == 0 {
i -= 1
}
i += 1;
let len = data_encoding::BASE32_NOPAD.encode_len(i);
let len0 = v.len() + 1;
v.push_str("..............");
v.truncate(len0 + len);
data_encoding::BASE32_NOPAD.encode_mut(&bytes[..i], unsafe {
v.split_at_mut(len0).1.as_bytes_mut()
});
v
}
fn from_base32(s: &[u8]) -> Option<Self> {
let n = if let Some(n) = s.iter().position(|c| *c == b'.') {
n
} else {
return None;
};
let (s, pos) = s.split_at(n);
let pos = &pos[1..];
let change = if let Some(change) = H::from_base32(s) {
change
} else {
return None;
};
let mut dec = [0; 8];
let len = if let Ok(len) = data_encoding::BASE32_NOPAD.decode_len(pos.len()) {
len
} else {
return None;
};
let pos = if let Ok(_) = data_encoding::BASE32_NOPAD.decode_mut(pos, &mut dec[..len]) {
LittleEndian::read_u64(&dec)
} else {
return None;
};
Some(Position {
change,
pos: ChangePosition(pos),
})
}
}
impl<H> std::ops::Add<usize> for Position<H> {
type Output = Position<H>;
fn add(self, x: usize) -> Self::Output {
Position {
change: self.change,
pos: self.pos + x,
}
}
}
impl Position<ChangeId> {
pub fn inode_vertex(&self) -> Vertex<ChangeId> {
Vertex {
change: self.change,
start: self.pos,
end: self.pos,
}
}
pub fn is_root(&self) -> bool {
self.change.is_root()
}
pub(crate) fn to_option(&self) -> Position<Option<ChangeId>> {
Position {
change: Some(self.change),
pos: self.pos,
}
}
pub const ROOT: Position<ChangeId> = Position {
change: ChangeId::ROOT,
pos: ChangePosition(0),
};
pub(crate) const OPTION_ROOT: Position<Option<ChangeId>> = Position {
change: Some(ChangeId::ROOT),
pos: ChangePosition(0),
};
pub(crate) const BOTTOM: Position<ChangeId> = Position {
change: ChangeId::ROOT,
pos: ChangePosition::BOTTOM,
};
}
// org id IulcnLeMMhhyQMholFGhyFqHxReK9r2I3Au24JVIri0=
use super::*;
use crate::small_string;
use ::sanakirja::*;
use byteorder::{ByteOrder, LittleEndian};
use rand::rngs::ThreadRng;
use rand::thread_rng;
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::path::Path;
use std::sync::Arc;
/// A Sanakirja pristine.
pub struct Pristine {
pub env: Arc<::sanakirja::Env>,
}
impl Pristine {
pub fn new<P: AsRef<Path>>(name: P) -> Result<Self, anyhow::Error> {
Self::new_with_size(name, 1 << 20)
}
pub unsafe fn new_nolock<P: AsRef<Path>>(name: P) -> Result<Self, anyhow::Error> {
Self::new_with_size_nolock(name, 1 << 20)
}
pub fn new_with_size<P: AsRef<Path>>(name: P, size: u64) -> Result<Self, anyhow::Error> {
Ok(Pristine {
env: Arc::new(sanakirja::Env::new(name, size)?),
})
}
pub unsafe fn new_with_size_nolock<P: AsRef<Path>>(
name: P,
size: u64,
) -> Result<Self, anyhow::Error> {
Ok(Pristine {
env: Arc::new(sanakirja::Env::new_nolock(name, size)?),
})
}
pub fn new_anon() -> Result<Self, anyhow::Error> {
Self::new_anon_with_size(1 << 20)
}
pub fn new_anon_with_size(size: u64) -> Result<Self, anyhow::Error> {
Ok(Pristine {
env: Arc::new(sanakirja::Env::new_anon(size)?),
})
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
#[repr(usize)]
enum Root {
Tree,
RevTree,
Inodes,
RevInodes,
Internal,
External,
RevDep,
Channels,
TouchedFiles,
Dep,
RevTouchedFiles,
Partials,
Remotes,
}
impl Pristine {
pub fn txn_begin(&self) -> Result<Txn, anyhow::Error> {
let txn = ::sanakirja::Env::txn_begin(self.env.clone())?;
fn begin(txn: ::sanakirja::Txn<Arc<::sanakirja::Env>>) -> Option<Txn> {
Some(Txn {
channels: txn.root(Root::Channels as usize)?,
external: txn.root(Root::External as usize)?,
internal: txn.root(Root::Internal as usize)?,
inodes: txn.root(Root::Inodes as usize)?,
revinodes: txn.root(Root::RevInodes as usize)?,
tree: txn.root(Root::Tree as usize)?,
revtree: txn.root(Root::RevTree as usize)?,
revdep: txn.root(Root::RevDep as usize)?,
touched_files: txn.root(Root::TouchedFiles as usize)?,
rev_touched_files: txn.root(Root::RevTouchedFiles as usize)?,
partials: txn.root(Root::Partials as usize)?,
dep: txn.root(Root::Dep as usize)?,
remotes: txn.root(Root::Remotes as usize)?,
rng: thread_rng(),
open_channels: RefCell::new(HashMap::new()),
open_remotes: RefCell::new(HashMap::new()),
txn,
})
}
if let Some(txn) = begin(txn) {
Ok(txn)
} else {
Err(crate::Error::PristineCorrupt.into())
}
}
pub fn mut_txn_begin(&self) -> MutTxn<()> {
let mut txn = ::sanakirja::Env::mut_txn_begin(self.env.clone()).unwrap();
MutTxn {
channels: txn
.root(Root::Channels as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
external: txn
.root(Root::External as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
internal: txn
.root(Root::Internal as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
inodes: txn
.root(Root::Inodes as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
revinodes: txn
.root(Root::RevInodes as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
tree: txn
.root(Root::Tree as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
revtree: txn
.root(Root::RevTree as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
revdep: txn
.root(Root::RevDep as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
dep: txn
.root(Root::Dep as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
touched_files: txn
.root(Root::TouchedFiles as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
rev_touched_files: txn
.root(Root::RevTouchedFiles as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
partials: txn
.root(Root::Partials as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
remotes: txn
.root(Root::Remotes as usize)
.unwrap_or_else(|| txn.create_db().unwrap()),
rng: thread_rng(),
open_channels: RefCell::new(HashMap::new()),
open_remotes: RefCell::new(HashMap::new()),
txn,
}
}
}
type Graph = ::sanakirja::Db<Vertex<ChangeId>, Edge>;
type ChangeSet = ::sanakirja::Db<ChangeId, ApplyTimestamp>;
type RevChangeSet = ::sanakirja::Db<ApplyTimestamp, (ChangeId, Merkle)>;
type ChannelStates = ::sanakirja::Db<Merkle, u64>;
pub type Txn = GenericTxn<::sanakirja::Txn<Arc<::sanakirja::Env>>>;
pub type MutTxn<T> = GenericTxn<::sanakirja::MutTxn<Arc<::sanakirja::Env>, T>>;
/// A transaction, used both for mutable and immutable transactions,
/// depending on type parameter `T`.
///
/// In Sanakirja, both `sanakirja::Txn` and `sanakirja::MutTxn`
/// implement `sanakirja::Transaction`, explaining our implementation
/// of `TxnT` for `Txn<T>` for all `T: sanakirja::Transaction`. This
/// covers both mutable and immutable transactions in a single
/// implementation.
pub struct GenericTxn<T: ::sanakirja::Transaction> {
txn: T,
internal: Db<Hash, ChangeId>,
external: Db<ChangeId, Hash>,
inodes: Db<Inode, Position<ChangeId>>,
revinodes: Db<Position<ChangeId>, Inode>,
tree: Db<UnsafePathId, Inode>,
revtree: Db<Inode, UnsafePathId>,
revdep: Db<ChangeId, ChangeId>,
dep: Db<ChangeId, ChangeId>,
touched_files: Db<Position<ChangeId>, ChangeId>,
rev_touched_files: Db<ChangeId, Position<ChangeId>>,
partials: Db<UnsafeSmallStr, Position<ChangeId>>,
channels: Db<UnsafeSmallStr, (Graph, ChangeSet, RevChangeSet, ChannelStates, u64, u64)>,
remotes: Db<UnsafeSmallStr, (Db<u64, (Hash, Merkle)>, Db<Hash, u64>, Db<Merkle, u64>)>,
rng: ThreadRng,
open_channels: RefCell<HashMap<SmallString, ChannelRef<Self>>>,
open_remotes: RefCell<HashMap<SmallString, RemoteRef<Self>>>,
}
/// This is actually safe because the only non-Send fields are
/// `open_channels` and `open_remotes`, but we can't do anything with
/// a `ChannelRef` whose transaction has been moved to another thread.
unsafe impl<T: ::sanakirja::Transaction> Send for GenericTxn<T> {}
#[derive(Debug)]
pub struct DatabaseReport {
pub refs: usize,
pub stats: ::sanakirja::Statistics,
}
impl Txn {
pub fn check_database(&self) -> DatabaseReport {
let mut refs = HashMap::new();
self.txn.references(&mut refs, self.internal);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.external);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.inodes);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.revinodes);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.tree);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.revtree);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.revdep);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.dep);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.touched_files);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.rev_touched_files);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.partials);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, self.channels);
info!("refs = {:?}", refs);
for (a, (g, c, r, s, _, _)) in self.txn.iter(&self.channels, None) {
info!("channel = {:?}", a);
self.txn.references(&mut refs, g);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, c);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, r);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, s);
info!("refs = {:?}", refs);
}
self.txn.references(&mut refs, self.remotes);
info!("refs = {:?}", refs);
for (a, (u, v, w)) in self.txn.iter(&self.remotes, None) {
info!("remote = {:?}", a);
self.txn.references(&mut refs, u);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, v);
info!("refs = {:?}", refs);
self.txn.references(&mut refs, w);
info!("refs = {:?}", refs);
}
let stats = self.txn.statistics();
let report = DatabaseReport {
refs: refs.len(),
stats: stats.clone(),
};
let mut channel_roots: Vec<UnsafeDb> = Vec::new();
for (a, (g, c, r, s, _, _)) in self.txn.iter(&self.channels, None) {
info!("channel: {:?}", a);
unsafe {
channel_roots.push(std::mem::transmute(g));
channel_roots.push(std::mem::transmute(c));
channel_roots.push(std::mem::transmute(r));
channel_roots.push(std::mem::transmute(s));
}
}
::sanakirja::debug_(&self.txn, &channel_roots[..], "debug_sanakirja", true);
self.txn.check_references(&mut refs);
info!("stats = {:?}", stats);
let occupied_pages =
stats.total_pages - stats.free_pages.len() - stats.bookkeeping_pages.len();
for i in 1..(stats.total_pages as u64) {
let p = i * 4096;
if !refs.contains_key(&p)
&& !stats.free_pages.contains(&p)
&& !stats.bookkeeping_pages.contains(&p)
{
panic!("does not contain {:?} ({:?})", i, p);
}
}
// check that there is no intersection.
for (r, _) in refs.iter() {
if stats.free_pages.contains(r) {
panic!("referenced page is free: {:?}", r);
}
if stats.bookkeeping_pages.contains(r) {
panic!("referenced page is a bookkeeping page: {:?}", r);
}
}
for p in stats.free_pages.iter() {
if stats.bookkeeping_pages.contains(p) {
panic!("bookkeeping inter free: {:?}", p);
}
}
assert_eq!(1 + refs.len(), occupied_pages);
report
}
}
impl<T: ::sanakirja::Transaction> TxnT for GenericTxn<T> {
#[doc(hidden)]
type Graph = Db<Vertex<ChangeId>, Edge>;
sanakirja_cursor_ref!(graph, Vertex<ChangeId>, Edge);
sanakirja_get!(graph, Vertex<ChangeId>, Edge);
#[doc(hidden)]
fn get_external(&self, p: ChangeId) -> Option<Hash> {
if p.is_root() {
Some(Hash::None)
} else {
self.txn.get(&self.external, p, None)
}
}
#[doc(hidden)]
fn get_internal(&self, p: Hash) -> Option<ChangeId> {
if let Hash::None = p {
Some(ChangeId::ROOT)
} else {
self.txn.get(&self.internal, p, None)
}
}
fn hash_from_prefix(&self, s: &str) -> Result<(Hash, ChangeId), anyhow::Error> {
let h = if let Some(h) = Hash::from_prefix(s) {
h
} else {
return Err((crate::Error::ParseError { s: s.to_string() }).into());
};
let mut result = None;
debug!("h = {:?}", h);
for (e, i) in self.txn.iter(&self.internal, Some((h, None))) {
debug!("{:?} {:?}", e, i);
if e < h {
continue;
} else {
let b32 = e.to_base32();
debug!("{:?}", b32);
let (b32, _) = b32.split_at(s.len().min(b32.len()));
if b32 != s {
break;
} else if result.is_none() {
result = Some((e, i))
} else {
return Err((crate::Error::AmbiguousHashPrefix {
prefix: s.to_string(),
})
.into());
}
}
}
if let Some(result) = result {
Ok(result)
} else {
Err((crate::Error::ChangeNotFound {
hash: s.to_string(),
})
.into())
}
}
fn hash_from_prefix_remote<'txn>(
&'txn self,
remote: &RemoteRef<Self>,
s: &str,
) -> Result<Hash, anyhow::Error> {
let remote = remote.borrow();
let h = if let Some(h) = Hash::from_prefix(s) {
h
} else {
return Err((crate::Error::ParseError { s: s.to_string() }).into());
};
let mut result = None;
debug!("h = {:?}", h);
for (e, _) in self.txn.iter(&remote.rev, Some((h, None))) {
debug!("{:?}", e);
if e < h {
continue;
} else {
let b32 = e.to_base32();
debug!("{:?}", b32);
let (b32, _) = b32.split_at(s.len().min(b32.len()));
if b32 != s {
break;
} else if result.is_none() {
result = Some(e)
} else {
return Err((crate::Error::AmbiguousHashPrefix {
prefix: s.to_string(),
})
.into());
}
}
}
if let Some(result) = result {
Ok(result)
} else {
Err((crate::Error::ChangeNotFound {
hash: s.to_string(),
})
.into())
}
}
#[doc(hidden)]
type Inodes = Db<Inode, Position<ChangeId>>;
sanakirja_table_get!(inodes, Inode, Position<ChangeId>);
sanakirja_table_get!(revinodes, Position<ChangeId>, Inode);
// #[cfg(debug_assertions)]
sanakirja_cursor!(inodes, Inode, Position<ChangeId>);
#[doc(hidden)]
type Tree = Db<UnsafePathId, Inode>;
sanakirja_table_get!(tree, PathId, Inode, (UnsafePathId::from_fileid(key), value),);
sanakirja_iter!(
tree,
OwnedPathId,
Inode,
if let Some((ref k, ref v)) = pos {
info!("tree iter {:?} {:?}", k, v);
Some((UnsafePathId::from_fileid(k.as_file_id()), *v))
} else {
None
},
map(|(k, v): (UnsafePathId, Inode)| (unsafe { k.to_fileid().to_owned() }, v))
);
sanakirja_iter!(
revtree,
Inode,
OwnedPathId,
if let Some((ref k, ref v)) = pos {
let v = if let Some(ref v) = *v {
Some(UnsafePathId::from_fileid(v.as_file_id()))
} else {
None
};
Some((*k, v))
} else {
None
},
map(|(k, v): (Inode, UnsafePathId)| (k, unsafe { v.to_fileid().to_owned() }))
);
#[doc(hidden)]
type Revtree = Db<Inode, UnsafePathId>;
sanakirja_table_get!(
revtree,
Inode,
PathId,
(
key,
if let Some(value) = value {
Some(UnsafePathId::from_fileid(value))
} else {
None
}
),
map(|value| unsafe { value.to_fileid() })
);
#[doc(hidden)]
type Changeset = Db<ChangeId, u64>;
#[doc(hidden)]
type Revchangeset = Db<u64, (ChangeId, Merkle)>;
#[doc(hidden)]
type Channelstates = Db<Merkle, u64>;
sanakirja_get!(changeset, ChangeId, u64);
sanakirja_get!(revchangeset, u64, (ChangeId, Merkle));
sanakirja_cursor!(changeset, ChangeId, u64);
sanakirja_cursor_ref!(revchangeset, u64, (ChangeId, Merkle));
sanakirja_rev_cursor!(revchangeset, u64, (ChangeId, Merkle));
#[doc(hidden)]
type Dep = Db<ChangeId, ChangeId>;
#[doc(hidden)]
type Revdep = Db<ChangeId, ChangeId>;
sanakirja_table_get!(dep, ChangeId, ChangeId);
sanakirja_table_get!(revdep, ChangeId, ChangeId);
sanakirja_cursor_ref!(dep, ChangeId, ChangeId);
sanakirja_table_get!(touched_files, Position<ChangeId>, ChangeId);
sanakirja_table_get!(rev_touched_files, ChangeId, Position<ChangeId>);
#[doc(hidden)]
fn iter_dep_ref<RT: std::ops::Deref<Target = Self> + Clone>(
txn: RT,
p: ChangeId,
) -> super::Cursor<Self, RT, Self::DepCursor, ChangeId, ChangeId> {
let curs = Self::cursor_dep_ref(txn.clone(), &txn.dep, Some((p, None)));
curs
}
#[doc(hidden)]
type Touched_files = Db<Position<ChangeId>, ChangeId>;
#[doc(hidden)]
type Rev_touched_files = Db<ChangeId, Position<ChangeId>>;
sanakirja_iter!(touched_files, Position<ChangeId>, ChangeId);
sanakirja_iter!(rev_touched_files, ChangeId, Position<ChangeId>);
#[doc(hidden)]
type Partials = Db<UnsafeSmallStr, Position<ChangeId>>;
sanakirja_cursor!(
partials,
SmallString,
Position<ChangeId>,
if let Some((ref k, ref v)) = pos {
Some((UnsafeSmallStr::from_small_str(k.as_small_str()), *v))
} else {
None
},
map(|(k, v): (UnsafeSmallStr, Position<ChangeId>)| (
unsafe { k.to_small_str().to_owned() },
v
))
);
///
fn load_channel(&self, name: &str) -> Option<ChannelRef<Self>> {
let name = SmallString::from_str(name);
match self.open_channels.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
if let Some((channel, changes, revchanges, states, counter, last_modified)) =
self.txn.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)
{
let r = ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph: channel,
changes,
revchanges,
apply_counter: counter,
states,
name: name.clone(),
last_modified,
})),
};
v.insert(r);
} else {
return None;
}
}
Entry::Occupied(_) => {}
}
self.open_channels.borrow().get(&name).map(|x| x.clone())
}
fn load_remote(&self, name: &str) -> Option<RemoteRef<Self>> {
let name = SmallString::from_str(name);
match self.open_remotes.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
if let Some(remote) = self.txn.get(
&self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
) {
let r = RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: remote.0,
rev: remote.1,
states: remote.2,
})),
name: name.clone(),
};
v.insert(r);
} else {
return None;
}
}
Entry::Occupied(_) => {}
}
self.open_remotes.borrow().get(&name).map(|x| x.clone())
}
///
type Channels = Db<UnsafeSmallStr, (u64, u64, u64, u64, u64, u64)>;
sanakirja_cursor!(
channels,
SmallString,
(u64, u64, u64, u64, u64, u64),
if let Some((ref k, ref v)) = pos {
Some((UnsafeSmallStr::from_small_str(k.as_small_str()), *v))
} else {
None
},
map(|(k, v): (UnsafeSmallStr, (u64, u64, u64, u64, u64, u64))| (
unsafe { k.to_small_str().to_owned() },
v
))
);
fn iter_channels<'txn>(&'txn self, start: &str) -> ChannelIterator<'txn, Self> {
let name = SmallString::from_str(start);
let name = UnsafeSmallStr::from_small_str(name.as_small_str());
ChannelIterator {
cursor: self.txn.set_cursors(&self.channels, Some((name, None))).0,
txn: self,
}
}
type Remotes = Db<UnsafeSmallStr, (u64, u64, u64)>;
sanakirja_cursor!(
remotes,
SmallString,
(u64, u64, u64),
if let Some((ref k, ref v)) = pos {
Some((UnsafeSmallStr::from_small_str(k.as_small_str()), *v))
} else {
None
},
map(|(k, v): (UnsafeSmallStr, (u64, u64, u64))| (
unsafe { k.to_small_str().to_owned() },
v
))
);
fn iter_remotes<'txn>(&'txn self, start: &str) -> RemotesIterator<'txn, Self> {
let name = SmallString::from_str(start);
let name = UnsafeSmallStr::from_small_str(name.as_small_str());
RemotesIterator {
cursor: self.txn.set_cursors(&self.remotes, Some((name, None))).0,
txn: self,
}
}
// #[cfg(debug_assertions)]
#[doc(hidden)]
fn iter_inodes<'txn>(
&'txn self,
) -> super::Cursor<Self, &'txn Self, Self::InodesCursor, Inode, Position<ChangeId>> {
self.cursor_inodes(&self.inodes, None)
}
#[doc(hidden)]
fn iter_revdep<'txn>(
&'txn self,
k: ChangeId,
) -> super::Cursor<Self, &'txn Self, Self::DepCursor, ChangeId, ChangeId> {
self.cursor_dep(&self.revdep, Some((k, None)))
}
#[doc(hidden)]
fn iter_dep<'txn>(
&'txn self,
k: ChangeId,
) -> super::Cursor<Self, &'txn Self, Self::DepCursor, ChangeId, ChangeId> {
self.cursor_dep(&self.dep, Some((k, None)))
}
#[doc(hidden)]
fn iter_touched<'txn>(
&'txn self,
k: Position<ChangeId>,
) -> super::Cursor<Self, &'txn Self, Self::Touched_filesCursor, Position<ChangeId>, ChangeId>
{
self.cursor_touched_files(&self.touched_files, Some((k, None)))
}
#[doc(hidden)]
fn iter_rev_touched<'txn>(
&'txn self,
k: ChangeId,
) -> super::Cursor<Self, &'txn Self, Self::Rev_touched_filesCursor, ChangeId, Position<ChangeId>>
{
self.cursor_rev_touched_files(&self.rev_touched_files, Some((k, None)))
}
#[doc(hidden)]
fn iter_partials<'txn>(
&'txn self,
k: &str,
) -> super::Cursor<Self, &'txn Self, Self::PartialsCursor, SmallString, Position<ChangeId>>
{
let k0 = SmallString::from_str(k);
self.cursor_partials(&self.partials, Some((k0, None)))
}
type Remote = Db<u64, (Hash, Merkle)>;
type Revremote = Db<Hash, u64>;
type Remotestates = Db<Merkle, u64>;
sanakirja_cursor!(remote, u64, (Hash, Merkle));
sanakirja_rev_cursor!(remote, u64, (Hash, Merkle));
fn iter_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: u64,
) -> super::Cursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)> {
self.cursor_remote(remote, Some((k, None)))
}
fn iter_rev_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: Option<u64>,
) -> super::RevCursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)> {
self.rev_cursor_remote(remote, k.map(|k| (k, None)))
}
fn get_remote(&mut self, name: &str) -> Option<RemoteRef<Self>> {
let name = SmallString::from_str(name);
match self.open_remotes.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
if let Some(remote) = self.txn.get(
&self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
) {
let r = RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: remote.0,
rev: remote.1,
states: remote.2,
})),
name: name.clone(),
};
v.insert(r);
} else {
return None;
}
}
Entry::Occupied(_) => {}
}
self.open_remotes.borrow().get(&name).map(|x| x.clone())
}
fn last_remote(&self, remote: &Self::Remote) -> Option<(u64, (Hash, Merkle))> {
self.txn.rev_iter(remote, None).next()
}
fn get_remote_state(&self, remote: &Self::Remote, n: u64) -> Option<(u64, (Hash, Merkle))> {
self.txn
.iter(remote, Some((n, None)))
.filter(|(k, _)| *k >= n)
.next()
}
fn remote_has_change(&self, remote: &RemoteRef<Self>, hash: Hash) -> bool {
self.txn.get(&remote.db.borrow().rev, hash, None).is_some()
}
fn remote_has_state(&self, remote: &RemoteRef<Self>, m: Merkle) -> bool {
self.txn.get(&remote.db.borrow().states, m, None).is_some()
}
fn channel_has_state(&self, channel: &ChannelRef<Self>, m: Merkle) -> bool {
self.txn.get(&channel.borrow().states, m, None).is_some()
}
}
/*
impl<T> MutTxn<T> {
pub fn with_remote_mut<F: FnMut(&mut Db<u64, (Hash, Merkle)>)>(
&mut self,
name: &str,
mut f: F,
) -> Result<bool, anyhow::Error> {
let name = SmallString::from_str(name);
let name = UnsafeSmallStr::from_small_str(name.as_small_str());
if let Some((mut db0, db1, db2)) = self.txn.get(&self.remotes, name, None) {
f(&mut db0);
let mut db: ::sanakirja::Db<UnsafeSmallStr, (u64, u64, u64)> =
unsafe { std::mem::transmute(self.remotes) };
self.txn.del(&mut self.rng, &mut db, name, None)?;
self.remotes = unsafe { std::mem::transmute(db) };
self.txn
.put(&mut self.rng, &mut self.remotes, name, (db0, db1, db2))?;
Ok(true)
} else {
Ok(false)
}
}
pub fn del_remote(&mut self, name: &str) -> Result<(), anyhow::Error> {
let name = SmallString::from_str(name);
let name = UnsafeSmallStr::from_small_str(name.as_small_str());
self.txn.del(&mut self.rng, &mut self.remotes, name, None)?;
Ok(())
}
}
*/
impl MutTxnT for MutTxn<()> {
sanakirja_put_del!(internal, Hash, ChangeId);
sanakirja_put_del!(external, ChangeId, Hash);
sanakirja_put_del!(inodes, Inode, Position<ChangeId>);
sanakirja_put_del!(revinodes, Position<ChangeId>, Inode);
sanakirja_put_del!(tree, PathId, Inode, UnsafePathId::from_fileid(k), v);
sanakirja_put_del!(revtree, Inode, PathId, k, UnsafePathId::from_fileid(v),);
sanakirja_put_del!(dep, ChangeId, ChangeId);
sanakirja_put_del!(revdep, ChangeId, ChangeId);
sanakirja_put_del!(touched_files, Position<ChangeId>, ChangeId);
sanakirja_put_del!(rev_touched_files, ChangeId, Position<ChangeId>);
#[doc(hidden)]
fn put_graph(
&mut self,
graph: &mut Self::Graph,
k: Vertex<ChangeId>,
e: Edge,
) -> Result<bool, anyhow::Error> {
Ok(self.txn.put(&mut self.rng, graph, k, e)?)
}
#[doc(hidden)]
fn del_graph(
&mut self,
graph: &mut Self::Graph,
k: Vertex<ChangeId>,
e: Option<Edge>,
) -> Result<bool, anyhow::Error> {
debug!("del_graph {:?} {:?}", k, e);
Ok(self.txn.del(&mut self.rng, graph, k, e)?)
}
#[doc(hidden)]
fn put_partials(&mut self, k: &str, e: Position<ChangeId>) -> Result<bool, anyhow::Error> {
let k = SmallString::from_str(k);
Ok(self.txn.put(
&mut self.rng,
&mut self.partials,
UnsafeSmallStr::from_small_str(k.as_small_str()),
e,
)?)
}
#[doc(hidden)]
fn del_partials(
&mut self,
k: &str,
e: Option<Position<ChangeId>>,
) -> Result<bool, anyhow::Error> {
let k = SmallString::from_str(k);
Ok(self.txn.del(
&mut self.rng,
&mut self.partials,
UnsafeSmallStr::from_small_str(k.as_small_str()),
e,
)?)
}
#[doc(hidden)]
fn put_changes(
&mut self,
channel: &mut Channel<Self>,
p: ChangeId,
t: ApplyTimestamp,
h: &Hash,
) -> Result<Option<Merkle>, anyhow::Error> {
if self.get_changeset(&channel.changes, p, None).is_none() {
channel.apply_counter += 1;
debug!("put_changes {:?} {:?}", t, p);
let m = if let Some((_, (_, m))) = self.txn.rev_iter(&channel.revchanges, None).next() {
m
} else {
Merkle::zero()
};
let m = m.next(h);
assert!(self
.get_revchangeset(&channel.revchanges, t, None)
.is_none());
assert!(self.txn.put(&mut self.rng, &mut channel.changes, p, t)?);
assert!(self
.txn
.put(&mut self.rng, &mut channel.revchanges, t, (p, m))?);
Ok(Some(m))
} else {
Ok(None)
}
}
#[doc(hidden)]
fn del_changes(
&mut self,
channel: &mut Channel<Self>,
p: ChangeId,
t: ApplyTimestamp,
) -> Result<bool, anyhow::Error> {
let mut repl = Vec::new();
for (t_, (p, _)) in self.txn.iter(&channel.revchanges, Some((t, None))) {
if t_ >= t {
repl.push((t_, p))
}
}
let mut m = Merkle::zero();
for (t_, (_, m_)) in self.txn.rev_iter(&channel.revchanges, Some((t, None))) {
if t_ < t {
m = m_;
break;
}
}
for (t_, p) in repl.iter() {
debug!("del_changes {:?} {:?}", t_, p);
self.txn
.del(&mut self.rng, &mut channel.revchanges, *t_, None)?;
if *t_ > t {
m = m.next(&self.get_external(*p).unwrap());
self.txn
.put(&mut self.rng, &mut channel.revchanges, *t_, (*p, m))?;
}
}
Ok(self
.txn
.del(&mut self.rng, &mut channel.changes, p, Some(t))?)
}
fn put_remote(
&mut self,
remote: &mut RemoteRef<Self>,
k: u64,
v: (Hash, Merkle),
) -> Result<bool, anyhow::Error> {
let mut remote = remote.borrow_mut();
self.txn.put(&mut self.rng, &mut remote.remote, k, v)?;
self.txn.put(&mut self.rng, &mut remote.states, v.1, k)?;
Ok(self.txn.put(&mut self.rng, &mut remote.rev, v.0, k)?)
}
fn del_remote(&mut self, remote: &mut RemoteRef<Self>, k: u64) -> Result<bool, anyhow::Error> {
let mut remote = remote.borrow_mut();
if let Some((h, m)) = self.txn.get(&remote.remote, k, None) {
self.txn.del(&mut self.rng, &mut remote.rev, h, None)?;
self.txn.del(&mut self.rng, &mut remote.states, m, None)?;
Ok(self.txn.del(&mut self.rng, &mut remote.remote, k, None)?)
} else {
Ok(false)
}
}
fn open_or_create_channel(&mut self, name: &str) -> Result<ChannelRef<Self>, anyhow::Error> {
let name = small_string::SmallString::from_str(name);
let mut commit = None;
match self.open_channels.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
let r = if let Some((
graph,
changes,
revchanges,
states,
apply_counter,
last_modified,
)) = self.txn.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
) {
ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph,
changes,
revchanges,
states,
apply_counter,
name: name.clone(),
last_modified,
})),
}
} else {
let br = ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph: self.txn.create_db()?,
changes: self.txn.create_db()?,
revchanges: self.txn.create_db()?,
states: self.txn.create_db()?,
apply_counter: 0,
name: name.clone(),
last_modified: 0,
})),
};
commit = Some(br.clone());
br
};
v.insert(r);
}
Entry::Occupied(_) => {}
}
if let Some(commit) = commit {
self.put_channel(commit.clone())?;
}
Ok(self.open_channels.borrow().get(&name).unwrap().clone())
}
fn fork(
&mut self,
channel: &ChannelRef<Self>,
new_name: &str,
) -> Result<ChannelRef<Self>, anyhow::Error> {
let channel = channel.r.borrow();
let name = SmallString::from_str(new_name);
if self
.txn
.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)
.is_none()
{
let br = ChannelRef {
r: Rc::new(RefCell::new(Channel {
graph: self.txn.fork(&mut self.rng, &channel.graph)?,
changes: self.txn.fork(&mut self.rng, &channel.changes)?,
revchanges: self.txn.fork(&mut self.rng, &channel.revchanges)?,
states: self.txn.fork(&mut self.rng, &channel.states)?,
name: name.clone(),
apply_counter: channel.apply_counter,
last_modified: channel.last_modified,
})),
};
self.open_channels
.borrow_mut()
.insert(name.clone(), br.clone());
Ok(br)
} else {
Err((crate::Error::ChannelNameExists {
name: new_name.to_string(),
})
.into())
}
}
fn rename_channel(
&mut self,
channel: &mut ChannelRef<Self>,
new_name: &str,
) -> Result<(), anyhow::Error> {
let name = SmallString::from_str(new_name);
if self
.txn
.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)
.is_none()
{
std::mem::drop(
self.open_channels
.borrow_mut()
.remove(&channel.borrow().name)
.unwrap(),
);
std::cell::RefCell::borrow_mut(&std::rc::Rc::get_mut(&mut channel.r).unwrap()).name =
name.clone();
self.open_channels
.borrow_mut()
.insert(name, channel.clone());
Ok(())
} else {
Err((crate::Error::ChannelNameExists {
name: new_name.to_string(),
})
.into())
}
}
fn drop_channel(&mut self, name: &str) -> Result<bool, anyhow::Error> {
let name = SmallString::from_str(name);
self.open_channels.borrow_mut().remove(&name);
debug!("drop_channel {:?}", name);
let result = self.txn.del(
&mut self.rng,
&mut self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)?;
debug!("/drop_channel {:?}: {:?}", name, result);
Ok(result)
}
fn open_or_create_remote(&mut self, name: &str) -> Result<RemoteRef<Self>, anyhow::Error> {
let name = small_string::SmallString::from_str(name);
let mut commit = None;
match self.open_remotes.borrow_mut().entry(name.clone()) {
Entry::Vacant(v) => {
let r = if let Some(remote) = self.txn.get(
&self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
) {
RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: remote.0,
rev: remote.1,
states: remote.2,
})),
name: name.clone(),
}
} else {
let br = RemoteRef {
db: Rc::new(RefCell::new(Remote {
remote: self.txn.create_db()?,
rev: self.txn.create_db()?,
states: self.txn.create_db()?,
})),
name: name.clone(),
};
commit = Some(br.clone());
br
};
v.insert(r);
}
Entry::Occupied(_) => {}
}
if let Some(commit) = commit {
self.put_remotes(commit.clone())?;
}
Ok(self.open_remotes.borrow().get(&name).unwrap().clone())
}
fn drop_remote(&mut self, remote: RemoteRef<Self>) -> Result<bool, anyhow::Error> {
let name = remote.name.clone();
let r = self.open_remotes.borrow_mut().remove(&name).unwrap();
std::mem::drop(remote);
assert_eq!(Rc::strong_count(&r.db), 1);
Ok(self.txn.del(
&mut self.rng,
&mut self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)?)
}
fn drop_named_remote(&mut self, name: &str) -> Result<bool, anyhow::Error> {
let name = SmallString::from_str(name);
if let Some(r) = self.open_remotes.borrow_mut().remove(&name) {
assert_eq!(Rc::strong_count(&r.db), 1);
}
Ok(self.txn.del(
&mut self.rng,
&mut self.remotes,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
)?)
}
fn commit(mut self) -> Result<(), anyhow::Error> {
use std::ops::DerefMut;
{
let open_channels =
std::mem::replace(self.open_channels.borrow_mut().deref_mut(), HashMap::new());
for (name, channel) in open_channels {
debug!("commit_channel {:?}", name);
self.commit_channel(channel)?
}
}
{
let open_remotes =
std::mem::replace(self.open_remotes.borrow_mut().deref_mut(), HashMap::new());
for (_, remote) in open_remotes {
self.commit_remote(remote)?
}
}
self.txn.set_root(Root::Tree as usize, self.tree);
self.txn.set_root(Root::RevTree as usize, self.revtree);
self.txn.set_root(Root::Inodes as usize, self.inodes);
self.txn.set_root(Root::RevInodes as usize, self.revinodes);
self.txn.set_root(Root::Internal as usize, self.internal);
self.txn.set_root(Root::External as usize, self.external);
self.txn.set_root(Root::RevDep as usize, self.revdep);
self.txn.set_root(Root::Channels as usize, self.channels);
self.txn.set_root(Root::Remotes as usize, self.remotes);
self.txn
.set_root(Root::TouchedFiles as usize, self.touched_files);
self.txn.set_root(Root::Dep as usize, self.dep);
self.txn
.set_root(Root::RevTouchedFiles as usize, self.rev_touched_files);
self.txn.set_root(Root::Partials as usize, self.partials);
self.txn.commit()?;
Ok(())
}
}
impl Txn {
pub fn load_const_channel(&self, name: &str) -> Option<Channel<Self>> {
let name = SmallString::from_str(name);
if let Some((channel, changes, revchanges, states, counter, last_modified)) = self.txn.get(
&self.channels,
UnsafeSmallStr::from_small_str(name.as_small_str()),
None,
) {
Some(Channel {
graph: channel,
changes: changes,
revchanges: revchanges,
states,
apply_counter: counter,
name: name.clone(),
last_modified,
})
} else {
None
}
}
}
impl<T> MutTxn<T> {
fn put_channel(&mut self, channel: ChannelRef<Self>) -> Result<(), anyhow::Error> {
debug!("Commit_channel. This is not too safe.");
let channel = channel.r.try_borrow()?;
// Since we are replacing the value, we don't want to
// decrement its reference counter (which del would do), hence
// the transmute.
//
// This would normally be wrong. The only reason it works is
// because we know that dbs_channels has never been forked
// from another database, hence all the reference counts to
// its elements are 1 (and therefore represented as "not
// referenced" in Sanakirja).
let mut dbs_channels: ::sanakirja::Db<UnsafeSmallStr, (u64, u64, u64, u64, u64, u64)> =
unsafe { std::mem::transmute(self.channels) };
debug!("Commit_channel, dbs_channels = {:?}", dbs_channels);
self.txn.del(
&mut self.rng,
&mut dbs_channels,
UnsafeSmallStr::from_small_str(channel.name.as_small_str()),
None,
)?;
debug!("Commit_channel, dbs_channels = {:?}", dbs_channels);
self.channels = unsafe { std::mem::transmute(dbs_channels) };
self.txn.put(
&mut self.rng,
&mut self.channels,
UnsafeSmallStr::from_small_str(channel.name.as_small_str()),
(
channel.graph,
channel.changes,
channel.revchanges,
channel.states,
channel.apply_counter,
channel.last_modified,
),
)?;
debug!("Commit_channel, self.dbs.channels = {:?}", self.channels);
Ok(())
}
fn commit_channel(&mut self, channel: ChannelRef<Self>) -> Result<(), anyhow::Error> {
debug!("Commit_channel. This is not too safe.");
std::mem::drop(
self.open_channels
.borrow_mut()
.remove(&channel.r.borrow().name),
);
// assert_eq!(Rc::strong_count(&channel.r), 1);
self.put_channel(channel)
}
fn put_remotes(&mut self, remote: RemoteRef<Self>) -> Result<(), anyhow::Error> {
let mut dbs_remotes: ::sanakirja::Db<UnsafeSmallStr, (u64, u64, u64)> =
unsafe { std::mem::transmute(self.remotes) };
debug!("Commit_remote, dbs_remotes = {:?}", dbs_remotes);
self.txn.del(
&mut self.rng,
&mut dbs_remotes,
UnsafeSmallStr::from_small_str(remote.name.as_small_str()),
None,
)?;
debug!("Commit_remote, dbs_remotes = {:?}", dbs_remotes);
self.remotes = unsafe { std::mem::transmute(dbs_remotes) };
let r = remote.db.borrow();
self.txn.put(
&mut self.rng,
&mut self.remotes,
UnsafeSmallStr::from_small_str(remote.name.as_small_str()),
(r.remote, r.rev, r.states),
)?;
debug!("Commit_remote, self.dbs.remotes = {:?}", self.remotes);
Ok(())
}
fn commit_remote(&mut self, remote: RemoteRef<Self>) -> Result<(), anyhow::Error> {
std::mem::drop(self.open_remotes.borrow_mut().remove(&remote.name));
// assert_eq!(Rc::strong_count(&remote.db), 1);
self.put_remotes(remote)
}
}
// org id alOSO47wz8kNW1m6QFJRsSfzfMuTUUkYcqcNRWFw78I=
const CHANGE_ID_SIZE: usize = 8;
impl Representable for ChangeId {
fn alignment() -> Alignment {
Alignment::B8
}
fn onpage_size(&self) -> u16 {
CHANGE_ID_SIZE as u16
}
unsafe fn write_value(&self, p: *mut u8) {
LittleEndian::write_u64(std::slice::from_raw_parts_mut(p, 8), self.0)
}
unsafe fn read_value(p: *const u8) -> Self {
ChangeId(LittleEndian::read_u64(std::slice::from_raw_parts(p, 8)))
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.0.cmp(&x.0)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
const VERTEX_SIZE: usize = CHANGE_ID_SIZE + 16;
impl Representable for Vertex<ChangeId> {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
VERTEX_SIZE as u16
}
unsafe fn write_value(&self, p: *mut u8) {
let p = std::slice::from_raw_parts_mut(p, VERTEX_SIZE);
LittleEndian::write_u64(p, self.change.0);
LittleEndian::write_u64(&mut p[CHANGE_ID_SIZE..], self.start.0);
LittleEndian::write_u64(&mut p[CHANGE_ID_SIZE + 8..], self.end.0);
}
unsafe fn read_value(p: *const u8) -> Self {
let p = std::slice::from_raw_parts(p, VERTEX_SIZE);
let change = LittleEndian::read_u64(p);
let start = LittleEndian::read_u64(&p[CHANGE_ID_SIZE..]);
let end = LittleEndian::read_u64(&p[CHANGE_ID_SIZE + 8..]);
Vertex {
change: ChangeId(change),
start: ChangePosition(start),
end: ChangePosition(end),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.cmp(&x)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Position<ChangeId> {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
(CHANGE_ID_SIZE + 8) as u16
}
unsafe fn write_value(&self, p: *mut u8) {
let p = std::slice::from_raw_parts_mut(p, CHANGE_ID_SIZE + 8);
LittleEndian::write_u64(p, self.change.0);
LittleEndian::write_u64(&mut p[CHANGE_ID_SIZE..], self.pos.0);
}
unsafe fn read_value(p: *const u8) -> Self {
let p = std::slice::from_raw_parts(p, CHANGE_ID_SIZE + 8);
let change = LittleEndian::read_u64(p);
let pos = LittleEndian::read_u64(&p[CHANGE_ID_SIZE..]);
Position {
change: ChangeId(change),
pos: ChangePosition(pos),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.cmp(&x)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Edge {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
25
}
unsafe fn write_value(&self, p: *mut u8) {
let s = std::slice::from_raw_parts_mut(p, 25);
s[0] = (*self).flag.bits();
LittleEndian::write_u64(&mut s[1..], (*self).dest.change.0);
LittleEndian::write_u64(&mut s[9..], (*self).dest.pos.0);
LittleEndian::write_u64(&mut s[17..], (*self).introduced_by.0);
}
unsafe fn read_value(p: *const u8) -> Self {
let s = std::slice::from_raw_parts(p, 25);
Edge {
flag: if let Some(b) = EdgeFlags::from_bits(s[0]) {
b
} else {
panic!("read_value, edge = {:?}", s);
},
dest: Position {
change: ChangeId(LittleEndian::read_u64(&s[1..])),
pos: ChangePosition(LittleEndian::read_u64(&s[9..])),
},
introduced_by: ChangeId(LittleEndian::read_u64(&s[17..])),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
let a: &Edge = self;
let b: &Edge = &x;
a.cmp(b)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
/// An internal "unsafe" version of a [`pristine::PathId`], used to
/// circumvent the absence of associated type constructors in Rust
/// (else this would be borrow on a table).
#[derive(Clone, Copy, Debug)]
#[doc(hidden)]
pub struct UnsafePathId {
parent_inode: Inode,
basename: UnsafeSmallStr,
}
impl UnsafePathId {
pub fn from_fileid(f: PathId) -> UnsafePathId {
UnsafePathId {
parent_inode: f.parent_inode,
basename: UnsafeSmallStr::from_small_str(f.basename),
}
}
pub unsafe fn to_fileid<'a>(&self) -> PathId<'a> {
PathId {
parent_inode: self.parent_inode,
basename: self.basename.to_small_str(),
}
}
}
impl Representable for UnsafePathId {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
INODE_SIZE + self.basename.onpage_size()
}
unsafe fn write_value(&self, p: *mut u8) {
self.parent_inode.write_value(p);
self.basename.write_value(p.offset(INODE_SIZE as isize));
}
unsafe fn read_value(p: *const u8) -> Self {
UnsafePathId {
parent_inode: Inode::read_value(p),
basename: UnsafeSmallStr::read_value(p.offset(INODE_SIZE as isize)),
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
let a: PathId = self.to_fileid();
let b: PathId = x.to_fileid();
a.cmp(&b)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
const INODE_SIZE: u16 = 8;
impl Representable for Inode {
fn alignment() -> Alignment {
Alignment::B8
}
fn onpage_size(&self) -> u16 {
INODE_SIZE
}
unsafe fn write_value(&self, p: *mut u8) {
LittleEndian::write_u64(std::slice::from_raw_parts_mut(p, 8), self.0)
}
unsafe fn read_value(p: *const u8) -> Self {
Inode(LittleEndian::read_u64(std::slice::from_raw_parts(p, 8)))
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.0.cmp(&x.0)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Hash {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
1 + (match *self {
Hash::Blake3(_) => 32,
Hash::None => 0,
})
}
unsafe fn write_value(&self, p: *mut u8) {
match *self {
Hash::Blake3(q) => {
*p = HashAlgorithm::Blake3 as u8;
std::ptr::copy(q.as_ptr(), p.offset(1), 32)
}
Hash::None => *p = HashAlgorithm::None as u8,
}
}
unsafe fn read_value(p: *const u8) -> Self {
assert!(*p <= HashAlgorithm::Blake3 as u8);
match std::mem::transmute(*p) {
HashAlgorithm::Blake3 => {
let mut h = [0; BLAKE3_BYTES];
std::ptr::copy(p.offset(1), h.as_mut_ptr(), BLAKE3_BYTES);
Hash::Blake3(h)
}
HashAlgorithm::None => Hash::None,
}
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.cmp(&x)
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
impl Representable for Merkle {
fn alignment() -> Alignment {
Alignment::B1
}
fn onpage_size(&self) -> u16 {
33
}
unsafe fn write_value(&self, p: *mut u8) {
match *self {
Merkle::Ed25519(q) => {
*p = MerkleAlgorithm::Ed25519 as u8;
assert_eq!(*p, 1);
let q = q.compress();
let q = q.as_bytes();
std::ptr::copy(q.as_ptr(), p.offset(1), 32);
}
}
}
unsafe fn read_value(p: *const u8) -> Self {
assert_eq!(*p, MerkleAlgorithm::Ed25519 as u8);
let slice = std::slice::from_raw_parts(p.offset(1), 32);
Merkle::Ed25519(
curve25519_dalek::edwards::CompressedEdwardsY::from_slice(slice)
.decompress()
.unwrap(),
)
}
unsafe fn cmp_value<T>(&self, _: &T, x: Self) -> std::cmp::Ordering {
self.to_bytes().cmp(&x.to_bytes())
}
type PageOffsets = std::iter::Empty<u64>;
fn page_offsets(&self) -> Self::PageOffsets {
std::iter::empty()
}
}
// org id iBTHhIzBVluJ4gTIZXlBR1WgC/C6Zw4cKuVCh++X60Y=
use super::inode::*;
use crate::small_string::*;
/// A key in the file tree, i.e. a directory (`parent_inode`) and the
/// name of the child (file or directory).
#[doc(hidden)]
#[derive(Debug, Hash, Eq, PartialEq, Clone, PartialOrd, Ord)]
pub struct OwnedPathId {
/// The parent of this path.
pub parent_inode: Inode,
/// Name of the file.
pub basename: SmallString,
}
impl OwnedPathId {
pub fn as_file_id(&self) -> PathId {
PathId {
parent_inode: self.parent_inode,
basename: self.basename.as_small_str(),
}
}
}
/// A borrow on a [`OwnedPathId`](struct.OwnedPathId.html).
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Copy)]
#[doc(hidden)]
pub struct PathId<'a> {
pub parent_inode: Inode,
pub basename: SmallStr<'a>,
}
impl<'a> PathId<'a> {
/// Make an owned version of this `PathId`.
pub fn to_owned(&self) -> OwnedPathId {
OwnedPathId {
parent_inode: self.parent_inode.clone(),
basename: self.basename.to_owned(),
}
}
}
// org id pdn0eIhH1QRWfoHMRKOKEyH66N6/XdmqfhCgcomZKKY=
use byteorder::{ByteOrder, LittleEndian};
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
pub struct ChangeId(pub u64);
impl std::fmt::Debug for ChangeId {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "ChangeId({})", self.to_base32())
}
}
impl ChangeId {
pub(crate) const ROOT: ChangeId = ChangeId(0);
pub fn is_root(&self) -> bool {
*self == ChangeId::ROOT
}
pub fn to_base32(&self) -> String {
let mut b = [0; 8];
LittleEndian::write_u64(&mut b, self.0);
base32::encode(base32::Alphabet::Crockford, &b)
}
}
// org id t9IQhza5ahG0m1/Po2oDcsDHnOGkOwlFVTNlWNtzNDY=
use crate::change::*;
use crate::small_string::*;
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use std::io::Write;
use std::rc::Rc;
mod change_id;
pub use change_id::*;
mod vertex;
pub use vertex::*;
mod edge;
pub use edge::*;
mod hash;
pub use hash::*;
mod inode;
pub use inode::*;
mod inode_metadata;
pub use inode_metadata::*;
mod path_id;
pub use path_id::*;
mod merkle;
pub use merkle::*;
#[cfg(feature = "dump")]
pub mod channel_dump;
pub trait Base32: Sized {
fn to_base32(&self) -> String;
fn from_base32(b: &[u8]) -> Option<Self>;
}
pub mod sanakirja;
// org id Qkha+M+HuWMXKeU6901VcwYqazAm5xHiO3+xzsK0vkI=
pub type ApplyTimestamp = u64;
/// A channel, i.e. a pristine graph plus a set of changes.
pub struct Channel<T: TxnT> {
pub graph: T::Graph,
pub changes: T::Changeset,
pub revchanges: T::Revchangeset,
pub states: T::Channelstates,
pub apply_counter: ApplyTimestamp,
pub(crate) name: SmallString,
pub last_modified: u64,
}
pub struct ChannelRef<T: TxnT> {
pub(crate) r: Rc<RefCell<Channel<T>>>,
}
impl<T: TxnT> Clone for ChannelRef<T> {
fn clone(&self) -> Self {
ChannelRef { r: self.r.clone() }
}
}
impl<T: TxnT> Channel<T> {
pub fn name(&self) -> &str {
self.name.as_str()
}
}
impl<T: TxnT> RemoteRef<T> {
pub fn name(&self) -> &str {
self.name.as_str()
}
}
impl<T: TxnT> ChannelRef<T> {
pub fn borrow(&self) -> std::cell::Ref<Channel<T>> {
self.r.borrow()
}
pub fn borrow_mut(&mut self) -> std::cell::RefMut<Channel<T>> {
self.r.borrow_mut()
}
}
pub struct Remote<T: TxnT> {
pub remote: T::Remote,
pub rev: T::Revremote,
pub states: T::Remotestates,
}
// org id PkITZrEn636gC2V7sqH5O6gX8NI0yzkkg7MSaoaARQg=
pub struct RemoteRef<T: TxnT> {
db: Rc<RefCell<Remote<T>>>,
name: SmallString,
}
impl<T: TxnT> Clone for RemoteRef<T> {
fn clone(&self) -> Self {
RemoteRef {
db: self.db.clone(),
name: self.name.clone(),
}
}
}
impl<T: TxnT> RemoteRef<T> {
pub fn borrow(&self) -> std::cell::Ref<Remote<T>> {
self.db.borrow()
}
pub fn borrow_mut(&mut self) -> std::cell::RefMut<Remote<T>> {
self.db.borrow_mut()
}
}
// org id Zm7WF9q4bHktcCg0LXmmKlQP7XVt4IkPEDuF9pREDw4=
/// The trait of immutable transactions.
pub trait TxnT: Sized {
table!(graph);
cursor_ref!(graph, Vertex<ChangeId>, Edge);
get!(graph, Vertex<ChangeId>, Edge);
table!(changeset);
get!(changeset, ChangeId, u64);
cursor!(changeset, ChangeId, u64);
table!(revchangeset);
get!(revchangeset, u64, (ChangeId, Merkle));
cursor_ref!(revchangeset, u64, (ChangeId, Merkle));
rev_cursor!(revchangeset, u64, (ChangeId, Merkle));
table!(channelstates);
table!(tree);
table_get!(tree, PathId, Inode);
iter!(tree, OwnedPathId, Inode);
table!(revtree);
table_get!(revtree, Inode, PathId);
iter!(revtree, Inode, OwnedPathId);
table!(inodes);
table_get!(inodes, Inode, Position<ChangeId>);
table_get!(revinodes, Position<ChangeId>, Inode);
table!(partials);
cursor!(partials, SmallString, Position<ChangeId>);
table!(channels);
cursor!(channels, SmallString, (u64, u64, u64, u64, u64, u64));
#[doc(hidden)]
fn iter_partials<'txn>(
&'txn self,
channel: &str,
) -> Cursor<Self, &'txn Self, Self::PartialsCursor, SmallString, Position<ChangeId>>;
table!(revdep);
table!(dep);
table_get!(dep, ChangeId, ChangeId);
cursor_ref!(dep, ChangeId, ChangeId);
table_get!(revdep, ChangeId, ChangeId);
table!(touched_files);
table!(rev_touched_files);
table_get!(touched_files, Position<ChangeId>, ChangeId);
table_get!(rev_touched_files, ChangeId, Position<ChangeId>);
iter!(touched_files, Position<ChangeId>, ChangeId);
iter!(rev_touched_files, ChangeId, Position<ChangeId>);
/// Returns the external hash of an internal change identifier, if
/// the change is known.
#[doc(hidden)]
fn get_external(&self, p: ChangeId) -> Option<Hash>;
/// Returns the internal change identifier of change with external
/// hash `hash`, if the change is known.
#[doc(hidden)]
fn get_internal(&self, p: Hash) -> Option<ChangeId>;
fn hash_from_prefix(&self, prefix: &str) -> Result<(Hash, ChangeId), anyhow::Error>;
fn hash_from_prefix_remote(
&self,
remote: &RemoteRef<Self>,
prefix: &str,
) -> Result<Hash, anyhow::Error>;
/// Returns a handle to the channel with name `name`, if it
/// exists. In order to prevent memory leaks, channels have to be
/// committed after use.
///
/// See the note about the return type of
/// [`MutTxnT::open_or_create_channel`](trait.MutTxnT.html#tymethod.open_or_create_channel).
/// Since a mutable transaction type could implement both this
/// trait and [`MutTxnT`](trait.MutTxnT.html), the return type of
/// this function must use the same logic as the return type of
/// [`MutTxnT::open_or_create_channel`](trait.MutTxnT.html#tymethod.open_or_create_channel).
fn load_channel(&self, name: &str) -> Option<ChannelRef<Self>>;
fn load_remote(&self, name: &str) -> Option<RemoteRef<Self>>;
/// Iterate a function over all channels. The loop stops the first
/// time `f` returns `false`.
fn iter_channels<'txn>(&'txn self, start: &str) -> ChannelIterator<'txn, Self>;
#[doc(hidden)]
fn iter_remotes<'txn>(&'txn self, start: &str) -> RemotesIterator<'txn, Self>;
// Provided methods
/// Iterate the graph between `(key, min_flag)` and `(key,
/// max_flag)`, where both bounds are included.
#[doc(hidden)]
fn iter_adjacent<'db, 'txn: 'db>(
&'txn self,
channel: &'db Channel<Self>,
key: Vertex<ChangeId>,
min_flag: EdgeFlags,
max_flag: EdgeFlags,
) -> AdjacentIterator<'txn, Self> {
let edge = Edge {
flag: min_flag,
dest: Position::ROOT,
introduced_by: ChangeId::ROOT,
};
AdjacentIterator {
it: self.iter_graph(&channel.graph, key, Some(edge)),
key,
min_flag,
max_flag,
}
}
/// Find the key where a position is.
#[doc(hidden)]
fn find_block<'db, 'txn: 'db>(
&'txn self,
channel: &'db Channel<Self>,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, crate::Error> {
if p.change.is_root() {
return Ok(Vertex::ROOT);
}
let key = Vertex {
change: p.change,
start: p.pos,
end: p.pos,
};
debug!(target: "libpijul::find_block", "find_block {:?}", key);
let mut cursor = self.cursor_graph(&channel.graph, Some((key, None)));
let mut k = if let Some((k, _)) = cursor.next() {
k
} else {
return Err(crate::Error::WrongBlock { block: p });
};
debug!("k = {:?}", k);
// The only guarantee here is that k is either the first key
// >= `key`, or the key just before that. We might need to
// rewind by one step if key is strictly larger than the
// result (i.e. if `p` is in the middle of the key).
while k.change > p.change || (k.change == p.change && k.start > p.pos) {
debug!(target: "libpijul::find_block", "find_block while {:?}", k);
if let Some((k_, _)) = cursor.prev() {
k = k_
} else {
break;
}
}
loop {
debug!(target: "libpijul::find_block", "find_block loop {:?}", k);
if k.change == p.change && k.start <= p.pos {
if k.end > p.pos || (k.start == k.end && k.end == p.pos) {
return Ok(k);
}
} else if k.change > p.change {
return Err(crate::Error::WrongBlock { block: p });
}
if let Some((k_, _)) = cursor.next() {
k = k_
} else {
break;
}
}
debug!(target: "libpijul::find_block", "find_block None, {:?}", k);
Err(crate::Error::WrongBlock { block: p })
}
/// Find the key ending at a position.
#[doc(hidden)]
fn find_block_end<'db, 'txn: 'db>(
&'txn self,
channel: &'db Channel<Self>,
p: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, crate::Error> {
if p.change.is_root() {
return Ok(Vertex::ROOT);
}
let key = Vertex {
change: p.change,
start: p.pos,
end: p.pos,
};
debug!(target: "libpijul::find_block_end", "find_block_end {:?}, p.change.0 = {:?}", key, p.change.0);
let mut cursor = self.cursor_graph(&channel.graph, Some((key, None)));
let mut k = if let Some((k, _)) = cursor.next() {
k
} else {
return Err(crate::Error::WrongBlock { block: p });
};
// The only guarantee here is that k is either the first key
// before `key`, or the key just before that.
loop {
debug!(target: "libpijul::find_block_end", "find_block_end loop {:?} k.change.0 = {:?}", k, k.change.0);
if k.change < p.change {
break;
} else if k.change == p.change {
// Here we want to create an edge pointing between `p`
// and its successor. If k.start == p.pos, the only
// case where that's what we want is if k.start ==
// k.end.
if k.start == p.pos && k.end == p.pos {
break;
} else if k.start < p.pos {
break;
}
}
if let Some((k_, _)) = cursor.prev() {
k = k_
} else {
break;
}
}
// We also want k.end >= p.pos, so we just call next() until
// we have that.
debug!(target: "libpijul::find_block_end", "find_block_end k(0) = {:?} k.change.0 = {:?}", k, k.change.0);
while k.change < p.change || (k.change == p.change && p.pos > k.end) {
if let Some((k_, _)) = cursor.next() {
k = k_
} else {
break;
}
}
debug!(target: "libpijul::find_block_end", "find_block_end k(1) = {:?}, k.change.0 = {:?}", k, k.change.0);
if k.change == p.change && k.start <= p.pos && p.pos <= k.end {
Ok(k)
} else {
Err(crate::Error::WrongBlock { block: p })
}
}
fn tree_path(&self, v: Position<ChangeId>) -> Option<String> {
if let Some(mut inode) = self.get_revinodes(v, None) {
let mut components = Vec::new();
while !inode.is_root() {
if let Some(next) = self.get_revtree(inode, None) {
components.push(next.basename.as_str().to_string());
inode = next.parent_inode;
} else {
assert!(components.is_empty());
return None;
}
}
if let Some(mut result) = components.pop() {
while let Some(c) = components.pop() {
result = result + "/" + c.as_str()
}
Some(result)
} else {
None
}
} else {
None
}
}
#[doc(hidden)]
fn internal(&self, h: &Option<Hash>, p: ChangeId) -> Option<ChangeId> {
match *h {
Some(Hash::None) => Some(ChangeId::ROOT),
Some(h) => self.get_internal(h),
None => Some(p),
}
}
#[doc(hidden)]
fn internal_pos(
&self,
pos: &Position<Option<Hash>>,
change_id: ChangeId,
) -> Result<Position<ChangeId>, crate::Error> {
Ok(Position {
change: if let Some(p) = pos.change {
if let Some(p) = self.get_internal(p) {
p
} else {
return Err(crate::Error::InconsistentChange);
}
} else {
change_id
},
pos: pos.pos,
})
}
#[doc(hidden)]
fn iter_graph<'txn>(
&'txn self,
graph: &Self::Graph,
k: Vertex<ChangeId>,
v: Option<Edge>,
) -> Cursor<Self, &'txn Self, Self::GraphCursor, Vertex<ChangeId>, Edge> {
let curs = self.cursor_graph(graph, Some((k, v)));
curs
}
#[doc(hidden)]
fn iter_graph_ref<RT: std::ops::Deref<Target = Self>>(
txn: RT,
graph: &Self::Graph,
k: Vertex<ChangeId>,
v: Option<Edge>,
) -> Cursor<Self, RT, Self::GraphCursor, Vertex<ChangeId>, Edge> {
let curs = Self::cursor_graph_ref(txn, graph, Some((k, v)));
curs
}
#[doc(hidden)]
fn iter_revdep<'a, 'txn>(
&'txn self,
p: ChangeId,
) -> Cursor<Self, &'txn Self, Self::DepCursor, ChangeId, ChangeId>;
#[doc(hidden)]
fn iter_dep<'txn>(
&'txn self,
p: ChangeId,
) -> Cursor<Self, &'txn Self, Self::DepCursor, ChangeId, ChangeId>;
#[doc(hidden)]
fn iter_dep_ref<RT: std::ops::Deref<Target = Self> + Clone>(
txn: RT,
p: ChangeId,
) -> Cursor<Self, RT, Self::DepCursor, ChangeId, ChangeId>;
#[doc(hidden)]
fn iter_touched<'txn>(
&'txn self,
p: Position<ChangeId>,
) -> Cursor<Self, &'txn Self, Self::Touched_filesCursor, Position<ChangeId>, ChangeId>;
#[doc(hidden)]
fn iter_rev_touched<'txn>(
&'txn self,
p: ChangeId,
) -> Cursor<Self, &'txn Self, Self::Rev_touched_filesCursor, ChangeId, Position<ChangeId>>;
#[doc(hidden)]
fn changeid_log<'db, 'txn: 'db>(
&'txn self,
channel: &'db Channel<Self>,
from: u64,
) -> Cursor<Self, &'txn Self, Self::RevchangesetCursor, u64, (ChangeId, Merkle)> {
self.cursor_revchangeset(&channel.revchanges, Some((from, None)))
}
fn current_state<'db, 'txn: 'db>(&'txn self, channel: &'db Channel<Self>) -> Option<Merkle> {
self.rev_cursor_revchangeset(&channel.revchanges, None)
.next()
.map(|(_, (_, m))| m)
}
#[doc(hidden)]
fn changeid_log_ref<RT: std::ops::Deref<Target = Self>>(
txn: RT,
channel: &Channel<Self>,
from: u64,
) -> Cursor<Self, RT, Self::RevchangesetCursor, u64, (ChangeId, Merkle)> {
Self::cursor_revchangeset_ref(txn, &channel.revchanges, Some((from, None)))
}
#[doc(hidden)]
fn changeid_rev_log<'db, 'txn: 'db>(
&'txn self,
channel: &'db Channel<Self>,
from: Option<u64>,
) -> RevCursor<Self, &'txn Self, Self::RevchangesetCursor, u64, (ChangeId, Merkle)> {
self.rev_cursor_revchangeset(&channel.revchanges, from.map(|from| (from, None)))
}
#[doc(hidden)]
fn log_for_path<'txn, 'channel>(
&'txn self,
channel: &'channel Channel<Self>,
key: Position<ChangeId>,
from_timestamp: u64,
) -> PathChangeset<'channel, 'txn, Self> {
PathChangeset {
iter: self.cursor_revchangeset(&channel.revchanges, Some((from_timestamp, None))),
txn: self,
channel,
key,
}
}
#[doc(hidden)]
fn rev_log_for_path<'txn, 'channel>(
&'txn self,
channel: &'channel Channel<Self>,
key: Position<ChangeId>,
from_timestamp: u64,
) -> RevPathChangeset<'channel, 'txn, Self> {
RevPathChangeset {
iter: self.rev_cursor_revchangeset(&channel.revchanges, Some((from_timestamp, None))),
txn: self,
channel,
key,
}
}
/// Is there an alive/pseudo edge from `a` to `b`.
#[doc(hidden)]
fn test_edge(
&self,
channel: &Channel<Self>,
a: Position<ChangeId>,
b: Position<ChangeId>,
min: EdgeFlags,
max: EdgeFlags,
) -> bool {
debug!("is_connected {:?} {:?}", a, b);
let key = Vertex {
change: a.change,
start: a.pos,
end: a.pos,
};
let edge = Edge {
flag: min,
dest: b,
introduced_by: ChangeId::ROOT,
};
let mut cursor = self.cursor_graph(&channel.graph, Some((key, Some(edge))));
let (a_, b_) = cursor.next().unwrap();
a_.change == a.change
&& a_.start <= a.pos
&& a_.end >= a.pos
&& b_.flag >= min
&& b_.flag <= max
&& b_.dest == b
}
/// Is there an alive/pseudo edge to `a`.
#[doc(hidden)]
fn is_alive_or_pseudo(&self, channel: &Channel<Self>, a: Vertex<ChangeId>) -> bool {
a.is_root()
|| self
.iter_adjacent(
channel,
a,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)
.next()
.is_some()
}
/// Is there an alive/pseudo edge to `a`.
#[doc(hidden)]
fn is_alive(&self, channel: &Channel<Self>, a: Vertex<ChangeId>) -> bool {
a.is_root()
|| self
.iter_adjacent(
channel,
a,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)
.filter(|e| {
e.flag.contains(EdgeFlags::FOLDER) || !e.flag.contains(EdgeFlags::PSEUDO)
})
.next()
.is_some()
}
// org id aY8inFkVguxv2TAAh/FWE+cCIjp5Jj3P0m2rXHfKTtg=
#[doc(hidden)]
fn make_changeid(&self, h: &Hash) -> ChangeId {
if let Some(h) = self.get_internal(*h) {
return h;
}
use byteorder::{ByteOrder, LittleEndian};
use rand::Rng;
let mut p = match h {
Hash::None => return ChangeId::ROOT,
Hash::Blake3(ref s) => ChangeId(LittleEndian::read_u64(&s[..])),
};
while self.get_external(p).is_some() {
p = ChangeId(rand::thread_rng().gen());
}
p
}
// org id fPNhuKxfbhMSEoBk5AN8BJbDzTyyY8utYN4ZfwRiivg=
#[doc(hidden)]
fn make_random_changeid(&self) -> ChangeId {
use rand::Rng;
let mut p = ChangeId(rand::thread_rng().gen());
while self.get_external(p).is_some() {
p = ChangeId(rand::thread_rng().gen());
}
p
}
// org id YXeNpJpzZclmYauw5rMn/+5ReOquucJhXct+DCSCfHY=
table!(remotes);
cursor!(remotes, SmallString, (u64, u64, u64));
table!(remote);
table!(revremote);
table!(remotestates);
cursor!(remote, u64, (Hash, Merkle));
rev_cursor!(remote, u64, (Hash, Merkle));
#[doc(hidden)]
fn iter_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: u64,
) -> Cursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)>;
fn iter_rev_remote<'txn>(
&'txn self,
remote: &Self::Remote,
k: Option<u64>,
) -> RevCursor<Self, &'txn Self, Self::RemoteCursor, u64, (Hash, Merkle)>;
fn get_remote(&mut self, name: &str) -> Option<RemoteRef<Self>>;
fn last_remote(&self, remote: &Self::Remote) -> Option<(u64, (Hash, Merkle))>;
fn get_remote_state(&self, remote: &Self::Remote, n: u64) -> Option<(u64, (Hash, Merkle))>;
fn remote_has_change(&self, remote: &RemoteRef<Self>, hash: Hash) -> bool;
fn remote_has_state(&self, remote: &RemoteRef<Self>, hash: Merkle) -> bool;
fn channel_has_state(&self, channel: &ChannelRef<Self>, hash: Merkle) -> bool;
// org id oGZWRYu9yo9VKkz4S9hipd5OpFx6kQiF5q/T9jSqp5A=
/// Write the graph of a channel to file `f` in graphviz
/// format. **Warning:** this can be really large on old channels.
#[doc(hidden)]
fn debug_to_file<P: AsRef<std::path::Path>>(
&self,
channel: &ChannelRef<Self>,
f: P,
) -> Result<bool, anyhow::Error> {
info!("debug {:?}", f.as_ref());
let mut f = std::fs::File::create(f)?;
let channel = channel.r.borrow();
let done = self.debug(&channel, &mut f)?;
f.flush()?;
info!("done debugging {:?}", done);
Ok(done)
}
#[doc(hidden)]
fn debug_tree<P: AsRef<std::path::Path>>(&self, file: P) -> Result<(), anyhow::Error> {
let root = OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::from_str(""),
};
let mut f = std::fs::File::create(file)?;
for t in self.iter_tree(root, None) {
writeln!(f, "{:?}", t)?
}
Ok(())
}
#[doc(hidden)]
fn debug_revtree<P: AsRef<std::path::Path>>(&self, file: P) -> Result<(), anyhow::Error> {
let mut f = std::fs::File::create(file)?;
for t in self.iter_revtree(Inode::ROOT, None) {
writeln!(f, "{:?}", t)?
}
Ok(())
}
// #[cfg(debug_assertions)]
cursor!(inodes, Inode, Position<ChangeId>);
#[doc(hidden)]
// #[cfg(debug_assertions)]
fn iter_inodes<'txn>(
&'txn self,
) -> Cursor<Self, &'txn Self, Self::InodesCursor, Inode, Position<ChangeId>>;
#[cfg(debug_assertions)]
#[doc(hidden)]
fn debug_inodes(&self) {
debug!("debug_inodes");
for t in self.iter_inodes() {
debug!("debug_inodes = {:?}", t)
}
debug!("/debug_inodes");
}
#[cfg(not(debug_assertions))]
#[doc(hidden)]
fn debug_inodes(&self) {}
/// Write the graph of a channel to write `W` in graphviz
/// format. **Warning:** this can be really large on old channels.
#[doc(hidden)]
fn debug<W: Write>(&self, channel: &Channel<Self>, mut f: W) -> Result<bool, anyhow::Error> {
let mut cursor = self.cursor_graph(&channel.graph, None);
writeln!(f, "digraph {{")?;
let mut keys = std::collections::HashSet::new();
let mut at_least_one = false;
while let Some((k, v)) = cursor.next() {
at_least_one = true;
debug!("debug {:?} {:?}", k, v);
if keys.insert(k) {
debug_vertex(&mut f, k)?
}
debug_edge(self, channel, &mut f, k, v)?
}
writeln!(f, "}}")?;
Ok(at_least_one)
}
#[doc(hidden)]
fn check_channel_log(&self, channel: &ChannelRef<Self>) {
let channel = channel.r.borrow();
for (t, (ch, _)) in self.cursor_revchangeset(&channel.revchanges, None) {
if self.get_changeset(&channel.changes, ch, None) != Some(t) {
panic!(
"ch = {:?}, {:?}, t = {:?}",
ch,
self.get_changeset(&channel.changes, ch, None),
Some(t)
);
}
}
for (ch, t) in self.cursor_changeset(&channel.changes, None) {
if self
.get_revchangeset(&channel.revchanges, t, None)
.unwrap()
.0
!= ch
{
panic!(
"t = {:?}, {:?}, ch = {:?}",
t,
self.get_revchangeset(&channel.revchanges, t, None),
Some(ch)
);
}
}
}
#[doc(hidden)]
fn check_tree_revtree(&self) {
let zero = OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::new(),
};
for (a, b) in self.iter_tree(zero, None) {
if !a.basename.is_empty() {
assert_eq!(
self.get_revtree(b, Some(a.as_file_id())),
Some(a.as_file_id())
)
}
}
let mut inodes = Vec::new();
for (a, b) in self.iter_revtree(Inode::ROOT, None) {
inodes.clear();
for (c, d) in self.iter_tree(b.clone(), None) {
if c > b {
break;
} else if c < b {
continue;
}
if d == a {
inodes.push(d)
}
}
if inodes.len() > 1 {
panic!("inodes for {:?} {:?} = {:?}", a, b, inodes);
}
}
}
#[doc(hidden)]
fn check_alive(
&self,
channel: &ChannelRef<Self>,
) -> (
HashMap<Vertex<ChangeId>, Option<Vertex<ChangeId>>>,
Vec<(Vertex<ChangeId>, Option<Vertex<ChangeId>>)>,
) {
let channel = channel.r.borrow();
// Find the reachable with a DFS.
let mut reachable = HashSet::new();
let mut stack = vec![Vertex::ROOT];
while let Some(v) = stack.pop() {
if !reachable.insert(v) {
continue;
}
for e in self.iter_adjacent(
&channel,
v,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED - EdgeFlags::PARENT,
) {
stack.push(self.find_block(&channel, e.dest).unwrap());
}
}
debug!("reachable = {:#?}", reachable);
// Find the alive
let mut alive_unreachable = HashMap::new();
let mut cursor = self.cursor_graph(&channel.graph, None);
let mut visited = HashSet::new();
let mut k0 = Vertex::ROOT;
let mut k0_has_pseudo_parents = false;
let mut k0_has_regular_parents = false;
let mut reachable_pseudo = Vec::new();
while let Some((k, v)) = cursor.next() {
debug!("check_alive, k = {:?}, v = {:?}", k, v);
if k0 != k {
if k0_has_pseudo_parents && !k0_has_regular_parents {
reachable_pseudo
.push((k0, self.find_file(&channel, k0, &mut stack, &mut visited)))
}
k0 = k;
k0_has_pseudo_parents = false;
k0_has_regular_parents = false;
}
if v.flag.contains(EdgeFlags::PARENT)
&& !v.flag.contains(EdgeFlags::FOLDER)
&& !v.flag.contains(EdgeFlags::DELETED)
{
if v.flag.contains(EdgeFlags::PSEUDO) {
k0_has_pseudo_parents = true
} else {
k0_has_regular_parents = true
}
}
if v.flag.contains(EdgeFlags::PARENT) && !v.flag.contains(EdgeFlags::DELETED) {
if !reachable.contains(&k) {
let file = self.find_file(&channel, k, &mut stack, &mut visited);
alive_unreachable.insert(k, file);
}
}
}
if !k0.is_root() && k0_has_pseudo_parents && !k0_has_regular_parents {
reachable_pseudo.push((k0, self.find_file(&channel, k0, &mut stack, &mut visited)));
}
(alive_unreachable, reachable_pseudo)
}
#[doc(hidden)]
fn find_file(
&self,
channel: &Channel<Self>,
k: Vertex<ChangeId>,
stack: &mut Vec<Vertex<ChangeId>>,
visited: &mut HashSet<Vertex<ChangeId>>,
) -> Option<Vertex<ChangeId>> {
let mut file = None;
stack.clear();
stack.push(k);
visited.clear();
'outer: while let Some(kk) = stack.pop() {
if !visited.insert(kk) {
continue;
}
for e in self.iter_adjacent(&channel, kk, EdgeFlags::PARENT, EdgeFlags::all()) {
if e.flag.contains(EdgeFlags::PARENT) {
if e.flag.contains(EdgeFlags::FOLDER) {
file = Some(kk);
break 'outer;
}
stack.push(self.find_block_end(&channel, e.dest).unwrap());
}
}
}
file
}
#[doc(hidden)]
fn debug_root<W: Write>(
&self,
channel: &ChannelRef<Self>,
root: Vertex<ChangeId>,
mut f: W,
) -> Result<(), anyhow::Error> {
let channel = channel.r.borrow();
writeln!(f, "digraph {{")?;
// Find the reachable with a DFS.
let mut visited = HashSet::new();
let mut stack = vec![(root, false)];
while let Some((v, is_going_up)) = stack.pop() {
if !visited.insert(v) {
continue;
}
debug_vertex(&mut f, v)?;
for e in self.iter_adjacent(&channel, v, EdgeFlags::empty(), EdgeFlags::all()) {
if e.flag.contains(EdgeFlags::PARENT) {
debug_edge(self, &channel, &mut f, v, e)?;
let v = self.find_block_end(&channel, e.dest).unwrap();
stack.push((v, e.flag.contains(EdgeFlags::FOLDER)));
} else if !is_going_up {
debug_edge(self, &channel, &mut f, v, e)?;
let v = self.find_block(&channel, e.dest).unwrap();
stack.push((v, false));
}
}
}
writeln!(f, "}}")?;
Ok(())
}
#[doc(hidden)]
fn debug_root_rev<W: Write>(
&self,
channel: &Channel<Self>,
root: Vertex<ChangeId>,
mut f: W,
) -> Result<(), anyhow::Error> {
writeln!(f, "digraph {{")?;
let mut visited = HashSet::new();
let mut stack = vec![root];
while let Some(v) = stack.pop() {
if !visited.insert(v) {
continue;
}
debug_vertex(&mut f, v)?;
for e in self.iter_adjacent(&channel, v, EdgeFlags::empty(), EdgeFlags::all()) {
if e.flag.contains(EdgeFlags::PARENT) {
debug_edge(self, &channel, &mut f, v, e)?;
let v = self.find_block_end(&channel, e.dest).unwrap();
stack.push(v);
}
}
}
writeln!(f, "}}")?;
Ok(())
}
}
fn debug_vertex<W: std::io::Write>(mut f: W, k: Vertex<ChangeId>) -> Result<(), std::io::Error> {
writeln!(
f,
"node_{}_{}_{}[label=\"{} [{};{}[\"];",
k.change.to_base32(),
k.start.0,
k.end.0,
k.change.to_base32(),
k.start.0,
k.end.0,
)
}
fn debug_edge<T: TxnT, W: std::io::Write>(
txn: &T,
channel: &Channel<T>,
mut f: W,
k: Vertex<ChangeId>,
v: Edge,
) -> Result<(), std::io::Error> {
let style = if v.flag.contains(EdgeFlags::DELETED) {
", style=dashed"
} else if v.flag.contains(EdgeFlags::PSEUDO) {
", style=dotted"
} else {
""
};
let color = if v.flag.contains(EdgeFlags::PARENT) {
if v.flag.contains(EdgeFlags::FOLDER) {
"orange"
} else {
"red"
}
} else if v.flag.contains(EdgeFlags::FOLDER) {
"royalblue"
} else {
"forestgreen"
};
if v.flag.contains(EdgeFlags::PARENT) {
let dest = if v.dest.change.is_root() {
Vertex::ROOT
} else {
txn.find_block_end(channel, v.dest).unwrap()
};
writeln!(
f,
"node_{}_{}_{} -> node_{}_{}_{} [label=\"{}{}{}\", color=\"{}\"{}];",
k.change.to_base32(),
k.start.0,
k.end.0,
dest.change.to_base32(),
dest.start.0,
dest.end.0,
if v.flag.contains(EdgeFlags::BLOCK) {
"["
} else {
""
},
v.introduced_by.to_base32(),
if v.flag.contains(EdgeFlags::BLOCK) {
"]"
} else {
""
},
color,
style
)?;
} else if let Ok(dest) = txn.find_block(&channel, v.dest) {
writeln!(
f,
"node_{}_{}_{} -> node_{}_{}_{} [label=\"{}{}{}\", color=\"{}\"{}];",
k.change.to_base32(),
k.start.0,
k.end.0,
dest.change.to_base32(),
dest.start.0,
dest.end.0,
if v.flag.contains(EdgeFlags::BLOCK) {
"["
} else {
""
},
v.introduced_by.to_base32(),
if v.flag.contains(EdgeFlags::BLOCK) {
"]"
} else {
""
},
color,
style
)?;
} else {
writeln!(
f,
"node_{}_{}_{} -> node_{}_{} [label=\"{}{}{}\", color=\"{}\"{}];",
k.change.to_base32(),
k.start.0,
k.end.0,
v.dest.change.to_base32(),
v.dest.pos.0,
if v.flag.contains(EdgeFlags::BLOCK) {
"["
} else {
""
},
v.introduced_by.to_base32(),
if v.flag.contains(EdgeFlags::BLOCK) {
"]"
} else {
""
},
color,
style
)?;
}
Ok(())
}
// org id TGVKwTRghwUbLyTsjKgneqVbyqqe6FeSa91J5Bse/kw=
/// A cursor over a table, initialised at a certain value.
#[doc(hidden)]
pub struct Cursor<T: TxnT, RT: std::ops::Deref<Target = T>, Cursor, K, V> {
pub(crate) cursor: Cursor,
pub(crate) txn: RT,
pub(crate) marker: std::marker::PhantomData<(T, K, V)>,
}
#[doc(hidden)]
pub struct RevCursor<T: TxnT, RT: std::ops::Deref<Target = T>, Cursor, K, V> {
pub(crate) cursor: Cursor,
pub(crate) txn: RT,
pub(crate) marker: std::marker::PhantomData<(T, K, V)>,
}
initialized_cursor!(graph, Vertex<ChangeId>, Edge);
initialized_cursor!(changeset, ChangeId, u64);
initialized_cursor!(revchangeset, u64, (ChangeId, Merkle));
initialized_rev_cursor!(revchangeset, u64, (ChangeId, Merkle));
initialized_cursor!(tree, OwnedPathId, Inode);
initialized_cursor!(revtree, Inode, OwnedPathId);
initialized_cursor!(dep, ChangeId, ChangeId);
initialized_cursor!(partials, SmallString, Position<ChangeId>);
initialized_cursor!(rev_touched_files, ChangeId, Position<ChangeId>);
initialized_cursor!(touched_files, Position<ChangeId>, ChangeId);
initialized_cursor!(remote, u64, (Hash, Merkle));
initialized_rev_cursor!(remote, u64, (Hash, Merkle));
// #[cfg(debug_assertions)]
initialized_cursor!(inodes, Inode, Position<ChangeId>);
/// An iterator for nodes adjacent to `key` through an edge with flags smaller than `max_flag`.
#[doc(hidden)]
pub struct AdjacentIterator<'txn, T: TxnT> {
it: Cursor<T, &'txn T, T::GraphCursor, Vertex<ChangeId>, Edge>,
key: Vertex<ChangeId>,
min_flag: EdgeFlags,
/// iter as long as the flag is smaller than this
max_flag: EdgeFlags,
}
impl<'txn, T: TxnT> Iterator for AdjacentIterator<'txn, T> {
type Item = Edge;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some((v, e)) = self.it.next() {
debug!("adjacent iterator: {:?} {:?}", v, e);
if v == self.key {
if e.flag >= self.min_flag {
if e.flag <= self.max_flag {
return Some(e);
} else {
return None;
}
}
} else if v > self.key {
return None;
}
} else {
debug!("adjacent iterator: over");
return None;
}
}
}
}
pub struct PathChangeset<'channel, 'txn: 'channel, T: TxnT> {
txn: &'txn T,
channel: &'channel Channel<T>,
iter: Cursor<T, &'txn T, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
key: Position<ChangeId>,
}
pub struct RevPathChangeset<'channel, 'txn: 'channel, T: TxnT> {
txn: &'txn T,
channel: &'channel Channel<T>,
iter: RevCursor<T, &'txn T, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
key: Position<ChangeId>,
}
impl<'channel, 'txn: 'channel, T: TxnT> Iterator for PathChangeset<'channel, 'txn, T> {
type Item = Hash;
fn next(&mut self) -> Option<Self::Item> {
while let Some((_, (changeid, _))) = self.iter.next() {
for (p, touched) in self.txn.iter_rev_touched_files(changeid, None) {
if p > changeid {
break;
} else if p < changeid {
continue;
}
if is_ancestor_of(self.txn, self.channel, self.key, touched) {
return self.txn.get_external(changeid);
}
}
}
None
}
}
impl<'channel, 'txn: 'channel, T: TxnT> Iterator for RevPathChangeset<'channel, 'txn, T> {
type Item = Hash;
fn next(&mut self) -> Option<Self::Item> {
while let Some((_, (changeid, _))) = self.iter.next() {
for (p, touched) in self.txn.iter_rev_touched_files(changeid, None) {
if p > changeid {
break;
} else if p < changeid {
continue;
}
if is_ancestor_of(self.txn, self.channel, self.key, touched) {
return self.txn.get_external(changeid);
}
}
}
None
}
}
fn is_ancestor_of<T: TxnT>(
txn: &T,
channel: &Channel<T>,
a: Position<ChangeId>,
b: Position<ChangeId>,
) -> bool {
let mut stack = vec![b];
let mut visited = std::collections::HashSet::new();
debug!("a = {:?}", a);
while let Some(b) = stack.pop() {
debug!("pop {:?}", b);
if a == b {
return true;
}
if !visited.insert(b) {
continue;
}
for p in txn.iter_adjacent(
channel,
b.inode_vertex(),
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO,
) {
// Ok, since `p` is in the channel.
let parent = txn.find_block_end(channel, p.dest).unwrap();
for pp in txn.iter_adjacent(
channel,
parent,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO,
) {
if pp.dest == a {
return true;
}
stack.push(pp.dest)
}
}
}
false
}
pub struct ChannelIterator<'txn, T: TxnT> {
txn: &'txn T,
cursor: T::ChannelsCursor,
}
impl<'txn, T: TxnT> Iterator for ChannelIterator<'txn, T> {
type Item = ChannelRef<T>;
fn next(&mut self) -> Option<Self::Item> {
let next: Option<(SmallString, (u64, u64, u64, u64, u64, u64))> =
self.txn.cursor_channels_next(&mut self.cursor);
if let Some((name, _)) = next {
self.txn.load_channel(name.as_str())
} else {
None
}
}
}
pub struct RemotesIterator<'txn, T: TxnT> {
txn: &'txn T,
cursor: T::RemotesCursor,
}
impl<'txn, T: TxnT> Iterator for RemotesIterator<'txn, T> {
type Item = RemoteRef<T>;
fn next(&mut self) -> Option<Self::Item> {
let next: Option<(SmallString, (u64, u64, u64))> =
self.txn.cursor_remotes_next(&mut self.cursor);
if let Some((name, _)) = next {
self.txn.load_remote(name.as_str())
} else {
None
}
}
}
// org id IOLey2qYHN/TyndNM5KGmqRqElipLz8mK4wbB7Tviqc=
/// The trait of immutable transactions.
pub trait MutTxnT: TxnT {
put_del!(internal, Hash, ChangeId);
put_del!(external, ChangeId, Hash);
put_del!(inodes, Inode, Position<ChangeId>);
put_del!(revinodes, Position<ChangeId>, Inode);
put_del!(tree, PathId, Inode);
put_del!(revtree, Inode, PathId);
put_del!(dep, ChangeId, ChangeId);
put_del!(revdep, ChangeId, ChangeId);
put_del!(touched_files, Position<ChangeId>, ChangeId);
put_del!(rev_touched_files, ChangeId, Position<ChangeId>);
#[doc(hidden)]
fn replace_inodes(
&mut self,
inode: Inode,
header: Position<ChangeId>,
) -> Result<bool, anyhow::Error> {
self.del_inodes(inode, None)?;
Ok(self.put_inodes(inode, header)?)
}
#[doc(hidden)]
fn replace_revinodes(
&mut self,
key: Position<ChangeId>,
inode: Inode,
) -> Result<bool, anyhow::Error> {
self.del_revinodes(key, None)?;
Ok(self.put_revinodes(key, inode)?)
}
#[doc(hidden)]
fn replace_inodes_with_rev(
&mut self,
inode: Inode,
position: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
self.del_inodes(inode, None)?;
self.del_revinodes(position, None)?;
self.put_inodes_with_rev(inode, position)?;
Ok(())
}
#[doc(hidden)]
fn put_inodes_with_rev(
&mut self,
inode: Inode,
position: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
self.put_inodes(inode, position)?;
self.put_revinodes(position, inode)?;
Ok(())
}
#[doc(hidden)]
fn del_inodes_with_rev(
&mut self,
inode: Inode,
position: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
self.del_inodes(inode, None)?;
self.del_revinodes(position, None)?;
Ok(())
}
#[doc(hidden)]
fn del_tree_with_rev(&mut self, inode: Inode) -> Result<(), anyhow::Error> {
if let Some(parent) = self.get_revtree(inode, None) {
let parent = parent.to_owned();
assert!(self.del_tree(parent.as_file_id(), Some(inode))?);
assert!(self.del_revtree(inode, Some(parent.as_file_id()))?);
}
Ok(())
}
/// Insert a key and a value to a graph. Returns `false` if and only if `(k, v)` was already in the graph, in which case no insertion happened.
#[doc(hidden)]
fn put_graph(
&mut self,
channel: &mut Self::Graph,
k: Vertex<ChangeId>,
v: Edge,
) -> Result<bool, anyhow::Error>;
/// Delete a key and a value from a graph. Returns `true` if and only if `(k, v)` was in the graph.
#[doc(hidden)]
fn del_graph(
&mut self,
channel: &mut Self::Graph,
k: Vertex<ChangeId>,
v: Option<Edge>,
) -> Result<bool, anyhow::Error>;
/// Add a change and a timestamp to a change table. Returns `None` if and only if `(p, t)` was already in the change table, in which case no insertion happened. Returns the new state else.
#[doc(hidden)]
fn put_changes(
&mut self,
channel: &mut Channel<Self>,
p: ChangeId,
t: ApplyTimestamp,
h: &Hash,
) -> Result<Option<Merkle>, anyhow::Error>;
/// Delete a change from a change table. Returns `true` if and only if `(p, t)` was in the change table.
#[doc(hidden)]
fn del_changes(
&mut self,
channel: &mut Channel<Self>,
p: ChangeId,
t: ApplyTimestamp,
) -> Result<bool, anyhow::Error>;
/// Open a channel, creating it if it is missing. The return type
/// is a `Rc<RefCell<…>>` in order to avoid:
/// - opening the same channel twice. Since a channel contains pointers, that could potentially lead to double-borrow issues. We absolutely have to check that at runtime (hence the `RefCell`).
/// - writing the channel to disk (if the backend is written on the disk) for every minor operation on the channel.
///
/// Additionally, the `Rc` is used to:
/// - avoid having to commit channels explicitly (channels are
/// committed automatically upon committing the transaction), and
/// - to return a value that doesn't borrow the transaction, so
/// that the channel can actually be used in a mutable transaction.
fn open_or_create_channel(&mut self, name: &str) -> Result<ChannelRef<Self>, anyhow::Error>;
fn fork(
&mut self,
channel: &ChannelRef<Self>,
name: &str,
) -> Result<ChannelRef<Self>, anyhow::Error>;
fn rename_channel(
&mut self,
channel: &mut ChannelRef<Self>,
name: &str,
) -> Result<(), anyhow::Error>;
fn drop_channel(&mut self, name: &str) -> Result<bool, anyhow::Error>;
/// Commit this transaction.
fn commit(self) -> Result<(), anyhow::Error>;
// Provided methods
/// Split a key `[a, b[` at position `pos`, yielding two keys `[a,
/// pos[` and `[pos, b[` linked by an edge.
#[doc(hidden)]
fn split_block<'db, 'txn: 'db>(
&'txn mut self,
channel: &'db mut Channel<Self>,
key: Vertex<ChangeId>,
pos: ChangePosition,
) -> Result<(), anyhow::Error> {
trace!("key = {:?}, pos = {:?}", key, pos);
let adjacent: Vec<_> = self
.cursor_graph(&channel.graph, Some((key, None)))
.take_while(|&(k, _)| k <= key)
.filter(|&(k, _)| k == key)
.map(|(_, e)| e)
.collect();
debug!("adjacent {:?}", adjacent);
for chi in adjacent {
assert!(chi.introduced_by != ChangeId::ROOT || chi.flag.contains(EdgeFlags::PSEUDO));
if chi.flag.contains(EdgeFlags::PARENT | EdgeFlags::BLOCK) {
self.put_graph_with_rev(
channel,
chi.flag - EdgeFlags::PARENT,
Vertex {
change: key.change,
start: key.start,
end: pos,
},
Vertex {
change: key.change,
start: pos,
end: key.end,
},
chi.introduced_by,
)?;
}
self.del_graph(&mut channel.graph, key, Some(chi))?;
self.put_graph(
&mut channel.graph,
if chi.flag.contains(EdgeFlags::PARENT) {
Vertex {
change: key.change,
start: key.start,
end: pos,
}
} else {
Vertex {
change: key.change,
start: pos,
end: key.end,
}
},
chi,
)?;
}
Ok(())
}
#[doc(hidden)]
fn del_graph_with_rev(
&mut self,
channel: &mut Channel<Self>,
mut flag: EdgeFlags,
mut k0: Vertex<ChangeId>,
mut k1: Vertex<ChangeId>,
introduced_by: ChangeId,
) -> Result<bool, anyhow::Error> {
if flag.contains(EdgeFlags::PARENT) {
std::mem::swap(&mut k0, &mut k1);
flag -= EdgeFlags::PARENT
}
debug!("del_graph_with_rev {:?} {:?} {:?}", flag, k0, k1);
let a = self.del_graph(
&mut channel.graph,
k0,
Some(Edge {
flag: flag,
dest: Position {
change: k1.change,
pos: k1.start,
},
introduced_by,
}),
)?;
let b = self.del_graph(
&mut channel.graph,
k1,
Some(Edge {
flag: flag | EdgeFlags::PARENT,
dest: Position {
change: k0.change,
pos: k0.end,
},
introduced_by,
}),
)?;
assert!((a && b) || (!a && !b));
Ok(a && b)
}
#[doc(hidden)]
fn put_graph_with_rev(
&mut self,
channel: &mut Channel<Self>,
flag: EdgeFlags,
k0: Vertex<ChangeId>,
k1: Vertex<ChangeId>,
introduced_by: ChangeId,
) -> Result<bool, anyhow::Error> {
debug_assert!(!flag.contains(EdgeFlags::PARENT));
if k0.change == k1.change {
assert_ne!(k0.start_pos(), k1.start_pos());
}
if introduced_by == ChangeId::ROOT {
assert!(flag.contains(EdgeFlags::PSEUDO));
}
debug!("put_graph_with_rev {:?} {:?} {:?}", k0, k1, flag);
let a = self.put_graph(
&mut channel.graph,
k0,
Edge {
flag: flag,
dest: Position {
change: k1.change,
pos: k1.start,
},
introduced_by,
},
)?;
let b = self.put_graph(
&mut channel.graph,
k1,
Edge {
flag: flag ^ EdgeFlags::PARENT,
dest: Position {
change: k0.change,
pos: k0.end,
},
introduced_by,
},
)?;
if flag.contains(EdgeFlags::FOLDER) {
if !((k0.len() == 0 && k1.len() > 2) || (k0.len() > 2 && k1.len() == 0)) {
let mut f = std::fs::File::create("folder_debug")?;
self.debug(channel, &mut f)?;
panic!("{:?} {:?}", k0, k1);
}
}
assert!((a && b) || (!a && !b));
Ok(a && b)
}
#[doc(hidden)]
fn register_change(
&mut self,
internal: ChangeId,
hash: Hash,
change: &Change,
) -> Result<(), anyhow::Error> {
self.put_external(internal, hash)?;
self.put_internal(hash, internal)?;
for dep in change.dependencies.iter() {
debug!(target:"libpijul::register_change", "dep = {:?}", dep);
let dep_internal = self.get_internal(*dep).unwrap();
debug!(target:"libpijul::register_change", "{:?} depends on {:?}", internal, dep_internal);
self.put_revdep(dep_internal, internal)?;
self.put_dep(internal, dep_internal)?;
}
for hunk in change.changes.iter().flat_map(|r| r.iter()) {
let inode = match *hunk {
Atom::NewVertex(NewVertex { ref inode, .. }) => inode,
Atom::EdgeMap(EdgeMap { ref inode, .. }) => inode,
};
let inode = Position {
change: inode
.change
.and_then(|change| self.get_internal(change))
.unwrap_or(internal),
pos: inode.pos,
};
debug!(target:"libpijul::register_change", "touched: {:?} {:?}", inode, internal);
self.put_touched_files(inode, internal)?;
self.put_rev_touched_files(internal, inode)?;
}
Ok(())
}
#[doc(hidden)]
fn put_partials(&mut self, k: &str, e: Position<ChangeId>) -> Result<bool, anyhow::Error>;
#[doc(hidden)]
fn del_partials(
&mut self,
k: &str,
e: Option<Position<ChangeId>>,
) -> Result<bool, anyhow::Error>;
fn open_or_create_remote(&mut self, name: &str) -> Result<RemoteRef<Self>, anyhow::Error>;
fn put_remote(
&mut self,
remote: &mut RemoteRef<Self>,
k: u64,
v: (Hash, Merkle),
) -> Result<bool, anyhow::Error>;
fn del_remote(&mut self, remote: &mut RemoteRef<Self>, k: u64) -> Result<bool, anyhow::Error>;
fn drop_remote(&mut self, remote: RemoteRef<Self>) -> Result<bool, anyhow::Error>;
fn drop_named_remote(&mut self, remote: &str) -> Result<bool, anyhow::Error>;
}
// org id YyhFvpusTGqxPZa63sNkcmQoKpeib01dP/RcY0ylnE4=
use super::Base32;
use curve25519_dalek::constants::ED25519_BASEPOINT_POINT;
#[doc(hidden)]
#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Merkle {
Ed25519(curve25519_dalek::edwards::EdwardsPoint),
}
#[doc(hidden)]
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[repr(u8)]
pub enum MerkleAlgorithm {
Ed25519 = 1,
}
impl std::fmt::Debug for Merkle {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "{:?}", self.to_base32())
}
}
impl Merkle {
pub fn zero() -> Self {
Merkle::Ed25519(ED25519_BASEPOINT_POINT)
}
pub fn next(&self, h: &super::Hash) -> Self {
match self {
Merkle::Ed25519(ref h0) => {
let scalar = match *h {
super::Hash::Blake3(h) => {
curve25519_dalek::scalar::Scalar::from_bytes_mod_order(h)
}
_ => unreachable!(),
};
Merkle::Ed25519(h0 * scalar)
}
}
}
pub fn to_bytes(&self) -> [u8; 32] {
match *self {
Merkle::Ed25519(ref e) => e.compress().to_bytes(),
}
}
}
impl super::Base32 for Merkle {
fn to_base32(&self) -> String {
match *self {
Merkle::Ed25519(ref s) => {
let mut hash = [0; 33];
(&mut hash[..32]).clone_from_slice(s.compress().as_bytes());
hash[32] = MerkleAlgorithm::Ed25519 as u8;
data_encoding::BASE32_NOPAD.encode(&hash)
}
}
}
/// Parses a base-32 string into a `Merkle`.
fn from_base32(s: &[u8]) -> Option<Self> {
let bytes = if let Ok(b) = data_encoding::BASE32_NOPAD.decode(s) {
b
} else {
return None;
};
if *bytes.last().unwrap() == MerkleAlgorithm::Ed25519 as u8 {
Some(Merkle::Ed25519(
curve25519_dalek::edwards::CompressedEdwardsY::from_slice(&bytes[..32])
.decompress()
.unwrap(),
))
} else {
None
}
}
}
impl std::str::FromStr for Merkle {
type Err = crate::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Some(b) = Self::from_base32(s.as_bytes()) {
Ok(b)
} else {
Err(crate::Error::ParseError { s: s.to_string() })
}
}
}
// org id 1pskDu0oEKm/5I0zevJAOBnZFHtRF4KVaKdOQIr5xOA=
use super::vertex::*;
use super::inode_metadata::*;
use super::change_id::*;
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord)]
#[doc(hidden)]
pub struct InodeVertex {
pub metadata: InodeMetadata,
pub position: Position<ChangeId>,
}
// org id 2aGUHdwMqOBrXQnmbPEW3sQRVHl+A3LxrYYZVTbmaDA=
/// Metadata about an inode, including unix-style permissions and
/// whether this inode is a directory.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)]
#[doc(hidden)]
pub struct InodeMetadata(pub u16);
const DIR_BIT: u16 = 0x200;
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
impl InodeMetadata {
/// Read the file metadata from the file name encoded in the
/// repository.
pub fn from_basename(p: &[u8]) -> Self {
debug_assert!(p.len() == 2);
InodeMetadata(BigEndian::read_u16(p))
}
/// Create a new file metadata with the given Unix permissions,
/// and "is directory" bit.
pub fn new(perm: usize, is_dir: bool) -> Self {
let mut m = InodeMetadata(0);
m.set_permissions(perm as u16);
if is_dir {
m.set_dir()
} else {
m.unset_dir()
}
m
}
/// Permissions of this inode (as in Unix).
pub fn permissions(&self) -> u16 {
u16::from_le(self.0) & 0x1ff
}
/// Set the permissions to the supplied parameters.
pub fn set_permissions(&mut self, perm: u16) {
let bits = u16::from_le(self.0);
let perm = (bits & !0x1ff) | perm;
self.0 = perm.to_le()
}
/// Tell whether this `InodeMetadata` is a directory.
pub fn is_dir(&self) -> bool {
u16::from_le(self.0) & DIR_BIT != 0
}
/// Tell whether this `InodeMetadata` is a file.
pub fn is_file(&self) -> bool {
u16::from_le(self.0) & DIR_BIT == 0
}
/// Set the metadata to be a directory.
pub fn set_dir(&mut self) {
let bits = u16::from_le(self.0);
self.0 = (bits | DIR_BIT).to_le()
}
/// Set the metadata to be a file.
pub fn unset_dir(&mut self) {
let bits = u16::from_le(self.0);
self.0 = (bits & !DIR_BIT).to_le()
}
pub fn write<W: std::io::Write>(&self, mut w: W) -> std::io::Result<()> {
w.write_u16::<BigEndian>(self.0)
}
}
// org id husqv+oHDFDeBrRtm5ymelT1H84bAvU87dV7ig64UFo=
/// A unique identifier for files or directories in the actual
/// file system, to map "files from the graph" to real files.
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct Inode(pub(in crate::pristine) u64);
impl std::fmt::Debug for Inode {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
use byteorder::{ByteOrder, LittleEndian};
let mut b = [0; 8];
LittleEndian::write_u64(&mut b, self.0);
write!(fmt, "Inode({})", data_encoding::BASE32_NOPAD.encode(&b))
}
}
impl Inode {
pub const ROOT: Inode = Inode(0);
pub fn is_root(&self) -> bool {
*self == Inode::ROOT
}
pub(crate) fn random() -> Self {
Inode(rand::random())
}
}
// org id prMUSvcE1piODcnTI1gHUSSFp1wjMaYw2OCSrvp1k4o=
use super::Base32;
pub(crate) const BLAKE3_BYTES: usize = 32;
pub(crate) const BASE32_BYTES: usize = 53;
/// The external hash of changes.
#[derive(Serialize, Deserialize, Clone, Copy, Eq, PartialEq, Hash, PartialOrd, Ord)]
pub enum Hash {
/// None is the hash of the "null change", which introduced a
/// single root vertex at the beginning of the repository.
None,
Blake3([u8; BLAKE3_BYTES]),
}
pub(crate) enum Hasher {
Blake3(blake3::Hasher),
}
impl Default for Hasher {
fn default() -> Self {
Hasher::Blake3(blake3::Hasher::new())
}
}
impl Hasher {
pub(crate) fn update(&mut self, bytes: &[u8]) {
match self {
Hasher::Blake3(ref mut h) => {
h.update(bytes);
}
}
}
pub(crate) fn finish(&self) -> Hash {
match self {
Hasher::Blake3(ref h) => {
let result = h.finalize();
let mut hash = [0; BLAKE3_BYTES];
hash.clone_from_slice(result.as_bytes());
Hash::Blake3(hash)
}
}
}
}
impl std::fmt::Debug for Hash {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "{:?}", self.to_base32())
}
}
/// Algorithm used to compute change hashes.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
#[repr(u8)]
pub enum HashAlgorithm {
None = 0,
Blake3 = 1,
}
impl Hash {
pub fn to_bytes(&self) -> [u8; 1 + BLAKE3_BYTES] {
match *self {
Hash::None => unimplemented!(),
Hash::Blake3(ref s) => {
let mut out = [0; 1 + BLAKE3_BYTES];
out[0] = HashAlgorithm::Blake3 as u8;
(&mut out[1..]).clone_from_slice(s);
out
}
}
}
pub fn from_bytes(s: &[u8]) -> Option<Self> {
if s.len() >= 1 + BLAKE3_BYTES && s[0] == HashAlgorithm::Blake3 as u8 {
let mut out = [0; BLAKE3_BYTES];
out.clone_from_slice(&s[1..]);
Some(Hash::Blake3(out))
} else {
None
}
}
pub fn from_prefix(s: &str) -> Option<Self> {
let mut b32 = [b'A'; BASE32_BYTES];
if s.len() > BASE32_BYTES {
return None;
}
(&mut b32[..s.len()]).clone_from_slice(s.as_bytes());
let bytes = if let Ok(bytes) = data_encoding::BASE32_NOPAD.decode(&b32) {
bytes
} else {
return None;
};
let mut hash = [0; BLAKE3_BYTES];
hash.clone_from_slice(&bytes[..BLAKE3_BYTES]);
Some(Hash::Blake3(hash))
}
}
impl super::Base32 for Hash {
/// Returns the base-32 representation of a hash.
fn to_base32(&self) -> String {
match *self {
Hash::None => data_encoding::BASE32_NOPAD.encode(&[0]),
Hash::Blake3(ref s) => {
let mut hash = [0; 1 + BLAKE3_BYTES];
hash[BLAKE3_BYTES] = HashAlgorithm::Blake3 as u8;
(&mut hash[..BLAKE3_BYTES]).clone_from_slice(s);
data_encoding::BASE32_NOPAD.encode(&hash)
}
}
}
/// Parses a base-32 string into a hash.
fn from_base32(s: &[u8]) -> Option<Self> {
let bytes = if let Ok(s) = data_encoding::BASE32_NOPAD.decode(s) {
s
} else {
return None;
};
if bytes == &[0] {
Some(Hash::None)
} else if bytes.len() == BLAKE3_BYTES + 1
&& bytes[BLAKE3_BYTES] == HashAlgorithm::Blake3 as u8
{
let mut hash = [0; BLAKE3_BYTES];
hash.clone_from_slice(&bytes[..BLAKE3_BYTES]);
Some(Hash::Blake3(hash))
} else {
None
}
}
}
impl std::str::FromStr for Hash {
type Err = crate::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Some(b) = Self::from_base32(s.as_bytes()) {
Ok(b)
} else {
Err(crate::Error::ParseError { s: s.to_string() })
}
}
}
#[test]
fn from_to() {
let mut h = Hasher::default();
h.update(b"blabla");
let h = h.finish();
assert_eq!(Hash::from_base32(&h.to_base32().as_bytes()), Some(h));
let h = Hash::None;
assert_eq!(Hash::from_base32(&h.to_base32().as_bytes()), Some(h));
let b = data_encoding::BASE32_NOPAD.encode(&[19, 18, 17]);
assert_eq!(Hash::from_base32(&b.as_bytes()), None);
}
// org id Robvmv7MKVA9nhridLr+yEw9Z+03849XIXfaIMnJ8jI=
use super::change_id::*;
use super::vertex::*;
bitflags! {
/// Possible flags of edges.
#[derive(Serialize, Deserialize)]
pub struct EdgeFlags: u8 {
const BLOCK = 1;
/// A pseudo-edge, computed when applying the change to
/// restore connectivity, and/or mark conflicts.
const PSEUDO = 4;
/// An edge encoding file system hierarchy.
const FOLDER = 16;
/// A "reverse" edge (all edges in the graph have a reverse edge).
const PARENT = 32;
/// An edge whose target (if not also `PARENT`) or
/// source (if also `PARENT`) is marked as deleted.
const DELETED = 128;
}
}
/// The target half of an edge in the repository graph.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[doc(hidden)]
pub struct Edge {
/// Flags of this edge.
pub flag: EdgeFlags,
/// Target of this edge.
pub dest: Position<ChangeId>,
/// Change that introduced this edge (possibly as a
/// pseudo-edge, i.e. not explicitly in the change, but
/// computed from it).
pub introduced_by: ChangeId,
}
use super::*;
use byteorder::ByteOrder;
enum DumpChannelState<T: super::TxnT, RT: std::ops::Deref<Target = T>> {
Changes {
log: crate::pristine::Cursor<T, RT, T::RevchangesetCursor, u64, (ChangeId, Merkle)>,
current: ChangeId,
deps: Option<crate::pristine::Cursor<T, RT, T::DepCursor, ChangeId, ChangeId>>,
},
Graph {
graph: crate::pristine::Cursor<T, RT, T::GraphCursor, Vertex<ChangeId>, Edge>,
current: Vertex<ChangeId>,
is_first: bool,
},
}
pub struct DumpChannel<
T: super::TxnT,
RT: std::ops::Deref<Target = T>,
C: std::ops::Deref<Target = crate::pristine::Channel<T>>,
> {
state: Option<DumpChannelState<T, RT>>,
txn: RT,
channel: C,
}
pub enum DumpChunk {
Dep([u8; 9]),
Hash([u8; 42]),
Edge([u8; 50]),
End([u8; 25]),
}
impl std::ops::Deref for DumpChunk {
type Target = [u8];
fn deref(&self) -> &Self::Target {
match *self {
DumpChunk::Dep(ref h) => h,
DumpChunk::Hash(ref h) => h,
DumpChunk::Edge(ref e) => e,
DumpChunk::End(ref e) => e,
}
}
}
#[repr(u8)]
enum Msg {
Hash = 253,
Dep = 254,
Vertex = 255,
}
impl<
T: super::TxnT,
RT: std::ops::Deref<Target = T> + Clone,
C: std::ops::Deref<Target = crate::pristine::Channel<T>>,
> Iterator for DumpChannel<T, RT, C>
{
type Item = DumpChunk;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.state.take() {
Some(DumpChannelState::Changes {
mut log,
current,
deps,
}) => {
if let Some(mut deps) = deps {
while let Some((h, dep)) = deps.next() {
if h > current {
break;
} else if h < current {
continue;
}
let mut msg = [Msg::Dep as u8; 9];
byteorder::BigEndian::write_u64(&mut msg[1..], dep.0);
self.state = Some(DumpChannelState::Changes {
log,
current,
deps: Some(deps),
});
debug!("dep msg = {:?}", msg);
return Some(DumpChunk::Dep(msg));
}
}
if let Some((_, (h, _))) = log.next() {
let deps = Some(T::iter_dep_ref(self.txn.clone(), h));
self.state = Some(DumpChannelState::Changes {
log,
current: h,
deps,
});
let ext = self.txn.get_external(h).unwrap();
let mut msg = [Msg::Hash as u8; 1 + 33 + 8];
(&mut msg[1..34]).clone_from_slice(&ext.to_bytes()[..]);
byteorder::BigEndian::write_u64(&mut msg[34..], h.0);
return Some(DumpChunk::Hash(msg));
} else {
self.state = Some(DumpChannelState::Graph {
graph: T::iter_graph_ref(
self.txn.clone(),
&self.channel.graph,
Vertex::ROOT,
None,
),
current: Vertex::ROOT,
is_first: true,
})
}
}
Some(DumpChannelState::Graph {
mut graph,
mut current,
is_first,
}) => {
if let Some((v, e)) = graph.next() {
if !e.flag.contains(EdgeFlags::PARENT) {
self.state = Some(DumpChannelState::Graph {
graph,
current,
is_first,
});
continue;
}
if v != current || is_first {
let mut buf = [Msg::Vertex as u8; 50];
byteorder::LittleEndian::write_u64(&mut buf[1..], v.change.0);
byteorder::LittleEndian::write_u64(&mut buf[9..], v.start.0);
byteorder::LittleEndian::write_u64(&mut buf[17..], v.end.0);
current = v;
buf[25] = e.flag.bits();
byteorder::LittleEndian::write_u64(&mut buf[26..], e.dest.change.0);
byteorder::LittleEndian::write_u64(&mut buf[34..], e.dest.pos.0);
byteorder::LittleEndian::write_u64(&mut buf[42..], e.introduced_by.0);
self.state = Some(DumpChannelState::Graph {
graph,
current,
is_first: false,
});
debug!("sending {:?}", &buf[..]);
return Some(DumpChunk::Edge(buf));
} else {
let mut buf = [0; 25];
buf[0] = e.flag.bits();
byteorder::LittleEndian::write_u64(&mut buf[1..], e.dest.change.0);
byteorder::LittleEndian::write_u64(&mut buf[9..], e.dest.pos.0);
byteorder::LittleEndian::write_u64(&mut buf[17..], e.introduced_by.0);
self.state = Some(DumpChannelState::Graph {
graph,
current,
is_first: false,
});
debug!("sending {:?}", &buf[..]);
return Some(DumpChunk::End(buf));
}
} else {
self.state = None;
let mut buf = [Msg::Vertex as u8; 25];
byteorder::LittleEndian::write_u64(&mut buf[1..], Vertex::BOTTOM.change.0);
byteorder::LittleEndian::write_u64(&mut buf[9..], Vertex::BOTTOM.start.0);
byteorder::LittleEndian::write_u64(&mut buf[17..], Vertex::BOTTOM.end.0);
debug!("sending {:?}", buf);
return Some(DumpChunk::End(buf));
}
}
None => return None,
}
}
}
}
pub fn dump_channel<
T: super::TxnT,
RT: std::ops::Deref<Target = T> + Clone,
C: std::ops::Deref<Target = crate::pristine::Channel<T>>,
>(
txn: RT,
channel: C,
) -> DumpChannel<T, RT, C> {
DumpChannel {
state: Some(DumpChannelState::Changes {
log: T::changeid_log_ref(txn.clone(), &channel, 0),
current: ChangeId::ROOT,
deps: None,
}),
txn,
channel,
}
}
pub struct ChannelFromDump<'a, T: super::MutTxnT> {
txn: &'a mut T,
channel: ChannelRef<T>,
buf: Buf,
current: Vertex<ChangeId>,
reverses: Vec<(Vertex<ChangeId>, Edge)>,
starts: HashMap<Position<ChangeId>, ChangePosition>,
current_changeid: Option<ChangeId>,
local_changeid: HashMap<ChangeId, ChangeId>,
pub alive: HashSet<ChangeId>,
}
/// The following type does zero-copy buffering: if there are enough
/// bytes in the part we just read, we just return these bytes. Else,
/// we copy the bytes in cache, and complete the cache once we have
/// enough bytes.
///
/// The size of the cache could be generic, but since edges and
/// vertices take 25 bytes, hashes 33, and [u8; 33] doesn't implement
/// AsMut<[u8]>, we just use a fixed-sized array of length 33.
pub struct Buf {
/// Internal cache.
buf: [u8; 68],
/// Length of the internal cache that is currently used.
buf_len: usize,
/// Position in the last buffer that was read. The `read` method
/// must be called with the same buffer until that method returns
/// `None`.
pos: usize,
}
impl Buf {
/// Create a new buffer.
fn new() -> Self {
Buf {
buf: [0; 68],
buf_len: 0,
pos: 0,
}
}
/// Read `wanted` number of bytes from `bytes` using the internal
/// cache if needed. This method must be called with the same
/// `bytes` buffer until it returns `None`.
fn read<'a>(&'a mut self, bytes: &'a [u8], wanted: usize) -> Option<&'a [u8]> {
trace!(
"bytes = {:?}, self.buf = {:?} {:?} {:?}",
bytes,
&self.buf[..],
self.buf_len,
self.pos
);
assert!(wanted < self.buf.len());
if self.buf_len > 0 {
let needs = wanted - self.buf_len;
if self.pos + needs > bytes.len() {
// Not enough bytes to complete the internal buffer,
// we need to save these extra bytes.
let len = self.buf_len + bytes.len();
(&mut self.buf[self.buf_len..len]).clone_from_slice(&bytes[self.pos..]);
self.buf_len = len;
self.pos = 0;
None
} else {
// There are enough bytes, output them.
(&mut self.buf[self.buf_len..wanted])
.clone_from_slice(&bytes[self.pos..self.pos + needs]);
self.buf_len = 0;
self.pos += needs;
Some(&self.buf[..wanted])
}
} else if bytes.len() - self.pos >= wanted {
// The internal buffer is empty, and `bytes` is long enough.
let buf = &bytes[self.pos..self.pos + wanted];
self.pos += wanted;
Some(buf)
} else {
// The internal buffer is empty and `bytes` is too short,
// save the extra bytes.
self.buf_len = bytes.len() - self.pos;
(&mut self.buf[..self.buf_len]).clone_from_slice(&bytes[self.pos..]);
self.pos = 0;
None
}
}
fn current<'a>(&self, bytes: &'a [u8]) -> Option<&'a u8> {
// debug!("self.pos = {:?}", self.pos);
bytes.get(self.pos)
}
}
impl<'a, T: super::MutTxnT> ChannelFromDump<'a, T> {
pub fn new(txn: &'a mut T, channel: ChannelRef<T>) -> Self {
let mut starts = HashMap::with_capacity(4096);
starts.insert(Position::ROOT, Position::ROOT.pos);
ChannelFromDump {
txn,
channel,
buf: Buf::new(),
current: Vertex::ROOT,
reverses: Vec::with_capacity(4096),
starts,
current_changeid: None,
local_changeid: HashMap::new(),
alive: HashSet::new(),
}
}
pub fn read(&mut self, bytes: &[u8]) -> Result<bool, anyhow::Error> {
let mut channel = self.channel.borrow_mut();
while let Some(&cur) = self.buf.current(bytes) {
debug!("cur = {:?}", cur);
if cur == Msg::Hash as u8 {
if let Some(buf) = self.buf.read(bytes, 42) {
let hash = Hash::from_bytes(&buf[1..34]).unwrap();
let mut p = ChangeId(byteorder::BigEndian::read_u64(&buf[34..]));
// Test if `p` is already taken for another hash.
if let Some(hh) = self.txn.get_external(p) {
if hh != hash {
let pp = self.txn.make_changeid(&hash);
self.local_changeid.insert(p, pp);
p = pp
}
}
let t = channel.apply_counter;
debug!("hash = {:?} {:?}", hash, p);
self.txn.put_external(p, hash)?;
self.txn.put_internal(hash, p)?;
self.txn.put_changes(&mut channel, p, t, &hash)?;
self.current_changeid = Some(p);
}
} else if cur == Msg::Dep as u8 {
debug!("dep");
if let Some(buf) = self.buf.read(bytes, 9) {
let mut a = ChangeId(byteorder::BigEndian::read_u64(&buf[1..9]));
if let Some(aa) = self.local_changeid.get(&a) {
a = *aa
}
if let Some(cur) = self.current_changeid {
self.txn.put_dep(cur, a)?;
self.txn.put_revdep(a, cur)?;
}
debug!("cur = {:?}, a = {:?}", self.current_changeid, a);
}
} else if cur == Msg::Vertex as u8 {
// New vertex
if let Some(buf) = self.buf.read(bytes, 25) {
self.current = read_vertex(buf);
debug!("new vertex {:?}", self.current);
if self.current == Vertex::BOTTOM {
finish_channel(self.txn, &mut channel, &self.reverses, &self.starts)?;
return Ok(true);
}
if let Some(aa) = self.local_changeid.get(&self.current.change) {
self.current.change = *aa
}
} else {
break;
}
} else if let Some(buf) = self.buf.read(bytes, 25) {
// Edge
let mut edge = read_edge(buf);
if let Some(aa) = self.local_changeid.get(&edge.dest.change) {
edge.dest.change = *aa
}
if let Some(aa) = self.local_changeid.get(&edge.introduced_by) {
edge.introduced_by = *aa
}
if !edge.flag.contains(EdgeFlags::DELETED) {
self.alive.insert(edge.dest.change);
}
debug!("put edge {:?} {:?}", self.current, edge);
self.txn.put_graph(&mut channel.graph, self.current, edge)?;
self.reverses.push((self.current, edge));
self.starts
.insert(self.current.end_pos(), self.current.start);
} else {
break;
}
}
self.buf.pos = 0;
Ok(false)
}
pub fn edges(&self) -> &[(Vertex<ChangeId>, Edge)] {
&self.reverses[..]
}
}
fn finish_channel<T: super::MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
reverses: &[(Vertex<ChangeId>, Edge)],
ends: &HashMap<Position<ChangeId>, ChangePosition>,
) -> Result<(), anyhow::Error> {
debug!("ends: {:?}", ends);
for &(v, e) in reverses {
debug!("{:?}", e);
let u = Vertex {
change: e.dest.change,
start: *ends.get(&e.dest).unwrap(),
end: e.dest.pos,
};
let mut e = e.clone();
e.flag = e.flag ^ EdgeFlags::PARENT;
e.dest = v.start_pos();
txn.put_graph(&mut channel.graph, u, e)?;
}
Ok(())
}
fn read_vertex(bytes: &[u8]) -> Vertex<ChangeId> {
let change = byteorder::LittleEndian::read_u64(&bytes[1..]);
let start = byteorder::LittleEndian::read_u64(&bytes[9..]);
let end = byteorder::LittleEndian::read_u64(&bytes[17..]);
Vertex {
change: ChangeId(change),
start: ChangePosition(start),
end: ChangePosition(end),
}
}
fn read_edge(bytes: &[u8]) -> Edge {
let flag = EdgeFlags::from_bits(bytes[0]).unwrap();
let change = byteorder::LittleEndian::read_u64(&bytes[1..]);
let p = byteorder::LittleEndian::read_u64(&bytes[9..]);
let introduced_by = byteorder::LittleEndian::read_u64(&bytes[17..]);
Edge {
flag,
dest: Position {
change: ChangeId(change),
pos: ChangePosition(p),
},
introduced_by: ChangeId(introduced_by),
}
}
// org id 0h+Mdm9ufwAz5PQmkGsma0Y/jIa5PpdEABXNibwSCRI=
use super::Base32;
use byteorder::{ByteOrder, LittleEndian};
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
#[doc(hidden)]
pub struct ChangeId(pub u64);
impl std::fmt::Debug for ChangeId {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "ChangeId({})", self.to_base32())
}
}
impl ChangeId {
pub(crate) const ROOT: ChangeId = ChangeId(0);
pub fn is_root(&self) -> bool {
*self == ChangeId::ROOT
}
}
impl super::Base32 for ChangeId {
fn to_base32(&self) -> String {
let mut b = [0; 8];
LittleEndian::write_u64(&mut b, self.0);
data_encoding::BASE32_NOPAD.encode(&b)
}
fn from_base32(b: &[u8]) -> Option<Self> {
let mut dec = [0; 8];
let len = data_encoding::BASE32_NOPAD.decode_len(b.len()).unwrap();
if let Ok(_) = data_encoding::BASE32_NOPAD.decode_mut(b, &mut dec[..len]) {
Some(ChangeId(LittleEndian::read_u64(&dec)))
} else {
None
}
}
}
// org id l+m3DECe179CylswisBkgJHs8Kb25EVmZSnj3a+UnHw=
//! Treating strings as paths. For portability reasons, paths must
//! internally be treated as strings, and converted to paths only by
//! the backend, if required (in-memory backends will typically not
//! need that conversion).
/// Returns the parent of the path, if it exists. This function tries
/// to replicate the behaviour of `std::path::Path::parent`, but with
/// `&str` instead of `Path`.
///
/// ```ignore
/// use libpijul::path::parent;
/// assert_eq!(parent("/foo/bar"), Some("/foo"));
/// assert_eq!(parent("foo"), Some(""));
/// assert_eq!(parent("/"), None);
/// assert_eq!(parent(""), None);
/// ```
pub fn parent(mut path: &str) -> Option<&str> {
loop {
if path == "/" || path.is_empty() {
return None;
} else if let Some(i) = path.rfind('/') {
let (a, b) = path.split_at(i);
if b == "/" {
path = a
} else {
return Some(a);
}
} else {
return Some("");
}
}
}
/// Returns the file name of the path. if it exists. This function
/// tries to replicate the behaviour of `std::path::Path::file_name`,
/// but with `&str` instead of `Path`.
///
/// Like the original, returns `None` if the path terminates in `..`.
///
/// ```ignore
/// use libpijul::path::file_name;
/// assert_eq!(file_name("/usr/bin/"), Some("bin"));
/// assert_eq!(file_name("tmp/foo.txt"), Some("foo.txt"));
/// assert_eq!(file_name("foo.txt/."), Some("foo.txt"));
/// assert_eq!(file_name("foo.txt/.//"), Some("foo.txt"));
/// assert_eq!(file_name("foo.txt/.."), None);
/// assert_eq!(file_name("/"), None);
/// ```
pub fn file_name(mut path: &str) -> Option<&str> {
if path == "/" || path.is_empty() {
None
} else {
while let Some(i) = path.rfind('/') {
let (_, f) = path.split_at(i + 1);
if f == ".." {
return None;
} else if f.is_empty() || f == "." {
path = path.split_at(i).0
} else {
return Some(f);
}
}
Some(path)
}
}
#[test]
fn test_file_name() {
assert_eq!(file_name("/usr/bin/"), Some("bin"));
assert_eq!(file_name("tmp/foo.txt"), Some("foo.txt"));
assert_eq!(file_name("foo.txt/."), Some("foo.txt"));
assert_eq!(file_name("foo.txt/.//"), Some("foo.txt"));
assert_eq!(file_name("foo.txt/.."), None);
assert_eq!(file_name("/"), None);
}
/// Returns an iterator of the non-empty components of a path,
/// delimited by `/`. Note that `.` and `..` are treated as
/// components.
pub fn components<'a>(path: &'a str) -> Components<'a> {
Components(path.split('/'))
}
#[derive(Clone)]
pub struct Components<'a>(std::str::Split<'a, char>);
impl<'a> std::fmt::Debug for Components<'a> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "Components {{ .. }}")
}
}
impl<'a> Iterator for Components<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(n) = self.0.next() {
if !n.is_empty() {
return Some(n);
}
} else {
return None;
}
}
}
}
/// Push a path component on an existing path. Only works if `extra`
/// is a relative path.
/// ```ignore
/// use libpijul::path::push;
/// let mut s = "a".to_string();
/// push(&mut s, "b");
/// assert_eq!(s, "a/b");
/// push(&mut s, "c");
/// assert_eq!(s, "a/b/c");
/// ```
pub fn push(path: &mut String, extra: &str) {
assert!(!extra.starts_with("/")); // Make sure the extra path is relative.
if !path.ends_with("/") && !path.is_empty() {
path.push('/');
}
path.push_str(extra)
}
/// Pop the last component off an existing path.
/// ```ignore
/// use libpijul::path::pop;
/// let mut s = "a/b/c".to_string();
/// pop(&mut s);
/// assert_eq!(s, "a/b");
/// pop(&mut s);
/// assert_eq!(s, "a");
/// pop(&mut s);
/// assert_eq!(s, "");
/// ```
pub fn pop(path: &mut String) {
if let Some(i) = path.rfind('/') {
path.truncate(i)
} else {
path.clear()
}
}
// org id YWj26SRzNLfGg2Qeglz9QVnhMTU+0+RMuMvr6DwSA38=
//! Output the pristine to the working copy, synchronising file
//! changes (file additions, deletions and renames) in the process.
use super::{collect_children, OutputItem};
use crate::alive::retrieve;
use crate::changestore::ChangeStore;
use crate::fs::{create_new_inode, inode_filename};
use crate::pristine::*;
use crate::small_string::SmallString;
use crate::working_copy::WorkingCopy;
use crate::{alive, path, vertex_buffer};
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
// org id LRtRE+PMuzBEsI/KgD1sy2W/K0f0HAN70rZlknlfCWs=
/// A structure representing a file with conflicts.
#[derive(Debug)]
pub enum Conflict {
Name { path: String },
ZombieFile { path: String },
MultipleNames { pos: Position<ChangeId> },
Zombie { path: String, line: usize },
Cyclic { path: String, line: usize },
Order { path: String, line: usize },
}
// org id 0ve7DFbo4xqHFBVPHbChT6lFMPwRxbNcxRs4Yzsb6rs=
/// Output updates the working copy after applying changes, including
/// the graph-file correspondence.
///
/// **WARNING:** This overwrites the working copy, cancelling any
/// unrecorded change.
pub fn output_repository_no_pending<T: MutTxnT, R: WorkingCopy, P: ChangeStore>(
repo: &mut R,
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
prefix: &str,
output_name_conflicts: bool,
) -> Result<Vec<Conflict>, anyhow::Error> {
output_repository(
repo,
changes,
txn,
&mut channel.r.borrow_mut(),
ChangeId::ROOT,
&mut crate::path::components(prefix),
output_name_conflicts,
)
}
// org id MpMlEfciwimf8weu0yPEV+/uIP9ErgimOTSV8nipLRA=
fn output_repository<
'a,
T: MutTxnT,
R: WorkingCopy,
P: ChangeStore,
I: Iterator<Item = &'a str>,
>(
repo: &mut R,
changes: &P,
txn: &mut T,
channel: &mut Channel<T>,
pending_change_id: ChangeId,
prefix: &mut I,
output_name_conflicts: bool,
) -> Result<Vec<Conflict>, anyhow::Error> {
// org id lpPIIMEW5CckD8tHjyvaRg/qw+G7csmIRQ9jxvSlOrk=
let mut conflicts = Vec::new();
let mut files = HashMap::new();
let mut next_files = HashMap::new();
let mut next_prefix_basename = prefix.next();
collect_children(
txn,
changes,
channel,
Position::ROOT,
Inode::ROOT,
"",
next_prefix_basename,
&mut files,
)?;
let mut is_first_none = true;
if next_prefix_basename.is_none() {
let dead = collect_dead_files(txn, channel, pending_change_id, Inode::ROOT);
debug!("dead (line {}) = {:?}", line!(), dead);
kill_dead_files(txn, repo, &dead)?;
is_first_none = false;
}
debug!("done collecting: {:?}", files);
let mut done = HashMap::new();
let mut done_inodes = HashSet::new();
// Actual moves is used to avoid a situation where have two files
// a and b, first rename a -> b, and then b -> c.
let mut actual_moves = Vec::new();
// org id gsYVU8LjyrWgVuIqAeIYTdCP2GY2jK+dVUaY5EM8DZU=
while !files.is_empty() {
debug!("files {:?}", files.len());
next_files.clear();
next_prefix_basename = prefix.next();
for (a, mut b) in files.drain() {
debug!("files: {:?} {:?}", a, b);
b.sort_by(|u, v| {
txn.get_changeset(&channel.changes, u.0.change, None)
.unwrap()
.cmp(
&txn.get_changeset(&channel.changes, v.0.change, None)
.unwrap(),
)
});
let mut is_first_name = true;
for (name_key, mut output_item) in b {
match done.entry(output_item.pos) {
Entry::Occupied(e) => {
debug!("pos already visited: {:?} {:?}", a, output_item.pos);
if *e.get() != name_key {
conflicts.push(Conflict::MultipleNames {
pos: output_item.pos,
});
}
continue;
}
Entry::Vacant(e) => {
e.insert(name_key);
}
}
if let Some((inode, _)) = output_item.inode {
if !done_inodes.insert(inode) {
debug!("inode already visited: {:?} {:?}", a, inode);
continue;
}
}
let name = if !is_first_name {
if output_name_conflicts {
let name = make_conflicting_name(&a, name_key);
conflicts.push(Conflict::Name { path: name.clone() });
name
} else {
debug!("not outputting {:?} {:?}", a, name_key);
conflicts.push(Conflict::Name {
path: a.to_string(),
});
break;
}
} else {
is_first_name = false;
a.clone()
};
let file_name = path::file_name(&name).unwrap();
path::push(&mut output_item.path, file_name);
let path = std::mem::replace(&mut output_item.path, String::new());
// org id 17m71heSzJMzN/R3aNLLqJlxXhZYglRTUOrHQjDCwVs=
let inode = move_or_create(
txn,
repo,
&output_item,
&path,
&file_name,
&mut actual_moves,
)?;
if next_prefix_basename.is_none() && is_first_none {
let dead = collect_dead_files(txn, channel, pending_change_id, inode);
debug!("dead (line {}) = {:?}", line!(), dead);
kill_dead_files(txn, repo, &dead)?;
is_first_none = false;
}
// org id 3dDmtOVITWsw6WH+WZ2gvHHNSFGeGJOf7xMvsclLv0M=
if output_item.meta.is_dir() {
repo.create_dir_all(&path)?;
collect_children(
txn,
changes,
channel,
output_item.pos,
inode,
&path,
next_prefix_basename,
&mut next_files,
)?
} else {
repo.write_file(&path, |w: &mut dyn std::io::Write| {
output_file(txn, channel, changes, &output_item, &mut conflicts, w)
})?;
}
if output_item.is_zombie {
conflicts.push(Conflict::ZombieFile {
path: name.to_string(),
})
}
// org id 2XIp9np3LpR5M9OuJWMtiEOx57jFCeckeuAd61x3hKU=
repo.set_permissions(&path, output_item.meta.permissions())?;
}
}
std::mem::swap(&mut files, &mut next_files);
}
for (a, b) in actual_moves.iter() {
repo.rename(a, b)?
}
Ok(conflicts)
}
// org id zB9X/hdfYcIVLvilzZg7Z+hMNTdYgDxn8Yv0ibiwW9s=
fn make_conflicting_name(name: &str, name_key: Vertex<ChangeId>) -> String {
let parent = path::parent(name).unwrap();
let basename = path::file_name(name).unwrap();
let mut parent = parent.to_string();
path::push(
&mut parent,
&format!("{}.{}", basename, &name_key.change.to_base32()),
);
parent
}
// org id xBUt7AHfxqCxXNwSBliMDFiMWRNF3c0qpG6MYcEiSH4=
fn move_or_create<T: MutTxnT, R: WorkingCopy>(
txn: &mut T,
repo: &mut R,
output_item: &OutputItem,
path: &str,
file_name: &str,
actual_moves: &mut Vec<(String, String)>,
) -> Result<Inode, anyhow::Error> {
let file_id = OwnedPathId {
parent_inode: output_item.parent,
basename: SmallString::from_str(&file_name),
};
let file_id_ = file_id.as_file_id();
debug!("move_or_create {:?}", file_id_);
// org id xS/j7ZPMIOAF9NgL2r1xaKrwJI6haZ2eBL0EQyNBgyo=
if let Some((inode, _)) = output_item.inode {
// If the file already exists, find its
// current name and rename it if that name
// is different.
if let Some(ref current_name) = inode_filename(txn, inode) {
debug!("current_name = {:?}, path = {:?}", current_name, path);
if current_name != path {
let parent = txn.get_revtree(inode, None).unwrap().to_owned();
debug!("parent = {:?}, inode = {:?}", parent, inode);
assert!(txn.del_revtree(inode, Some(parent.as_file_id()))?);
assert!(txn.del_tree(parent.as_file_id(), Some(inode))?);
let mut tmp_path = path.to_string();
crate::path::pop(&mut tmp_path);
use rand::Rng;
let s: String = rand::thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(30)
.collect();
crate::path::push(&mut tmp_path, &s);
repo.rename(¤t_name, &tmp_path)?;
actual_moves.push((tmp_path, path.to_string()));
// If the new location is overwriting an existing one,
// actually overwrite.
if let Some(inode) = txn.get_tree(file_id_, None) {
crate::fs::rec_delete(txn, file_id.clone(), inode, true)?;
}
txn.put_inodes(inode, output_item.pos)?;
txn.put_revinodes(output_item.pos, inode)?;
txn.put_tree(file_id_, inode)?;
txn.put_revtree(inode, file_id_)?;
}
} else {
debug!("no current name, inserting {:?} {:?}", file_id_, inode);
if let Some(inode) = txn.get_tree(file_id_, None) {
crate::fs::rec_delete(txn, file_id.clone(), inode, true)?;
}
txn.put_inodes(inode, output_item.pos)?;
txn.put_revinodes(output_item.pos, inode)?;
txn.put_tree(file_id_, inode)?;
txn.put_revtree(inode, file_id_)?;
}
Ok(inode)
// org id uwWMiVIILtuAREerIsrWeLRjSBFCxhZtjJRLJ1BhHqQ=
} else {
if let Some(inode) = txn.get_tree(file_id_, None) {
crate::fs::rec_delete(txn, file_id.clone(), inode, true)?;
}
let inode = create_new_inode(txn);
debug!(
"created new inode {:?} {:?} {:?}",
inode, output_item.pos, file_id_
);
txn.put_inodes(inode, output_item.pos)?;
txn.put_revinodes(output_item.pos, inode)?;
txn.put_tree(file_id_, inode)?;
txn.put_revtree(inode, file_id_)?;
Ok(inode)
}
}
// org id GN2HnMRazrBbQEPBgPOj2kiaHsrz9CfnCmvfQnMs+gU=
fn output_file<T: MutTxnT, P: ChangeStore>(
txn: &mut T,
channel: &mut Channel<T>,
changes: &P,
output_item: &OutputItem,
conflicts: &mut Vec<Conflict>,
w: &mut dyn std::io::Write,
) -> Result<(), anyhow::Error> {
let mut l = retrieve(txn, channel, output_item.pos);
let mut f = vertex_buffer::ConflictsWriter::new(w, &output_item.path, conflicts);
let mut forward = Vec::new();
alive::output_graph(changes, txn, channel, &mut f, &mut l, &mut forward)?;
for &(vertex, edge) in forward.iter() {
// Unwrap ok since `edge` is in the channel.
let dest = txn.find_block(&channel, edge.dest).unwrap();
debug!("deleting forward edge {:?} {:?} {:?}", vertex, dest, edge);
txn.del_graph_with_rev(channel, edge.flag, vertex, dest, edge.introduced_by)?;
}
Ok(())
}
// org id 4uIws8SX4Pc6k8n8Gg5W8ukCH1XvjAWIViBruFTA6rg=
fn collect_dead_files<T: MutTxnT>(
txn: &mut T,
channel: &Channel<T>,
pending_change_id: ChangeId,
inode: Inode,
) -> HashMap<OwnedPathId, (Inode, Option<String>)> {
let mut inodes = vec![(inode, false)];
let mut next_inodes = Vec::new();
let mut dead = HashMap::new();
while !inodes.is_empty() {
for (inode, parent_is_dead) in inodes.drain(..) {
for (id, inode_) in txn.iter_tree(
OwnedPathId {
parent_inode: inode,
basename: SmallString::from_str(""),
},
None,
) {
if id.parent_inode > inode {
break;
} else if id.parent_inode < inode {
continue;
}
let is_dead = parent_is_dead || {
if let Some(vertex) = txn.get_inodes(inode_, None) {
vertex.change != pending_change_id
&& !txn.is_alive(channel, vertex.inode_vertex())
} else {
true
}
};
if is_dead {
dead.insert(id.to_owned(), (inode_, inode_filename(txn, inode_)));
}
if inode_ != inode {
next_inodes.push((inode_, is_dead))
}
}
}
std::mem::swap(&mut inodes, &mut next_inodes)
}
dead
}
// org id qtncQESQX2X6uNqryalEkYZlRV7d5PeDxHnU2aIx/XA=
fn kill_dead_files<T: MutTxnT, W: WorkingCopy>(
txn: &mut T,
repo: &mut W,
dead: &HashMap<OwnedPathId, (Inode, Option<String>)>,
) -> Result<(), anyhow::Error> {
for (fileid, (inode, ref name)) in dead.iter() {
debug!("killing {:?} {:?} {:?}", fileid, inode, name);
assert!(txn.del_tree(fileid.as_file_id(), Some(*inode))?);
if !fileid.basename.is_empty() {
assert!(txn.del_revtree(*inode, Some(fileid.as_file_id()))?);
}
if let Some(vertex) = txn.get_inodes(*inode, None) {
assert!(txn.del_inodes(*inode, Some(vertex))?);
assert!(txn.del_revinodes(vertex, Some(*inode))?);
}
if let Some(name) = name {
repo.remove_path(&name)?
}
}
Ok(())
}
use crate::changestore::ChangeStore;
use crate::path;
use crate::pristine::*;
use std::collections::HashMap;
mod output;
pub use output::*;
mod archive;
pub use archive::*;
#[derive(Debug)]
struct OutputItem {
parent: Inode,
path: String,
meta: InodeMetadata,
pos: Position<ChangeId>,
inode: Option<(Inode, Position<ChangeId>)>,
is_zombie: bool,
}
fn collect_children<T: TxnT, P: ChangeStore>(
txn: &T,
changes: &P,
channel: &Channel<T>,
inode_pos: Position<ChangeId>,
inode: Inode,
path: &str,
prefix_basename: Option<&str>,
files: &mut HashMap<String, Vec<(Vertex<ChangeId>, OutputItem)>>,
) -> Result<(), anyhow::Error> {
debug!("path = {:?}", path);
for e in txn.iter_adjacent(
&channel,
inode_pos.inode_vertex(),
EdgeFlags::empty(),
EdgeFlags::all(),
) {
debug!("inode_pos, e = {:?}", e);
}
for e in txn.iter_adjacent(
&channel,
inode_pos.inode_vertex(),
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
) {
// org id Gb4FgVTFFkCPgh6xtUjeL/ntA6wAsIKhRiiX/8b4Jy8=
// This unwrap is ok since e.dest is in the channel.
let name_vertex = txn.find_block(&channel, e.dest).unwrap();
let mut name_buf = Vec::new();
changes.get_contents(|h| txn.get_external(h), name_vertex, &mut name_buf)?;
let (perms, basename) = name_buf.as_slice().split_at(2);
let (perms, basename) = (
InodeMetadata::from_basename(perms),
std::str::from_utf8(basename).unwrap(),
);
debug!("filename: {:?} {:?}", perms, basename);
let mut name = path.to_string();
if let Some(next) = prefix_basename {
if next != basename {
continue;
}
}
path::push(&mut name, basename);
// org id RFCdVpFfGZuILPerwkNtNS2jmRrxrMOGl0kh7aVMVmM=
debug!("name_vertex: {:?} {:?}", e, name_vertex);
for e in txn.iter_adjacent(&channel, name_vertex, EdgeFlags::empty(), EdgeFlags::all()) {
debug!("e = {:?}", e);
}
let child = if let Some(child) = txn
.iter_adjacent(
&channel,
name_vertex,
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::PSEUDO,
)
.next()
{
child
} else {
let child = txn
.iter_adjacent(&channel, name_vertex, EdgeFlags::FOLDER, EdgeFlags::all())
.filter(|e| !e.flag.contains(EdgeFlags::PARENT))
.next()
.unwrap();
let mut f = std::fs::File::create("debug_output").unwrap();
txn.debug_root_rev(channel, child.dest.inode_vertex(), &mut f)
.unwrap();
panic!("no child");
};
debug!("child: {:?}", child);
let child_inode = txn
.get_revinodes(child.dest, None)
.map(|inode| (inode, txn.get_inodes(inode, None).unwrap()));
debug!("child_inode: {:?}", child_inode);
let v = files.entry(name).or_insert(Vec::new());
v.push((
name_vertex,
OutputItem {
parent: inode,
path: path.to_string(),
meta: perms,
pos: child.dest,
inode: child_inode,
is_zombie: is_zombie(txn, channel, child.dest),
},
));
}
Ok(())
}
fn is_zombie<T: TxnT>(txn: &T, channel: &Channel<T>, pos: Position<ChangeId>) -> bool {
let f = EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::DELETED;
txn.iter_adjacent(&channel, pos.inode_vertex(), f, f | EdgeFlags::BLOCK)
.next()
.is_some()
}
use super::*;
use crate::changestore::ChangeStore;
use crate::Conflict;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
pub trait Archive {
type File: std::io::Write;
fn create_file(&mut self, path: &str, perm: u16) -> Self::File;
fn close_file(&mut self, f: Self::File) -> Result<(), anyhow::Error>;
}
#[cfg(feature = "tarball")]
pub struct Tarball<W: std::io::Write> {
pub archive: tar::Builder<flate2::write::GzEncoder<W>>,
pub prefix: Option<String>,
pub buffer: Vec<u8>,
}
#[cfg(feature = "tarball")]
pub struct File {
buf: Vec<u8>,
path: String,
permissions: u16,
}
#[cfg(feature = "tarball")]
impl std::io::Write for File {
fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> {
self.buf.write(buf)
}
fn flush(&mut self) -> Result<(), std::io::Error> {
Ok(())
}
}
#[cfg(feature = "tarball")]
impl<W: std::io::Write> Tarball<W> {
pub fn new(w: W, prefix: Option<String>) -> Self {
let encoder = flate2::write::GzEncoder::new(w, flate2::Compression::best());
Tarball {
archive: tar::Builder::new(encoder),
buffer: Vec::new(),
prefix,
}
}
}
#[cfg(feature = "tarball")]
impl<W: std::io::Write> Archive for Tarball<W> {
type File = File;
fn create_file(&mut self, path: &str, permissions: u16) -> Self::File {
self.buffer.clear();
File {
buf: std::mem::replace(&mut self.buffer, Vec::new()),
path: if let Some(ref prefix) = self.prefix {
prefix.clone() + path
} else {
path.to_string()
},
permissions,
}
}
fn close_file(&mut self, file: Self::File) -> Result<(), anyhow::Error> {
let mut header = tar::Header::new_gnu();
header.set_path(&file.path)?;
header.set_size(file.buf.len() as u64);
header.set_mode(file.permissions as u32);
header.set_cksum();
self.archive.append(&header, &file.buf[..])?;
self.buffer = file.buf;
Ok(())
}
}
pub(crate) fn archive<'a, T: TxnT, P: ChangeStore, I: Iterator<Item = &'a str>, A: Archive>(
changes: &P,
txn: &T,
channel: &ChannelRef<T>,
prefix: &mut I,
arch: &mut A,
) -> Result<Vec<Conflict>, anyhow::Error> {
let channel = channel.borrow();
let mut conflicts = Vec::new();
let mut files = HashMap::new();
let mut next_files = HashMap::new();
let mut next_prefix_basename = prefix.next();
collect_children(
txn,
changes,
&channel,
Position::ROOT,
Inode::ROOT,
"",
next_prefix_basename,
&mut files,
)?;
let mut done = HashMap::new();
let mut done_inodes = HashSet::new();
while !files.is_empty() {
debug!("files {:?}", files.len());
next_files.clear();
next_prefix_basename = prefix.next();
for (a, mut b) in files.drain() {
debug!("files: {:?} {:?}", a, b);
b.sort_by(|u, v| {
txn.get_changeset(&channel.changes, u.0.change, None)
.unwrap()
.cmp(
&txn.get_changeset(&channel.changes, v.0.change, None)
.unwrap(),
)
});
let mut is_first_name = true;
for (name_key, mut output_item) in b {
match done.entry(output_item.pos) {
Entry::Occupied(e) => {
debug!("pos already visited: {:?} {:?}", a, output_item.pos);
if *e.get() != name_key {
conflicts.push(Conflict::MultipleNames {
pos: output_item.pos,
});
}
continue;
}
Entry::Vacant(e) => {
e.insert(name_key);
}
}
if let Some((inode, _)) = output_item.inode {
if !done_inodes.insert(inode) {
debug!("inode already visited: {:?} {:?}", a, inode);
continue;
}
}
let name = if !is_first_name {
conflicts.push(Conflict::Name {
path: a.to_string(),
});
break;
} else {
is_first_name = false;
a.clone()
};
let file_name = path::file_name(&name).unwrap();
path::push(&mut output_item.path, file_name);
let path = std::mem::replace(&mut output_item.path, String::new());
if output_item.meta.is_dir() {
collect_children(
txn,
changes,
&channel,
output_item.pos,
Inode::ROOT, // unused
&path,
next_prefix_basename,
&mut next_files,
)?
} else {
let mut l = crate::alive::retrieve(txn, &channel, output_item.pos);
let mut f = arch.create_file(&path, output_item.meta.permissions());
{
let mut f = crate::vertex_buffer::ConflictsWriter::new(
&mut f,
&output_item.path,
&mut conflicts,
);
crate::alive::output_graph(
changes,
txn,
&channel,
&mut f,
&mut l,
&mut Vec::new(),
)?;
}
arch.close_file(f)?;
}
if output_item.is_zombie {
conflicts.push(Conflict::ZombieFile {
path: name.to_string(),
})
}
}
}
std::mem::swap(&mut files, &mut next_files);
}
Ok(conflicts)
}
// org id 58v3AKimretz5he0vs+xqV3r0G0SNrO0HFWAzfAI/50=
use crate::alive::{Graph, VertexId};
use crate::change::*;
use crate::find_alive::*;
use crate::pristine::*;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
impl Workspace {
pub(crate) fn load_graph<T: TxnT>(
&mut self,
txn: &T,
channel: &Channel<T>,
inode: Position<Option<Hash>>,
) -> Result<Option<&(Graph, HashMap<Vertex<ChangeId>, VertexId>)>, crate::Error> {
if let Some(change) = inode.change {
match self.graphs.entry(inode) {
Entry::Occupied(e) => Ok(Some(e.into_mut())),
Entry::Vacant(v) => {
let pos = Position {
change: if let Some(i) = txn.get_internal(change) {
i
} else {
return Err(crate::Error::InconsistentChange.into());
},
pos: inode.pos,
};
let mut graph = crate::alive::retrieve(txn, channel, pos);
graph.tarjan();
let mut ids = HashMap::new();
for (i, l) in graph.lines.iter().enumerate() {
ids.insert(l.vertex, VertexId(i));
}
Ok(Some(v.insert((graph, ids))))
}
}
} else {
Ok(None)
}
}
}
// org id CwY6M+Aa91RfVAJkIYG3StPNDxg5S2/Q0RJB2eWlhFg=
pub(crate) fn repair_missing_up_context<
'a,
T: MutTxnT,
I: IntoIterator<Item = &'a Vertex<ChangeId>>,
>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
c: Vertex<ChangeId>,
d: I,
d_is_folder: bool,
) -> Result<(), anyhow::Error> {
let now = std::time::Instant::now();
let (mut alive, files) = find_alive_up(txn, channel, c)?;
crate::TIMERS.lock().unwrap().find_alive += now.elapsed();
ws.load_graph(txn, channel, inode)?;
debug!("repair_missing_up_context, alive = {:?}", alive);
debug!("repair_missing_up_context, files = {:?}", files);
for &d in d {
if let Some((graph, vids)) = ws.graphs.get(&inode) {
crate::alive::remove_redundant_parents(
graph,
vids,
&mut alive,
&mut ws.covered_parents,
d,
);
}
for &ancestor in alive.iter() {
debug!("put_graph_with_rev {:?} -> {:?}", ancestor, d);
if ancestor == d {
info!(
"repair_missing_up_context, alive: {:?} == {:?}",
ancestor, d
);
continue;
}
debug!("repair_missing_up {:?} {:?}", ancestor, d);
txn.put_graph_with_rev(
channel,
if d_is_folder {
EdgeFlags::PSEUDO | EdgeFlags::FOLDER
} else {
EdgeFlags::PSEUDO
},
ancestor,
d,
ChangeId::ROOT,
)?;
}
if d_is_folder && c != d {
txn.put_graph_with_rev(
channel,
EdgeFlags::PSEUDO | EdgeFlags::FOLDER,
c,
d,
ChangeId::ROOT,
)?;
}
}
repair_files_up(txn, channel, &files)?;
Ok(())
}
fn repair_files_up<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
files: &[(Vertex<ChangeId>, Edge)],
) -> Result<(), anyhow::Error> {
for &(a, b) in files {
let b = if a.start == a.end {
txn.find_block_end(channel, b.dest)?
} else {
b.dest.inode_vertex()
};
debug!("put_graph_with_rev (files) {:?} -> {:?}", b, a);
assert_ne!(a, b);
txn.put_graph_with_rev(
channel,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO,
b,
a,
ChangeId::ROOT,
)?;
}
Ok(())
}
// org id 33iwKoFwpXhCt1MdikUmkOn7apYc7yMUk23JBOUn5mg=
pub(crate) fn repair_missing_down_context<
'a,
T: MutTxnT,
I: IntoIterator<Item = &'a Vertex<ChangeId>>,
>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
c: Vertex<ChangeId>,
d: I,
) -> Result<(), anyhow::Error> {
let now = std::time::Instant::now();
let mut alive = find_alive_down(txn, channel, c)?;
crate::TIMERS.lock().unwrap().find_alive += now.elapsed();
ws.load_graph(txn, channel, inode)?;
if let Some((graph, vids)) = ws.graphs.get(&inode) {
crate::alive::remove_redundant_children(graph, vids, &mut alive, c);
}
if !alive.is_empty() {
debug!("repair_missing_down_context alive = {:#?}", alive);
}
for &d in d {
for &descendant in alive.iter() {
if d == descendant {
info!(
"repair_missing_down_context, alive: {:?} == {:?}",
d, descendant
);
continue;
}
debug!("repair_missing_down {:?} {:?}", d, descendant);
txn.put_graph_with_rev(channel, EdgeFlags::PSEUDO, d, descendant, ChangeId::ROOT)?;
}
}
Ok(())
}
// org id GMesFpbMkVsOgAOjes5yxc060bclzWVMLh5KLd/KQc8=
pub(crate) fn repair_context_nondeleted<T: MutTxnT, K>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
change_id: ChangeId,
mut known: K,
e: &NewEdge<Option<Hash>>,
) -> Result<(), anyhow::Error>
where
K: FnMut(Hash) -> bool,
{
let source = txn.internal_pos(&e.from, change_id)?;
let source = txn.find_block_end(&channel, source)?;
let target = txn.internal_pos(&e.to.start_pos(), change_id)?;
let target = txn.find_block(&channel, target)?;
debug!(
"repair_context_nondeleted: source = {:?}, target: {:?}",
source, target
);
// Fixing the connection from root to the source: if the source is
// deleted by an unknown patch, or if the target is deleted
// (necessarily by unknown patch), then `target` might be
// disconnected from the alive graph, so we need to fix the context.
let mut deleted_by_unknown = false;
let mut target_is_folder = false;
for v in txn.iter_adjacent(channel, source, EdgeFlags::empty(), EdgeFlags::all()) {
if !v.flag.contains(EdgeFlags::PARENT) {
target_is_folder = v.flag.contains(EdgeFlags::FOLDER);
continue;
}
if !v.flag.contains(EdgeFlags::DELETED) {
continue;
}
if v.introduced_by == change_id || v.dest.change.is_root() || v.introduced_by.is_root() {
continue;
}
// This unwrap is ok, since `v` is in the channel.
let intro = txn.get_external(v.introduced_by).unwrap();
if known(intro) {
// If a known change also delete the context, we're good.
deleted_by_unknown = false;
break;
} else {
// If an unknown change deletes the context, wait: maybe a
// known change will delete it too.
deleted_by_unknown = true;
}
}
debug!("deleted_by_unknown {:?}", deleted_by_unknown);
if deleted_by_unknown {
repair_missing_up_context(txn, channel, ws, inode, source, &[target], target_is_folder)?;
}
// Now, maybe ~source~ was deleted by known changes, but
// accessibility to ~target~ was provided by other edges that got
// deleted by unknown changes.
let mut unknown = HashSet::new();
for v in txn.iter_adjacent(channel, target, EdgeFlags::empty(), EdgeFlags::all()) {
if !v.flag.contains(EdgeFlags::PARENT) || !v.flag.contains(EdgeFlags::DELETED) {
continue;
}
if v.introduced_by == change_id || v.dest.change.is_root() || v.introduced_by.is_root() {
continue;
}
// Else change ~v.introduced_by~ is a change we don't know,
// since no change can create a conflict.
unknown.insert(txn.find_block_end(channel, v.dest)?);
}
for up in unknown.drain() {
repair_missing_up_context(txn, channel, ws, inode, up, &[target], target_is_folder)?;
}
// Finally, we fix the down context of target, i.e. collecting unknown deleted children.
let mut children_are_deleted_by_unknown = false;
for v in txn.iter_adjacent(channel, target, EdgeFlags::empty(), EdgeFlags::all()) {
if !v.flag.contains(EdgeFlags::DELETED)
|| v.flag.contains(EdgeFlags::PARENT)
|| v.introduced_by == change_id
|| v.dest.change.is_root()
|| v.introduced_by.is_root()
{
continue;
}
// This unwrap is ok, since `v` is in the channel.
let intro = txn.get_external(v.introduced_by).unwrap();
if !known(intro) {
children_are_deleted_by_unknown = true;
break;
}
}
if children_are_deleted_by_unknown {
repair_missing_down_context(txn, channel, ws, inode, target, &[target])?;
}
Ok(())
}
// org id OfylSf5taky9lfC0119yeNkCZSw55LxD7HNIT7E0TO8=
pub(crate) fn repair_context_deleted<T: MutTxnT, K>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
change_id: ChangeId,
mut known: K,
e: &NewEdge<Option<Hash>>,
) -> Result<(), anyhow::Error>
where
K: FnMut(Hash) -> bool,
{
debug!("repair_context_deleted {:?}", e);
let mut pos = txn.internal_pos(&e.to.start_pos(), change_id)?;
let end_pos = txn.internal_pos(&e.to.end_pos(), change_id)?;
while let Ok(dest_vertex) = txn.find_block(&channel, pos) {
debug!("repair_context_deleted, dest_vertex = {:?}", dest_vertex);
repair_children_of_deleted(txn, channel, ws, inode, &mut known, change_id, dest_vertex)?;
if dest_vertex.end < end_pos.pos {
pos.pos = dest_vertex.end
} else {
break;
}
}
Ok(())
}
// org id ueoLxafjHlJdBaAESsgdkbWhSKPGMpMkuFiROTxbrSM=
pub struct Workspace {
unknown_parents: Vec<(
Vertex<ChangeId>,
Vertex<ChangeId>,
Position<Option<Hash>>,
EdgeFlags,
)>,
unknown: Vec<Edge>,
parents: HashSet<Edge>,
pseudo: Vec<(Vertex<ChangeId>, Edge)>,
pub(crate) graphs:
HashMap<Position<Option<Hash>>, (Graph, HashMap<Vertex<ChangeId>, crate::alive::VertexId>)>,
pub(crate) covered_parents: HashSet<(Vertex<ChangeId>, Vertex<ChangeId>)>,
}
impl Workspace {
pub fn new() -> Self {
Workspace {
unknown: Vec::new(),
unknown_parents: Vec::new(),
pseudo: Vec::new(),
parents: HashSet::new(),
graphs: HashMap::new(),
covered_parents: HashSet::new(),
}
}
pub fn clear(&mut self) {
self.unknown.clear();
self.unknown_parents.clear();
self.pseudo.clear();
self.parents.clear();
self.graphs.clear();
self.covered_parents.clear();
}
pub fn assert_empty(&self) {
assert!(self.unknown.is_empty());
assert!(self.unknown_parents.is_empty());
assert!(self.pseudo.is_empty());
assert!(self.parents.is_empty());
assert!(self.graphs.is_empty());
assert!(self.covered_parents.is_empty());
}
}
fn collect_unknown_children<T: TxnT, K>(
txn: &T,
channel: &Channel<T>,
ws: &mut Workspace,
dest_vertex: Vertex<ChangeId>,
change_id: ChangeId,
known: &mut K,
) where
K: FnMut(Hash) -> bool,
{
for v in txn.iter_adjacent(
channel,
dest_vertex,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED - EdgeFlags::PARENT,
) {
trace!("v = {:?}", v);
if v.introduced_by == change_id || v.dest.change.is_root() {
continue;
}
if v.introduced_by.is_root() {
ws.pseudo.push((dest_vertex, v));
continue;
}
// If `dest_vertex` is not also deleted by the same change
// that introduced unknown edge `v`, then `v` is really
// a missing context.
if !txn
.iter_adjacent(
channel,
dest_vertex,
EdgeFlags::PARENT | EdgeFlags::DELETED,
EdgeFlags::all(),
)
.any(|e| e.introduced_by == v.introduced_by)
{
// This unwrap is ok, since `v` is in the channel.
let intro = txn.get_external(v.introduced_by).unwrap();
if !known(intro) {
ws.unknown.push(v);
}
}
}
}
fn repair_children_of_deleted<T: MutTxnT, K>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
mut known: K,
change_id: ChangeId,
dest_vertex: Vertex<ChangeId>,
) -> Result<(), anyhow::Error>
where
K: FnMut(Hash) -> bool,
{
// org id HPoxuN+RIwX+UvJZERkZAFK6+F5MEAFM39nnpnXs9YY=
trace!("repair_children_of_deleted {:?}", dest_vertex);
collect_unknown_children(txn, channel, ws, dest_vertex, change_id, &mut known);
let mut unknown = std::mem::replace(&mut ws.unknown, Vec::new());
if unknown.is_empty() {
trace!("unknown = []")
} else {
debug!("dest_vertex = {:?}, unknown = {:?}", dest_vertex, unknown)
}
for edge in unknown.drain(..) {
let p = txn.find_block(channel, edge.dest)?;
if !edge.flag.contains(EdgeFlags::FOLDER) {
// org id izc8DKE2hSSCYLTpux0cTTOqoMvRdn35r2Hfrdyq82w=
debug!("dest_vertex {:?}, p {:?}", dest_vertex, p);
txn.put_graph_with_rev(
channel,
EdgeFlags::DELETED | EdgeFlags::BLOCK,
dest_vertex,
p,
change_id,
)?;
let mut u = p;
while let Ok(v) = txn.find_block(channel, u.end_pos()) {
if u != v {
debug!("repair_children_of_deleted: {:?} -> {:?}", u, v);
txn.put_graph_with_rev(
channel,
EdgeFlags::DELETED | EdgeFlags::BLOCK,
u,
v,
change_id,
)?;
u = v
} else {
break;
}
}
}
// org id Hux8KrdJakJleVXFgumk9t/dqB3Cl95Q0HD49nlsmD4=
if txn.is_alive(channel, p) || edge.flag.contains(EdgeFlags::FOLDER) {
let p_is_folder = edge.flag.contains(EdgeFlags::FOLDER);
repair_missing_up_context(txn, channel, ws, inode, dest_vertex, &[p], p_is_folder)?;
} else {
let alive = find_alive_down(txn, channel, p)?;
repair_missing_up_context(txn, channel, ws, inode, dest_vertex, &alive, false)?;
}
}
ws.unknown = unknown;
Ok(())
}
// org id MAjl4KAOsUjeORSkZWk6W/QbgBD/9TDZ1Usr0XnzTFg=
pub(crate) fn delete_pseudo_edges<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
) -> Result<(), anyhow::Error> {
if ws.pseudo.is_empty() {
debug!("no pseudo edges")
}
for (dest_vertex, e) in ws.pseudo.drain(..) {
debug!("repair_context_deleted, deleting {:?} {:?}", dest_vertex, e);
if !txn.is_alive(channel, dest_vertex) {
if e.flag.contains(EdgeFlags::PARENT) {
let p = txn.find_block_end(channel, e.dest)?;
if !txn.is_alive(channel, p) {
txn.del_graph_with_rev(
channel,
e.flag - EdgeFlags::PARENT,
p,
dest_vertex,
e.introduced_by,
)?;
}
} else {
let p = txn.find_block(channel, e.dest)?;
if !txn.is_alive(channel, p) {
txn.del_graph_with_rev(channel, e.flag, dest_vertex, p, e.introduced_by)?;
}
}
}
}
Ok(())
}
pub(crate) fn collect_zombie_context<T: MutTxnT, K>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
inode: Position<Option<Hash>>,
n: &NewEdge<Option<Hash>>,
change_id: ChangeId,
mut known: K,
) -> Result<(), anyhow::Error>
where
K: FnMut(Hash) -> bool,
{
if n.flag.contains(EdgeFlags::FOLDER) {
return Ok(());
}
let mut pos = txn.internal_pos(&n.to.start_pos(), change_id)?;
let end_pos = txn.internal_pos(&n.to.end_pos(), change_id)?;
let mut unknown_parents = Vec::new();
while let Ok(dest_vertex) = txn.find_block(&channel, pos) {
for v in txn.iter_adjacent(
channel,
dest_vertex,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED,
) {
if v.introduced_by == change_id || v.dest.change.is_root() {
continue;
}
if v.introduced_by.is_root() {
ws.pseudo.push((dest_vertex, v));
continue;
}
if v.flag.contains(EdgeFlags::PARENT) {
// Unwrap ok, since `v` is in the channel.
let intro = txn.get_external(v.introduced_by).unwrap();
if !known(intro) {
unknown_parents.push((dest_vertex, v))
}
}
}
zombify(txn, channel, ws, change_id, inode, n.flag, &unknown_parents)?;
if dest_vertex.end < end_pos.pos {
pos.pos = dest_vertex.end
} else {
break;
}
}
Ok(())
}
fn zombify<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
change_id: ChangeId,
inode: Position<Option<Hash>>,
flag: EdgeFlags,
unknown: &[(Vertex<ChangeId>, Edge)],
) -> Result<(), anyhow::Error> {
for &(dest_vertex, edge) in unknown.iter() {
let p = txn.find_block_end(channel, edge.dest)?;
ws.unknown_parents.push((dest_vertex, p, inode, edge.flag));
if !edge.flag.contains(EdgeFlags::FOLDER) {
// If ~dest_vertex~ is a folder, there's no way to repair
// its down context, so we can't disconnect here.
let f = edge.flag - EdgeFlags::PARENT;
txn.del_graph_with_rev(channel, f, p, dest_vertex, edge.introduced_by)?;
}
let fold = flag & EdgeFlags::FOLDER;
debug!("p {:?}, dest_vertex {:?}", p, dest_vertex);
txn.put_graph_with_rev(
channel,
EdgeFlags::DELETED | EdgeFlags::BLOCK | fold,
p,
dest_vertex,
change_id,
)?;
let mut v = p;
while let Ok(u) = txn.find_block_end(channel, v.start_pos()) {
if u != v {
debug!("u = {:?}, v = {:?}", u, v);
txn.put_graph_with_rev(
channel,
EdgeFlags::DELETED | EdgeFlags::BLOCK | fold,
u,
v,
change_id,
)?;
v = u
} else {
break;
}
}
// Zombify the first chunk of the split.
for parent in txn.iter_adjacent(
channel,
v,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
) {
if !parent.flag.contains(EdgeFlags::PSEUDO) {
ws.parents.insert(parent);
}
}
debug!("ws.parents = {:?}", ws.parents);
for parent in ws.parents.drain() {
let parent_dest = txn.find_block_end(channel, parent.dest)?;
let mut flag = EdgeFlags::DELETED | EdgeFlags::BLOCK;
if parent.flag.contains(EdgeFlags::FOLDER) {
flag |= EdgeFlags::FOLDER
}
txn.put_graph_with_rev(channel, flag, parent_dest, v, change_id)?;
}
}
Ok(())
}
pub(crate) fn repair_parents_of_deleted<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
) -> Result<(), anyhow::Error> {
debug!("repair_parents_of_deleted");
let mut unknown = std::mem::replace(&mut ws.unknown_parents, Vec::new());
for (dest_vertex, p, inode, flag) in unknown.drain(..) {
if flag.contains(EdgeFlags::FOLDER) {
repair_missing_down_context(txn, channel, ws, inode, dest_vertex, &[dest_vertex])?
} else {
repair_missing_down_context(txn, channel, ws, inode, dest_vertex, &[p])?
}
}
ws.unknown_parents = unknown;
Ok(())
}
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate pijul_macros;
#[macro_use]
extern crate thiserror;
#[macro_use]
extern crate lazy_static;
pub mod alive;
mod apply;
pub mod change;
pub mod changestore;
mod diff;
mod find_alive;
#[doc(hidden)]
pub mod fs;
mod missing_context;
pub mod output;
pub mod path;
pub mod pristine;
pub mod record;
pub mod small_string;
mod unrecord;
mod vector2;
pub mod vertex_buffer;
pub mod working_copy;
#[cfg(test)]
mod tests;
pub const DOT_DIR: &'static str = ".pijul";
#[derive(Debug, Error)]
pub enum Error {
#[error("File {:?} already in repository", path)]
FileAlreadyInRepo { path: String },
#[error("File {:?} not in repository", path)]
FileNotInRepo { path: String },
#[error("Cannot record from unrecorded prefix {:?}", path)]
UnrecordedPath { path: String },
#[error("File {:?} not found", path)]
FileNotFound { path: String },
#[error("Change not on channel {:?}", change_id)]
ChangeNotOnChannel { change_id: pristine::ChangeId },
#[error("Change is depended upon {:?}", change_id)]
ChangeIsDependedUpon { change_id: pristine::ChangeId },
#[error("Change not found: {:?}", hash)]
ChangeNotFound { hash: String },
#[error("State not found: {:?}", state)]
StateNotFound { state: pristine::Merkle },
#[error("Change hash mismatch: {:?} != {:?}", claimed, computed)]
ChangeHashMismatch {
claimed: pristine::Hash,
computed: pristine::Hash,
},
#[error("Contents hash mismatch: {:?} != {:?}", claimed, computed)]
ContentsHashMismatch {
claimed: pristine::Hash,
computed: pristine::Hash,
},
#[error("Change already on channel: {:?}", hash)]
ChangeAlreadyOnChannel { hash: pristine::Hash },
#[error("Dependency missing: {:?}", hash)]
DependencyMissing { hash: pristine::Hash },
#[error("Channel name already taken: {:?}", name)]
ChannelNameExists { name: String },
#[error("Parse error: {:?}", s)]
ParseError { s: String },
#[error("Verify error, public key = {:?}", pk)]
VerifyError { pk: String },
#[error("Ambiguous hash prefix: {}", prefix)]
AmbiguousHashPrefix { prefix: String },
#[error("Inconsistent references in change")]
InconsistentChange,
#[error("Missing change contents: {}", hash)]
MissingContents { hash: String },
#[error("Wrong block: {:?}", block)]
WrongBlock {
block: pristine::Position<pristine::ChangeId>,
},
#[error("Pristine corrupt")]
PristineCorrupt,
#[error("Change version mismatch, please run `pijul upgrade`.")]
VersionMismatch,
}
pub use crate::apply::Workspace as ApplyWorkspace;
pub use crate::fs::WorkingCopyIterator;
pub use crate::output::{Archive, Conflict};
pub use crate::pristine::Vertex;
pub use crate::record::Builder as RecordBuilder;
pub use crate::record::{Algorithm, InodeUpdate};
use std::collections::HashMap;
impl MutTxnTExt for pristine::sanakirja::MutTxn<()> {}
impl TxnTExt for pristine::sanakirja::MutTxn<()> {}
impl TxnTExt for pristine::sanakirja::Txn {}
pub trait MutTxnTExt: pristine::MutTxnT {
fn apply_change_ws<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: crate::pristine::Hash,
workspace: &mut ApplyWorkspace,
) -> Result<(u64, pristine::Merkle), anyhow::Error> {
crate::apply::apply_change_ws(changes, self, channel, hash, workspace)
}
fn apply_change_rec_ws<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: crate::pristine::Hash,
workspace: &mut ApplyWorkspace,
) -> Result<(), anyhow::Error> {
crate::apply::apply_change_rec_ws(changes, self, channel, hash, workspace, false)
}
fn apply_change<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<(u64, pristine::Merkle), anyhow::Error> {
crate::apply::apply_change(changes, self, channel, hash)
}
fn apply_change_rec<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<(), anyhow::Error> {
crate::apply::apply_change_rec(changes, self, channel, hash, false)
}
fn apply_deps_rec<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Result<(), anyhow::Error> {
crate::apply::apply_change_rec(changes, self, channel, hash, true)
}
fn apply_local_change_ws(
&mut self,
channel: &mut pristine::ChannelRef<Self>,
change: &change::Change,
hash: pristine::Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
workspace: &mut ApplyWorkspace,
) -> Result<(u64, pristine::Merkle), anyhow::Error> {
crate::apply::apply_local_change_ws(self, channel, change, hash, inode_updates, workspace)
}
fn apply_local_change(
&mut self,
channel: &mut crate::pristine::ChannelRef<Self>,
change: &crate::change::Change,
hash: pristine::Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
) -> Result<(u64, pristine::Merkle), anyhow::Error> {
crate::apply::apply_local_change(self, channel, change, hash, inode_updates)
}
fn record<W: crate::working_copy::WorkingCopy, C: crate::changestore::ChangeStore>(
&mut self,
builder: &mut RecordBuilder,
diff_algorithm: Algorithm,
channel: &mut pristine::ChannelRef<Self>,
working_copy: &mut W,
changes: &C,
prefix: &str,
) -> Result<(), anyhow::Error> {
builder.record(self, diff_algorithm, channel, working_copy, changes, prefix)
}
fn record_all<W: crate::working_copy::WorkingCopy, C: crate::changestore::ChangeStore>(
&mut self,
diff_algorithm: Algorithm,
channel: &mut pristine::ChannelRef<Self>,
working_copy: &mut W,
changes: &C,
prefix: &str,
) -> Result<record::Recorded, anyhow::Error> {
let mut builder = crate::record::Builder::new();
builder.record(self, diff_algorithm, channel, working_copy, changes, prefix)?;
Ok(builder.finish())
}
fn apply_recorded<C: changestore::ChangeStore>(
&mut self,
channel: &mut pristine::ChannelRef<Self>,
recorded: record::Recorded,
changestore: &C,
) -> Result<pristine::Hash, anyhow::Error> {
use std::collections::BTreeSet;
let contents_hash = {
let mut hasher = pristine::Hasher::default();
hasher.update(&recorded.contents);
hasher.finish()
};
let change = change::LocalChange {
offsets: change::Offsets::default(),
hashed: change::Hashed {
version: change::VERSION,
contents_hash,
changes: recorded
.actions
.into_iter()
.map(|rec| rec.globalize(self))
.collect(),
metadata: Vec::new(),
dependencies: BTreeSet::new(),
extra_known: BTreeSet::new(),
header: change::ChangeHeader::default(),
},
unhashed: None,
contents: recorded.contents,
};
let hash = changestore.save_change(&change)?;
apply::apply_local_change(self, channel, &change, hash.clone(), &recorded.updatables)?;
Ok(hash)
}
fn unrecord<C: changestore::ChangeStore>(
&mut self,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
hash: &pristine::Hash,
) -> Result<bool, anyhow::Error> {
unrecord::unrecord(self, channel, changes, hash)
}
fn output_repository_no_pending<R: working_copy::WorkingCopy, C: changestore::ChangeStore>(
&mut self,
repo: &mut R,
changes: &C,
channel: &mut pristine::ChannelRef<Self>,
prefix: &str,
output_name_conflicts: bool,
) -> Result<Vec<output::Conflict>, anyhow::Error> {
output::output_repository_no_pending(
repo,
changes,
self,
channel,
prefix,
output_name_conflicts,
)
}
/// Register a file in the working copy, where the file is given by
/// its path from the root of the repository, where the components of
/// the path are separated by `/` (example path: `a/b/c`).
fn add_file(&mut self, path: &str) -> Result<(), anyhow::Error> {
fs::add_inode(self, None, path, false)
}
/// Register a directory in the working copy, where the directory is
/// given by its path from the root of the repository, where the
/// components of the path are separated by `/` (example path:
/// `a/b/c`).
fn add_dir(&mut self, path: &str) -> Result<(), anyhow::Error> {
fs::add_inode(self, None, path, true)
}
/// Register a file or directory in the working copy, given by its
/// path from the root of the repository, where the components of the
/// path are separated by `/` (example path: `a/b/c`).
fn add(&mut self, path: &str, is_dir: bool) -> Result<(), anyhow::Error> {
fs::add_inode(self, None, path, is_dir)
}
fn move_file(&mut self, a: &str, b: &str) -> Result<(), anyhow::Error> {
fs::move_file(self, a, b)
}
fn remove_file(&mut self, a: &str) -> Result<(), anyhow::Error> {
fs::remove_file(self, a)
}
#[cfg(feature = "dump")]
fn channel_from_dump<'a>(
&'a mut self,
name: &str,
) -> Result<pristine::channel_dump::ChannelFromDump<'a, Self>, anyhow::Error> {
if self.load_channel(name).is_none() {
let channel = self.open_or_create_channel(name)?;
Ok(pristine::channel_dump::ChannelFromDump::new(self, channel))
} else {
Err((Error::ChannelNameExists {
name: name.to_string(),
})
.into())
}
}
fn archive_with_state<'a, P: changestore::ChangeStore, A: Archive>(
&mut self,
changes: &P,
channel: &mut pristine::ChannelRef<Self>,
state: pristine::Merkle,
extra: &[pristine::Hash],
arch: &mut A,
) -> Result<Vec<output::Conflict>, anyhow::Error> {
self.archive_prefix_with_state(
changes,
channel,
state,
extra,
&mut std::iter::empty(),
arch,
)
}
/// Warning: this method unrecords changes until finding the
/// state. If this is not wanted, please fork the channel before
/// calling.
fn archive_prefix_with_state<
'a,
P: changestore::ChangeStore,
A: Archive,
I: Iterator<Item = &'a str>,
>(
&mut self,
changes: &P,
channel: &mut pristine::ChannelRef<Self>,
state: pristine::Merkle,
extra: &[pristine::Hash],
prefix: &mut I,
arch: &mut A,
) -> Result<Vec<output::Conflict>, anyhow::Error> {
let mut unrecord = Vec::new();
let mut found = false;
for (_, (h, m)) in self.changeid_rev_log(&channel.borrow(), None) {
if m == state {
found = true;
break;
} else {
unrecord.push(h)
}
}
debug!("unrecord = {:?}", unrecord);
if found {
for h in unrecord.drain(..) {
let h = self.get_external(h).unwrap();
self.unrecord(changes, channel, &h)?;
}
for app in extra.iter() {
self.apply_change_rec(changes, channel, *app)?
}
output::archive(changes, self, channel, prefix, arch)
} else {
Err((Error::StateNotFound { state }).into())
}
}
}
pub trait TxnTExt: pristine::TxnT {
fn is_directory(&self, inode: pristine::Inode) -> bool {
fs::is_directory(self, inode)
}
fn is_tracked(&self, path: &str) -> bool {
fs::is_tracked(self, path)
}
fn iter_working_copy<'txn>(&'txn self) -> WorkingCopyIterator<'txn, Self> {
fs::iter_working_copy(self, pristine::Inode::ROOT)
}
fn has_change(
&self,
channel: &pristine::ChannelRef<Self>,
hash: pristine::Hash,
) -> Option<u64> {
let cid = if let Some(c) = self.get_internal(hash) {
c
} else {
return None;
};
self.get_changeset(&channel.borrow().changes, cid, None)
}
fn log<'channel, 'txn>(
&'txn self,
channel: &'channel pristine::Channel<Self>,
from: u64,
) -> Log<'txn, Self> {
Log {
txn: self,
iter: self.changeid_log(channel, from),
}
}
fn reverse_log<'channel, 'txn>(
&'txn self,
channel: &'channel pristine::Channel<Self>,
from: Option<u64>,
) -> RevLog<'txn, Self> {
RevLog {
txn: self,
iter: self.changeid_rev_log(channel, from),
}
}
fn get_changes(
&self,
channel: &pristine::ChannelRef<Self>,
n: u64,
) -> Option<(pristine::Hash, pristine::Merkle)> {
self.get_revchangeset(&channel.borrow().revchanges, n, None)
.map(|(h, m)| (self.get_external(h).unwrap(), m))
}
fn get_revchanges(
&self,
channel: &pristine::ChannelRef<Self>,
h: pristine::Hash,
) -> Option<u64> {
if let Some(h) = self.get_internal(h) {
self.get_changeset(&channel.borrow().changes, h, None)
} else {
None
}
}
fn touched_files<'txn>(&'txn self, h: pristine::Hash) -> Option<Touched<'txn, Self>> {
if let Some(id) = self.get_internal(h) {
Some(Touched {
txn: self,
iter: self.iter_rev_touched_files(id, None),
id,
})
} else {
None
}
}
fn find_oldest_path<C: changestore::ChangeStore>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
position: pristine::Position<pristine::Hash>,
) -> Result<(String, bool), anyhow::Error> {
let position = pristine::Position {
change: self.get_internal(position.change).unwrap(),
pos: position.pos,
};
fs::find_path(changes, self, &channel.borrow(), false, position)
}
fn find_youngest_path<C: changestore::ChangeStore>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
position: pristine::Position<pristine::Hash>,
) -> Result<(String, bool), anyhow::Error> {
let position = pristine::Position {
change: self.get_internal(position.change).unwrap(),
pos: position.pos,
};
fs::find_path(changes, self, &channel.borrow(), true, position)
}
fn follow_oldest_path<C: changestore::ChangeStore>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
path: &str,
) -> Result<(pristine::Position<pristine::ChangeId>, bool), anyhow::Error> {
fs::follow_oldest_path(changes, self, channel, path)
}
fn output_file<C: changestore::ChangeStore, V: vertex_buffer::VertexBuffer>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
v0: pristine::Position<pristine::ChangeId>,
out: &mut V,
) -> Result<(), anyhow::Error> {
let mut forward = Vec::new();
let channel = channel.borrow();
let mut graph = alive::retrieve(self, &channel, v0);
alive::output_graph(changes, self, &channel, out, &mut graph, &mut forward)?;
Ok(())
}
fn archive<'a, C: changestore::ChangeStore, A: Archive>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
arch: &mut A,
) -> Result<Vec<output::Conflict>, anyhow::Error> {
output::archive(changes, self, channel, &mut std::iter::empty(), arch)
}
fn archive_prefix<'a, C: changestore::ChangeStore, I: Iterator<Item = &'a str>, A: Archive>(
&self,
changes: &C,
channel: &pristine::ChannelRef<Self>,
prefix: &mut I,
arch: &mut A,
) -> Result<Vec<output::Conflict>, anyhow::Error> {
output::archive(changes, self, channel, prefix, arch)
}
}
pub struct Log<'txn, T: pristine::TxnT> {
txn: &'txn T,
iter: pristine::Cursor<
T,
&'txn T,
T::RevchangesetCursor,
u64,
(pristine::ChangeId, pristine::Merkle),
>,
}
impl<'txn, T: pristine::TxnT> Iterator for Log<'txn, T> {
type Item = (u64, (pristine::Hash, pristine::Merkle));
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(n, (c, m))| (n, (self.txn.get_external(c).unwrap(), m)))
}
}
pub struct RevLog<'txn, T: pristine::TxnT> {
txn: &'txn T,
iter: pristine::RevCursor<
T,
&'txn T,
T::RevchangesetCursor,
u64,
(pristine::ChangeId, pristine::Merkle),
>,
}
impl<'txn, T: pristine::TxnT> Iterator for RevLog<'txn, T> {
type Item = (u64, (pristine::Hash, pristine::Merkle));
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(n, (c, m))| (n, (self.txn.get_external(c).unwrap(), m)))
}
}
pub struct Touched<'txn, T: pristine::TxnT> {
txn: &'txn T,
iter: pristine::Cursor<
T,
&'txn T,
T::Rev_touched_filesCursor,
pristine::ChangeId,
pristine::Position<pristine::ChangeId>,
>,
id: pristine::ChangeId,
}
impl<'txn, T: pristine::TxnT> Iterator for Touched<'txn, T> {
type Item = pristine::Position<pristine::Hash>;
fn next(&mut self) -> Option<Self::Item> {
while let Some((cid, file)) = self.iter.next() {
if cid > self.id {
return None;
} else if cid == self.id {
let change = self.txn.get_external(file.change).unwrap();
return Some(pristine::Position {
change,
pos: file.pos,
});
}
}
None
}
}
#[doc(hidden)]
#[derive(Debug, Default, Clone)]
pub struct Timers {
pub alive_output: std::time::Duration,
pub alive_graph: std::time::Duration,
pub alive_retrieve: std::time::Duration,
pub alive_contents: std::time::Duration,
pub alive_write: std::time::Duration,
pub record: std::time::Duration,
pub apply: std::time::Duration,
pub repair_context: std::time::Duration,
pub check_cyclic_paths: std::time::Duration,
pub find_alive: std::time::Duration,
}
use std::sync::Mutex;
lazy_static! {
pub static ref TIMERS: Mutex<Timers> = Mutex::new(Timers {
alive_output: std::time::Duration::from_secs(0),
alive_graph: std::time::Duration::from_secs(0),
alive_retrieve: std::time::Duration::from_secs(0),
alive_contents: std::time::Duration::from_secs(0),
alive_write: std::time::Duration::from_secs(0),
record: std::time::Duration::from_secs(0),
apply: std::time::Duration::from_secs(0),
repair_context: std::time::Duration::from_secs(0),
check_cyclic_paths: std::time::Duration::from_secs(0),
find_alive: std::time::Duration::from_secs(0),
});
}
#[doc(hidden)]
pub fn reset_timers() {
*TIMERS.lock().unwrap() = Timers::default();
}
#[doc(hidden)]
pub fn get_timers() -> Timers {
TIMERS.lock().unwrap().clone()
}
// org id GEhwQUzK6088geMhP32RRVZ2gKcVn8vpgw30CsBQVAw=
//! Manipulating the internal representation of files and directories
//! tracked by Pijul (i.e. adding files, removing files, getting file
//! names…).
//!
//! Pijul tracks files in two different ways: one is the *graph*,
//! where changes are applied. The other one is the *working copy*,
//! where some filesystem changes are not yet recorded. The purpose of
//! this double representation is to be able to compare a file from
//! the graph with its version in the working copy, even if its name
//! has changed in the working copy.
//!
//! The functions of this module work at exactly one of these two
//! levels. Changing the graph is done by recording and applying a
//! change, and changing the working copy is done either by some of the
//! functions in this module, or by outputting the graph to the
//! working copy (using the [output module](../output/index.html)).
use crate::changestore::*;
use crate::pristine::*;
use crate::small_string::*;
use crate::Error;
use std::iter::Iterator;
// org id QA+Q9LiwMr0rzpggManDGnt+j0KyJb22KISVyvLG2TA=
pub(crate) fn create_new_inode<T: MutTxnT>(txn: &mut T) -> Inode {
let mut already_taken = true;
let mut inode: Inode = Inode::ROOT;
while already_taken {
inode = Inode::random();
already_taken = txn.get_revtree(inode, None).is_some();
}
inode
}
// org id O4dv274a9XaSxWj15wJRno8kQXVuvjg34lczIGWAQYg=
/// Test whether `inode` is the inode of a directory (as opposed to a
/// file).
pub fn is_directory<T: TxnT>(txn: &T, inode: Inode) -> bool {
if inode == Inode::ROOT {
return true;
}
let pathid = OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
};
for (pid, _) in txn.iter_tree(pathid.clone(), None) {
if pid < pathid {
continue;
} else if pid > pathid {
break;
}
return true;
}
false
}
// org id N+C5y4G7uvo/yoq/w35d2HQzUzbee5SZHjhgfE6nCpU=
fn closest_in_repo_ancestor<'a, T: TxnT>(
txn: &T,
path: &'a str,
) -> (Inode, std::iter::Peekable<crate::path::Components<'a>>) {
let mut components = crate::path::components(path).peekable();
let mut fileid = OwnedPathId {
parent_inode: Inode::ROOT,
basename: SmallString::new(),
};
while let Some(c) = components.peek() {
trace!("component {:?}", c);
fileid.basename = SmallString::from_str(c);
trace!("{:?}", fileid);
let mut found = false;
for (id, inode) in txn.iter_tree(fileid.clone(), None) {
trace!(
"id = {:?}, inode = {:?}, cmp = {:?}",
id,
inode,
id.cmp(&fileid)
);
if id > fileid {
break;
} else if id < fileid {
continue;
}
if !found {
fileid.parent_inode = inode;
}
found = true;
}
if found {
components.next();
} else {
break;
}
}
(fileid.parent_inode, components)
}
// org id J1ElC104DNMWRPiHY0S0/Y2xKqcs64b1hM3Rk/NXMCA=
/// Find the inode corresponding to that path, or return an error if
/// there's no such inode.
pub fn find_inode<T: TxnT>(txn: &T, path: &str) -> Result<Inode, anyhow::Error> {
debug!("find_inode");
let (inode, mut remaining_path_components) = closest_in_repo_ancestor(txn, path);
debug!("/find_inode");
if let Some(c) = remaining_path_components.next() {
debug!("c = {:?}", c);
Err((Error::FileNotInRepo {
path: path.to_string(),
})
.into())
} else {
Ok(inode)
}
}
/// Returns whether a path is registered in the working copy.
pub fn is_tracked<T: TxnT>(txn: &T, path: &str) -> bool {
debug!("is_tracked {:?}", path);
let (_, mut remaining_path_components) = closest_in_repo_ancestor(txn, path);
debug!("/is_tracked {:?}", path);
remaining_path_components.next().is_none()
}
// org id 53QzTGnAYBDED5pTf+Al3IPoKJmELcsESzJyUp4g66c=
/// Find the filename leading from the root to ~inode~.
pub fn inode_filename<T: TxnT>(txn: &T, inode: Inode) -> Option<String> {
let mut components = Vec::new();
let mut current = inode;
loop {
match txn.get_revtree(current, None) {
Some(v) => {
components.push(v.basename);
current = v.parent_inode.clone();
if current == Inode::ROOT {
break;
}
}
None => {
debug!("filename_of_inode: not in tree");
return None;
}
}
}
// org id u5TedBjlO8J2YeMattqakBVbKKTtExIbBX/SegeIzCY=
let mut path = String::new();
for c in components.iter().rev() {
if !path.is_empty() {
path.push('/')
}
path.push_str(c.as_str());
}
Some(path)
}
// org id egPsRv38yVvBRxGgYmBSSTLNQQCxnftKGTT3V8Xdigw=
/// Record the information that `parent_inode` is now a parent of
/// file `filename`, and `filename` has inode `child_inode`.
fn make_new_child<T: MutTxnT>(
txn: &mut T,
parent_inode: Inode,
filename: &str,
is_dir: bool,
child_inode: Option<Inode>,
) -> Result<Inode, anyhow::Error> {
let parent_id = OwnedPathId {
parent_inode: parent_inode,
basename: SmallString::from_str(filename),
};
// org id oDi033pRbuuYaxa6hoMWYfl5G39MtiA81W8UfjTembg=
if let Some(inode) = txn.get_tree(parent_id.as_file_id(), None) {
debug!("inode = {:?}", inode);
if let Some(child) = child_inode {
if child == inode {
// No need to do anything.
Ok(inode)
} else {
assert!(txn.del_tree(parent_id.as_file_id(), Some(inode))?);
assert!(txn.del_revtree(inode, Some(parent_id.as_file_id()))?);
if let Some(vertex) = txn.get_inodes(inode, None) {
assert!(txn.del_revinodes(vertex, Some(inode))?);
assert!(txn.del_inodes(inode, Some(vertex))?);
}
txn.put_tree(parent_id.as_file_id(), child)?;
txn.put_revtree(child, parent_id.as_file_id())?;
Ok(child)
}
} else {
Err((Error::FileAlreadyInRepo {
path: filename.to_string(),
})
.into())
}
// org id 9qLOcUCA1wN2oqHAae8FEFP1IFvAMJsxj5HJGbaZk4w=
} else {
let child_inode = match child_inode {
None => create_new_inode(txn),
Some(i) => i,
};
debug!("make_new_child: {:?} {:?}", parent_id, child_inode);
txn.put_tree(parent_id.as_file_id(), child_inode)?;
txn.put_revtree(child_inode, parent_id.as_file_id())?;
if is_dir {
let dir_id = OwnedPathId {
parent_inode: child_inode,
basename: SmallString::new(),
};
txn.put_tree(dir_id.as_file_id(), child_inode)?;
};
Ok(child_inode)
}
}
// org id 9pW024HoHtvMwC3XQr2YNg7YduAvhri45EFSm45bglc=
pub(crate) fn add_inode<T: MutTxnT>(
txn: &mut T,
inode: Option<Inode>,
path: &str,
is_dir: bool,
) -> Result<(), anyhow::Error> {
debug!("add_inode");
if let Some(parent) = crate::path::parent(path) {
let (current_inode, unrecorded_path) = closest_in_repo_ancestor(txn, parent);
let mut current_inode = current_inode;
debug!("add_inode: closest = {:?}", current_inode);
for c in unrecorded_path {
debug!("unrecorded: {:?}", c);
current_inode = make_new_child(txn, current_inode, c, true, None)?
}
let file_name = crate::path::file_name(path).unwrap();
debug!("add_inode: file_name = {:?}", file_name);
make_new_child(txn, current_inode, file_name, is_dir, inode)?;
}
Ok(())
}
// org id YyZGd9COjGYaWuRNjKpg1URE0qTN3e/46D8VKKBgNSY=
/// Move an inode (file or directory) from `origin` to `destination`,
/// (in the working copy).
///
/// **Warning**: both `origin` and `destination` must be full paths to
/// the inode being moved (unlike e.g. in the `mv` Unix command).
pub fn move_file<T: MutTxnT>(
txn: &mut T,
origin: &str,
destination: &str,
) -> Result<(), anyhow::Error> {
debug!("move_file: {},{}", origin, destination);
move_file_by_inode(txn, find_inode(txn, origin)?, destination)?;
Ok(())
}
pub fn move_file_by_inode<T: MutTxnT>(
txn: &mut T,
inode: Inode,
destination: &str,
) -> Result<(), anyhow::Error> {
let fileref = txn.get_revtree(inode, None).unwrap().to_owned();
debug!("fileref = {:?}", fileref);
// org id pbMhYNkbwXVN5IhA6QR+oGisezusrYj/GXR0zMRLtTU=
assert!(txn.del_tree(fileref.as_file_id(), Some(inode))?);
assert!(txn.del_revtree(inode, Some(fileref.as_file_id()))?);
// org id yM7YkGaAlCc1HX7iY/8kRuL9f2moGbKyzXKA/sGOdlM=
debug!("inode={:?} destination={}", inode, destination);
let is_dir = txn
.get_tree(
(OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
})
.as_file_id(),
None,
)
.is_some();
add_inode(txn, Some(inode), destination, is_dir)?;
Ok(())
}
// org id ePI6/NVKoAiLIji81YVx8XqpcwvbR7EnYAHpEOm8byE=
pub(crate) fn rec_delete<T: MutTxnT>(
txn: &mut T,
parent: OwnedPathId,
inode: Inode,
update_inodes: bool,
) -> Result<(), anyhow::Error> {
let file_id = OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
};
let mut children = Vec::new();
let mut is_dir = false;
for (k, inode) in txn.iter_tree(file_id.clone(), None) {
if k.parent_inode > file_id.parent_inode {
break;
} else if k.parent_inode < file_id.parent_inode {
continue;
}
is_dir = true;
if !k.basename.is_empty() {
children.push((k, inode))
}
}
// org id k45Kc67uPC/E/7XAO9HQAtOFcuseeeANPdTBlqsYfUo=
for (k, inode_) in children {
assert_ne!(inode, inode_);
rec_delete(txn, k, inode_, update_inodes)?;
}
// org id 9uWiC1zSHDrVjjYHHo9FTeMcd5TthRVfRwqIGnootns=
debug!(
"rec_delete: {:?}, {:?}, {:?}, {:?}",
parent, file_id, inode, is_dir
);
if is_dir {
if !txn.del_tree(file_id.as_file_id(), Some(inode))? {
debug!(
"rec_delete (is_dir): {:?} {:?} not present",
file_id.as_file_id(),
inode
);
}
}
if txn.del_tree(parent.as_file_id(), Some(inode))? {
assert!(txn.del_revtree(inode, Some(parent.as_file_id()))?);
if update_inodes {
if let Some(vertex) = txn.get_inodes(inode, None) {
assert!(txn.del_revinodes(vertex, Some(inode))?);
assert!(txn.del_inodes(inode, Some(vertex))?);
}
}
} else {
debug!(
"rec_delete: {:?} {:?} not present",
parent.as_file_id(),
inode
);
}
Ok(())
}
/// Removes a file from the repository.
pub fn remove_file<T: MutTxnT>(txn: &mut T, path: &str) -> Result<(), anyhow::Error> {
debug!("remove file {:?}", path);
let inode = find_inode(txn, path)?;
let parent = txn.get_revtree(inode, None).unwrap().to_owned();
debug!("remove file {:?} {:?}", parent, inode);
rec_delete(txn, parent, inode, false)?;
Ok(())
}
// org id WTGyRZtOoRkPm6hdNGUvlEreGTfHbbGS+69KXoOpWTg=
/// An iterator over the children (i.e. one level down) of an inode in
/// the working copy.
///
/// Constructed using
/// [`working_copy_children`](fn.working_copy_children.html).
pub struct WorkingCopyChildren<'txn, T: TxnT> {
iter: crate::pristine::Cursor<T, &'txn T, T::TreeCursor, OwnedPathId, Inode>,
inode: Inode,
}
impl<'txn, T: TxnT> Iterator for WorkingCopyChildren<'txn, T> {
type Item = (SmallString, Inode);
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some((k, v)) = self.iter.next() {
if k.parent_inode == self.inode {
if k.basename.len() > 0 {
return Some((k.basename, v));
}
} else if k.parent_inode > self.inode {
return None;
}
} else {
return None;
}
}
}
}
/// Returns a list of the children of an inode, in the working copy.
pub fn working_copy_children<'txn, T: TxnT>(
txn: &'txn T,
inode: Inode,
) -> WorkingCopyChildren<'txn, T> {
WorkingCopyChildren {
iter: txn.iter_tree(
OwnedPathId {
parent_inode: inode,
basename: SmallString::new(),
},
None,
),
inode,
}
}
// org id +fM8wS/o0J3Rh8yD3ffhbr/hOC/s5aXjUcR53gJGXm0=
/// An iterator over all the paths in the working copy.
///
/// Constructed using [`iter_working_copy`](fn.iter_working_copy.html).
pub struct WorkingCopyIterator<'txn, T: TxnT> {
stack: Vec<(Inode, String)>,
txn: &'txn T,
}
impl<'txn, T: TxnT> Iterator for WorkingCopyIterator<'txn, T> {
type Item = (Inode, String);
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some((inode, name)) = self.stack.pop() {
let fileid = OwnedPathId {
parent_inode: inode,
basename: SmallString::from_str(""),
};
for (k, v) in self.txn.iter_tree(fileid, None) {
if k.parent_inode < inode {
continue;
} else if k.parent_inode > inode {
break;
}
if k.basename.len() > 0 {
let mut name = name.clone();
crate::path::push(&mut name, k.basename.as_str());
self.stack.push((v, name))
}
}
if !name.is_empty() {
return Some((inode, name));
}
} else {
return None;
}
}
}
}
/// Returns an iterator over all the files in the working copy.
pub fn iter_working_copy<'txn, T: TxnT>(txn: &'txn T, root: Inode) -> WorkingCopyIterator<'txn, T> {
WorkingCopyIterator {
stack: vec![(root, String::new())],
txn,
}
}
// org id s2dYh4owv/S89blo/x6pHrZe7en3+nGbN3oJNhxce4E=
/// An iterator over the children (i.e. a single level down) of an
/// inode key in the graph.
///
/// Constructed using
/// [`iter_graph_children`](fn.iter_graph_children.html).
pub struct GraphChildren<'txn, 'channel, 'changes, T: TxnT, P: ChangeStore + 'changes> {
txn: &'txn T,
channel: &'channel Channel<T>,
adj: AdjacentIterator<'txn, T>,
changes: &'changes P,
buf: Vec<u8>,
}
impl<'txn, 'channel, 'changes, T: TxnT, P: ChangeStore + 'changes> Iterator
for GraphChildren<'txn, 'channel, 'changes, T, P>
{
type Item = (Position<ChangeId>, InodeMetadata, String);
fn next(&mut self) -> Option<Self::Item> {
self.adj.next().map(move |child| {
let dest = self.txn.find_block(&self.channel, child.dest).unwrap();
let mut buf = std::mem::replace(&mut self.buf, Vec::new());
self.changes
.get_contents(|p| self.txn.get_external(p), dest, &mut buf)
.unwrap();
self.buf = buf;
let (perms, basename) = self.buf.split_at(2);
let perms = InodeMetadata::from_basename(perms);
let basename = std::str::from_utf8(basename).unwrap();
let grandchild = self
.txn
.iter_adjacent(
&self.channel,
dest,
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)
.next()
.unwrap();
(grandchild.dest, perms, basename.to_string())
})
}
}
/// Returns a list of files under the given key. The root key is
/// [`pristine::Vertex::ROOT`](../pristine/constant.Vertex::ROOT.html).
pub fn iter_graph_children<'txn, 'channel, 'changes, T, P>(
txn: &'txn T,
changes: &'changes P,
channel: &'channel Channel<T>,
key: Position<ChangeId>,
) -> GraphChildren<'txn, 'channel, 'changes, T, P>
where
T: TxnT,
P: ChangeStore,
{
GraphChildren {
buf: Vec::new(),
adj: txn.iter_adjacent(
&channel,
key.inode_vertex(),
EdgeFlags::FOLDER,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
),
txn,
channel,
changes,
}
}
// org id 0cSAIupDOdaN83IzVNuArF2/MYb6cb1keVCSWXpfod0=
/// An iterator over the basenames of an "inode key" in the graph.
///
/// See [`iter_basenames`](fn.iter_basenames.html).
pub struct GraphBasenames<'txn, 'channel, 'changes, T: TxnT, P: ChangeStore + 'changes> {
txn: &'txn T,
channel: &'channel Channel<T>,
adj: AdjacentIterator<'txn, T>,
changes: &'changes P,
buf: Vec<u8>,
}
impl<'txn, 'channel, 'changes, T: TxnT, P: ChangeStore + 'changes> Iterator
for GraphBasenames<'txn, 'channel, 'changes, T, P>
{
type Item = (Position<ChangeId>, InodeMetadata, String);
fn next(&mut self) -> Option<Self::Item> {
self.adj.next().map(move |parent| {
let dest = self.txn.find_block_end(&self.channel, parent.dest).unwrap();
let mut buf = std::mem::replace(&mut self.buf, Vec::new());
self.changes
.get_contents(|p| self.txn.get_external(p), dest, &mut buf)
.unwrap();
self.buf = buf;
let (perms, basename) = self.buf.split_at(2);
let perms = InodeMetadata::from_basename(perms);
let basename = std::str::from_utf8(basename).unwrap().to_string();
let grandparent = self
.txn
.iter_adjacent(
&self.channel,
dest,
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
)
.next()
.unwrap();
(grandparent.dest, perms, basename)
})
}
}
/// List all the basenames of an "inode key" in the graph (more than
/// one name means a conflict).
///
/// See also [`iter_paths`](fn.iter_paths.html).
pub fn iter_basenames<'txn, 'channel, 'changes, T, P>(
txn: &'txn T,
changes: &'changes P,
channel: &'channel Channel<T>,
pos: Position<ChangeId>,
) -> GraphBasenames<'txn, 'channel, 'changes, T, P>
where
T: TxnT,
P: ChangeStore,
{
GraphBasenames {
buf: Vec::new(),
adj: txn.iter_adjacent(
&channel,
pos.inode_vertex(),
EdgeFlags::FOLDER | EdgeFlags::PARENT,
EdgeFlags::FOLDER | EdgeFlags::PARENT | EdgeFlags::PSEUDO,
),
txn,
channel,
changes,
}
}
// org id bspD6KoukTup9yOup0ORHD0RACO5Ww3RiQDvvBs9i/k=
/// Traverse the paths in the graph to a key. **Warning:** there might
/// be a number of paths exponential in the number of conflicts.
///
/// This function takes a closure `f`, which gets called on each path
/// with an iterator over the keys from the root to `key`. This
/// function stops when `f` returns `false` (or when all the paths
/// have been traversed).
///
/// See also [`iter_basenames`](fn.iter_basenames.html).
pub fn iter_paths<T: TxnT, F: FnMut(&mut dyn Iterator<Item = Position<ChangeId>>) -> bool>(
txn: &T,
channel: &ChannelRef<T>,
key: Position<ChangeId>,
mut f: F,
) {
let channel = channel.r.borrow();
let mut stack: Vec<(Position<ChangeId>, bool)> = vec![(key, true)];
while let Some((cur_key, on_stack)) = stack.pop() {
if cur_key.is_root() {
if !f(&mut stack
.iter()
.filter_map(|(key, on_path)| if *on_path { Some(*key) } else { None }))
{
break;
}
} else if !on_stack {
let e = Edge {
flag: EdgeFlags::FOLDER | EdgeFlags::PARENT,
dest: Position::ROOT,
introduced_by: ChangeId::ROOT,
};
stack.push((cur_key, true));
let len = stack.len();
for (_, parent) in txn
.iter_graph(&channel.graph, cur_key.inode_vertex(), Some(e))
.take_while(|&(k, _)| k == cur_key.inode_vertex())
.filter(|&(_, ref v)| v.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT))
{
let parent_dest = txn.find_block_end(&channel, parent.dest).unwrap();
for (_, grandparent) in txn
.iter_graph(&channel.graph, parent_dest, Some(e))
.take_while(|&(k, _)| k == parent_dest)
.filter(|&(_, ref v)| v.flag.contains(EdgeFlags::FOLDER | EdgeFlags::PARENT))
{
stack.push((grandparent.dest, false))
}
}
if stack.len() == len {
stack.pop();
}
}
}
}
pub(crate) fn follow_oldest_path<T: TxnT, C: ChangeStore>(
changes: &C,
txn: &T,
channel: &ChannelRef<T>,
path: &str,
) -> Result<(Position<ChangeId>, bool), anyhow::Error> {
use crate::pristine::*;
let channel = channel.borrow();
debug!("follow_oldest_path = {:?}", path);
let mut current = Position::ROOT;
let flag0 = EdgeFlags::FOLDER;
let flag1 = flag0 | EdgeFlags::BLOCK | EdgeFlags::PSEUDO;
let mut name_buf = Vec::new();
let mut ambiguous = false;
for c in crate::path::components(path) {
let mut next = None;
for name in txn.iter_adjacent(&channel, current.inode_vertex(), flag0, flag1) {
let name_dest = txn.find_block(&channel, name.dest).unwrap();
name_buf.clear();
debug!("getting contents {:?}", name);
changes.get_contents(|h| txn.get_external(h), name_dest, &mut name_buf)?;
if std::str::from_utf8(&name_buf[2..]) == Ok(c) {
let age = txn
.get_changeset(&channel.changes, name.dest.change, None)
.unwrap();
if let Some((ref mut next, ref mut next_age)) = next {
ambiguous = true;
if age < *next_age {
*next = name_dest;
*next_age = age;
}
} else {
next = Some((name_dest, age));
}
}
}
if let Some((next, _)) = next {
current = txn
.iter_adjacent(&channel, next, flag0, flag1)
.next()
.unwrap()
.dest
} else {
return Err((Error::FileNotInRepo {
path: path.to_string(),
})
.into());
}
}
Ok((current, ambiguous))
}
// org id kjvsilNXRBZpH7gJoN20ycppeDLC3F/GrbO0OVhQF+4=
pub fn find_path<T: TxnT, C: ChangeStore>(
changes: &C,
txn: &T,
channel: &Channel<T>,
youngest: bool,
mut v: Position<ChangeId>,
) -> Result<(String, bool), anyhow::Error> {
debug!("oldest_path = {:?}", v);
let mut path = Vec::new();
let mut name_buf = Vec::new();
let flag0 = EdgeFlags::FOLDER | EdgeFlags::PARENT;
let flag1 = EdgeFlags::all();
let mut all_alive = true;
while !v.change.is_root() {
let mut next_v = None;
let mut alive = false;
let inode_vertex = txn.find_block_end(&channel, v).unwrap();
assert_eq!(inode_vertex, v.inode_vertex());
for name in txn.iter_adjacent(channel, v.inode_vertex(), flag0, flag1) {
if !name.flag.contains(EdgeFlags::PARENT) {
continue;
}
debug!("oldest_path, name = {:?}", name);
let age = txn
.get_changeset(&channel.changes, name.dest.change, None)
.unwrap();
let name_dest = txn.find_block_end(&channel, name.dest).unwrap();
debug!("name_dest = {:?}", name_dest);
if let Some(next) = txn
.iter_adjacent(channel, name_dest, flag0, flag1)
.filter(|e| e.flag.contains(EdgeFlags::PARENT | EdgeFlags::FOLDER))
.next()
{
debug!("oldest_path, next = {:?}", next);
if !next.flag.contains(EdgeFlags::DELETED) {
alive = true;
} else if alive {
break;
} else {
all_alive = false
}
if let Some((_, p_age, _)) = next_v {
if (age > p_age) ^ youngest {
continue;
}
}
next_v = Some((name_dest, age, next.dest));
}
}
let (name, _, next) = next_v.unwrap();
if alive {
name_buf.clear();
debug!("getting contents {:?}", name);
changes.get_contents(|h| txn.get_external(h), name, &mut name_buf)?;
path.push(std::str::from_utf8(&name_buf[2..]).unwrap().to_string());
}
debug!("next = {:?}", next);
v = next;
}
path.reverse();
Ok((path.join("/"), all_alive))
}
// org id MEBHKxb8oK7gGUqwl4XgvF1rhw0vYcS2SvEcXbaqMok=
use crate::pristine::{ChangeId, Channel, Edge, EdgeFlags, TxnT, Vertex};
use std::collections::HashSet;
// org id 7GC7Q2pCnb0s8d1hQ5JvScuZvi24ZElhM713+gpHgeY=
pub(crate) fn find_alive_down<T: TxnT>(
txn: &T,
channel: &Channel<T>,
vertex0: Vertex<ChangeId>,
) -> Result<HashSet<Vertex<ChangeId>>, crate::Error> {
let mut stack = vec![Edge {
dest: vertex0.start_pos(),
introduced_by: ChangeId::ROOT,
flag: EdgeFlags::empty(),
}];
let mut visited = HashSet::new();
let mut alive = HashSet::new();
// org id naTw4q63bu2EvVqiMcyTpsR2XgGPIlo+Xg0pAHxty50=
while let Some(elt) = stack.pop() {
if !visited.insert(elt.dest) {
continue;
}
let vertex = txn.find_block(&channel, elt.dest)?;
debug!("elt = {:?}, vertex = {:?}", elt, vertex);
let elt_index = stack.len();
// org id rATmHVXe7ujnLJm9BtP1kGj7iFT2vIEqTj1NygT3jKg=
for v in txn.iter_adjacent(&channel, vertex, EdgeFlags::empty(), EdgeFlags::all()) {
if v.flag.contains(EdgeFlags::FOLDER) {
continue;
}
// org id zyrR2nOtEiDH3D+9lWuTt+nzSRQoz8jgKSCV9Pn6rBE=
debug!("v = {:?}", v);
if v.flag.contains(EdgeFlags::PARENT) {
if !v.flag.contains(EdgeFlags::DELETED)
&& vertex != vertex0
&& !v.flag.contains(EdgeFlags::PSEUDO)
{
alive.insert(vertex);
stack.truncate(elt_index);
break;
} else {
continue;
}
}
// org id uQvQHlOvPqlL+IP8xnqczr279XI5bz+CUrzAi0CkA+c=
stack.push(v)
}
}
Ok(alive)
}
// org id Pw320u7FKY4pwoQyoNx3dm1mLfOcikfMt+ObiMN9njo=
pub(crate) fn find_alive_up<T: TxnT>(
txn: &T,
channel: &Channel<T>,
vertex0: Vertex<ChangeId>,
) -> Result<(HashSet<Vertex<ChangeId>>, Vec<(Vertex<ChangeId>, Edge)>), crate::Error> {
let mut alive = HashSet::new();
let mut files = Vec::new();
let mut stack = vec![Edge {
dest: vertex0.end_pos(),
introduced_by: ChangeId::ROOT,
flag: EdgeFlags::empty(),
}];
let mut visited = HashSet::new();
// org id 4NHenXps5de4P2CKb7tHKFazsveVGn9JlzWwLKcaDfY=
while let Some(elt) = stack.pop() {
if elt.dest.is_root() {
continue;
}
if !visited.insert(elt.dest) {
continue;
}
let vertex = txn.find_block_end(&channel, elt.dest)?;
debug!("find_alive_up: elt = {:?}, vertex = {:?}", elt, vertex);
let elt_index = stack.len();
// org id yUDemNuncRZ3JF72y4FgQuPPYbQhsyj+3Dw9xom4PyQ=
let mut next_file = None;
for v in txn.iter_adjacent(&channel, vertex, EdgeFlags::PARENT, EdgeFlags::all()) {
debug!("find_alive_up: v = {:?}", v);
if !v.flag.contains(EdgeFlags::PARENT) || v.flag.contains(EdgeFlags::PSEUDO) {
continue;
}
// org id wquayIcqrmfRuMmqB4vd/wlbkHCGqWHFKFrwWY7st9g=
if !v.flag.contains(EdgeFlags::DELETED) {
if v.flag.contains(EdgeFlags::FOLDER) {
// check whether `vertex` is a "file" inode,
// i.e. if it has non-folder children. We're never
// in the case of an empty file if we call this
// function.
if next_file.is_none() {
if txn
.iter_adjacent(&channel, vertex, EdgeFlags::empty(), EdgeFlags::all())
.any(|e| {
!e.flag.contains(EdgeFlags::PARENT)
&& !e.flag.contains(EdgeFlags::FOLDER)
})
{
alive.insert(vertex);
}
}
next_file = None;
} else {
alive.insert(vertex);
}
stack.truncate(elt_index);
break;
}
// org id S8/CnGSbU1qxCbYJGuK142RZkxIMPWg2k8LfVRnevOY=
if v.flag.contains(EdgeFlags::FOLDER) {
let v_age = txn
.get_changeset(&channel.changes, v.introduced_by, None)
.unwrap();
if let Some((ref mut file, ref mut age)) = next_file {
if v_age > *age {
*age = v_age;
*file = (vertex, v)
}
} else {
next_file = Some(((vertex, v), v_age));
// If `vertex` is a "file" inode (as opposed to a
// "folder" inode), mark it "alive".
if txn
.iter_adjacent(&channel, vertex, EdgeFlags::empty(), EdgeFlags::all())
.any(|e| {
!e.flag.contains(EdgeFlags::PARENT)
&& !e.flag.contains(EdgeFlags::FOLDER)
})
{
alive.insert(vertex);
}
}
} else {
stack.push(v)
}
}
if let Some((file, _)) = next_file {
files.push(file);
stack.push(file.1)
}
}
Ok((alive, files))
}
// org id CuLytHOw5MhIUxraqhZp3RiCvAXraOTYajmAlakyEYY=
use crate::pristine::*;
use crate::vertex_buffer;
use std::collections::{HashMap, HashSet};
// org id JJDR/iA4tUvr8bHYeAudspj8wfK5zy20N2fayiMztIQ=
pub(super) struct Diff {
pub buf: Vec<u8>,
// org id O4woWVifU6a3TB5v+SW/IS06RN6LDal4OOyCtAcxrLg=
pub inode: Position<Option<ChangeId>>,
pub path: String,
// org id znv/E/xSBDaD0tw5tDrdhOrfa00e2i6hoX4Y0P0M1yU=
pub contents_a: Vec<u8>,
pub pos_a: Vec<Vertex>,
// org id VrcGilQ4pnxnCqaZ30f0AYxuMW5/PAjPKYEV0W8OS3o=
pub missing_eol: HashSet<usize>,
// org id tHKWG2wUrgq0sWiE8x5vHspQufWcVCF0VBQMdJ/E6PI=
pub marker: HashMap<usize, ConflictMarker>,
// org id RtX+AmeBXlMmux4krn3hr2TC5YAQ4dUD0aQ9cG0WmPE=
conflict_stack: Vec<Conflict>,
pub conflict_ends: Vec<ConflictEnds>,
pub solved_conflicts: HashSet<usize>,
pub cyclic_conflict_bytes: Vec<(usize, usize)>,
}
// org id BJp9ZUxNcNZGXbeuELu/m0hxjcCh0Lrworivp2DNz/I=
#[derive(Debug, Clone)]
pub struct Conflict {
pub counter: usize,
pub side: usize,
pub conflict_type: ConflictType,
}
#[derive(Debug, Clone, Copy)]
pub enum ConflictType {
Root,
Order,
Zombie,
Cyclic,
}
// org id x2fKAxT97bw7IkKwnYnx5G3Ys3zfwC7SQzd3J/n1vPU=
#[derive(Debug)]
pub struct ConflictEnds {
pub start: usize,
pub end: usize,
pub end_pos: usize,
pub conflict_type: ConflictType,
}
// org id CBYn0QVUhTLd+1DPhJGxeuxcQPrCqfoCj5QdyKDc7Hg=
#[derive(Debug, PartialEq, Eq)]
pub enum ConflictMarker {
Begin,
Next,
End,
}
// org id a0AuyLQJdOXmDjcbzuLKji89Suyb79UP94XH0DGe0rs=
#[derive(Debug)]
pub struct Vertex {
pub pos: usize,
pub vertex: crate::pristine::Vertex<ChangeId>,
pub before_conflict: bool,
pub conflict: usize,
}
// org id pMXpkacg64vkNGPaLA/BBNw5jtn7ZI+FySQGEkPivNo=
impl Diff {
pub fn new(
inode: Position<Option<ChangeId>>,
path: String,
graph: &crate::alive::Graph,
) -> Self {
Diff {
inode,
path,
buf: Vec::with_capacity(graph.len_bytes()),
pos_a: Vec::with_capacity(2 * graph.len_vertices()),
contents_a: Vec::with_capacity(graph.len_bytes()),
missing_eol: HashSet::new(),
conflict_ends: vec![ConflictEnds {
start: 0,
end: 0,
end_pos: 0,
conflict_type: ConflictType::Root,
}],
marker: HashMap::new(),
conflict_stack: vec![Conflict {
counter: 0,
side: 0,
conflict_type: ConflictType::Root,
}],
cyclic_conflict_bytes: Vec::new(),
solved_conflicts: HashSet::new(),
}
}
}
// org id NnHmVJ1BuujNsYVWbu4O4GAsabVxPG61uPjBjwLDGXY=
impl Diff {
pub fn vertex(
&self,
i: usize,
pos: usize,
end_pos: usize,
) -> crate::pristine::Vertex<ChangeId> {
let mut v = self.pos_a[i].vertex;
assert!(!v.is_root());
if pos > self.pos_a[i].pos {
v.start =
ChangePosition(self.pos_a[i].vertex.start.0 + (pos - self.pos_a[i].pos) as u64)
}
if i + 1 >= self.pos_a.len() || end_pos < self.pos_a[i + 1].pos {
v.end =
ChangePosition(self.pos_a[i].vertex.start.0 + (end_pos - self.pos_a[i].pos) as u64)
}
v
}
pub fn position(&self, i: usize, pos: usize) -> crate::pristine::Position<ChangeId> {
let mut v = self.pos_a[i].vertex.start_pos();
if pos > self.pos_a[i].pos {
v.pos = ChangePosition(self.pos_a[i].vertex.start.0 + (pos - self.pos_a[i].pos) as u64)
}
v
}
}
// org id Pk25wSBAZ0t4Oeyx6ZTm1xqxX4h54qK8APBYvN+cjoo=
impl Diff {
fn begin_conflict_(&mut self, conflict_type: ConflictType) {
self.conflict_stack.push(Conflict {
counter: self.conflict_ends.len(),
side: 0,
conflict_type,
});
let len = match self.contents_a.last() {
Some(&b'\n') | None => self.contents_a.len(),
_ => {
self.missing_eol.insert(self.contents_a.len());
self.contents_a.len() + 1
}
};
self.conflict_ends.push(ConflictEnds {
start: self.pos_a.len(),
end: self.pos_a.len(),
end_pos: len,
conflict_type,
});
self.marker.insert(len, ConflictMarker::Begin);
}
}
impl vertex_buffer::VertexBuffer for Diff {
fn output_line<C: FnOnce(&mut Vec<u8>) -> Result<(), anyhow::Error>>(
&mut self,
v: crate::pristine::Vertex<ChangeId>,
c: C,
) -> Result<(), anyhow::Error> {
if v == crate::pristine::Vertex::BOTTOM {
return Ok(());
}
self.buf.clear();
c(&mut self.buf)?;
self.pos_a.push(Vertex {
pos: self.contents_a.len(),
vertex: v,
before_conflict: false,
conflict: self.conflict_stack.last().unwrap().counter,
});
self.contents_a.extend(&self.buf);
Ok(())
}
// org id 0I3Htm1AUtdNytI0fsyZC9DaKebRuVAOWnCAp1eM4As=
fn begin_conflict(&mut self) -> Result<(), anyhow::Error> {
self.begin_conflict_(ConflictType::Order);
self.output_conflict_marker(vertex_buffer::START_MARKER)
}
fn begin_cyclic_conflict(&mut self) -> Result<(), anyhow::Error> {
let len = self.contents_a.len();
self.begin_conflict_(ConflictType::Cyclic);
self.cyclic_conflict_bytes.push((len, len));
self.output_conflict_marker(vertex_buffer::START_MARKER)
}
fn begin_zombie_conflict(&mut self) -> Result<(), anyhow::Error> {
self.begin_conflict_(ConflictType::Zombie);
self.output_conflict_marker(vertex_buffer::START_MARKER)
}
// org id zJgaktpjBecKJlgqzyViT+wt457sbkOEgjcJ7OGLOcU=
fn end_conflict(&mut self) -> Result<(), anyhow::Error> {
let len = match self.contents_a.last() {
Some(&b'\n') | None => self.contents_a.len(),
_ => {
self.missing_eol.insert(self.contents_a.len());
self.contents_a.len() + 1
}
};
let chunk = self.pos_a.len();
self.output_conflict_marker(vertex_buffer::END_MARKER)?;
let conflict = self.conflict_stack.pop().unwrap();
self.marker.insert(len, ConflictMarker::End);
self.conflict_ends[conflict.counter].end_pos = len;
self.conflict_ends[conflict.counter].end = chunk;
Ok(())
}
fn end_cyclic_conflict(&mut self) -> Result<(), anyhow::Error> {
debug!("end_cyclic_conflict");
self.end_conflict()?;
self.cyclic_conflict_bytes.last_mut().unwrap().1 = self.contents_a.len();
Ok(())
}
// org id W0ejcsiNdGBeNc6omb0+BUSF8MKjBtY+mLMyyqcvPBU=
fn conflict_next(&mut self) -> Result<(), anyhow::Error> {
let len = match self.contents_a.last() {
Some(&b'\n') | None => self.contents_a.len(),
_ => {
self.missing_eol.insert(self.contents_a.len());
self.contents_a.len() + 1
}
};
self.conflict_stack.last_mut().unwrap().side += 1;
self.marker.insert(len, ConflictMarker::Next);
self.output_conflict_marker(vertex_buffer::SEPARATOR)
}
// org id kwhiQGVdomF6A7ajQbYvVGPQrMY24ay2R9148GsaXPw=
fn output_conflict_marker(&mut self, marker: &str) -> Result<(), anyhow::Error> {
if let Some(line) = self.pos_a.last_mut() {
line.before_conflict = true
}
debug!(
"output_conflict_marker {:?} {:?}",
self.contents_a.last(),
marker
);
let pos = match self.contents_a.last() {
Some(&b'\n') | None => {
let len = self.contents_a.len();
self.contents_a.extend(marker.as_bytes().iter().skip(1));
len
}
_ => {
let len = self.contents_a.len() + 1;
self.contents_a.extend(marker.as_bytes().iter());
len
}
};
self.pos_a.push(Vertex {
pos,
vertex: crate::pristine::Vertex::ROOT,
before_conflict: false,
conflict: self.conflict_stack.last().unwrap().counter,
});
Ok(())
}
}
// org id JBb+2glaECLGIRa7HRodwcjfm5JxXetKLaq3zZ/Nmlk=
impl Diff {
pub fn last_vertex_containing(&self, pos: usize) -> usize {
match self.pos_a.binary_search_by(|l| l.pos.cmp(&pos)) {
Ok(mut i) => loop {
if i + 1 >= self.pos_a.len() {
return i;
}
if self.pos_a[i].pos == self.pos_a[i + 1].pos {
i += 1
} else {
return i;
}
},
Err(i) => {
assert!(i > 0);
i - 1
}
}
}
// org id oVu2Ds9U4nGNutRBwAzH93ymSXS45iQmZKidcu3IFk0=
pub fn first_vertex_containing(&self, pos: usize) -> usize {
match self.pos_a.binary_search_by(|l| l.pos.cmp(&pos)) {
Ok(mut i) => loop {
if i == 0 {
return 0;
}
if self.pos_a[i].pos == self.pos_a[i - 1].pos {
i -= 1
} else {
return i;
}
},
Err(i) => {
assert!(i > 0);
i - 1
}
}
}
}
// org id rBOk5JvKonh9vfG67HYNYVjMj8JBPVzrm1uwlszSVrU=
use std::collections::HashSet;
pub struct LineSplit<'a> {
buf: &'a [u8],
missing_eol: Option<&'a HashSet<usize>>,
current: usize,
}
impl super::vertex_buffer::Diff {
pub fn lines(&self) -> LineSplit {
LineSplit {
buf: &self.contents_a,
missing_eol: Some(&self.missing_eol),
current: 0,
}
}
}
impl<'a> std::convert::From<&'a [u8]> for LineSplit<'a> {
fn from(buf: &'a [u8]) -> LineSplit<'a> {
LineSplit {
buf,
missing_eol: None,
current: 0,
}
}
}
impl<'a> Iterator for LineSplit<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<Self::Item> {
if self.current >= self.buf.len() {
return None;
}
let current = self.current;
while self.current < self.buf.len() && self.buf[self.current] != b'\n' {
self.current += 1
}
if self.current < self.buf.len() {
self.current += 1
}
let mut last = self.current;
if let Some(miss) = self.missing_eol {
if miss.contains(&(self.current - 1)) {
last -= 1
}
}
Some(&self.buf[current..last])
}
}
// org id i9zs1FuoTKihEV4e7Hjhwp+Ig1VWaUaVLdsXA5wWR60=
use super::diff::*;
use super::vertex_buffer::{ConflictMarker, Diff};
use super::{bytes_len, bytes_pos, Line};
use crate::change::{Atom, Local, NewVertex, Record};
use crate::pristine::{ChangeId, ChangePosition, EdgeFlags, Position};
use crate::record::Builder;
use std::collections::{HashMap, HashSet};
// org id yQkpDXRjJGQ6G7F+gzy9YVtKnPW8ZuaUzipUfB+xtEc=
pub struct ConflictContexts {
pub up: HashMap<usize, ChangePosition>,
pub side_ends: HashMap<usize, Vec<ChangePosition>>,
pub active: HashSet<usize>,
pub reorderings: HashMap<usize, ChangePosition>,
}
impl ConflictContexts {
pub fn new() -> Self {
ConflictContexts {
side_ends: HashMap::new(),
up: HashMap::new(),
active: HashSet::new(),
reorderings: HashMap::new(),
}
}
}
// org id lpPd5PgXTaP14efELbl1ZpntaiMAD1GQyzeEiC3pLeU=
impl Builder {
pub(super) fn replace(
&mut self,
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
dd: &D,
r: usize,
) -> Result<(), anyhow::Error> {
let old = dd[r].old;
let old_len = dd[r].old_len;
let from_new = dd[r].new;
let len = dd[r].new_len;
let up_context = get_up_context(diff, conflict_contexts, lines_a, old);
let down_context = get_down_context(
diff,
conflict_contexts,
dd,
lines_a,
lines_b,
old,
old_len,
from_new,
len,
self.rec.contents.len(),
);
let start = self.rec.contents.len();
for &line in &lines_b[from_new..(from_new + len)] {
self.rec.contents.extend(line.l);
}
let end = self.rec.contents.len();
self.rec.contents.push(0);
let change = Atom::NewVertex(NewVertex {
up_context,
down_context,
flag: EdgeFlags::empty(),
start: ChangePosition(start as u64),
end: ChangePosition(end as u64),
inode: diff.inode,
});
// org id +YBMbZ/a+Rur9Ym45CDXCBvMPIg1i8caARuyPsF0VSU=
if old_len > 0 {
match self.rec.actions.pop() {
Some(Record::Edit { change: c, local }) => {
if local.line == from_new {
self.rec.actions.push(Record::Replacement {
change: c,
local,
replacement: change,
});
return Ok(());
} else {
self.rec.actions.push(Record::Edit { change: c, local })
}
}
Some(c) => self.rec.actions.push(c),
_ => {}
}
}
self.rec.actions.push(Record::Edit {
local: Local {
line: from_new,
path: diff.path.clone(),
},
change,
});
Ok(())
}
}
// org id Obu6fZedTr+KH2T/j/6hedbo5xCjNezHvw09Parbyp0=
pub(super) fn get_up_context(
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
old: usize,
) -> Vec<Position<Option<ChangeId>>> {
// org id MwsFBtXGGgFFkme/wF5mxpM0NvpA+tcMkOZlIpMmO24=
if let Some(&pos) = conflict_contexts.reorderings.get(&old) {
return vec![Position { change: None, pos }];
}
// org id 74H9BVbOYJe7wQxbjLfNVVz1M3s5HpCHidKw+JnqorQ=
let old_bytes = if old == 0 {
return vec![diff.pos_a[0].vertex.end_pos().to_option()];
} else if old < lines_a.len() {
bytes_pos(lines_a, old)
} else {
diff.contents_a.len()
};
debug!("old_bytes {:?}", old_bytes);
let mut up_context_idx = diff.last_vertex_containing(old_bytes - 1);
// org id 7fAui6Yd4tbCGv7m0n83w2oYiZIgobIz0JX9CiRpEJk=
let mut seen_conflict_markers = false;
loop {
debug!("up_context_idx = {:?}", up_context_idx);
debug!("{:?}", diff.marker.get(&diff.pos_a[up_context_idx].pos));
match diff.marker.get(&diff.pos_a[up_context_idx].pos) {
None if seen_conflict_markers => {
return vec![diff.pos_a[up_context_idx].vertex.end_pos().to_option()]
}
None => {
let change = diff.pos_a[up_context_idx].vertex.change;
let pos = diff.pos_a[up_context_idx].vertex.start;
let offset = old_bytes - diff.pos_a[up_context_idx].pos;
debug!("offset {:?} {:?}", pos.0, offset);
return vec![Position {
change: Some(change),
pos: ChangePosition(pos.0 + offset as u64),
}];
}
// org id 6tOYtor9USyZl/m9YLz19gVQaEqheJF1dDjWvvJvpAs=
Some(ConflictMarker::End) => {
debug!("get_up_context_conflict");
return get_up_context_conflict(diff, conflict_contexts, up_context_idx);
}
// org id NFZgWc05rTTGrohnEcMsNeFMtreI03vwwhQTezH8p5A=
_ => {
let conflict = diff.pos_a[up_context_idx].conflict;
debug!(
"conflict = {:?} {:?}",
conflict, diff.conflict_ends[conflict]
);
if let Some(&pos) = conflict_contexts.up.get(&conflict) {
return vec![Position { change: None, pos }];
}
seen_conflict_markers = true;
if diff.conflict_ends[conflict].start > 0 {
up_context_idx = diff.conflict_ends[conflict].start - 1
} else {
return vec![diff.pos_a[0].vertex.end_pos().to_option()];
}
}
}
}
}
// org id iZ2idzfUTJBfGFSVixuhZjMUMGHKnxxpBag6PJ0K2ZE=
fn get_up_context_conflict(
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
mut up_context_idx: usize,
) -> Vec<Position<Option<ChangeId>>> {
let conflict = diff.pos_a[up_context_idx].conflict;
let conflict_start = diff.conflict_ends[conflict].start;
let mut up_context = Vec::new();
// org id rq9KS5r8ewdtArAm5XPZ2oHi7IQ15BWwNxoHHxSl2Nk=
if let Some(ref up) = conflict_contexts.side_ends.get(&up_context_idx) {
up_context.extend(up.iter().map(|&pos| Position { change: None, pos }));
}
// org id +L0C6tvDi9HguptfaxX1Tc0UfPHxGsY1ppC8XSL2Hhs=
let mut on = true;
conflict_contexts.active.clear();
conflict_contexts.active.insert(conflict);
while up_context_idx > conflict_start {
match diff.marker.get(&diff.pos_a[up_context_idx].pos) {
None if on => {
let change = diff.pos_a[up_context_idx].vertex.change;
let pos = diff.pos_a[up_context_idx].vertex.end;
up_context.push(Position {
change: Some(change),
pos,
});
on = false
}
Some(ConflictMarker::End) if on => {
conflict_contexts
.active
.insert(diff.pos_a[up_context_idx].conflict);
}
Some(ConflictMarker::Next)
if conflict_contexts
.active
.contains(&diff.pos_a[up_context_idx].conflict) =>
{
on = true
}
_ => {}
}
up_context_idx -= 1;
}
assert!(!up_context.is_empty());
up_context
}
// org id jxZo7ucNoQz0mtmEAIlebg6CHgQsFmYbCPUJWueSMy8=
pub(super) fn get_down_context(
diff: &Diff,
conflict_contexts: &mut ConflictContexts,
dd: &D,
lines_a: &[Line],
lines_b: &[Line],
old: usize,
old_len: usize,
from_new: usize,
new_len: usize,
contents_len: usize,
) -> Vec<Position<Option<ChangeId>>> {
// org id vBcibNha9owu+bqUg9WwvqZ4IaGL80/pszPgJO/kBi8=
if old + old_len >= lines_a.len() {
return Vec::new();
}
let mut down_context_idx = 1;
let mut pos_bytes = if old + old_len == 0 {
0
} else {
let pos_bytes = bytes_pos(lines_a, old) + bytes_len(lines_a, old, old_len);
down_context_idx = diff.first_vertex_containing(pos_bytes);
pos_bytes
};
// org id 0RMRK9MdsifnP+vtTYtOAL6A94lwcKLc+3pcZ1ggTUQ=
while down_context_idx < diff.pos_a.len() {
match diff.marker.get(&(diff.pos_a[down_context_idx].pos)) {
// org id uqVV5rYxT/MZR4HYn/0O5W//GCNlsSEFYQfJQ/c90os=
Some(ConflictMarker::Begin) => {
return get_down_context_conflict(
diff,
dd,
conflict_contexts,
lines_a,
lines_b,
from_new,
new_len,
down_context_idx,
)
}
// org id 7Opl+ycZH8ls7qsjk63GJcPCOOahQvFKN8qvHAAvaL8=
Some(marker) => {
if let ConflictMarker::Next = marker {
let conflict = diff.pos_a[down_context_idx].conflict;
down_context_idx = diff.conflict_ends[conflict].end;
}
let e = conflict_contexts
.side_ends
.entry(down_context_idx)
.or_insert(Vec::new());
let b_len_bytes = bytes_len(lines_b, from_new, new_len);
e.push(ChangePosition((contents_len + b_len_bytes) as u64));
down_context_idx += 1
}
// org id +zMt4XAIXBWBGrnc2lR5l7Bax61VwTq7HLcMAWfpJys=
None => {
pos_bytes = pos_bytes.max(diff.pos_a[down_context_idx].pos);
let next_vertex_pos = if down_context_idx + 1 >= diff.pos_a.len() {
diff.contents_a.len()
} else {
diff.pos_a[down_context_idx + 1].pos
};
while pos_bytes < next_vertex_pos {
match dd.is_deleted(lines_a, pos_bytes) {
Some(Deleted { replaced: true, .. }) => return Vec::new(),
Some(Deleted {
replaced: false,
next,
}) => pos_bytes = next,
None => {
return vec![diff.position(down_context_idx, pos_bytes).to_option()]
}
}
}
down_context_idx += 1;
}
}
}
Vec::new()
}
// org id xS2xtB9b85dSnrkf1NO6wP4M7RkobbUuamR3dxwnog0=
fn get_down_context_conflict(
diff: &Diff,
dd: &D,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
from_new: usize,
new_len: usize,
mut down_context_idx: usize,
) -> Vec<Position<Option<ChangeId>>> {
let conflict = diff.pos_a[down_context_idx].conflict;
let len_bytes = bytes_len(lines_b, from_new, new_len);
conflict_contexts
.up
.insert(conflict, ChangePosition(len_bytes as u64));
conflict_contexts.active.clear();
conflict_contexts.active.insert(conflict);
assert!(!diff.pos_a.is_empty());
let conflict_end = diff.conflict_ends[conflict].end.min(diff.pos_a.len() - 1);
let mut down_context = Vec::new();
let mut on = true;
let mut pos = diff.pos_a[down_context_idx].pos;
loop {
match diff.marker.get(&pos) {
// org id lWg5gtAF+8f9aI71EwI0KMc/MWvbtUiWvxsFoOVoe6c=
None if on => match dd.is_deleted(lines_a, pos) {
Some(Deleted { replaced: true, .. }) => on = false,
Some(Deleted { next, .. }) => {
pos = next;
let next_pos = if down_context_idx + 1 < diff.pos_a.len() {
diff.pos_a[down_context_idx + 1].pos
} else {
diff.contents_a.len()
};
if pos < next_pos {
continue;
}
}
None => {
down_context.push(diff.position(down_context_idx, pos).to_option());
on = false;
}
},
// org id 8sx3D55StGNwIM6s5ed/OxIzl1T91oPBHjVam3z5r1I=
Some(ConflictMarker::Begin) if on => {
conflict_contexts
.active
.insert(diff.pos_a[down_context_idx].conflict);
}
// org id oRsCOS5fTyddCz16FTkFk28kx81IY5VpUE2woTXD3K4=
Some(ConflictMarker::Next)
if conflict_contexts
.active
.contains(&diff.pos_a[down_context_idx].conflict) =>
{
on = true
}
_ => {}
}
// org id VMoaOu2rVxXKnhIvGDgJPPg2aZJxxbeYcL8Ni67JFhE=
down_context_idx += 1;
if down_context_idx > conflict_end {
break;
} else {
pos = diff.pos_a[down_context_idx].pos
}
}
down_context
}
use crate::alive::{output_graph, Graph};
use crate::changestore::*;
use crate::pristine::*;
use crate::record::Builder;
mod diff;
mod split;
mod vertex_buffer;
pub use diff::Algorithm;
mod delete;
mod replace;
#[derive(Debug, Hash, Clone, Copy)]
struct Line<'a> {
l: &'a [u8],
cyclic: bool,
before_end_marker: bool,
last: bool,
}
impl<'a> PartialEq for Line<'a> {
fn eq(&self, b: &Self) -> bool {
if self.before_end_marker {
if !b.last && b.l.last() == Some(&b'\n') {
return &b.l[..b.l.len() - 1] == self.l;
}
}
if b.before_end_marker {
if !self.last && self.l.last() == Some(&b'\n') {
return &self.l[..self.l.len() - 1] == b.l;
}
}
self.l == b.l && self.cyclic == b.cyclic
}
}
impl<'a> Eq for Line<'a> {}
impl Builder {
pub(crate) fn diff<T: TxnT, P: ChangeStore>(
&mut self,
changes: &P,
txn: &T,
channel: &Channel<T>,
algorithm: Algorithm,
path: String,
inode: Position<Option<ChangeId>>,
a: &mut Graph,
b: &[u8],
) -> Result<(), anyhow::Error> {
self.rec.largest_file = self.rec.largest_file.max(b.len() as u64);
let mut d = vertex_buffer::Diff::new(inode, path.clone(), a);
output_graph(changes, txn, channel, &mut d, a, &mut self.redundant)?;
if std::str::from_utf8(&d.contents_a).is_err() || std::str::from_utf8(&b).is_err() {
if d.contents_a != b {
self.diff_binary(changes, txn, channel, path, inode, a, &b);
return Ok(());
}
}
let lines_a: Vec<Line> = d
.lines()
.map(|l| {
let old_bytes = l.as_ptr() as usize - d.contents_a.as_ptr() as usize;
let cyclic = if let Err(n) = d
.cyclic_conflict_bytes
.binary_search(&(old_bytes, std::usize::MAX))
{
n > 0 && {
let (a, b) = d.cyclic_conflict_bytes[n - 1];
a <= old_bytes && old_bytes < b
}
} else {
false
};
let before_end_marker = if l.last() != Some(&b'\n') {
let next_index =
l.as_ptr() as usize + l.len() - d.contents_a.as_ptr() as usize + 1;
d.marker.get(&next_index) == Some(&vertex_buffer::ConflictMarker::End)
} else {
false
};
Line {
l,
cyclic,
before_end_marker,
last: l.as_ptr() as usize + l.len() - d.contents_a.as_ptr() as usize
>= d.contents_a.len(),
}
})
.collect();
let lines_b: Vec<Line> = split::LineSplit::from(&b[..])
.map(|l| {
let next_index = l.as_ptr() as usize + l.len() - b.as_ptr() as usize;
Line {
l,
cyclic: false,
before_end_marker: false,
last: next_index >= b.len(),
}
})
.collect();
debug!("pos = {:?}", d.pos_a);
debug!("{:?} {:?}", lines_a, lines_b);
let dd = diff::diff(&lines_a, &lines_b, algorithm);
let mut conflict_contexts = replace::ConflictContexts::new();
for r in 0..dd.len() {
if dd[r].old_len > 0 {
self.delete(
txn,
channel,
&d,
&dd,
&mut conflict_contexts,
&lines_a,
&lines_b,
r,
)?
}
if dd[r].new_len > 0 {
self.replace(&d, &mut conflict_contexts, &lines_a, &lines_b, &dd, r)?
}
}
debug!("Diff ended");
Ok(())
}
fn diff_binary<T: TxnT, C: ChangeStore>(
&mut self,
changes: &C,
txn: &T,
channel: &Channel<T>,
path: String,
inode: Position<Option<ChangeId>>,
ret: &crate::alive::Graph,
b: &[u8],
) {
self.rec.has_binary_files = true;
use crate::change::{Atom, EdgeMap, Local, NewEdge, NewVertex, Record};
let pos = self.rec.contents.len();
self.rec.contents.extend_from_slice(&b[..]);
let pos_end = self.rec.contents.len();
self.rec.contents.push(0);
let mut edges = Vec::new();
let mut deleted = Vec::new();
for v in ret.lines.iter() {
debug!("v.vertex = {:?}, inode = {:?}", v.vertex, inode);
if Some(v.vertex.change) == inode.change && v.vertex.end == inode.pos {
continue;
}
for e in txn.iter_adjacent(channel, v.vertex, EdgeFlags::PARENT, EdgeFlags::all()) {
if e.flag.contains(EdgeFlags::PSEUDO) {
continue;
}
if e.flag.contains(EdgeFlags::FOLDER) {
if log_enabled!(log::Level::Debug) {
let f = std::fs::File::create("debug_diff_binary").unwrap();
ret.debug(changes, txn, channel, false, true, f).unwrap();
}
panic!("e.flag.contains(EdgeFlags::FOLDER)");
}
if e.flag.contains(EdgeFlags::PARENT) {
if e.flag.contains(EdgeFlags::DELETED) {
deleted.push(NewEdge {
previous: e.flag - EdgeFlags::PARENT,
flag: e.flag - EdgeFlags::PARENT,
from: e.dest.to_option(),
to: v.vertex.to_option(),
introduced_by: Some(e.introduced_by),
})
} else {
let previous = e.flag - EdgeFlags::PARENT;
edges.push(NewEdge {
previous,
flag: previous | EdgeFlags::DELETED,
from: e.dest.to_option(),
to: v.vertex.to_option(),
introduced_by: Some(e.introduced_by),
})
}
}
}
}
// Kill all of `ret`, add `b` instead.
if !deleted.is_empty() {
self.rec.actions.push(Record::Edit {
local: Local {
line: 0,
path: path.clone(),
},
change: Atom::EdgeMap(EdgeMap {
edges: deleted,
inode,
}),
})
}
self.rec.actions.push(Record::Replacement {
local: Local {
line: 0,
path: path.clone(),
},
change: Atom::EdgeMap(EdgeMap { edges, inode }),
replacement: Atom::NewVertex(NewVertex {
up_context: vec![inode],
down_context: Vec::new(),
flag: EdgeFlags::empty(),
start: ChangePosition(pos as u64),
end: ChangePosition(pos_end as u64),
inode,
}),
})
}
}
// org id TWRPjsr8ioUr6BtLpJMREEOcKhvaKMY++GVJXKbukTk=
fn bytes_pos(chunks: &[Line], old: usize) -> usize {
debug!("bytes pos {:?} {:?}", old, chunks[old]);
chunks[old].l.as_ptr() as usize - chunks[0].l.as_ptr() as usize
}
// org id wEqdtyTU2z0vm6N/SgMMCD8XmQsVkHcThIW/C7zL2xA=
fn bytes_len(chunks: &[Line], old: usize, len: usize) -> usize {
if let Some(p) = chunks.get(old + len) {
p.l.as_ptr() as usize - chunks[old].l.as_ptr() as usize
} else if old + len > 0 {
chunks[old + len - 1].l.as_ptr() as usize + chunks[old + len - 1].l.len()
- chunks[old].l.as_ptr() as usize
} else {
chunks[old + len].l.as_ptr() as usize - chunks[old].l.as_ptr() as usize
}
}
// org id 4bIFPrgi6R3X166/tO8dsuyrx3B/kp3AB7fmGV3VcYU=
use super::Line;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
/// Algorithm used to compute the diff.
pub enum Algorithm {
Myers,
Patience,
}
impl Default for Algorithm {
fn default() -> Self {
Algorithm::Myers
}
}
pub(super) fn diff(lines_a: &[Line], lines_b: &[Line], algorithm: Algorithm) -> D {
let mut dd = diffs::Replace::new(D(Vec::with_capacity(lines_a.len() + lines_b.len())));
match algorithm {
Algorithm::Patience => diffs::patience::diff(
&mut dd,
lines_a,
0,
lines_a.len(),
lines_b,
0,
lines_b.len(),
)
.unwrap(),
Algorithm::Myers => diffs::myers::diff(
&mut dd,
lines_a,
0,
lines_a.len(),
lines_b,
0,
lines_b.len(),
)
.unwrap(),
}
dd.into_inner()
}
// org id Dr0sBW4RAvp+0weo7WZIApJbiIk0NZ4yAOng6sR0sN0=
#[derive(Debug)]
pub struct D(pub Vec<Replacement>);
impl D {
pub fn len(&self) -> usize {
self.0.len()
}
}
impl std::ops::Index<usize> for D {
type Output = Replacement;
fn index(&self, i: usize) -> &Replacement {
self.0.index(i)
}
}
impl std::ops::IndexMut<usize> for D {
fn index_mut(&mut self, i: usize) -> &mut Replacement {
self.0.index_mut(i)
}
}
#[derive(Debug)]
pub struct Replacement {
pub old: usize,
pub old_len: usize,
pub new: usize,
pub new_len: usize,
pub is_cyclic: bool,
}
impl diffs::Diff for D {
type Error = ();
fn delete(&mut self, old: usize, old_len: usize, new: usize) -> std::result::Result<(), ()> {
debug!("Diff::delete {:?} {:?} {:?}", old, old_len, new);
self.0.push(Replacement {
old,
old_len,
new,
new_len: 0,
is_cyclic: false,
});
Ok(())
}
fn insert(&mut self, old: usize, new: usize, new_len: usize) -> std::result::Result<(), ()> {
debug!("Diff::insert {:?} {:?} {:?}", old, new, new_len);
self.0.push(Replacement {
old,
old_len: 0,
new,
new_len,
is_cyclic: false,
});
Ok(())
}
fn replace(
&mut self,
old: usize,
old_len: usize,
new: usize,
new_len: usize,
) -> std::result::Result<(), ()> {
debug!(
"Diff::replace {:?} {:?} {:?} {:?}",
old, old_len, new, new_len
);
self.0.push(Replacement {
old,
old_len,
new,
new_len,
is_cyclic: false,
});
Ok(())
}
}
// org id MikwmAUmXz25O9jC0dE+uyJkpmnvLFuoHqPY1nMRX64=
fn line_index(lines_a: &[Line], pos_bytes: usize) -> usize {
lines_a
.binary_search_by(|line| {
(line.l.as_ptr() as usize - lines_a[0].l.as_ptr() as usize).cmp(&pos_bytes)
})
.unwrap()
}
// org id KNeDba2yJOySMLCf/W9mFftnonk8nlXpmLLPYVdGm74=
pub struct Deleted {
pub replaced: bool,
pub next: usize,
}
impl D {
pub(super) fn is_deleted(&self, lines_a: &[Line], pos: usize) -> Option<Deleted> {
let line = line_index(lines_a, pos);
match self.0.binary_search_by(|repl| repl.old.cmp(&line)) {
Ok(i) if self.0[i].old_len > 0 => Some(Deleted {
replaced: self.0[i].new_len > 0,
next: pos + lines_a[line].l.len(),
}),
Err(i) if i == 0 => None,
Err(i) if line < self.0[i - 1].old + self.0[i - 1].old_len => Some(Deleted {
replaced: self.0[i - 1].new_len > 0,
next: pos + lines_a[line].l.len(),
}),
_ => None,
}
}
}
// org id VnAE9Yu4i4zOovZPrKRYRDk1jGBB+em9HeaWxWkG3/g=
use super::diff::*;
use super::replace::ConflictContexts;
use super::vertex_buffer::{ConflictMarker, ConflictType, Diff};
use super::{bytes_len, bytes_pos, Line};
use crate::change;
use crate::change::{Atom, EdgeMap, Local, NewVertex, Record};
use crate::pristine::*;
use crate::record::Builder;
impl Builder {
pub(super) fn delete<T: TxnT>(
&mut self,
txn: &T,
channel: &Channel<T>,
diff: &Diff,
d: &super::diff::D,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
r: usize,
) -> Result<(), anyhow::Error> {
debug!("delete {:?}", r);
self.delete_lines(txn, channel, diff, d, lines_a, r);
let old = d[r].old;
let len = d[r].old_len;
self.order_conflict_sides(
diff,
d,
conflict_contexts,
lines_a,
lines_b,
old,
len,
d[r].new,
d[r].new_len > 0,
);
Ok(())
}
}
// org id lPDV4YIiVBiingRq95QDZ6IYEG1JyKt+hIFtgUaxi1s=
struct Deletion {
edges: Vec<crate::change::NewEdge<Option<ChangeId>>>,
edges_del: Vec<crate::change::NewEdge<Option<ChangeId>>>,
resurrect: Vec<crate::change::NewEdge<Option<ChangeId>>>,
}
impl Builder {
fn delete_lines<T: TxnT>(
&mut self,
txn: &T,
channel: &Channel<T>,
diff: &Diff,
d: &super::diff::D,
lines_a: &[Line],
r: usize,
) {
let deletion = delete_lines(txn, channel, diff, d, lines_a, r);
if !deletion.edges.is_empty() {
self.rec.actions.push(Record::Edit {
change: Atom::EdgeMap(EdgeMap {
edges: deletion.edges,
inode: diff.inode,
}),
local: Local {
// old: d[r].old,
line: d[r].new,
path: diff.path.clone(),
},
})
}
if !deletion.edges_del.is_empty() {
self.rec.actions.push(Record::Edit {
change: Atom::EdgeMap(EdgeMap {
edges: deletion.edges_del,
inode: diff.inode,
}),
local: Local {
// old: d[r].old,
line: d[r].new,
path: diff.path.clone(),
},
})
}
if !deletion.resurrect.is_empty() {
self.rec.actions.push(Record::ResurrectZombies {
change: Atom::EdgeMap(EdgeMap {
edges: deletion.resurrect,
inode: diff.inode,
}),
local: Local {
// old: d[r].old,
line: d[r].new,
path: diff.path.clone(),
},
})
}
}
}
// org id 3IBxNOy8oJr1ZUaJ80WRO/3lPXiYz8ywu43VF3bSJpc=
fn delete_lines<T: TxnT>(
txn: &T,
channel: &Channel<T>,
diff: &Diff,
d: &super::diff::D,
lines_a: &[Line],
r: usize,
) -> Deletion {
let old = d[r].old;
let len = d[r].old_len;
let mut deletion = Deletion {
edges: Vec::new(),
edges_del: Vec::new(),
resurrect: Vec::new(),
};
let mut pos = bytes_pos(lines_a, old);
let end_pos = pos + bytes_len(lines_a, old, len);
let first_vertex = diff.last_vertex_containing(pos);
// org id M4iVAW1I+Ie9+tzQ1i+i5rOAwbHzfevLSwkJvGWLcL8=
let mut solved_conflict_end = 0;
let mut i = first_vertex;
while pos < end_pos {
match diff.marker.get(&diff.pos_a[i].pos) {
Some(ConflictMarker::Begin) => {
match diff.conflict_ends[diff.pos_a[i].conflict].conflict_type {
ConflictType::Zombie => {
solved_conflict_end = solved_conflict_end
.max(diff.conflict_ends[diff.pos_a[i].conflict].end_pos)
}
_ => {}
}
}
None => delete_parents(
txn,
channel,
diff.pos_a[i].vertex,
diff.vertex(i, pos, end_pos),
&mut deletion,
),
_ => {}
}
i += 1;
if i < diff.pos_a.len() {
pos = diff.pos_a[i].pos
} else {
break;
}
}
if solved_conflict_end > 0 && i < diff.pos_a.len() {
resurrect_zombies(
txn,
channel,
diff,
d,
lines_a,
r,
i,
end_pos,
solved_conflict_end,
&mut deletion,
)
}
deletion
}
// org id b67uTPKfiYo9TYQBrOrAZD3cUgPMNdMRnuVQ4/wGPYE=
fn delete_parents<T: TxnT>(
txn: &T,
channel: &Channel<T>,
graph_key: Vertex<ChangeId>,
del_key: Vertex<ChangeId>,
deletion: &mut Deletion,
) {
let mut has_original_edge = false;
for v in txn
.iter_adjacent(&channel, graph_key, EdgeFlags::PARENT, EdgeFlags::all())
.filter(|e| e.flag.contains(EdgeFlags::PARENT) && !e.flag.contains(EdgeFlags::PSEUDO))
{
let previous = v.flag - EdgeFlags::PARENT;
if !previous.contains(EdgeFlags::BLOCK) && graph_key.start != del_key.start {
// non-block edges don't propagate inside blocks, so we can't delete them.
continue;
}
let edge = change::NewEdge {
previous,
flag: previous | EdgeFlags::DELETED,
from: if graph_key.start == del_key.start {
v.dest.to_option()
} else {
has_original_edge |= del_key.change == v.introduced_by;
del_key.start_pos().to_option()
},
to: del_key.to_option(),
introduced_by: Some(v.introduced_by),
};
if v.flag.contains(EdgeFlags::DELETED) {
deletion.edges_del.push(edge)
} else {
deletion.edges.push(edge)
}
}
if graph_key.start != del_key.start && !has_original_edge {
deletion.edges_del.push(change::NewEdge {
previous: EdgeFlags::BLOCK,
flag: EdgeFlags::DELETED | EdgeFlags::BLOCK,
from: del_key.start_pos().to_option(),
to: del_key.to_option(),
introduced_by: Some(del_key.change),
})
}
}
// org id jdtaVKceDlq3ry0DaBgl4s1Mx9unkP4iGWVMtpAH1dQ=
fn is_conflict_reordering(diff: &Diff, old_bytes: usize, len_bytes: usize) -> bool {
let mut result = false;
debug!("conflict reordering {:?} {:?}", old_bytes, len_bytes);
trace!("markers: {:#?}", diff.marker);
for i in old_bytes..old_bytes + len_bytes {
match diff.marker.get(&i) {
Some(&ConflictMarker::Next) => result = true,
Some(_) => return false,
_ => {}
}
}
debug!("is_conflict_reordering: {:?}", result);
result
}
impl Builder {
fn order_conflict_sides(
&mut self,
diff: &Diff,
dd: &D,
conflict_contexts: &mut ConflictContexts,
lines_a: &[Line],
lines_b: &[Line],
old: usize,
len: usize,
new: usize,
is_replaced: bool,
) {
let old_bytes = bytes_pos(lines_a, old);
let len_bytes = bytes_len(lines_a, old, len);
if !is_conflict_reordering(diff, old_bytes, len_bytes) {
return;
}
// org id YAADPxanxOxEOX6q2iJepCkCQrV6SceHbd/fqSzKzrw=
let up_context = super::replace::get_up_context(diff, conflict_contexts, lines_a, old);
// org id VMVYEtYydP6W9ybPW6P7dEEOd5etlX++6XN3ikoRr4A=
self.rec.contents.push(0);
let pos = ChangePosition(self.rec.contents.len() as u64);
self.rec.contents.push(0);
// org id LrbVIzoeEbriE6CQeSz53sEJEg3zerWT+Qhj7yrpUNQ=
let down_context = if is_replaced {
conflict_contexts.reorderings.insert(old, pos);
Vec::new()
} else {
super::replace::get_down_context(
diff,
conflict_contexts,
dd,
lines_a,
lines_b,
old,
len,
0,
0,
self.rec.contents.len(),
)
};
// org id 4lWzAIoFc4bfLIp3eU6ZJT3mGYrbewhP+ZwP3t4HxVI=
debug!("Conflict reordering {:?} {:?}", up_context, down_context);
self.rec.actions.push(Record::SolveOrderConflict {
change: Atom::NewVertex(NewVertex {
up_context,
down_context,
flag: EdgeFlags::empty(),
start: pos,
end: pos,
inode: diff.inode,
}),
local: Local {
// old: old,
line: new,
path: diff.path.clone(),
},
});
}
}
// org id gp0ql/wh1QgKCmbwEfDafpVFKaECHxS7WudiACH1hCQ=
fn resurrect_zombies<T: TxnT>(
txn: &T,
channel: &Channel<T>,
diff: &Diff,
d: &super::diff::D,
lines_a: &[Line],
mut r: usize,
mut i: usize,
end_pos: usize,
solved_conflict_end: usize,
deletion: &mut Deletion,
) {
// org id oqcc+7DlI1Ck9ZACzf+xD+jtGyN/bekjVL30jxGhw/k=
let mut pos = end_pos;
if diff.pos_a[i].pos > pos {
i -= 1;
}
// org id Mm/Ry13zKcpDNDiLz+nKKueg1nU+NUCOkqa8u7GpI0k=
while pos < solved_conflict_end {
// org id ar3vtE/7LtA1cZ0gaui3HDHLqUAEJLMb9F7ZqseKFoU=
r += 1;
while r < d.len() && d[r].old_len == 0 && bytes_pos(lines_a, d[r].old) < solved_conflict_end
{
r += 1
}
let next_pos = if r >= d.len() {
solved_conflict_end
} else {
bytes_pos(lines_a, d[r].old).min(solved_conflict_end)
};
// org id jmcOZQ5XmFYGoRUnfVHWAA1oawNU4RePnjyAOjPSgzw=
while i < diff.pos_a.len() {
if diff.pos_a[i].pos > next_pos {
break;
}
if diff.pos_a[i].vertex.is_root()
|| (i + 1 < diff.pos_a.len() && diff.pos_a[i + 1].pos <= pos)
{
i += 1;
continue;
}
resurrect_zombie(
txn,
channel,
diff.pos_a[i].vertex,
diff.vertex(i, pos, next_pos),
deletion,
);
i += 1
}
// org id BR6tLhX5u8/ePYba+R0cyvC0eEwvd6SgW3P0pvBFmIc=
if r >= d.len() {
break;
} else {
pos = bytes_pos(lines_a, d[r].old) + bytes_len(lines_a, d[r].old, d[r].old_len)
}
}
}
// org id
// org id a6l+av327wOUrblVGZi9ccUrs0G07arCInmTgOveaNs=
fn resurrect_zombie<T: TxnT>(
txn: &T,
channel: &Channel<T>,
v: Vertex<ChangeId>,
target: Vertex<ChangeId>,
deletion: &mut Deletion,
) {
for e in txn.iter_adjacent(
&channel,
v,
EdgeFlags::PARENT,
EdgeFlags::PARENT | EdgeFlags::DELETED | EdgeFlags::BLOCK,
) {
if e.flag.contains(EdgeFlags::PSEUDO) || !e.flag.contains(EdgeFlags::PARENT) {
continue;
}
let previous = e.flag - EdgeFlags::PARENT;
let newedge = change::NewEdge {
previous,
flag: previous - EdgeFlags::DELETED,
from: if target.start_pos() == v.start_pos() {
e.dest.to_option()
} else {
target.start_pos().to_option()
},
to: target.to_option(),
introduced_by: Some(e.introduced_by),
};
deletion.resurrect.push(newedge)
}
}
// org id WWSnsySeK8oZK76E3A0xmGSdUJO8+xV/QhQCkBQOgGc=
//! A change store is a trait for change storage facilities. Even though
//! changes are normally stored on disk, there are situations (such as
//! an embedded Pijul) where one might want changes in-memory, in a
//! database, or something else.
use crate::change::{Change, ChangeHeader};
use crate::pristine::{ChangeId, Hash, InodeMetadata, Position, Vertex};
use std::collections::BTreeSet;
#[cfg(feature = "ondisk-repos")]
/// If this crate is compiled with the `ondisk-repos` feature (the
/// default), this module stores changes on the file system, under
/// `.pijul/changes`.
pub mod filesystem;
/// A change store entirely in memory.
pub mod memory;
/// A trait for storing changes and reading from them.
pub trait ChangeStore {
fn has_contents(&self, hash: Hash, change_id: Option<ChangeId>) -> bool;
fn get_contents<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
key: Vertex<ChangeId>,
buf: &mut Vec<u8>,
) -> Result<usize, anyhow::Error>;
fn get_header(&self, h: &Hash) -> Result<ChangeHeader, anyhow::Error> {
Ok(self.get_change(h)?.hashed.header)
}
fn get_contents_ext(
&self,
key: Vertex<Option<Hash>>,
buf: &mut Vec<u8>,
) -> Result<usize, anyhow::Error>;
fn get_dependencies(&self, hash: &Hash) -> Result<BTreeSet<Hash>, anyhow::Error> {
Ok(self.get_change(hash)?.hashed.dependencies)
}
fn get_extra_known(&self, hash: &Hash) -> Result<BTreeSet<Hash>, anyhow::Error> {
Ok(self.get_change(hash)?.hashed.extra_known)
}
fn knows(&self, hash0: &Hash, hash1: &Hash) -> Result<bool, anyhow::Error> {
Ok(self.get_change(hash0)?.knows(hash1))
}
fn has_edge(
&self,
change: Hash,
from: Position<Option<Hash>>,
to: Position<Option<Hash>>,
flags: crate::pristine::EdgeFlags,
) -> Result<bool, anyhow::Error> {
let change_ = self.get_change(&change)?;
Ok(change_.has_edge(change, from, to, flags))
}
fn change_deletes_position<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
pos: Position<Option<Hash>>,
) -> Result<Vec<Hash>, anyhow::Error>;
fn save_change(&self, p: &Change) -> Result<Hash, anyhow::Error>;
fn del_change(&self, h: &Hash) -> Result<bool, anyhow::Error>;
fn get_change(&self, h: &Hash) -> Result<Change, anyhow::Error>;
fn get_file_name<'a, F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
vertex: Vertex<ChangeId>,
buf: &'a mut Vec<u8>,
) -> Result<(InodeMetadata, &'a str), anyhow::Error> {
buf.clear();
self.get_contents(hash, vertex, buf)?;
assert!(buf.len() >= 2);
let (a, b) = buf.as_slice().split_at(2);
Ok((InodeMetadata::from_basename(a), std::str::from_utf8(b)?))
}
}
impl crate::change::Atom<Option<Hash>> {
pub(crate) fn deletes_pos(&self, pos: Position<Option<Hash>>) -> Vec<Hash> {
let mut h = Vec::new();
if let crate::change::Atom::EdgeMap(ref n) = self {
for edge in n.edges.iter() {
if edge.to.change == pos.change && edge.to.start <= pos.pos && pos.pos < edge.to.end
{
if let Some(c) = edge.introduced_by {
h.push(c)
}
}
}
}
h
}
}
// org id IrkyldFdSUdlWvkOaCjruxYFb6enw2iUYulbsAuXais=
use super::*;
use crate::change::Change;
use crate::pristine::{ChangeId, Hash, Vertex};
use crate::Error;
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
#[derive(Clone)]
/// A change store in memory, i.e. basically a hash table.
pub struct Memory {
changes: Arc<RwLock<HashMap<Hash, Change>>>,
}
impl Memory {
pub fn new() -> Self {
Memory {
changes: Arc::new(RwLock::new(HashMap::new())),
}
}
}
impl ChangeStore for Memory {
fn has_contents(&self, hash: Hash, _: Option<ChangeId>) -> bool {
let changes = self.changes.read().unwrap();
let p = changes.get(&hash).unwrap();
!p.contents.is_empty()
}
fn get_contents<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
key: Vertex<ChangeId>,
buf: &mut Vec<u8>,
) -> Result<usize, anyhow::Error> {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start {
return Ok(0);
}
let changes = self.changes.read().unwrap();
let p = changes.get(&hash(key.change).unwrap()).unwrap();
let start = key.start.0 as usize;
let end = key.end.0 as usize;
buf.clear();
buf.extend(&p.contents[start..end]);
Ok(end - start)
}
fn get_contents_ext(
&self,
key: Vertex<Option<Hash>>,
buf: &mut Vec<u8>,
) -> Result<usize, anyhow::Error> {
if let Some(change) = key.change {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start {
return Ok(0);
}
let changes = self.changes.read().unwrap();
let p = changes.get(&change).unwrap();
let start = key.start.0 as usize;
let end = key.end.0 as usize;
buf.clear();
buf.extend(&p.contents[start..end]);
Ok(end - start)
} else {
Ok(0)
}
}
fn change_deletes_position<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
pos: Position<Option<Hash>>,
) -> Result<Vec<Hash>, anyhow::Error> {
let changes = self.changes.read().unwrap();
let change = changes.get(&hash(change).unwrap()).unwrap();
let mut v = Vec::new();
for c in change.changes.iter() {
for c in c.iter() {
v.extend(c.deletes_pos(pos).into_iter())
}
}
Ok(v)
}
fn save_change(&self, p: &Change) -> Result<Hash, anyhow::Error> {
let mut w = self.changes.write().unwrap();
let hash = p.hash()?;
w.insert(hash, p.clone());
Ok(hash)
}
fn del_change(&self, h: &Hash) -> Result<bool, anyhow::Error> {
let mut w = self.changes.write().unwrap();
Ok(w.remove(h).is_some())
}
fn get_change(&self, h: &Hash) -> Result<Change, anyhow::Error> {
let w = self.changes.read().unwrap();
if let Some(p) = w.get(h) {
Ok(p.clone())
} else {
use crate::pristine::Base32;
Err((Error::ChangeNotFound {
hash: h.to_base32(),
})
.into())
}
}
}
// org id kW/IHvqMv1tGUnZ3hsw93aiFlYKijskZMV1/i8mZ+bc=
use super::*;
use crate::change::{Change, ChangeFile};
use crate::pristine::{Base32, ChangeId, Hash, Vertex};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, MutexGuard};
const CHANGE_CACHE_SIZE: usize = 100;
/// A file system change store.
#[derive(Clone)]
pub struct FileSystem(Arc<FileSystem_>);
struct FileSystem_ {
change_cache: Mutex<lru_cache::LruCache<ChangeId, Arc<Mutex<ChangeFile<'static>>>>>,
changes_dir: PathBuf,
}
pub fn push_filename(changes_dir: &mut PathBuf, hash: &Hash) {
let h32 = hash.to_base32();
let (a, b) = h32.split_at(2);
changes_dir.push(a);
changes_dir.push(b);
changes_dir.set_extension("change");
}
pub fn pop_filename(changes_dir: &mut PathBuf) {
changes_dir.pop();
changes_dir.pop();
}
impl FileSystem {
pub fn filename(&self, hash: &Hash) -> PathBuf {
let mut path = self.0.changes_dir.clone();
push_filename(&mut path, hash);
path
}
pub fn has_change(&self, hash: &Hash) -> bool {
std::fs::metadata(&self.filename(hash)).is_ok()
}
/// Construct a `FileSystem`, starting from the root of the
/// repository (i.e. the parent of the `.pijul` directory).
pub fn from_root<P: AsRef<Path>>(root: P) -> Self {
let dot_pijul = root.as_ref().join(crate::DOT_DIR);
let changes_dir = dot_pijul.join("changes");
Self::from_changes(changes_dir)
}
/// Construct a `FileSystem`, starting from the root of the
/// repository (i.e. the parent of the `.pijul` directory).
pub fn from_changes(changes_dir: PathBuf) -> Self {
std::fs::create_dir_all(&changes_dir).unwrap();
FileSystem(Arc::new(FileSystem_ {
changes_dir,
change_cache: Mutex::new(lru_cache::LruCache::new(CHANGE_CACHE_SIZE)),
}))
}
fn load<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
) -> Result<
MutexGuard<lru_cache::LruCache<ChangeId, Arc<Mutex<ChangeFile<'static>>>>>,
anyhow::Error,
> {
let mut cache = self.0.change_cache.lock().unwrap();
if !cache.contains_key(&change) {
let h = hash(change).unwrap();
let path = self.filename(&h);
let p = crate::change::ChangeFile::open(h, &path.to_str().unwrap())?;
cache.insert(change, Arc::new(Mutex::new(p)));
}
Ok(cache)
}
pub fn save_from_buf(
&self,
buf: &[u8],
hash: &Hash,
change_id: Option<ChangeId>,
) -> Result<(), anyhow::Error> {
Change::check_from_buffer(buf, hash)?;
self.save_from_buf_unchecked(buf, hash, change_id)
}
pub fn save_from_buf_unchecked(
&self,
buf: &[u8],
hash: &Hash,
change_id: Option<ChangeId>,
) -> Result<(), anyhow::Error> {
let mut f = tempfile::NamedTempFile::new_in(&self.0.changes_dir)?;
let file_name = self.filename(hash);
use std::io::Write;
f.write_all(buf)?;
debug!("file_name = {:?}", file_name);
std::fs::create_dir_all(file_name.parent().unwrap())?;
f.persist(file_name)?;
if let Some(ref change_id) = change_id {
let mut cache = self.0.change_cache.lock().unwrap();
cache.remove(change_id);
}
Ok(())
}
}
impl ChangeStore for FileSystem {
fn has_contents(&self, hash: Hash, change_id: Option<ChangeId>) -> bool {
if let Some(ref change_id) = change_id {
let mut cache = self.0.change_cache.lock().unwrap();
let mut poisoned = false;
if let Some(c) = cache.get_mut(change_id) {
if let Ok(l) = c.lock() {
return l.has_contents();
} else {
poisoned = true
}
}
if poisoned {
cache.remove(change_id);
}
}
let path = self.filename(&hash);
if let Ok(p) = crate::change::ChangeFile::open(hash, &path.to_str().unwrap()) {
p.has_contents()
} else {
false
}
}
fn get_header(&self, h: &Hash) -> Result<ChangeHeader, anyhow::Error> {
let path = self.filename(h);
let p = crate::change::ChangeFile::open(*h, &path.to_str().unwrap())?;
Ok(p.hashed().header.clone())
}
fn get_contents<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
key: Vertex<ChangeId>,
buf: &mut Vec<u8>,
) -> Result<usize, anyhow::Error> {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start || key.is_root() {
return Ok(0);
}
let mut cache = self.load(hash, key.change)?;
let p = cache.get_mut(&key.change).unwrap();
let mut p = p.lock().unwrap();
let n = p.read_contents(key.start.0, buf)?;
Ok(n)
}
fn get_contents_ext(
&self,
key: Vertex<Option<Hash>>,
buf: &mut Vec<u8>,
) -> Result<usize, anyhow::Error> {
if let Some(change) = key.change {
buf.resize((key.end.0 - key.start.0) as usize, 0);
if key.end <= key.start {
return Ok(0);
}
let path = self.filename(&change);
let mut p = crate::change::ChangeFile::open(change, &path.to_str().unwrap())?;
let n = p.read_contents(key.start.0, buf)?;
Ok(n)
} else {
Ok(0)
}
}
fn change_deletes_position<F: Fn(ChangeId) -> Option<Hash>>(
&self,
hash: F,
change: ChangeId,
pos: Position<Option<Hash>>,
) -> Result<Vec<Hash>, anyhow::Error> {
let mut cache = self.load(hash, change)?;
let p = cache.get_mut(&change).unwrap();
let p = p.lock().unwrap();
let mut v = Vec::new();
for c in p.hashed().changes.iter() {
for c in c.iter() {
v.extend(c.deletes_pos(pos).into_iter())
}
}
Ok(v)
}
fn save_change(&self, p: &Change) -> Result<Hash, anyhow::Error> {
let mut f = tempfile::NamedTempFile::new_in(&self.0.changes_dir)?;
let hash = {
let w = std::io::BufWriter::new(&mut f);
p.serialize(w)?
};
let file_name = self.filename(&hash);
std::fs::create_dir_all(file_name.parent().unwrap())?;
debug!("file_name = {:?}", file_name);
f.persist(file_name)?;
Ok(hash)
}
fn del_change(&self, hash: &Hash) -> Result<bool, anyhow::Error> {
let file_name = self.filename(hash);
debug!("file_name = {:?}", file_name);
let result = std::fs::remove_file(&file_name).is_ok();
std::fs::remove_dir(file_name.parent().unwrap()).unwrap_or(()); // fails silently if there are still changes with the same 2-letter prefix.
Ok(result)
}
fn get_change(&self, h: &Hash) -> Result<Change, anyhow::Error> {
let file_name = self.filename(h);
let file_name = file_name.to_str().unwrap();
debug!("file_name = {:?}", file_name);
Ok(Change::deserialize(&file_name, Some(h))?)
}
}
// org id 4+w2sTG9zBScwyjzB8qqUXj0dCYZg3xGYPeKsGKua2k=
use super::Error;
use crate::pristine::*;
use chrono::{DateTime, Utc};
use std::collections::{BTreeSet, HashSet};
#[cfg(feature = "zstd")]
pub mod v3;
#[cfg(feature = "zstd")]
use std::io::Write;
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub enum Atom<Change> {
NewVertex(NewVertex<Change>),
EdgeMap(EdgeMap<Change>),
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NewVertex<Change> {
pub up_context: Vec<Position<Change>>,
pub down_context: Vec<Position<Change>>,
pub flag: EdgeFlags,
pub start: ChangePosition,
pub end: ChangePosition,
pub inode: Position<Change>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct EdgeMap<Change> {
pub edges: Vec<NewEdge<Change>>,
pub inode: Position<Change>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NewEdge<Change> {
pub previous: EdgeFlags,
pub flag: EdgeFlags,
/// The origin of the edge, i.e. if a vertex split is needed, the
/// left-hand side of the split will include `from.pos`. This
/// means that splitting vertex `[a, b[` to apply this edge
/// modification will yield vertices `[a, from.pos+1[` and
/// `[from.pos+1, b[`.
pub from: Position<Change>,
/// The destination of the edge, i.e. the last byte affected by
/// this change.
pub to: Vertex<Change>,
/// The change that introduced the previous version of the edge
/// (the one being replaced by this `NewEdge`).
pub introduced_by: Change,
}
/// The header of a change contains all the metadata about a change
/// (but not the actual contents of a change).
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct ChangeHeader {
pub message: String,
pub description: Option<String>,
pub timestamp: DateTime<Utc>,
pub authors: Vec<Author>,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct Author {
pub name: String,
#[serde(default = "default_none")]
pub full_name: Option<String>,
#[serde(default = "default_none")]
pub email: Option<String>,
}
fn default_none() -> Option<String> {
None
}
impl std::fmt::Display for Author {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
if self.full_name.is_none() && self.email.is_none() {
write!(fmt, "{:?}", self.name)
} else {
write!(fmt, "{{ name = {:?}", self.name)?;
if let Some(ref f) = self.full_name {
write!(fmt, ", full_name = {:?}", f)?;
}
if let Some(ref f) = self.email {
write!(fmt, ", email = {:?}", f)?;
}
write!(fmt, " }}")
}
}
}
impl Default for ChangeHeader {
fn default() -> Self {
ChangeHeader {
message: String::new(),
description: None,
timestamp: Utc::now(),
authors: Vec::new(),
}
}
}
// org id sKanuyTdfBvORra8t03TId+vXgcf/4TLpoyTdbWlSCw=
#[derive(Clone, Debug, PartialEq)]
pub struct LocalChange<Local> {
pub offsets: Offsets,
pub hashed: Hashed<Local>,
/// unhashed TOML extra contents.
pub unhashed: Option<toml::Value>,
/// The contents.
pub contents: Vec<u8>,
}
impl std::ops::Deref for LocalChange<Local> {
type Target = Hashed<Local>;
fn deref(&self) -> &Self::Target {
&self.hashed
}
}
impl std::ops::DerefMut for LocalChange<Local> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.hashed
}
}
pub const VERSION: u64 = 4;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Hashed<Local> {
/// Version, again (in order to hash it).
pub version: u64,
/// Header part, containing the metadata.
pub header: ChangeHeader,
/// The dependencies of this change.
pub dependencies: BTreeSet<Hash>,
/// Extra known "context" changes to recover from deleted contexts.
pub extra_known: BTreeSet<Hash>,
/// Some space to write application-specific data.
pub metadata: Vec<u8>,
/// The changes, without the contents.
pub changes: Vec<Record<Option<Hash>, Local>>,
/// Hash of the contents, so that the "contents" field is
/// verifiable independently from the actions in this change.
pub contents_hash: Hash,
}
pub type Change = LocalChange<Local>;
// org id R0jj9ZeU5rpjhihFuVm1UU/KVUBo4gUbxbm7vzj590w=
pub fn dependencies<'a, Local: 'a, I: Iterator<Item = &'a Record<Option<Hash>, Local>>, T: TxnT>(
txn: &T,
channel: &ChannelRef<T>,
changes: I,
) -> (BTreeSet<Hash>, BTreeSet<Hash>) {
let mut deps = BTreeSet::new();
let mut zombie_deps = BTreeSet::new();
let channel = channel.r.borrow();
for ch in changes.flat_map(|r| r.iter()) {
match *ch {
// org id 4EY6ZudPX57Bxi4lFXkpWSZ/EqG1/XKTImiLkq398H0=
Atom::NewVertex(NewVertex {
ref up_context,
ref down_context,
..
}) => {
for up in up_context.iter().chain(down_context.iter()) {
match up.change {
None | Some(Hash::None) => {}
Some(ref dep) => {
deps.insert(dep.clone());
}
}
}
}
// org id cAErZBr9shDetd4zM5eUya3MMYx6M4pfKFzj37lCbLc=
Atom::EdgeMap(EdgeMap { ref edges, .. }) => {
for e in edges {
assert!(!e.flag.contains(EdgeFlags::PARENT));
assert!(e.introduced_by != Some(Hash::None));
if let Some(p) = e.from.change {
deps.insert(p);
}
if let Some(p) = e.introduced_by {
deps.insert(p);
}
if let Some(p) = e.to.change {
deps.insert(p);
}
add_zombie_deps_from(txn, &channel, &mut zombie_deps, e.from);
add_zombie_deps_to(txn, &channel, &mut zombie_deps, e.to)
}
} // org id yDRN5gnQ/9AMHzgC/x7VAZGzAcrIlrEb6CBiUAKRm8U=
}
}
let deps = minimize_deps(txn, &channel, &deps);
for d in deps.iter() {
zombie_deps.remove(d);
}
(deps, zombie_deps)
}
pub fn full_dependencies<T: TxnT>(
txn: &T,
channel: &ChannelRef<T>,
) -> (BTreeSet<Hash>, BTreeSet<Hash>) {
let mut deps = BTreeSet::new();
let channel = channel.borrow();
for (_, (ch, _)) in txn.changeid_log(&channel, 0) {
let h = txn.get_external(ch).unwrap();
deps.insert(h);
}
let deps = minimize_deps(txn, &channel, &deps);
(deps, BTreeSet::new())
}
// org id RhbpWA5HDGKpilANJFPz8t1Ips9PJW+pBFkwmXNl21E=
fn add_zombie_deps_from<T: TxnT>(
txn: &T,
channel: &Channel<T>,
zombie_deps: &mut BTreeSet<Hash>,
e_from: Position<Option<Hash>>,
) {
let e_from = if let Some(p) = e_from.change {
Position {
change: txn.get_internal(p).unwrap(),
pos: e_from.pos,
}
} else {
return;
};
let from = txn.find_block_end(channel, e_from).unwrap();
for edge in txn.iter_adjacent(channel, from, EdgeFlags::empty(), EdgeFlags::all()) {
if let Some(ext) = txn.get_external(edge.introduced_by) {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
if let Some(ext) = txn.get_external(edge.dest.change) {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
}
}
fn add_zombie_deps_to<T: TxnT>(
txn: &T,
channel: &Channel<T>,
zombie_deps: &mut BTreeSet<Hash>,
e_to: Vertex<Option<Hash>>,
) {
let to_pos = if let Some(p) = e_to.change {
Position {
change: txn.get_internal(p).unwrap(),
pos: e_to.start,
}
} else {
return;
};
let mut to = txn.find_block(channel, to_pos).unwrap();
loop {
for edge in txn.iter_adjacent(channel, to, EdgeFlags::empty(), EdgeFlags::all()) {
if let Some(ext) = txn.get_external(edge.introduced_by) {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
if let Some(ext) = txn.get_external(edge.dest.change) {
if let Hash::None = ext {
} else {
zombie_deps.insert(ext.to_owned());
}
}
}
if to.end >= e_to.end {
break;
}
to = txn.find_block(channel, to.end_pos()).unwrap();
}
}
// org id 11du9AL8hevTwTE48smNaTtU/x2O3eh4LYm6FPayeTw=
fn minimize_deps<T: TxnT>(txn: &T, channel: &Channel<T>, deps: &BTreeSet<Hash>) -> BTreeSet<Hash> {
let mut min_time = std::u64::MAX;
let mut internal_deps = Vec::new();
let mut internal_deps_ = HashSet::new();
for h in deps.iter() {
if let Hash::None = h {
continue;
}
debug!("h = {:?}", h);
let id = txn.get_internal(*h).unwrap();
debug!("id = {:?}", id);
let time = txn.get_changeset(&channel.changes, id, None).unwrap();
debug!("time = {:?}", time);
min_time = min_time.min(time);
internal_deps.push((id, true));
internal_deps_.insert(id);
}
internal_deps.sort_by(|a, b| a.1.cmp(&b.1));
let mut visited = HashSet::new();
while let Some((id, is_root)) = internal_deps.pop() {
if is_root {
if !internal_deps_.contains(&id) {
continue;
}
} else {
if internal_deps_.remove(&id) {
debug!("removing dep {:?}", id);
}
}
if !visited.insert(id) {
continue;
}
let mut cursor = txn.iter_dep(id);
while let Some((id0, dep)) = txn.cursor_dep_next(&mut cursor.cursor) {
trace!("minimize loop = {:?} {:?}", id0, dep);
if id0 < id {
continue;
} else if id0 > id {
break;
}
let time = if let Some(time) = txn.get_changeset(&channel.changes, dep, None) {
time
} else {
panic!(
"not found in channel {:?}: id = {:?} depends on {:?}",
channel.name(),
id,
dep
);
};
trace!("time = {:?}", time);
if time >= min_time {
internal_deps.push((dep, false))
}
}
}
internal_deps_
.into_iter()
.map(|id| txn.get_external(id).unwrap())
.collect()
}
// org id +Tmh0+vnf5AKRwS2S9/W5J9KUc2umjSBiLGrBg2vqXk=
impl Change {
pub fn knows(&self, hash: &Hash) -> bool {
self.extra_known.contains(hash) || self.dependencies.contains(&hash)
}
pub fn has_edge(
&self,
hash: Hash,
from: Position<Option<Hash>>,
to: Position<Option<Hash>>,
flags: crate::pristine::EdgeFlags,
) -> bool {
debug!("has_edge: {:?} {:?} {:?} {:?}", hash, from, to, flags);
for change_ in self.changes.iter() {
for change_ in change_.iter() {
match change_ {
Atom::NewVertex(n) => {
debug!("has_edge: {:?}", n);
if from.change == Some(hash) && from.pos >= n.start && from.pos <= n.end {
if to.change == Some(hash) {
// internal
return flags | EdgeFlags::FOLDER
== EdgeFlags::BLOCK | EdgeFlags::FOLDER;
} else {
// down context
if n.down_context.iter().any(|d| *d == to) {
return flags.is_empty();
} else {
return false;
}
}
} else if to.change == Some(hash) && to.pos >= n.start && to.pos <= n.end {
// up context
if n.up_context.iter().any(|d| *d == from) {
return flags | EdgeFlags::FOLDER
== EdgeFlags::BLOCK | EdgeFlags::FOLDER;
} else {
return false;
}
}
}
Atom::EdgeMap(e) => {
debug!("has_edge: {:?}", e);
if e.edges
.iter()
.any(|e| e.from == from && e.to.start_pos() == to && e.flag == flags)
{
return true;
}
}
}
}
}
debug!("not found");
false
}
}
// org id t0CzNo3X9SFATJMMzCCew7o4s6KP6ZTxxBvpbZKrKe0=
impl Atom<Option<Hash>> {
pub fn inode(&self) -> Position<Option<Hash>> {
match self {
Atom::NewVertex(ref n) => n.inode,
Atom::EdgeMap(ref n) => n.inode,
}
}
pub fn inverse(&self, hash: &Hash) -> Self {
match *self {
Atom::NewVertex(NewVertex {
ref up_context,
flag,
start,
end,
ref inode,
..
}) => {
let mut edges = Vec::new();
for up in up_context {
let previous = flag | EdgeFlags::BLOCK;
edges.push(NewEdge {
previous,
flag: previous | EdgeFlags::DELETED,
from: Position {
change: Some(if let Some(ref h) = up.change {
*h
} else {
*hash
}),
pos: up.pos,
},
to: Vertex {
change: Some(*hash),
start,
end,
},
introduced_by: Some(*hash),
})
}
Atom::EdgeMap(EdgeMap {
edges,
inode: Position {
change: Some(if let Some(p) = inode.change { p } else { *hash }),
pos: inode.pos,
},
})
}
Atom::EdgeMap(EdgeMap {
ref edges,
ref inode,
}) => Atom::EdgeMap(EdgeMap {
inode: Position {
change: Some(if let Some(p) = inode.change { p } else { *hash }),
pos: inode.pos,
},
edges: edges
.iter()
.map(|e| {
let mut e = e.clone();
e.introduced_by = Some(*hash);
std::mem::swap(&mut e.flag, &mut e.previous);
e
})
.collect(),
}),
}
}
}
impl EdgeMap<Option<Hash>> {
fn concat(mut self, e: EdgeMap<Option<Hash>>) -> Self {
assert_eq!(self.inode, e.inode);
self.edges.extend(e.edges.into_iter());
EdgeMap {
inode: self.inode,
edges: self.edges,
}
}
}
impl<L: Clone> Record<Option<Hash>, L> {
pub fn inverse(&self, hash: &Hash) -> Self {
match self {
Record::FileMove { del, add, path } => Record::FileMove {
del: add.inverse(hash),
add: del.inverse(hash),
path: path.clone(),
},
Record::FileDel {
del,
contents,
path,
} => Record::FileUndel {
undel: del.inverse(hash),
contents: contents.as_ref().map(|c| c.inverse(hash)),
path: path.clone(),
},
Record::FileUndel {
undel,
contents,
path,
} => Record::FileDel {
del: undel.inverse(hash),
contents: contents.as_ref().map(|c| c.inverse(hash)),
path: path.clone(),
},
Record::FileAdd {
add_name,
add_inode,
contents,
path,
} => {
let del = match (add_name.inverse(hash), add_inode.inverse(hash)) {
(Atom::EdgeMap(e0), Atom::EdgeMap(e1)) => Atom::EdgeMap(e0.concat(e1)),
_ => unreachable!(),
};
Record::FileDel {
del,
contents: contents.as_ref().map(|c| c.inverse(hash)),
path: path.clone(),
}
}
Record::SolveNameConflict { name, path } => Record::UnsolveNameConflict {
name: name.inverse(hash),
path: path.clone(),
},
Record::UnsolveNameConflict { name, path } => Record::SolveNameConflict {
name: name.inverse(hash),
path: path.clone(),
},
Record::Edit { change, local } => Record::Edit {
change: change.inverse(hash),
local: local.clone(),
},
Record::Replacement {
change,
replacement,
local,
} => Record::Replacement {
change: replacement.inverse(hash),
replacement: change.inverse(hash),
local: local.clone(),
},
Record::SolveOrderConflict { change, local } => Record::UnsolveOrderConflict {
change: change.inverse(hash),
local: local.clone(),
},
Record::UnsolveOrderConflict { change, local } => Record::SolveOrderConflict {
change: change.inverse(hash),
local: local.clone(),
},
Record::ResurrectZombies { change, local } => Record::Edit {
change: change.inverse(hash),
local: local.clone(),
},
}
}
}
impl Change {
pub fn inverse(&self, hash: &Hash, header: ChangeHeader, metadata: Vec<u8>) -> Self {
let mut dependencies = BTreeSet::new();
dependencies.insert(*hash);
let contents_hash = Hasher::default().finish();
Change {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header,
dependencies,
extra_known: self.extra_known.clone(),
metadata,
changes: self.changes.iter().map(|r| r.inverse(hash)).collect(),
contents_hash,
},
contents: Vec::new(),
unhashed: None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Local {
pub path: String,
pub line: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Record<Hash, Local> {
FileMove {
del: Atom<Hash>,
add: Atom<Hash>,
path: String,
},
FileDel {
del: Atom<Hash>,
contents: Option<Atom<Hash>>,
path: String,
},
FileUndel {
undel: Atom<Hash>,
contents: Option<Atom<Hash>>,
path: String,
},
FileAdd {
add_name: Atom<Hash>,
add_inode: Atom<Hash>,
contents: Option<Atom<Hash>>,
path: String,
},
SolveNameConflict {
name: Atom<Hash>,
path: String,
},
UnsolveNameConflict {
name: Atom<Hash>,
path: String,
},
Edit {
change: Atom<Hash>,
local: Local,
},
Replacement {
change: Atom<Hash>,
replacement: Atom<Hash>,
local: Local,
},
SolveOrderConflict {
change: Atom<Hash>,
local: Local,
},
UnsolveOrderConflict {
change: Atom<Hash>,
local: Local,
},
ResurrectZombies {
change: Atom<Hash>,
local: Local,
},
}
// org id 4YhjVHcMd4xMYTDXLWLTMaiyiDHg8H/Hj/lt3t4f2eU=
#[doc(hidden)]
pub struct RecordIter<R, C> {
rec: Option<R>,
extra: Option<C>,
extra2: Option<C>,
}
impl<Context, Local> IntoIterator for Record<Context, Local> {
type IntoIter = RecordIter<Record<Context, Local>, Atom<Context>>;
type Item = Atom<Context>;
fn into_iter(self) -> Self::IntoIter {
RecordIter {
rec: Some(self),
extra: None,
extra2: None,
}
}
}
impl<Context, Local> Record<Context, Local> {
pub fn iter(&self) -> RecordIter<&Record<Context, Local>, &Atom<Context>> {
RecordIter {
rec: Some(self),
extra: None,
extra2: None,
}
}
pub fn rev_iter(&self) -> RevRecordIter<&Record<Context, Local>, &Atom<Context>> {
RevRecordIter {
rec: Some(self),
extra: None,
extra2: None,
}
}
}
impl<Context, Local> Iterator for RecordIter<Record<Context, Local>, Atom<Context>> {
type Item = Atom<Context>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(extra) = self.extra.take() {
Some(extra)
} else if let Some(extra) = self.extra2.take() {
Some(extra)
} else if let Some(rec) = self.rec.take() {
match rec {
Record::FileMove { del, add, .. } => {
self.extra = Some(add);
Some(del)
}
Record::FileDel { del, contents, .. } => {
self.extra = contents;
Some(del)
}
Record::FileUndel {
undel, contents, ..
} => {
self.extra = contents;
Some(undel)
}
Record::FileAdd {
add_name,
add_inode,
contents,
..
} => {
self.extra = Some(add_inode);
self.extra2 = contents;
Some(add_name)
}
Record::SolveNameConflict { name, .. } => Some(name),
Record::UnsolveNameConflict { name, .. } => Some(name),
Record::Edit { change, .. } => Some(change),
Record::Replacement {
change,
replacement,
..
} => {
self.extra = Some(replacement);
Some(change)
}
Record::SolveOrderConflict { change, .. } => Some(change),
Record::UnsolveOrderConflict { change, .. } => Some(change),
Record::ResurrectZombies { change, .. } => Some(change),
}
} else {
None
}
}
}
impl<'a, Context, Local> Iterator for RecordIter<&'a Record<Context, Local>, &'a Atom<Context>> {
type Item = &'a Atom<Context>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(extra) = self.extra.take() {
Some(extra)
} else if let Some(extra) = self.extra2.take() {
Some(extra)
} else if let Some(rec) = self.rec.take() {
match *rec {
Record::FileMove {
ref del, ref add, ..
} => {
self.extra = Some(add);
Some(del)
}
Record::FileDel {
ref del,
ref contents,
..
} => {
self.extra = contents.as_ref();
Some(del)
}
Record::FileUndel {
ref undel,
ref contents,
..
} => {
self.extra = contents.as_ref();
Some(undel)
}
Record::FileAdd {
ref add_name,
ref add_inode,
ref contents,
..
} => {
self.extra = Some(add_inode);
self.extra2 = contents.as_ref();
Some(&add_name)
}
Record::SolveNameConflict { ref name, .. } => Some(&name),
Record::UnsolveNameConflict { ref name, .. } => Some(&name),
Record::Edit { change: ref c, .. } => Some(c),
Record::Replacement {
replacement: ref r,
change: ref c,
..
} => {
self.extra = Some(r);
Some(c)
}
Record::SolveOrderConflict { ref change, .. } => Some(change),
Record::UnsolveOrderConflict { ref change, .. } => Some(change),
Record::ResurrectZombies { ref change, .. } => Some(change),
}
} else {
None
}
}
}
pub struct RevRecordIter<R, C> {
rec: Option<R>,
extra: Option<C>,
extra2: Option<C>,
}
impl<'a, Context, Local> Iterator for RevRecordIter<&'a Record<Context, Local>, &'a Atom<Context>> {
type Item = &'a Atom<Context>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(extra) = self.extra.take() {
Some(extra)
} else if let Some(extra) = self.extra2.take() {
Some(extra)
} else if let Some(rec) = self.rec.take() {
match *rec {
Record::FileMove {
ref del, ref add, ..
} => {
self.extra = Some(del);
Some(add)
}
Record::FileDel {
ref del,
ref contents,
..
} => {
if let Some(ref c) = contents {
self.extra = Some(del);
Some(c)
} else {
Some(del)
}
}
Record::FileUndel {
ref undel,
ref contents,
..
} => {
if let Some(ref c) = contents {
self.extra = Some(undel);
Some(c)
} else {
Some(undel)
}
}
Record::FileAdd {
ref add_name,
ref add_inode,
ref contents,
..
} => {
if let Some(ref c) = contents {
self.extra = Some(add_inode);
self.extra2 = Some(add_name);
Some(c)
} else {
self.extra = Some(add_name);
Some(add_inode)
}
}
Record::SolveNameConflict { ref name, .. } => Some(&name),
Record::UnsolveNameConflict { ref name, .. } => Some(&name),
Record::Edit { change: ref c, .. } => Some(c),
Record::Replacement {
replacement: ref r,
change: ref c,
..
} => {
self.extra = Some(c);
Some(r)
}
Record::SolveOrderConflict { ref change, .. } => Some(change),
Record::UnsolveOrderConflict { ref change, .. } => Some(change),
Record::ResurrectZombies { ref change, .. } => Some(change),
}
} else {
None
}
}
}
// org id UyRvnSNfnjujP0ygPCmcfcQcIqtp5EjD3NGO3LXlt/0=
impl Atom<Option<ChangeId>> {
fn globalize<T: TxnT>(&self, txn: &T) -> Atom<Option<Hash>> {
match self {
Atom::NewVertex(NewVertex {
up_context,
down_context,
start,
end,
flag,
inode,
}) => Atom::NewVertex(NewVertex {
up_context: up_context
.iter()
.map(|&up| Position {
change: up.change.and_then(|a| txn.get_external(a)),
pos: up.pos,
})
.collect(),
down_context: down_context
.iter()
.map(|&down| Position {
change: down.change.and_then(|a| txn.get_external(a)),
pos: down.pos,
})
.collect(),
start: *start,
end: *end,
flag: *flag,
inode: Position {
change: inode.change.and_then(|a| txn.get_external(a)),
pos: inode.pos,
},
}),
Atom::EdgeMap(EdgeMap { edges, inode }) => Atom::EdgeMap(EdgeMap {
edges: edges
.iter()
.map(|edge| NewEdge {
previous: edge.previous,
flag: edge.flag,
from: Position {
change: edge.from.change.and_then(|a| txn.get_external(a)),
pos: edge.from.pos,
},
to: Vertex {
change: edge.to.change.and_then(|a| txn.get_external(a)),
start: edge.to.start,
end: edge.to.end,
},
introduced_by: edge.introduced_by.map(|a| {
if let Some(a) = txn.get_external(a) {
a
} else {
panic!("introduced by {:?}", a);
}
}),
})
.collect(),
inode: Position {
change: inode.change.and_then(|a| txn.get_external(a)),
pos: inode.pos,
},
}),
}
}
}
impl<H> Record<H, Local> {
pub fn path(&self) -> &str {
match self {
Record::FileMove { ref path, .. }
| Record::FileDel { ref path, .. }
| Record::FileUndel { ref path, .. }
| Record::SolveNameConflict { ref path, .. }
| Record::UnsolveNameConflict { ref path, .. }
| Record::FileAdd { ref path, .. } => path,
Record::Edit { ref local, .. }
| Record::Replacement { ref local, .. }
| Record::SolveOrderConflict { ref local, .. }
| Record::UnsolveOrderConflict { ref local, .. }
| Record::ResurrectZombies { ref local, .. } => &local.path,
}
}
pub fn line(&self) -> Option<usize> {
match self {
Record::FileMove { .. }
| Record::FileDel { .. }
| Record::FileUndel { .. }
| Record::SolveNameConflict { .. }
| Record::UnsolveNameConflict { .. }
| Record::FileAdd { .. } => None,
Record::Edit { ref local, .. }
| Record::Replacement { ref local, .. }
| Record::SolveOrderConflict { ref local, .. }
| Record::UnsolveOrderConflict { ref local, .. }
| Record::ResurrectZombies { ref local, .. } => Some(local.line),
}
}
}
impl<Local> Record<Option<ChangeId>, Local> {
pub fn globalize<T: TxnT>(self, txn: &T) -> Record<Option<Hash>, Local> {
match self {
Record::FileMove { del, add, path } => Record::FileMove {
del: del.globalize(txn),
add: add.globalize(txn),
path,
},
Record::FileDel {
del,
contents,
path,
} => Record::FileDel {
del: del.globalize(txn),
contents: contents.as_ref().map(|del| del.globalize(txn)),
path,
},
Record::FileUndel {
undel,
contents,
path,
} => Record::FileUndel {
undel: undel.globalize(txn),
contents: contents.as_ref().map(|del| del.globalize(txn)),
path,
},
Record::SolveNameConflict { name, path } => Record::SolveNameConflict {
name: name.globalize(txn),
path,
},
Record::UnsolveNameConflict { name, path } => Record::UnsolveNameConflict {
name: name.globalize(txn),
path,
},
Record::FileAdd {
add_inode,
add_name,
contents,
path,
} => Record::FileAdd {
add_name: add_name.globalize(txn),
add_inode: add_inode.globalize(txn),
contents: contents.as_ref().map(|add| add.globalize(txn)),
path,
},
Record::Edit { change, local } => Record::Edit {
change: change.globalize(txn),
local,
},
Record::Replacement {
change,
replacement,
local,
} => Record::Replacement {
change: change.globalize(txn),
replacement: replacement.globalize(txn),
local,
},
Record::SolveOrderConflict { change, local } => Record::SolveOrderConflict {
change: change.globalize(txn),
local,
},
Record::UnsolveOrderConflict { change, local } => Record::UnsolveOrderConflict {
change: change.globalize(txn),
local,
},
Record::ResurrectZombies { change, local } => Record::ResurrectZombies {
change: change.globalize(txn),
local,
},
}
}
}
// org id j+CGohqSoJFz2ZtLqz9M7oC9UpfOWmyP7YWCbYjb+fI=
/// A table of contents of a change, indicating where each section is,
/// to allow seeking inside a change file.
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq)]
pub struct Offsets {
version: u64,
hashed_len: u64, // length of the hashed contents
unhashed_off: u64,
unhashed_len: u64, // length of the unhashed contents
contents_off: u64,
contents_len: u64,
total: u64,
}
impl<L> LocalChange<L> {
#[cfg(feature = "zstd")]
const OFFSETS_SIZE: u64 = 56;
pub fn make_change<T: TxnT>(
txn: &T,
channel: &ChannelRef<T>,
changes: Vec<Record<Option<Hash>, L>>,
contents: Vec<u8>,
header: ChangeHeader,
metadata: Vec<u8>,
) -> Self {
let (dependencies, extra_known) = dependencies(txn, channel, changes.iter());
trace!("make_change, contents = {:?}", contents);
let contents_hash = {
let mut hasher = Hasher::default();
hasher.update(&contents);
hasher.finish()
};
debug!("make_change, contents_hash = {:?}", contents_hash);
LocalChange {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header,
changes,
contents_hash,
metadata,
dependencies,
extra_known,
},
contents,
unhashed: None,
}
}
pub fn new() -> Self {
LocalChange {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header: ChangeHeader::default(),
changes: Vec::new(),
contents_hash: Hasher::default().finish(),
metadata: Vec::new(),
dependencies: BTreeSet::new(),
extra_known: BTreeSet::new(),
},
unhashed: None,
contents: Vec::new(),
}
}
}
#[cfg(feature = "zstd")]
const LEVEL: usize = 1;
#[cfg(feature = "zstd")]
const FRAME_SIZE: usize = 256;
#[cfg(feature = "zstd")]
fn compress<W: Write>(input: &[u8], mut w: W) -> Result<(), anyhow::Error> {
let mut cstream = zstd_seekable::SeekableCStream::new(LEVEL, FRAME_SIZE).unwrap();
let mut output = [0; 4096];
let mut input_pos = 0;
while input_pos < input.len() {
let (out_pos, inp_pos) = cstream.compress(&mut output, &input[input_pos..])?;
w.write_all(&output[..out_pos])?;
input_pos += inp_pos;
}
while let Ok(n) = cstream.end_stream(&mut output) {
if n == 0 {
break;
}
w.write_all(&output[..n])?;
}
Ok(())
}
impl Change {
pub fn size_no_contents<R: std::io::Read + std::io::Seek>(
r: &mut R,
) -> Result<u64, anyhow::Error> {
let pos = r.seek(std::io::SeekFrom::Current(0))?;
let mut off = [0u8; Self::OFFSETS_SIZE as usize];
r.read_exact(&mut off)?;
let off: Offsets = bincode::deserialize(&off)?;
if off.version != VERSION {
return Err(Error::VersionMismatch.into());
}
r.seek(std::io::SeekFrom::Start(pos))?;
Ok(off.contents_off)
}
/// Serialise the change as a file named "<hash>.change" in
/// directory `dir`, where "<hash>" is the actual hash of the
/// change.
#[cfg(feature = "zstd")]
pub fn serialize<W: Write>(&self, mut w: W) -> Result<Hash, anyhow::Error> {
let mut offsets = Offsets::default();
offsets.version = VERSION;
// Hashed part.
let mut hashed = Vec::new();
bincode::serialize_into(&mut hashed, &self.hashed)?;
offsets.hashed_len = hashed.len() as u64;
trace!("hashed = {:?}", hashed);
let mut hasher = Hasher::default();
hasher.update(&hashed);
let hash = hasher.finish();
debug!("{:?}", hash);
// Unhashed part.
let unhashed = if let Some(ref un) = self.unhashed {
let s = toml::ser::to_string(un).unwrap();
s.into()
} else {
Vec::new()
};
offsets.unhashed_len = unhashed.len() as u64;
// Compress the change.
let mut hashed_comp = Vec::new();
compress(&hashed, &mut hashed_comp)?;
offsets.unhashed_off = Self::OFFSETS_SIZE + hashed_comp.len() as u64;
let mut unhashed_comp = Vec::new();
compress(&unhashed, &mut unhashed_comp)?;
offsets.contents_off = offsets.unhashed_off + unhashed_comp.len() as u64;
let mut contents_comp = Vec::new();
compress(&self.contents, &mut contents_comp)?;
offsets.contents_len = self.contents.len() as u64;
offsets.total = offsets.contents_off + contents_comp.len() as u64;
bincode::serialize_into(&mut w, &offsets)?;
w.write_all(&hashed_comp)?;
w.write_all(&unhashed_comp)?;
w.write_all(&contents_comp)?;
Ok(hash)
}
/// Deserialise a change from the file given as input `file`.
#[cfg(feature = "zstd")]
pub fn check_from_buffer(buf: &[u8], hash: &Hash) -> Result<(), anyhow::Error> {
let offsets: Offsets = bincode::deserialize_from(&buf[..Self::OFFSETS_SIZE as usize])?;
if offsets.version != VERSION {
return Err(Error::VersionMismatch.into());
}
debug!("check_from_buffer, offsets = {:?}", offsets);
let mut s = zstd_seekable::Seekable::init_buf(
&buf[Self::OFFSETS_SIZE as usize..offsets.unhashed_off as usize],
)?;
let mut buf_ = Vec::new();
buf_.resize(offsets.hashed_len as usize, 0);
s.decompress(&mut buf_[..], 0)?;
debug!("check_from_buffer, buf_ = {:?}", buf_);
let mut hasher = Hasher::default();
hasher.update(&buf_);
let computed_hash = hasher.finish();
debug!("{:?} {:?}", computed_hash, hash);
if &computed_hash != hash {
return Err((Error::ChangeHashMismatch {
claimed: *hash,
computed: computed_hash,
})
.into());
}
let hashed: Hashed<Local> = bincode::deserialize(&buf_)?;
buf_.clear();
buf_.resize(offsets.contents_len as usize, 0);
let mut s = zstd_seekable::Seekable::init_buf(&buf[offsets.contents_off as usize..])?;
buf_.resize(offsets.contents_len as usize, 0);
s.decompress(&mut buf_[..], 0)?;
let mut hasher = Hasher::default();
debug!("contents = {:?}", buf_);
hasher.update(&buf_);
let computed_hash = hasher.finish();
debug!(
"contents hash: {:?}, computed: {:?}",
hashed.contents_hash, computed_hash
);
if computed_hash != hashed.contents_hash {
return Err((Error::ContentsHashMismatch {
claimed: hashed.contents_hash,
computed: computed_hash,
})
.into());
}
Ok(())
}
/// Deserialise a change from the file given as input `file`.
#[cfg(feature = "zstd")]
pub fn deserialize(file: &str, hash: Option<&Hash>) -> Result<Self, anyhow::Error> {
use std::io::Read;
let mut r = std::fs::File::open(file)?;
let mut buf = vec![0u8; Self::OFFSETS_SIZE as usize];
r.read_exact(&mut buf)?;
let offsets: Offsets = bincode::deserialize(&buf)?;
if offsets.version != VERSION {
return Err(Error::VersionMismatch.into());
}
debug!("offsets = {:?}", offsets);
buf.clear();
buf.resize((offsets.unhashed_off - Self::OFFSETS_SIZE) as usize, 0);
r.read_exact(&mut buf)?;
let hashed: Hashed<Local> = {
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut out = vec![0u8; offsets.hashed_len as usize];
s.decompress(&mut out[..], 0)?;
let mut hasher = Hasher::default();
hasher.update(&out);
let computed_hash = hasher.finish();
if let Some(hash) = hash {
if &computed_hash != hash {
return Err((Error::ChangeHashMismatch {
claimed: *hash,
computed: computed_hash,
})
.into());
}
}
bincode::deserialize_from(&out[..])?
};
buf.clear();
buf.resize((offsets.contents_off - offsets.unhashed_off) as usize, 0);
let unhashed = if buf.is_empty() {
None
} else {
r.read_exact(&mut buf)?;
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut out = vec![0u8; offsets.unhashed_len as usize];
s.decompress(&mut out[..], 0)?;
Some(toml::de::from_slice(&out)?)
};
debug!("unhashed = {:?}", unhashed);
buf.clear();
buf.resize((offsets.total - offsets.contents_off) as usize, 0);
let contents = if r.read_exact(&mut buf).is_ok() {
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut contents = vec![0u8; offsets.contents_len as usize];
s.decompress(&mut contents[..], 0)?;
contents
} else {
Vec::new()
};
debug!("contents = {:?}", contents);
Ok(LocalChange {
offsets,
hashed,
unhashed,
contents,
})
}
/// Compute the hash of this change. If the `zstd` feature is
/// enabled, it is probably more efficient to serialise the change
/// (using the `serialize` method) at the same time, which also
/// returns the hash.
pub fn hash(&self) -> Result<Hash, anyhow::Error> {
let input = bincode::serialize(&self.hashed)?;
let mut hasher = Hasher::default();
hasher.update(&input);
Ok(hasher.finish())
}
}
// org id SY0PPKJcw1SQ8DEsOkicihzeXjmhMRxFEiBeypeSfTY=
use crate::changestore::*;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::io::BufRead;
#[cfg(feature = "text-changes")]
impl LocalChange<Local> {
const DEPS_LINE: &'static str = "# Dependencies\n";
const CHANGES_LINE: &'static str = "# Changes\n";
pub fn write_all_deps<F: FnMut(Hash) -> Result<(), anyhow::Error>>(
&self,
mut f: F,
) -> Result<(), anyhow::Error> {
for c in self.changes.iter() {
for c in c.iter() {
match *c {
Atom::NewVertex(ref n) => {
for change in n
.up_context
.iter()
.chain(n.down_context.iter())
.map(|c| c.change)
.chain(std::iter::once(n.inode.change))
{
if let Some(change) = change {
if let Hash::None = change {
continue;
}
f(change)?
}
}
}
Atom::EdgeMap(ref e) => {
for edge in e.edges.iter() {
for change in &[
edge.from.change,
edge.to.change,
edge.introduced_by,
e.inode.change,
] {
if let Some(change) = *change {
if let Hash::None = change {
continue;
}
f(change)?
}
}
}
}
}
}
}
Ok(())
}
pub fn write<W: Write, C: ChangeStore, F: FnMut(&Local, Position<Option<Hash>>) -> String>(
&self,
changes: &C,
hash: Option<Hash>,
mut file_name: F,
write_header: bool,
mut w: W,
) -> Result<(), anyhow::Error> {
if let Some(h) = hash {
// Check if we have the full contents
let mut hasher = Hasher::default();
hasher.update(&self.contents);
let hash = hasher.finish();
if hash != self.contents_hash {
return Err((Error::MissingContents {
hash: h.to_base32(),
})
.into());
}
}
if write_header {
w.write_all(toml::ser::to_string_pretty(&self.header)?.as_bytes())?;
w.write_all(b"\n")?;
}
let mut hashes = HashMap::new();
let mut i = 2;
let mut needs_newline = false;
if !self.dependencies.is_empty() {
w.write_all(Self::DEPS_LINE.as_bytes())?;
needs_newline = true;
for dep in self.dependencies.iter() {
hashes.insert(*dep, i);
writeln!(w, "[{}] {}", i, dep.to_base32())?;
i += 1;
}
}
self.write_all_deps(|change| {
if let Entry::Vacant(e) = hashes.entry(change) {
e.insert(i);
if !needs_newline {
w.write_all(Self::DEPS_LINE.as_bytes())?;
needs_newline = true;
}
writeln!(w, "[{}]+{}", i, change.to_base32())?;
i += 1;
}
Ok(())
})?;
if !self.extra_known.is_empty() {
needs_newline = true;
for dep in self.extra_known.iter() {
writeln!(w, "[*] {}", dep.to_base32())?;
i += 1;
}
}
if !self.changes.is_empty() {
if needs_newline {
w.write_all(b"\n")?
}
w.write_all(Self::CHANGES_LINE.as_bytes())?;
for (n, rec) in self.changes.iter().enumerate() {
write!(w, "\n{}. ", n + 1)?;
rec.write(changes, &mut file_name, &hashes, &self.contents, &mut w)?
}
}
Ok(())
}
}
impl Change {
pub fn read_and_deps<R: BufRead, T: TxnT>(
r: R,
updatables: &mut HashMap<usize, crate::InodeUpdate>,
txn: &T,
channel: &ChannelRef<T>,
) -> Result<Self, anyhow::Error> {
let (mut change, extra_dependencies) = Self::read_(r, updatables)?;
let (mut deps, extra) = dependencies(txn, channel, change.hashed.changes.iter());
deps.extend(extra_dependencies.into_iter());
change.hashed.dependencies = deps;
change.hashed.extra_known = extra;
Ok(change)
}
pub fn read<R: BufRead>(
r: R,
updatables: &mut HashMap<usize, crate::InodeUpdate>,
) -> Result<Self, anyhow::Error> {
Ok(Self::read_(r, updatables)?.0)
}
fn read_<R: BufRead>(
mut r: R,
updatables: &mut HashMap<usize, crate::InodeUpdate>,
) -> Result<(Self, HashSet<Hash>), anyhow::Error> {
use self::text_changes::*;
let mut section = Section::Header(String::new());
let mut change = Change {
offsets: Offsets::default(),
hashed: Hashed {
version: VERSION,
header: ChangeHeader {
authors: Vec::new(),
message: String::new(),
description: None,
timestamp: chrono::Utc::now(),
},
dependencies: BTreeSet::new(),
extra_known: BTreeSet::new(),
metadata: Vec::new(),
changes: Vec::new(),
contents_hash: Hasher::default().finish(),
},
unhashed: None,
contents: Vec::new(),
};
let conclude_section = |change: &mut Change,
section: Section,
contents: &mut Vec<u8>|
-> Result<(), anyhow::Error> {
match section {
Section::Header(ref s) => {
debug!("header = {:?}", s);
change.header = toml::de::from_str(&s)?;
Ok(())
}
Section::Deps => Ok(()),
Section::Changes {
mut changes,
current,
..
} => {
if has_newvertices(¤t) {
contents.push(0)
}
if let Some(c) = current {
debug!("next action = {:?}", c);
changes.push(c)
}
change.changes = changes;
Ok(())
}
}
};
let mut h = String::new();
let mut contents = Vec::new();
let mut deps = HashMap::new();
let mut extra_dependencies = HashSet::new();
while r.read_line(&mut h)? > 0 {
debug!("h = {:?}", h);
if h == Self::DEPS_LINE {
let section = std::mem::replace(&mut section, Section::Deps);
conclude_section(&mut change, section, &mut contents)?;
} else if h == Self::CHANGES_LINE {
let section = std::mem::replace(
&mut section,
Section::Changes {
changes: Vec::new(),
current: None,
offsets: HashMap::new(),
},
);
conclude_section(&mut change, section, &mut contents)?;
} else {
use regex::Regex;
lazy_static! {
static ref DEPS: Regex = Regex::new(r#"\[(\d*|\*)\](\+| ) *(\S*)"#).unwrap();
static ref KNOWN: Regex = Regex::new(r#"(\S*)"#).unwrap();
}
match section {
Section::Header(ref mut s) => s.push_str(&h),
Section::Deps => {
if let Some(d) = DEPS.captures(&h) {
let hash = Hash::from_base32(d[3].as_bytes()).unwrap();
if let Ok(n) = d[1].parse() {
if &d[2] == " " {
change.hashed.dependencies.insert(hash);
}
deps.insert(n, hash);
} else if &d[1] == "*" {
change.hashed.extra_known.insert(hash);
} else {
extra_dependencies.insert(hash);
}
}
}
Section::Changes {
ref mut current,
ref mut changes,
ref mut offsets,
} => {
if let Some(next) =
Record::read(updatables, current, &mut contents, &deps, offsets, &h)?
{
debug!("next action = {:?}", next);
changes.push(next)
}
}
}
}
h.clear();
}
conclude_section(&mut change, section, &mut contents)?;
change.contents = contents;
change.contents_hash = {
let mut hasher = Hasher::default();
hasher.update(&change.contents);
hasher.finish()
};
Ok((change, extra_dependencies))
}
}
#[cfg(feature = "text-changes")]
impl Record<Option<Hash>, Local> {
fn write<
W: std::io::Write,
C: ChangeStore,
F: FnMut(&Local, Position<Option<Hash>>) -> String,
>(
&self,
changes: &C,
mut file_name: F,
hashes: &HashMap<Hash, usize>,
change_contents: &[u8],
mut w: W,
) -> Result<(), anyhow::Error> {
use self::text_changes::*;
match self {
Record::FileMove { del, add, path } => match add {
Atom::NewVertex(ref add) => {
let name = std::str::from_utf8(
&change_contents[add.start.0 as usize + 2..add.end.0 as usize],
)
.unwrap();
let perms = crate::pristine::InodeMetadata::from_basename(
&change_contents[add.start.0 as usize..add.start.0 as usize + 2],
);
write!(w, "Moved: {:?} {:?} {:o} ", path, name, perms.0)?;
write_pos(&mut w, hashes, del.inode())?;
write!(w, "\n")?;
write_atom(&mut w, hashes, &del)?;
write!(w, "up")?;
for c in add.up_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
write!(w, ", down")?;
for c in add.down_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
w.write_all(b"\n")?;
}
Atom::EdgeMap(_) => {
write!(w, "Moved: {:?} ", path)?;
write_pos(&mut w, hashes, del.inode())?;
write!(w, "\n")?;
write_atom(&mut w, hashes, &add)?;
write_atom(&mut w, hashes, &del)?;
}
},
Record::FileDel {
del,
contents,
path,
} => {
write!(w, "File deletion: {:?} ", path)?;
write_pos(&mut w, hashes, del.inode())?;
write!(w, "\n")?;
write_atom(&mut w, hashes, &del)?;
if let Some(ref contents) = contents {
write_atom(&mut w, hashes, &contents)?;
write!(w, "\n")?;
print_change_contents(&mut w, changes, contents, change_contents)?;
} else {
write!(w, "\n")?;
}
}
Record::FileUndel {
undel,
contents,
path,
} => {
write!(w, "File un-deletion: {:?} ", path)?;
write_pos(&mut w, hashes, undel.inode())?;
write!(w, "\n")?;
write_atom(&mut w, hashes, &undel)?;
if let Some(ref contents) = contents {
write_atom(&mut w, hashes, &contents)?;
print_change_contents(&mut w, changes, contents, change_contents)?;
} else {
write!(w, "\n")?;
}
}
Record::FileAdd {
add_name,
contents,
path,
..
} => {
if let Atom::NewVertex(ref n) = add_name {
let name = std::str::from_utf8(
&change_contents[n.start.0 as usize + 2..n.end.0 as usize],
)
.unwrap();
let perms = crate::pristine::InodeMetadata::from_basename(
&change_contents[n.start.0 as usize..n.start.0 as usize + 2],
);
let parent = if let Some(p) = crate::path::parent(&path) {
if p.is_empty() {
"/"
} else {
p
}
} else {
"/"
};
write!(
w,
"File addition: {:?} in {:?} {:o}\n up",
name, parent, perms.0
)?;
assert!(n.down_context.is_empty());
for c in n.up_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
writeln!(w, ", new {}:{}", n.start.0, n.end.0)?;
}
if let Some(Atom::NewVertex(ref n)) = contents {
let c = &change_contents[n.start.0 as usize..n.end.0 as usize];
print_contents(&mut w, "+", c)?;
if !c.ends_with(b"\n") {
writeln!(w, "\\")?
}
}
}
Record::Edit { change, local } => {
write!(w, "Edit in {} ", file_name(&local, change.inode()))?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w, "")?;
write_atom(&mut w, hashes, &change)?;
print_change_contents(&mut w, changes, change, change_contents)?;
}
Record::Replacement {
change,
replacement,
local,
} => {
write!(w, "Replacement in {} ", file_name(&local, change.inode()))?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w, "")?;
write_atom(&mut w, hashes, &change)?;
write_atom(&mut w, hashes, &replacement)?;
print_change_contents(&mut w, changes, change, change_contents)?;
print_change_contents(&mut w, changes, replacement, change_contents)?;
}
Record::SolveNameConflict { name, path } => {
write!(w, "Solving a name conflict in {:?} ", path)?;
write_pos(&mut w, hashes, name.inode())?;
write!(w, ": ")?;
write_deleted_names(&mut w, changes, name)?;
write!(w, "\n")?;
write_atom(&mut w, hashes, &name)?;
}
Record::UnsolveNameConflict { name, path } => {
write!(w, "Un-solving a name conflict in {:?} ", path)?;
write_pos(&mut w, hashes, name.inode())?;
write!(w, ": ")?;
write_deleted_names(&mut w, changes, name)?;
write!(w, "\n")?;
write_atom(&mut w, hashes, &name)?;
}
Record::SolveOrderConflict { change, local } => {
write!(
w,
"Solving an order conflict in {} ",
file_name(&local, change.inode())
)?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w, "")?;
write_atom(&mut w, hashes, &change)?;
}
Record::UnsolveOrderConflict { change, local } => {
write!(
w,
"Un-solving an order conflict in {} ",
file_name(&local, change.inode())
)?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w, "")?;
write_atom(&mut w, hashes, &change)?;
}
Record::ResurrectZombies { change, local } => {
write!(w, "Resurrecting zombie lines in {:?} ", local.path)?;
write_pos(&mut w, hashes, change.inode())?;
writeln!(w, " at line {}", local.line)?;
write_atom(&mut w, hashes, &change)?;
}
}
Ok(())
}
}
#[cfg(feature = "text-changes")]
impl Record<Option<Hash>, Local> {
fn read(
updatables: &mut HashMap<usize, crate::InodeUpdate>,
current: &mut Option<Self>,
mut contents_: &mut Vec<u8>,
changes: &HashMap<usize, Hash>,
offsets: &mut HashMap<u64, ChangePosition>,
h: &str,
) -> Result<Option<Self>, Error> {
use self::text_changes::*;
use regex::Regex;
lazy_static! {
static ref FILE_ADDITION: Regex =
Regex::new(r#"(?P<n>\d+)\. File addition: "(?P<name>[^"]*)" in "(?P<parent>[^"]*)" (?P<perm>\d+)"#).unwrap();
static ref EDIT: Regex =
Regex::new(r#"(\d+)\. Edit in ([^:]+):(\d+) (\d+\.\d+)"#).unwrap();
static ref REPLACEMENT: Regex =
Regex::new(r#"(\d+)\. Replacement in ([^:]+):(\d+) (\d+\.\d+)"#).unwrap();
static ref FILE_DELETION: Regex =
Regex::new(r#"(\d+)\. File deletion: "([^"]*)" (\d+\.\d+)"#).unwrap();
static ref FILE_UNDELETION: Regex =
Regex::new(r#"(\d+)\. File un-deletion: "([^"]*)" (\d+\.\d+)"#).unwrap();
static ref MOVE: Regex =
Regex::new(r#"(\d+)\. Moved: "(?P<former>[^"]*)" "(?P<new>[^"]*)" (?P<perm>\d+) (?P<inode>.*)"#).unwrap();
static ref MOVE_: Regex = Regex::new(r#"(\d+)\. Moved: "([^"]*)" (.*)"#).unwrap();
static ref NAME_CONFLICT: Regex = Regex::new(
r#"(\d+)\. ((Solving)|(Un-solving)) a name conflict in "([^"]*)" (.*): .*"#
)
.unwrap();
static ref ORDER_CONFLICT: Regex = Regex::new(
r#"(\d+)\. ((Solving)|(Un-solving)) an order conflict in (.*):(\d+) (\d+\.\d+)"#
)
.unwrap();
static ref ZOMBIE: Regex =
Regex::new(r#"(\d+)\. Resurrecting zombie lines in ([^:]+):(\d+) (\d+\.\d+)"#)
.unwrap();
static ref CONTEXT: Regex = Regex::new(
r#"up ((\d+\.\d+ )*\d+\.\d+)(, new (\d+):(\d+))?(, down ((\d+\.\d+ )*\d+\.\d+))?"#
)
.unwrap();
}
if let Some(cap) = FILE_ADDITION.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut add_name = default_newvertex();
add_name.start = ChangePosition(contents_.len() as u64);
add_name.flag = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
let name = &cap.name("name").unwrap().as_str();
let path = {
let parent = cap.name("parent").unwrap().as_str();
(if parent == "/" {
String::new()
} else {
parent.to_string()
}) + name
};
let meta = cap
.name("perm")
.unwrap()
.as_str()
.chars()
.fold(0, |x, c| x * 8 + (c as u16 - b'0' as u16));
let meta = InodeMetadata(meta);
meta.write(&mut contents_).unwrap();
contents_.extend(name.as_bytes());
add_name.end = ChangePosition(contents_.len() as u64);
let mut add_inode = default_newvertex();
add_inode.flag = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
add_inode.up_context.push(Position {
change: None,
pos: ChangePosition(contents_.len() as u64),
});
contents_.push(0);
add_inode.start = ChangePosition(contents_.len() as u64);
add_inode.end = ChangePosition(contents_.len() as u64);
contents_.push(0);
let n = cap.name("n").unwrap().as_str().parse().unwrap();
if let Entry::Occupied(mut e) = updatables.entry(n) {
if let crate::InodeUpdate::Add { ref mut pos, .. } = e.get_mut() {
*pos = add_inode.start
}
}
Ok(std::mem::replace(
current,
Some(Record::FileAdd {
add_name: Atom::NewVertex(add_name),
add_inode: Atom::NewVertex(add_inode),
contents: None,
path,
}),
))
} else if let Some(cap) = EDIT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut v = default_newvertex();
v.inode = parse_pos(changes, &cap[4]);
Ok(std::mem::replace(
current,
Some(Record::Edit {
change: Atom::NewVertex(v),
local: Local {
path: cap[2].to_string(),
line: cap[3].parse().unwrap(),
},
}),
))
} else if let Some(cap) = REPLACEMENT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut v = default_newvertex();
v.inode = parse_pos(changes, &cap[4]);
Ok(std::mem::replace(
current,
Some(Record::Replacement {
change: Atom::NewVertex(v.clone()),
replacement: Atom::NewVertex(v),
local: Local {
path: cap[2].to_string(),
line: cap[3].parse().unwrap(),
},
}),
))
} else if let Some(cap) = FILE_DELETION.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut del = default_edgemap();
del.inode = parse_pos(changes, &cap[3]);
Ok(std::mem::replace(
current,
Some(Record::FileDel {
del: Atom::EdgeMap(del),
contents: None,
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = FILE_UNDELETION.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut undel = default_edgemap();
undel.inode = parse_pos(changes, &cap[3]);
Ok(std::mem::replace(
current,
Some(Record::FileUndel {
undel: Atom::EdgeMap(undel),
contents: None,
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = NAME_CONFLICT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut name = default_edgemap();
debug!("cap = {:?}", cap);
name.inode = parse_pos(changes, &cap[6]);
Ok(std::mem::replace(
current,
if &cap[2] == "Solving" {
Some(Record::SolveNameConflict {
name: Atom::EdgeMap(name),
path: cap[5].to_string(),
})
} else {
Some(Record::UnsolveNameConflict {
name: Atom::EdgeMap(name),
path: cap[5].to_string(),
})
},
))
} else if let Some(cap) = MOVE.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut add = default_newvertex();
add.start = ChangePosition(contents_.len() as u64);
add.flag = EdgeFlags::FOLDER | EdgeFlags::BLOCK;
let name = cap.name("new").unwrap().as_str();
let meta = cap
.name("perm")
.unwrap()
.as_str()
.chars()
.fold(0, |x, c| x * 8 + (c as u16 - b'0' as u16));
let meta = InodeMetadata(meta);
meta.write(&mut contents_).unwrap();
contents_.extend(name.as_bytes());
add.end = ChangePosition(contents_.len() as u64);
let mut del = default_edgemap();
del.inode = parse_pos(changes, cap.name("inode").unwrap().as_str());
Ok(std::mem::replace(
current,
Some(Record::FileMove {
del: Atom::EdgeMap(del),
add: Atom::NewVertex(add),
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = MOVE_.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
let mut add = default_edgemap();
let mut del = default_edgemap();
add.inode = parse_pos(changes, &cap[3]);
del.inode = add.inode;
Ok(std::mem::replace(
current,
Some(Record::FileMove {
del: Atom::EdgeMap(del),
add: Atom::EdgeMap(add),
path: cap[2].to_string(),
}),
))
} else if let Some(cap) = ORDER_CONFLICT.captures(h) {
if has_newvertices(current) {
contents_.push(0)
}
Ok(std::mem::replace(
current,
Some(if &cap[2] == "Solving" {
let mut v = default_newvertex();
v.inode = parse_pos(changes, &cap[7]);
Record::SolveOrderConflict {
change: Atom::NewVertex(v),
local: Local {
path: cap[5].to_string(),
line: cap[6].parse().unwrap(),
},
}
} else {
let mut v = default_edgemap();
v.inode = parse_pos(changes, &cap[7]);
Record::UnsolveOrderConflict {
change: Atom::EdgeMap(v),
local: Local {
path: cap[5].to_string(),
line: cap[6].parse().unwrap(),
},
}
}),
))
} else {
match current {
Some(Record::FileAdd {
ref mut contents,
ref mut add_name,
..
}) => {
if h.starts_with("+") {
if contents.is_none() {
let mut v = default_newvertex();
let inode = Position {
change: None,
pos: ChangePosition(contents_.len() as u64 - 1),
};
v.up_context.push(inode);
v.inode = inode;
v.start = ChangePosition(contents_.len() as u64);
*contents = Some(Atom::NewVertex(v));
}
if let Some(Atom::NewVertex(ref mut contents)) = contents {
if h.starts_with("+") {
text_changes::parse_line_add(h, contents, contents_)
}
}
} else if h.starts_with("\\") {
if let Some(Atom::NewVertex(ref mut contents)) = contents {
if contents_[contents.end.0 as usize - 1] == b'\n' {
assert_eq!(contents.end.0 as usize, contents_.len());
contents_.pop();
contents.end.0 -= 1;
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
if let Atom::NewVertex(ref mut name) = add_name {
name.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let (Some(new_start), Some(new_end)) = (cap.get(4), cap.get(5)) {
offsets.insert(new_start.as_str().parse().unwrap(), name.start);
offsets.insert(new_end.as_str().parse().unwrap(), name.end);
offsets.insert(
new_end.as_str().parse::<u64>().unwrap() + 1,
name.end + 1,
);
}
}
}
Ok(None)
}
Some(Record::FileDel {
ref mut del,
ref mut contents,
..
}) => {
if let Some(edges) = parse_edges(changes, h) {
if let Atom::EdgeMap(ref mut e) = del {
if edges[0].flag.contains(EdgeFlags::FOLDER) {
*e = EdgeMap {
inode: e.inode,
edges,
}
} else {
*contents = Some(Atom::EdgeMap(EdgeMap {
inode: e.inode,
edges,
}))
}
}
}
Ok(None)
}
Some(Record::FileUndel {
ref mut undel,
ref mut contents,
..
}) => {
if let Some(edges) = parse_edges(changes, h) {
if let Atom::EdgeMap(ref mut e) = undel {
if edges[0].flag.contains(EdgeFlags::FOLDER) {
*e = EdgeMap {
inode: e.inode,
edges,
}
} else {
*contents = Some(Atom::EdgeMap(EdgeMap {
inode: e.inode,
edges,
}))
}
}
}
Ok(None)
}
Some(Record::FileMove {
ref mut del,
ref mut add,
..
}) => {
if let Some(edges) = parse_edges(changes, h) {
if edges[0].flag.contains(EdgeFlags::DELETED) {
*del = Atom::EdgeMap(EdgeMap {
inode: del.inode(),
edges,
});
return Ok(None);
} else if let Atom::EdgeMap(ref mut add) = add {
if add.edges.is_empty() {
*add = EdgeMap {
inode: add.inode,
edges,
};
return Ok(None);
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
if let Atom::NewVertex(ref mut c) = add {
debug!("cap = {:?}", cap);
c.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
c.down_context = parse_pos_vec(changes, offsets, cap.as_str())?;
}
}
}
Ok(None)
}
Some(Record::Edit { ref mut change, .. }) => {
if h.starts_with("+ ") {
if let Atom::NewVertex(ref mut change) = change {
if change.start == change.end {
change.start = ChangePosition(contents_.len() as u64);
}
text_changes::parse_line_add(h, change, contents_)
}
} else if h.starts_with("\\") {
if let Atom::NewVertex(ref mut change) = change {
if contents_[change.end.0 as usize - 1] == b'\n' {
assert_eq!(change.end.0 as usize, contents_.len());
contents_.pop();
change.end.0 -= 1;
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
if let Atom::NewVertex(ref mut c) = change {
debug!("cap = {:?}", cap);
c.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
c.down_context = parse_pos_vec(changes, offsets, cap.as_str())?;
}
}
} else if let Some(edges) = parse_edges(changes, h) {
*change = Atom::EdgeMap(EdgeMap {
inode: change.inode(),
edges,
});
}
Ok(None)
}
Some(Record::Replacement {
ref mut change,
ref mut replacement,
..
}) => {
if h.starts_with("+ ") {
if let Atom::NewVertex(ref mut repl) = replacement {
if repl.start == repl.end {
repl.start = ChangePosition(contents_.len() as u64);
}
if h.starts_with("+") {
text_changes::parse_line_add(h, repl, contents_)
}
}
} else if h.starts_with("\\") {
if let Atom::NewVertex(ref mut repl) = replacement {
if contents_[repl.end.0 as usize - 1] == b'\n' {
assert_eq!(repl.end.0 as usize, contents_.len());
contents_.pop();
repl.end.0 -= 1;
}
}
} else if let Some(cap) = CONTEXT.captures(h) {
debug!("cap = {:?}", cap);
if let Atom::NewVertex(ref mut repl) = replacement {
repl.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
repl.down_context = parse_pos_vec(changes, offsets, cap.as_str())?;
}
}
} else if let Some(edges) = parse_edges(changes, h) {
*change = Atom::EdgeMap(EdgeMap {
inode: change.inode(),
edges,
});
}
Ok(None)
}
Some(Record::SolveNameConflict { ref mut name, .. })
| Some(Record::UnsolveNameConflict { ref mut name, .. }) => {
if let Some(edges) = parse_edges(changes, h) {
*name = Atom::EdgeMap(EdgeMap {
edges,
inode: name.inode(),
})
}
Ok(None)
}
Some(Record::SolveOrderConflict { ref mut change, .. }) => {
if let Some(cap) = CONTEXT.captures(h) {
debug!("cap = {:?}", cap);
if let Atom::NewVertex(ref mut change) = change {
change.up_context = parse_pos_vec(changes, offsets, &cap[1])?;
if let Some(cap) = cap.get(7) {
change.down_context =
parse_pos_vec(changes, offsets, cap.as_str())?;
}
change.start = ChangePosition(contents_.len() as u64);
change.end = ChangePosition(contents_.len() as u64);
}
}
Ok(None)
}
Some(Record::UnsolveOrderConflict { ref mut change, .. }) => {
if let Some(edges) = parse_edges(changes, h) {
if let Atom::EdgeMap(ref mut change) = change {
change.edges = edges
}
}
Ok(None)
}
Some(Record::ResurrectZombies { ref mut change, .. }) => {
if let Some(edges) = parse_edges(changes, h) {
if let Atom::EdgeMap(ref mut change) = change {
change.edges = edges
}
}
Ok(None)
}
None => {
debug!("current = {:#?}", current);
debug!("h = {:?}", h);
Ok(None)
}
}
}
}
}
#[cfg(feature = "text-changes")]
mod text_changes {
use super::*;
lazy_static! {
static ref POS: regex::Regex = regex::Regex::new(r#"(\d+)\.(\d+)"#).unwrap();
static ref EDGE: regex::Regex =
regex::Regex::new(r#"\s*(?P<prev>[BFD]*):(?P<flag>[BFD]*)\s+(?P<up_c>\d+)\.(?P<up_l>\d+)\s*->\s*(?P<c>\d+)\.(?P<l0>\d+):(?P<l1>\d+)/(?P<intro>\d+)\s*"#).unwrap();
}
pub fn default_newvertex() -> NewVertex<Option<Hash>> {
NewVertex {
start: ChangePosition(0),
end: ChangePosition(0),
flag: EdgeFlags::empty(),
up_context: Vec::new(),
down_context: Vec::new(),
inode: Position {
change: Some(Hash::None),
pos: ChangePosition(0),
},
}
}
pub fn default_edgemap() -> EdgeMap<Option<Hash>> {
EdgeMap {
edges: Vec::new(),
inode: Position {
change: Some(Hash::None),
pos: ChangePosition(0),
},
}
}
pub fn has_newvertices<L>(current: &Option<Record<Option<Hash>, L>>) -> bool {
match current {
Some(Record::FileAdd { contents: None, .. }) | None => false,
Some(rec) => rec.iter().any(|e| {
if let Atom::NewVertex(_) = e {
true
} else {
false
}
}),
}
}
pub fn parse_pos_vec(
changes: &HashMap<usize, Hash>,
offsets: &HashMap<u64, ChangePosition>,
s: &str,
) -> Result<Vec<Position<Option<Hash>>>, Error> {
let mut v = Vec::new();
for pos in POS.captures_iter(s) {
let change: usize = (&pos[1]).parse().unwrap();
let pos: u64 = (&pos[2]).parse().unwrap();
let pos = if change == 0 {
if let Some(&pos) = offsets.get(&pos) {
pos
} else {
return Err(Error::InconsistentChange);
}
} else {
ChangePosition(pos)
};
v.push(Position {
change: change_ref(changes, change),
pos,
})
}
Ok(v)
}
fn change_ref(changes: &HashMap<usize, Hash>, change: usize) -> Option<Hash> {
debug!("change_ref {:?} {:?}", changes, change);
if change == 0 {
None
} else if change == 1 {
Some(Hash::None)
} else {
Some(*changes.get(&change).unwrap())
}
}
pub fn parse_pos(changes: &HashMap<usize, Hash>, s: &str) -> Position<Option<Hash>> {
let pos = POS.captures(s).unwrap();
let change: usize = (&pos[1]).parse().unwrap();
let pos: u64 = (&pos[2]).parse().unwrap();
Position {
change: change_ref(changes, change),
pos: ChangePosition(pos),
}
}
pub fn parse_edges(
changes: &HashMap<usize, Hash>,
s: &str,
) -> Option<Vec<NewEdge<Option<Hash>>>> {
debug!("parse_edges {:?}", s);
let mut result = Vec::new();
for edge in s.split(",") {
debug!("parse edge {:?}", edge);
if let Some(cap) = EDGE.captures(edge) {
let previous = read_flag(cap.name("prev").unwrap().as_str());
let flag = read_flag(cap.name("flag").unwrap().as_str());
let change0: usize = cap.name("up_c").unwrap().as_str().parse().unwrap();
let pos0: u64 = cap.name("up_l").unwrap().as_str().parse().unwrap();
let change1: usize = cap.name("c").unwrap().as_str().parse().unwrap();
let start1: u64 = cap.name("l0").unwrap().as_str().parse().unwrap();
let end1: u64 = cap.name("l1").unwrap().as_str().parse().unwrap();
let introduced_by: usize = cap.name("intro").unwrap().as_str().parse().unwrap();
result.push(NewEdge {
previous,
flag,
from: Position {
change: change_ref(changes, change0),
pos: ChangePosition(pos0),
},
to: Vertex {
change: change_ref(changes, change1),
start: ChangePosition(start1),
end: ChangePosition(end1),
},
introduced_by: change_ref(changes, introduced_by),
})
} else {
debug!("not parsed");
return None;
}
}
Some(result)
}
pub fn parse_line_add(h: &str, change: &mut NewVertex<Option<Hash>>, contents_: &mut Vec<u8>) {
let h = h.as_bytes();
debug!("parse_line_add {:?} {:?}", change.end, change.start);
debug!("parse_line_add {:?}", h);
if h.len() > 2 {
let h = &h[2..h.len()];
contents_.extend(h);
} else if h.len() > 1 {
contents_.push(b'\n');
}
debug!("contents_.len() = {:?}", contents_.len());
trace!("contents_ = {:?}", contents_);
change.end = ChangePosition(contents_.len() as u64);
}
pub fn print_contents<W: std::io::Write>(
w: &mut W,
pref: &str,
contents: &[u8],
) -> Result<(), anyhow::Error> {
if let Ok(contents) = std::str::from_utf8(&contents) {
for l in contents.lines() {
writeln!(w, "{} {}", pref, l)?;
}
} else {
writeln!(w, "{}b{}", pref, data_encoding::BASE64.encode(contents))?
}
Ok(())
}
pub fn print_change_contents<W: std::io::Write, C: ChangeStore>(
w: &mut W,
changes: &C,
change: &Atom<Option<Hash>>,
change_contents: &[u8],
) -> Result<(), anyhow::Error> {
match change {
Atom::NewVertex(ref n) => {
let c = &change_contents[n.start.0 as usize..n.end.0 as usize];
print_contents(w, "+", c)?;
if !c.ends_with(b"\n") {
writeln!(w, "\\")?
}
Ok(())
}
Atom::EdgeMap(ref n) if n.edges[0].flag.contains(EdgeFlags::DELETED) => {
let mut buf = Vec::new();
let mut current = None;
for e in n.edges.iter() {
if Some(e.to) == current {
continue;
}
buf.clear();
changes.get_contents_ext(e.to, &mut buf)?;
print_contents(w, "-", &buf[..])?;
current = Some(e.to)
}
Ok(())
}
_ => Ok(()),
}
}
pub fn write_deleted_names<W: std::io::Write, C: ChangeStore>(
w: &mut W,
changes: &C,
del: &Atom<Option<Hash>>,
) -> Result<(), anyhow::Error> {
if let Atom::EdgeMap(ref e) = del {
let mut buf = Vec::new();
let mut is_first = true;
for d in e.edges.iter() {
buf.clear();
changes.get_contents_ext(d.to, &mut buf)?;
if !buf.is_empty() {
let name = std::str::from_utf8(buf.split_at(2).1).unwrap();
write!(w, "{}{:?}", if is_first { "" } else { ", " }, name)?;
is_first = false;
}
}
}
Ok(())
}
pub fn write_flag<W: std::io::Write>(mut w: W, flag: EdgeFlags) -> Result<(), anyhow::Error> {
if flag.contains(EdgeFlags::BLOCK) {
w.write_all(b"B")?;
}
if flag.contains(EdgeFlags::FOLDER) {
w.write_all(b"F")?;
}
if flag.contains(EdgeFlags::DELETED) {
w.write_all(b"D")?;
}
assert!(!flag.contains(EdgeFlags::PARENT));
assert!(!flag.contains(EdgeFlags::PSEUDO));
Ok(())
}
pub fn read_flag(s: &str) -> EdgeFlags {
let mut f = EdgeFlags::empty();
for i in s.chars() {
match i {
'B' => f |= EdgeFlags::BLOCK,
'F' => f |= EdgeFlags::FOLDER,
'D' => f |= EdgeFlags::DELETED,
c => panic!("read_flag: {:?}", c),
}
}
f
}
pub fn write_pos<W: std::io::Write>(
mut w: W,
hashes: &HashMap<Hash, usize>,
pos: Position<Option<Hash>>,
) -> Result<(), anyhow::Error> {
let change = if let Some(Hash::None) = pos.change {
1
} else if let Some(ref c) = pos.change {
*hashes.get(c).unwrap()
} else {
0
};
write!(w, "{}.{}", change, pos.pos.0)?;
Ok(())
}
pub fn write_atom<W: std::io::Write>(
w: &mut W,
hashes: &HashMap<Hash, usize>,
atom: &Atom<Option<Hash>>,
) -> Result<(), anyhow::Error> {
match atom {
Atom::NewVertex(ref n) => write_newvertex(w, hashes, n),
Atom::EdgeMap(ref n) => write_edgemap(w, hashes, n),
}
}
pub fn write_newvertex<W: std::io::Write>(
mut w: W,
hashes: &HashMap<Hash, usize>,
n: &NewVertex<Option<Hash>>,
) -> Result<(), anyhow::Error> {
write!(w, " up")?;
for c in n.up_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
write!(w, ", new {}:{}", n.start.0, n.end.0)?;
if !n.down_context.is_empty() {
write!(w, ", down")?;
for c in n.down_context.iter() {
write!(w, " ")?;
write_pos(&mut w, hashes, *c)?
}
}
w.write_all(b"\n")?;
Ok(())
}
pub fn write_edgemap<W: std::io::Write>(
mut w: W,
hashes: &HashMap<Hash, usize>,
n: &EdgeMap<Option<Hash>>,
) -> Result<(), anyhow::Error> {
let mut is_first = true;
for c in n.edges.iter() {
if !is_first {
write!(w, ", ")?;
}
is_first = false;
write_flag(&mut w, c.previous)?;
write!(w, ":")?;
write_flag(&mut w, c.flag)?;
write!(w, " ")?;
write_pos(&mut w, hashes, c.from)?;
write!(w, " -> ")?;
write_pos(&mut w, hashes, c.to.start_pos())?;
let h = if let Some(h) = hashes.get(c.introduced_by.as_ref().unwrap()) {
h
} else {
panic!("introduced_by = {:?}, not found", c.introduced_by);
};
write!(w, ":{}/{}", c.to.end.0, h)?;
}
write!(w, "\n")?;
Ok(())
}
#[cfg(feature = "text-changes")]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Section {
Header(String),
Deps,
Changes {
changes: Vec<Record<Option<Hash>, Local>>,
current: Option<Record<Option<Hash>, Local>>,
offsets: HashMap<u64, ChangePosition>,
},
}
}
// org id XDu/0rcMPSdaCWLiWWDiw32Rl45EX9uwvy9vprRXJbg=
/// An open, seekable change file.
#[cfg(feature = "zstd")]
pub struct ChangeFile<'a> {
s: Option<zstd_seekable::Seekable<'a, OffFile>>,
hashed: Hashed<Local>,
hash: Hash,
unhashed: Option<toml::Value>,
}
struct OffFile {
f: std::fs::File,
start: u64,
}
unsafe impl Send for OffFile {}
impl std::io::Read for OffFile {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error> {
self.f.read(buf)
}
}
impl std::io::Seek for OffFile {
fn seek(&mut self, from: std::io::SeekFrom) -> Result<u64, std::io::Error> {
use std::io::SeekFrom;
let from = match from {
SeekFrom::Start(s) => SeekFrom::Start(s + self.start),
c => c,
};
self.f.seek(from)
}
}
#[cfg(feature = "zstd")]
impl<'a> ChangeFile<'a> {
/// Open a change file from a path.
pub fn open(hash: Hash, path: &str) -> Result<Self, anyhow::Error> {
use std::io::Read;
let mut r = std::fs::File::open(path)?;
let mut buf = Vec::new();
buf.resize(Change::OFFSETS_SIZE as usize, 0);
r.read_exact(&mut buf)?;
let offsets: Offsets = bincode::deserialize(&buf)?;
if offsets.version != VERSION {
return Err(Error::VersionMismatch.into());
}
buf.clear();
buf.resize((offsets.unhashed_off - Change::OFFSETS_SIZE) as usize, 0);
r.read_exact(&mut buf)?;
let mut buf2 = vec![0u8; offsets.hashed_len as usize];
let hashed: Hashed<Local> = {
let mut s = zstd_seekable::Seekable::init_buf(&buf)?;
s.decompress(&mut buf2, 0)?;
bincode::deserialize(&buf2)?
};
buf.resize((offsets.contents_off - offsets.unhashed_off) as usize, 0);
let unhashed = if buf.is_empty() {
None
} else {
r.read_exact(&mut buf)?;
let mut s = zstd_seekable::Seekable::init_buf(&buf)?;
buf2.resize(offsets.unhashed_len as usize, 0);
s.decompress(&mut buf2, 0)?;
Some(toml::de::from_slice(&buf2)?)
};
let m = r.metadata()?;
let s = if offsets.contents_off >= m.len() {
None
} else {
Some(zstd_seekable::Seekable::init(Box::new(OffFile {
f: r,
start: offsets.contents_off,
}))?)
};
Ok(ChangeFile {
s,
hashed,
hash,
unhashed,
})
}
pub fn has_contents(&self) -> bool {
self.s.is_some()
}
/// Reads the contents at an offset into `buf`, and returns the
/// number of bytes read. The bounds of the change's "contents"
/// section are not checked.
pub fn read_contents(&mut self, offset: u64, buf: &mut [u8]) -> Result<usize, anyhow::Error> {
debug!("read_contents {:?} {:?}", offset, buf.len());
if let Some(ref mut s) = self.s {
Ok(s.decompress(buf, offset)?)
} else {
Err((Error::MissingContents {
hash: self.hash.to_base32(),
})
.into())
}
}
pub fn hashed(&self) -> &Hashed<Local> {
&self.hashed
}
pub fn unhashed(&self) -> &Option<toml::Value> {
&self.unhashed
}
}
use super::*;
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
enum Atom3<Change> {
NewVertex(NewVertex<Change>),
EdgeMap(EdgeMap3<Change>),
}
impl Atom3<Option<Hash>> {
fn to_v4(self, translation: &HashMap<Option<Hash>, Option<Hash>>) -> Atom<Option<Hash>> {
match self {
Atom3::NewVertex(n) => Atom::NewVertex(NewVertex {
up_context: n
.up_context
.into_iter()
.map(|x| Position {
change: *translation.get(&x.change).unwrap(),
..x
})
.collect(),
down_context: n
.down_context
.into_iter()
.map(|x| Position {
change: *translation.get(&x.change).unwrap(),
..x
})
.collect(),
flag: n.flag,
start: n.start,
end: n.end,
inode: Position {
change: *translation.get(&n.inode.change).unwrap(),
..n.inode
},
}),
Atom3::EdgeMap(e) => Atom::EdgeMap(e.to_v4(translation)),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
struct EdgeMap3<Change> {
pub previous: EdgeFlags,
pub flag: EdgeFlags,
pub edges: Vec<NewEdge3<Change>>,
pub inode: Position<Change>,
}
impl EdgeMap3<Option<Hash>> {
fn to_v4(self, translations: &HashMap<Option<Hash>, Option<Hash>>) -> EdgeMap<Option<Hash>> {
let flag = self.flag;
let previous = self.previous;
EdgeMap {
inode: Position {
change: *translations.get(&self.inode.change).unwrap(),
..self.inode
},
edges: self
.edges
.into_iter()
.map(|e| NewEdge {
previous,
flag,
from: Position {
change: *translations.get(&e.from.change).unwrap(),
..e.from
},
to: Vertex {
change: *translations.get(&e.to.change).unwrap(),
..e.to
},
introduced_by: *translations.get(&e.introduced_by).unwrap(),
})
.collect(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
struct NewEdge3<Change> {
pub from: Position<Change>,
pub to: Vertex<Change>,
pub introduced_by: Change,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
enum Record3<Hash, Local> {
FileMove {
del: Atom3<Hash>,
add: Atom3<Hash>,
confirm: Option<Atom3<Hash>>,
path: String,
},
FileDel {
del: Atom3<Hash>,
confirm: Option<Atom3<Hash>>,
contents: Option<Atom3<Hash>>,
path: String,
},
FileUndel {
undel: Atom3<Hash>,
confirm: Option<Atom3<Hash>>,
contents: Option<Atom3<Hash>>,
path: String,
},
FileAdd {
add_name: Atom3<Hash>,
add_inode: Atom3<Hash>,
contents: Option<Atom3<Hash>>,
path: String,
},
SolveNameConflict {
name: Atom3<Hash>,
confirm: Option<Atom3<Hash>>,
path: String,
},
UnsolveNameConflict {
name: Atom3<Hash>,
confirm: Option<Atom3<Hash>>,
path: String,
},
Edit {
change: Atom3<Hash>,
local: Local,
},
Replacement {
change: Atom3<Hash>,
replacement: Atom3<Hash>,
local: Local,
},
SolveOrderConflict {
change: Atom3<Hash>,
local: Local,
},
UnsolveOrderConflict {
change: Atom3<Hash>,
local: Local,
},
ResurrectZombies {
alive: Atom3<Hash>,
dead: Atom3<Hash>,
local: Local,
},
}
impl Record3<Option<Hash>, Local> {
fn to_v4(
self,
translation: &HashMap<Option<Hash>, Option<Hash>>,
) -> Record<Option<Hash>, Local> {
match self {
Record3::FileMove {
del,
add,
confirm,
path,
} => {
let mut del = del.to_v4(translation);
if let Atom::EdgeMap(ref mut e) = del {
if let Some(Atom3::EdgeMap(c)) = confirm {
let c = c.to_v4(translation);
e.edges.extend(c.edges.into_iter())
}
}
let add = add.to_v4(translation);
Record::FileMove { del, add, path }
}
Record3::FileDel {
del,
confirm,
contents,
path,
} => {
let mut del = del.to_v4(translation);
if let Atom::EdgeMap(ref mut e) = del {
if let Some(Atom3::EdgeMap(c)) = confirm {
let c = c.to_v4(translation);
e.edges.extend(c.edges.into_iter())
}
}
let contents = contents.map(|c| c.to_v4(translation));
Record::FileDel {
del,
contents,
path,
}
}
Record3::FileUndel {
undel,
confirm,
contents,
path,
} => {
let mut undel = undel.to_v4(translation);
if let Atom::EdgeMap(ref mut e) = undel {
if let Some(Atom3::EdgeMap(c)) = confirm {
let c = c.to_v4(translation);
e.edges.extend(c.edges.into_iter())
}
}
let contents = contents.map(|c| c.to_v4(translation));
Record::FileUndel {
undel,
contents,
path,
}
}
Record3::FileAdd {
add_name,
add_inode,
contents,
path,
} => Record::FileAdd {
add_name: add_name.to_v4(translation),
add_inode: add_inode.to_v4(translation),
contents: contents.map(|c| c.to_v4(translation)),
path,
},
Record3::SolveNameConflict {
name,
confirm,
path,
} => {
let mut name = name.to_v4(translation);
if let Atom::EdgeMap(ref mut e) = name {
if let Some(Atom3::EdgeMap(c)) = confirm {
let c = c.to_v4(translation);
e.edges.extend(c.edges.into_iter())
}
}
Record::SolveNameConflict { name, path }
}
Record3::UnsolveNameConflict {
name,
confirm,
path,
} => {
let mut name = name.to_v4(translation);
if let Atom::EdgeMap(ref mut e) = name {
if let Some(Atom3::EdgeMap(c)) = confirm {
let c = c.to_v4(translation);
e.edges.extend(c.edges.into_iter())
}
}
Record::UnsolveNameConflict { name, path }
}
Record3::Edit { change, local } => Record::Edit {
change: change.to_v4(translation),
local,
},
Record3::Replacement {
change,
replacement,
local,
} => Record::Replacement {
change: change.to_v4(translation),
replacement: replacement.to_v4(translation),
local,
},
Record3::SolveOrderConflict { change, local } => Record::SolveOrderConflict {
change: change.to_v4(translation),
local,
},
Record3::UnsolveOrderConflict { change, local } => Record::UnsolveOrderConflict {
change: change.to_v4(translation),
local,
},
Record3::ResurrectZombies { alive, dead, local } => {
let mut change = alive.to_v4(translation);
if let Atom::EdgeMap(ref mut e) = change {
if let Atom3::EdgeMap(c) = dead {
let c = c.to_v4(translation);
e.edges.extend(c.edges.into_iter())
}
}
Record::ResurrectZombies { change, local }
}
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct LocalChange3<Local> {
offsets: Offsets,
hashed: Hashed3<Local>,
unhashed: Option<toml::Value>,
contents: Vec<u8>,
}
const VERSION: u64 = 3;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct Hashed3<Local> {
version: u64,
header: ChangeHeader,
dependencies: BTreeSet<Hash>,
extra_known: BTreeSet<Hash>,
metadata: Vec<u8>,
changes: Vec<Record3<Option<Hash>, Local>>,
contents_hash: Hash,
}
impl Hashed3<Local> {
fn to_v4(self, translation: &HashMap<Option<Hash>, Option<Hash>>) -> Hashed<Local> {
Hashed {
version: 4,
header: self.header,
dependencies: self
.dependencies
.into_iter()
.map(|x| translation.get(&Some(x)).unwrap().unwrap())
.collect(),
extra_known: self
.extra_known
.into_iter()
.filter_map(|x| translation.get(&Some(x)).map(|x| x.unwrap()))
.collect(),
metadata: self.metadata,
contents_hash: self.contents_hash,
changes: self
.changes
.into_iter()
.map(|x| x.to_v4(translation))
.collect(),
}
}
}
impl LocalChange3<Local> {
const OFFSETS_SIZE: u64 = 56;
pub fn deserialize(file: &str, hash: Option<&Hash>) -> Result<Self, anyhow::Error> {
use std::io::Read;
let mut r = std::fs::File::open(file)?;
let mut buf = vec![0u8; Self::OFFSETS_SIZE as usize];
r.read_exact(&mut buf)?;
let offsets: Offsets = bincode::deserialize(&buf)?;
if offsets.version != VERSION {
return Err(Error::VersionMismatch.into());
}
debug!("offsets = {:?}", offsets);
buf.clear();
buf.resize((offsets.unhashed_off - Self::OFFSETS_SIZE) as usize, 0);
r.read_exact(&mut buf)?;
let hashed: Hashed3<Local> = {
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut out = vec![0u8; offsets.hashed_len as usize];
s.decompress(&mut out[..], 0)?;
let mut hasher = Hasher::default();
hasher.update(&out);
let computed_hash = hasher.finish();
if let Some(hash) = hash {
if &computed_hash != hash {
return Err((Error::ChangeHashMismatch {
claimed: *hash,
computed: computed_hash,
})
.into());
}
}
bincode::deserialize_from(&out[..])?
};
buf.clear();
buf.resize((offsets.contents_off - offsets.unhashed_off) as usize, 0);
let unhashed = if buf.is_empty() {
None
} else {
r.read_exact(&mut buf)?;
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut out = vec![0u8; offsets.unhashed_len as usize];
s.decompress(&mut out[..], 0)?;
Some(toml::de::from_slice(&out)?)
};
debug!("unhashed = {:?}", unhashed);
buf.clear();
buf.resize((offsets.total - offsets.contents_off) as usize, 0);
let contents = if r.read_exact(&mut buf).is_ok() {
let mut s = zstd_seekable::Seekable::init_buf(&buf[..])?;
let mut contents = vec![0u8; offsets.contents_len as usize];
s.decompress(&mut contents[..], 0)?;
contents
} else {
Vec::new()
};
debug!("contents = {:?}", contents);
Ok(LocalChange3 {
offsets,
hashed,
unhashed,
contents,
})
}
pub fn to_v4(self, translation: &HashMap<Option<Hash>, Option<Hash>>) -> LocalChange<Local> {
LocalChange {
offsets: self.offsets,
hashed: self.hashed.to_v4(translation),
unhashed: self.unhashed,
contents: self.contents,
}
}
}
// org id g5piHODDo9cz2FFDRisMQw4h8fj1SqTfQ3I/i2p+EGc=
//! Apply a change.
use crate::change::{Atom, Change, NewEdge, NewVertex};
use crate::changestore::ChangeStore;
use crate::missing_context::*;
use crate::pristine::*;
use crate::record::InodeUpdate;
use crate::Error;
use std::collections::{HashMap, HashSet};
// org id Fn0IzU5GRCA4xymOneQzYPhm7xvcbR8viQt+xz5AzSU=
/// Apply a change to a channel. This function does not update the
/// inodes/tree tables, i.e. the correspondence between the pristine
/// and the working copy. Therefore, this function must be used only
/// on remote changes, or on "bare" repositories.
pub fn apply_change_ws<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
workspace: &mut Workspace,
) -> Result<(u64, Merkle), anyhow::Error> {
debug!("apply_change {:?}", hash.to_base32());
workspace.clear();
let mut channel = channel.r.borrow_mut();
let change = changes.get_change(&hash)?;
for &hash in change.dependencies.iter() {
if let Hash::None = hash {
continue;
}
if let Some(int) = txn.get_internal(hash) {
if txn.get_changeset(&channel.changes, int, None).is_some() {
continue;
}
}
return Err((Error::DependencyMissing { hash }).into());
}
let internal = if let Some(p) = txn.get_internal(hash) {
p
} else {
let internal: ChangeId = txn.make_changeid(&hash);
txn.register_change(internal, hash, &change)?;
internal
};
debug!("internal = {:?}", internal);
apply_change_to_channel(
|h, from, to, flag| changes.has_edge(h, from, to, flag),
txn,
&mut channel,
internal,
&hash,
&change,
workspace,
)
}
pub fn apply_change_rec_ws<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
workspace: &mut Workspace,
deps_only: bool,
) -> Result<(), anyhow::Error> {
debug!("apply_change {:?}", hash.to_base32());
workspace.clear();
let mut channel = channel.r.borrow_mut();
let mut dep_stack = vec![(hash, true, !deps_only)];
while let Some((hash, first, actually_apply)) = dep_stack.pop() {
let change = changes.get_change(&hash)?;
if first {
dep_stack.push((hash, false, actually_apply));
for &hash in change.dependencies.iter() {
if let Hash::None = hash {
continue;
}
dep_stack.push((hash, true, true))
}
} else if actually_apply {
let applied = if let Some(int) = txn.get_internal(hash) {
txn.get_changeset(&channel.changes, int, None).is_some()
} else {
false
};
if !applied {
let internal = if let Some(p) = txn.get_internal(hash) {
p
} else {
let internal: ChangeId = txn.make_changeid(&hash);
txn.register_change(internal, hash, &change)?;
internal
};
debug!("internal = {:?}", internal);
workspace.clear();
apply_change_to_channel(
|h, from, to, flag| changes.has_edge(h, from, to, flag),
txn,
&mut channel,
internal,
&hash,
&change,
workspace,
)?;
}
}
}
Ok(())
}
/// Same as [apply_change_ws], but allocates its own workspace.
pub fn apply_change<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
) -> Result<(u64, Merkle), anyhow::Error> {
apply_change_ws(changes, txn, channel, hash, &mut Workspace::new())
}
/// Same as [apply_change_ws], but allocates its own workspace.
pub fn apply_change_rec<T: MutTxnT, P: ChangeStore>(
changes: &P,
txn: &mut T,
channel: &mut ChannelRef<T>,
hash: Hash,
deps_only: bool,
) -> Result<(), anyhow::Error> {
apply_change_rec_ws(
changes,
txn,
channel,
hash,
&mut Workspace::new(),
deps_only,
)
}
// org id T9nfH63JFCwYHwPZjZB+12xQEo2fUyo58K1R1FFRZaE=
fn apply_change_to_channel<
T: MutTxnT,
F: FnMut(
Hash,
Position<Option<Hash>>,
Position<Option<Hash>>,
EdgeFlags,
) -> Result<bool, anyhow::Error>,
>(
mut changes: F,
txn: &mut T,
channel: &mut Channel<T>,
change_id: ChangeId,
hash: &Hash,
change: &Change,
ws: &mut Workspace,
) -> Result<(u64, Merkle), anyhow::Error> {
ws.assert_empty();
let n = channel.apply_counter;
let merkle =
if let Some(m) = txn.put_changes(channel, change_id, channel.apply_counter, hash)? {
m
} else {
return Err((Error::ChangeAlreadyOnChannel { hash: *hash }).into());
};
debug!("apply change to channel");
let now = std::time::Instant::now();
for change_ in change.changes.iter() {
debug!("Applying {:?} (1)", change_);
for change_ in change_.iter() {
match *change_ {
Atom::NewVertex(ref n) => put_newvertex(txn, channel, change, ws, change_id, n)?,
Atom::EdgeMap(ref n) => {
for edge in n.edges.iter() {
if !edge.flag.contains(EdgeFlags::DELETED) {
put_newedge(
&mut changes,
txn,
channel,
ws,
change_id,
n.inode,
edge,
|_, _, _, _| Ok(true),
)?
}
}
}
}
}
}
for change_ in change.changes.iter() {
debug!("Applying {:?} (2)", change_);
for change_ in change_.iter() {
match *change_ {
Atom::EdgeMap(ref n) => {
for edge in n.edges.iter() {
if edge.flag.contains(EdgeFlags::DELETED) {
put_newedge(
&mut changes,
txn,
channel,
ws,
change_id,
n.inode,
edge,
|_, _, _, _| Ok(true),
)?;
// org id aNVimxa4oSqbTYJ033w2QbZTCVMG/+tO2ymc1QSWPH0=
crate::missing_context::collect_zombie_context(
txn,
channel,
&mut ws.missing_context,
n.inode,
edge,
change_id,
|h| change.knows(&h),
)?
// org id hXW9eN25ZM8EQ5B2Ew8NMra+xi7hFv6CN8no+qg8GpY=
}
}
}
_ => {}
}
}
}
crate::TIMERS.lock().unwrap().apply += now.elapsed();
clean_obsolete_pseudo_edges(txn, channel, ws)?;
info!("repairing missing contexts");
repair_missing_contexts(txn, channel, ws, change_id, change)?;
repair_cyclic_paths(txn, channel, ws, change_id, change)?;
info!("done applying change");
channel.last_modified = std::time::SystemTime::now()
.duration_since(std::time::SystemTime::UNIX_EPOCH)?
.as_secs();
Ok((n, merkle))
}
// org id 7IPbKRayK1ey50vdV6FP2KOmzRGn2tCT3qapFICvlNc=
/// Apply a change created locally: serialize it, compute its hash, and
/// apply it. This function also registers changes in the filesystem
/// introduced by the change (file additions, deletions and moves), to
/// synchronise the pristine and the working copy after the
/// application.
pub fn apply_local_change_ws<T: MutTxnT>(
txn: &mut T,
channel: &mut ChannelRef<T>,
change: &Change,
hash: Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
workspace: &mut Workspace,
) -> Result<(u64, Merkle), anyhow::Error> {
// org id GEylov+bdcaQUmngYlibUCe0K6dMYx9lmeEmNPGCFUQ=
let mut channel = channel.r.borrow_mut();
let internal: ChangeId = txn.make_changeid(&hash);
txn.register_change(internal, hash, &change)?;
// org id O9lNVqhAbRqj2tE6JW0Ax8U3WSetv22rYrSXHQlT78A=
let n = apply_change_to_channel(
|_, _, _, _| Ok(true),
txn,
&mut channel,
internal,
&hash,
&change,
workspace,
)?;
// org id w0Bu10TO1Kbn1WbRcFHyU/0xBR4Yi61jj66D/1uiicM=
for (_, update) in inode_updates.iter() {
info!("updating {:?}", update);
update_inode(txn, &channel, internal, update)?;
}
Ok(n)
}
/// Same as [apply_local_change_ws], but allocates its own workspace.
pub fn apply_local_change<T: MutTxnT>(
txn: &mut T,
channel: &mut ChannelRef<T>,
change: &Change,
hash: Hash,
inode_updates: &HashMap<usize, InodeUpdate>,
) -> Result<(u64, Merkle), anyhow::Error> {
apply_local_change_ws(
txn,
channel,
change,
hash,
inode_updates,
&mut Workspace::new(),
)
}
// org id zzKI7nUCrGP6P9+0nk29zZnRY1qcdFJH2tXwOxTn/xM=
fn update_inode<T: MutTxnT>(
txn: &mut T,
channel: &Channel<T>,
internal: ChangeId,
update: &InodeUpdate,
) -> Result<(), anyhow::Error> {
debug!("update_inode {:?}", update);
match *update {
// org id 8cpuyZX4ja1VYT3lhtYBMXJenBtvUbVEGky3wZ6W/HM=
InodeUpdate::Add { inode, pos, .. } => {
let vertex = Position {
change: internal,
pos,
};
if txn
.get_graph(&channel.graph, vertex.inode_vertex(), None)
.is_some()
{
debug!("Adding inodes: {:?} {:?}", inode, vertex);
txn.put_inodes_with_rev(inode, vertex)?;
} else {
debug!("Not adding inodes: {:?} {:?}", inode, vertex);
}
}
// org id iTzVDtyzdcIEH3EDPbaq8PfGH9WgwLTd5XSxI5nK4Kc=
InodeUpdate::Deleted { inode } => {
if let Some(parent) = txn.get_revtree(inode, None).map(|x| x.to_owned()) {
txn.del_revtree(inode, Some(parent.as_file_id()))?;
txn.del_tree(parent.as_file_id(), Some(inode))?;
}
txn.del_tree(
(OwnedPathId {
parent_inode: inode,
basename: crate::small_string::SmallString::new(),
})
.as_file_id(),
Some(inode),
)?;
if let Some(vertex) = txn.get_inodes(inode, None) {
txn.del_inodes(inode, Some(vertex))?;
txn.del_revinodes(vertex, Some(inode))?;
}
}
}
Ok(())
}
// org id QIECdrRSNOnrMRRD9QO87SUCSg4dt9OUS5V6GZPPmeM=
fn put_newvertex<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ch: &Change,
ws: &mut Workspace,
change: ChangeId,
n: &NewVertex<Option<Hash>>,
) -> Result<(), anyhow::Error> {
let vertex = Vertex {
change,
start: n.start,
end: n.end,
};
debug!(
"put_newvertex {:?} {:?} {:?} {:?} {:?}",
vertex, n.up_context, n.down_context, n.flag, change
);
assert!(ws.deleted_by.is_empty());
for up in n.up_context.iter() {
let up = txn.internal_pos(up, change)?;
put_up_context(txn, channel, ch, n.inode, ws, up)?;
}
for down in n.down_context.iter() {
let down = txn.internal_pos(down, change)?;
put_down_context(txn, channel, ch, n.inode, ws, down)?;
}
debug!("deleted by: {:?}", ws.deleted_by);
for up in ws.up_context.drain(..) {
assert_ne!(up, vertex);
if !n.flag.contains(EdgeFlags::FOLDER) {
for (change, _) in ws.deleted_by.iter() {
let flag = n.flag | EdgeFlags::BLOCK | EdgeFlags::DELETED;
txn.put_graph_with_rev(channel, flag, up, vertex, *change)?;
}
}
txn.put_graph_with_rev(channel, n.flag | EdgeFlags::BLOCK, up, vertex, change)?;
}
debug!("down_context {:?}", ws.down_context);
for (down, alive_parent) in ws.down_context.drain(..) {
assert_ne!(down, vertex);
if !alive_parent && !n.flag.contains(EdgeFlags::FOLDER) {
for (change, _) in ws.deleted_by.iter() {
let flag = n.flag | EdgeFlags::BLOCK | EdgeFlags::DELETED;
txn.put_graph_with_rev(channel, flag, vertex, down, *change)?;
}
} else if n.flag.contains(EdgeFlags::FOLDER) {
txn.put_graph_with_rev(channel, n.flag | EdgeFlags::BLOCK, vertex, down, change)?;
} else {
txn.put_graph_with_rev(channel, n.flag, vertex, down, change)?;
}
}
ws.deleted_by.clear();
Ok(())
}
// org id qJsZsuGOM0pVWr+gR/Q1oondyvfulu/VvSZEXxvzUww=
fn put_up_context<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ch: &Change,
inode: Position<Option<Hash>>,
ws: &mut Workspace,
up: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
let up_vertex = if up.change.is_root() {
Vertex::ROOT
} else {
debug!("put_up_context {:?}", up);
let k = txn.find_block_end(channel, up)?;
assert_eq!(k.change, up.change);
assert!(k.start <= up.pos);
debug!("k = {:?}", k);
if k.start < up.pos && k.end > up.pos {
if let Some((_, vids)) = ws.missing_context.graphs.get_mut(&inode) {
if let Some(vid) = vids.remove(&k) {
vids.insert(Vertex { end: up.pos, ..k }, vid);
vids.insert(Vertex { start: up.pos, ..k }, vid);
}
}
txn.split_block(channel, k, up.pos)?
}
Vertex {
change: k.change,
start: k.start,
end: up.pos,
}
};
debug!("up_vertex {:?}", up_vertex);
let flag0 = EdgeFlags::PARENT | EdgeFlags::DELETED;
let flag1 = flag0 | EdgeFlags::FOLDER | EdgeFlags::BLOCK;
for parent in txn.iter_adjacent(&channel, up_vertex, flag0, flag1) {
// This unwrap is ok: `parent` is in the channel.
let introduced_by = txn.get_external(parent.introduced_by).unwrap();
if !ch.knows(&introduced_by) {
ws.deleted_by
.insert((parent.introduced_by, parent.flag - EdgeFlags::PARENT));
}
}
ws.up_context.push(up_vertex);
Ok(())
}
// org id wmLgN7MtIkCgY6DOEzzeaGOQtP0X7+eXRCRk23sR/F8=
fn put_down_context<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ch: &Change,
inode: Position<Option<Hash>>,
ws: &mut Workspace,
down: Position<ChangeId>,
) -> Result<Vertex<ChangeId>, anyhow::Error> {
let k = txn.find_block(&channel, down)?;
assert_eq!(k.change, down.change);
assert!(k.end >= down.pos);
if k.start < down.pos && k.end > down.pos {
if let Some((_, vids)) = ws.missing_context.graphs.get_mut(&inode) {
if let Some(vid) = vids.remove(&k) {
vids.insert(Vertex { end: down.pos, ..k }, vid);
vids.insert(
Vertex {
start: down.pos,
..k
},
vid,
);
}
}
txn.split_block(channel, k, down.pos)?
}
let down_vertex = Vertex {
change: k.change,
start: down.pos,
end: k.end,
};
debug!("down_vertex {:?}", down_vertex);
let flag0 = EdgeFlags::PARENT;
let flag1 = flag0 | EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::DELETED;
let mut alive_parent = false;
for parent in txn.iter_adjacent(&channel, down_vertex, flag0, flag1) {
if parent.flag.contains(EdgeFlags::PARENT) {
if parent.flag.contains(EdgeFlags::DELETED) {
// This unwrap is ok: `parent` is in the channel.
let introduced_by = txn.get_external(parent.introduced_by).unwrap();
if !ch.knows(&introduced_by) {
ws.deleted_by
.insert((parent.introduced_by, parent.flag - EdgeFlags::PARENT));
}
} else {
alive_parent = true
}
}
}
ws.down_context.push((down_vertex, alive_parent));
Ok(down_vertex)
}
// org id Z9HKWXTmLYMKPH1tSnFV+QYAj3myOwZiyHaaocqlvvU=
pub struct Workspace {
parents: HashSet<Vertex<ChangeId>>,
children: HashSet<Vertex<ChangeId>>,
pseudo: Vec<(Vertex<ChangeId>, Edge, Position<Option<Hash>>)>,
deleted_by: HashSet<(ChangeId, EdgeFlags)>,
up_context: Vec<Vertex<ChangeId>>,
down_context: Vec<(Vertex<ChangeId>, bool)>,
pub(crate) missing_context: crate::missing_context::Workspace,
rooted: HashMap<Vertex<ChangeId>, bool>,
}
impl Workspace {
pub fn new() -> Self {
Workspace {
children: HashSet::new(),
parents: HashSet::new(),
pseudo: Vec::new(),
deleted_by: HashSet::new(),
up_context: Vec::new(),
down_context: Vec::new(),
missing_context: crate::missing_context::Workspace::new(),
rooted: HashMap::new(),
}
}
fn clear(&mut self) {
self.children.clear();
self.parents.clear();
self.pseudo.clear();
self.deleted_by.clear();
self.up_context.clear();
self.down_context.clear();
self.missing_context.clear();
self.rooted.clear();
}
fn assert_empty(&self) {
assert!(self.children.is_empty());
assert!(self.parents.is_empty());
assert!(self.pseudo.is_empty());
assert!(self.deleted_by.is_empty());
assert!(self.up_context.is_empty());
assert!(self.down_context.is_empty());
self.missing_context.assert_empty();
assert!(self.rooted.is_empty())
}
}
// org id wwZDqGk+qPaB9Oabmhr4ziOpXgWClHLLbxpCOAo1zpI=
pub(crate) fn put_newedge<T, C, F>(
mut validate_edge: C,
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
change: ChangeId,
inode: Position<Option<Hash>>,
n: &NewEdge<Option<Hash>>,
apply_check: F,
) -> Result<(), anyhow::Error>
where
T: MutTxnT,
C: FnMut(
Hash,
Position<Option<Hash>>,
Position<Option<Hash>>,
EdgeFlags,
) -> Result<bool, anyhow::Error>,
F: Fn(
&mut T,
&mut Channel<T>,
Vertex<ChangeId>,
Vertex<ChangeId>,
) -> Result<bool, anyhow::Error>,
{
if n.flag.contains(EdgeFlags::DELETED) {
ws.missing_context.load_graph(txn, channel, inode)?;
}
assert!(ws.children.is_empty());
assert!(ws.parents.is_empty());
debug!("put_newedge {:?} {:?}", n, change);
let n_introduced_by = if let Some(n) = txn.internal(&n.introduced_by, change) {
n
} else {
return Err(crate::Error::InconsistentChange.into());
};
let mut source = find_source_vertex(txn, channel, &n.from, change, inode, n.flag, ws)?;
let mut target = find_target_vertex(txn, channel, &n.to, change, inode, n.flag, ws)?;
loop {
if target.end > n.to.end {
assert!(!n.flag.contains(EdgeFlags::FOLDER));
if let Some((_, vids)) = ws.missing_context.graphs.get_mut(&inode) {
if let Some(vid) = vids.remove(&target) {
vids.insert(
Vertex {
end: n.to.end,
..target
},
vid,
);
vids.insert(
Vertex {
start: n.to.end,
..target
},
vid,
);
}
}
txn.split_block(channel, target, n.to.end)?;
target.end = n.to.end
}
if n.flag.contains(EdgeFlags::DELETED) {
collect_pseudo_edges(txn, channel, ws, inode, target)?;
if !n.flag.contains(EdgeFlags::FOLDER) {
reconnect_pseudo_edges(txn, channel, inode, ws, target)?;
}
ws.children.clear();
}
debug!("deleting {:?} {:?}", n.previous, n.flag);
let del = txn.del_graph_with_rev(channel, n.previous, source, target, n_introduced_by)?;
debug!("deleted: {:?}", del);
if !del {
if let Some(intro) = n.introduced_by {
// Check that the edge actually existed at some point.
if !validate_edge(intro, n.from, n.to.start_pos(), n.previous)? {
debug!("edge doesn't exist: {:?}", n);
return Err(crate::Error::InconsistentChange.into())
}
}
}
debug!("put_graph {:?} {:?}, intro {:?}", source, target, change);
assert_ne!(source, target);
if apply_check(txn, channel, source, target)? {
txn.put_graph_with_rev(channel, n.flag, source, target, change)?;
}
if target.end >= n.to.end {
assert_eq!(target.end, n.to.end);
break;
}
source = target;
target = txn.find_block(channel, target.end_pos())?;
assert_ne!(source, target);
}
Ok(())
}
// org id 7zbypTSSsas1XfpHlx5O3tLiuSmZZ9+Eq05hBASSbn4=
fn find_source_vertex<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
from: &Position<Option<Hash>>,
change: ChangeId,
inode: Position<Option<Hash>>,
flag: EdgeFlags,
ws: &mut Workspace,
) -> Result<Vertex<ChangeId>, anyhow::Error> {
debug!("find_source_vertex");
let mut source = txn.find_block_end(&channel, txn.internal_pos(&from, change)?)?;
debug!("source = {:?}", source);
if source.start < from.pos && source.end > from.pos {
assert!(!flag.contains(EdgeFlags::FOLDER));
if let Some((_, vids)) = ws.missing_context.graphs.get_mut(&inode) {
if let Some(vid) = vids.remove(&source) {
vids.insert(
Vertex {
end: from.pos,
..source
},
vid,
);
vids.insert(
Vertex {
start: from.pos,
..source
},
vid,
);
}
}
txn.split_block(channel, source, from.pos)?;
source.end = from.pos;
}
Ok(source)
}
fn find_target_vertex<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
to: &Vertex<Option<Hash>>,
change: ChangeId,
inode: Position<Option<Hash>>,
flag: EdgeFlags,
ws: &mut Workspace,
) -> Result<Vertex<ChangeId>, anyhow::Error> {
let to_pos = txn.internal_pos(&to.start_pos(), change)?;
debug!("find_target_vertex");
let mut target = txn.find_block(channel, to_pos)?;
debug!("target = {:?}", target);
if target.start < to.start {
assert!(!flag.contains(EdgeFlags::FOLDER));
if let Some((_, vids)) = ws.missing_context.graphs.get_mut(&inode) {
if let Some(vid) = vids.remove(&target) {
vids.insert(
Vertex {
end: to.start,
..target
},
vid,
);
vids.insert(
Vertex {
start: to.start,
..target
},
vid,
);
}
}
txn.split_block(channel, target, to.start)?;
target.start = to.start;
}
Ok(target)
}
// org id szeri9Xjwvhn/QRxGYNQRM76Obj5Mxd6Z6KYP1vDdik=
fn collect_pseudo_edges<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
apply: &mut Workspace,
inode: Position<Option<Hash>>,
v: Vertex<ChangeId>,
) -> Result<(), anyhow::Error> {
for e in txn.iter_adjacent(
&channel,
v,
EdgeFlags::empty(),
EdgeFlags::all() - EdgeFlags::DELETED,
) {
debug!("collect_pseudo_edges {:?} {:?}", v, e);
if !e.flag.contains(EdgeFlags::FOLDER) {
if e.flag.contains(EdgeFlags::PARENT) {
let p = txn.find_block_end(channel, e.dest)?;
if txn.is_alive(channel, p) {
apply.parents.insert(p);
}
} else {
let p = txn.find_block(channel, e.dest)?;
apply.children.insert(p);
}
}
if e.flag.contains(EdgeFlags::PSEUDO) {
apply.pseudo.push((v, e, inode));
}
}
Ok(())
}
// org id mkvErN7T/SkmZqqWYdWC3oNTXKi28ZS8zsmBdylcngA=
fn reconnect_pseudo_edges<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
inode: Position<Option<Hash>>,
ws: &mut Workspace,
target: Vertex<ChangeId>,
) -> Result<(), anyhow::Error> {
debug!(
"reconnect: parents.len() = {:?} to children.len() = {:?}",
ws.parents.len(),
ws.children.len()
);
debug!(
"reconnect: parents = {:#?} to children = {:#?}",
ws.parents, ws.children
);
if ws.parents.is_empty() {
ws.children.clear();
return Ok(());
}
if ws.children.is_empty() {
ws.parents.clear();
return Ok(());
}
let (graph, vids) = if let Some(n) = ws.missing_context.graphs.get(&inode) {
n
} else {
return Err(crate::Error::InconsistentChange.into());
};
crate::alive::remove_redundant_parents(
&graph,
&vids,
&mut ws.parents,
&mut ws.missing_context.covered_parents,
target,
);
for &p in ws.parents.iter() {
ws.missing_context.covered_parents.insert((p, target));
}
crate::alive::remove_redundant_children(&graph, &vids, &mut ws.children, target);
debug!(
"reconnect (nonredundant) {:?} to {:?}",
ws.parents, ws.children
);
for p in ws.parents.drain() {
for c in ws.children.iter() {
if p != *c && txn.is_alive(channel, p) && txn.is_alive(channel, *c) {
txn.put_graph_with_rev(channel, EdgeFlags::PSEUDO, p, *c, ChangeId::ROOT)?;
}
}
}
Ok(())
}
// org id 98Q84FaUOk/7navHiQObtyTKOkDVixV8OQF97nuKTPs=
pub(crate) fn clean_obsolete_pseudo_edges<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
) -> Result<(), anyhow::Error> {
debug!("pseudo = {:#?}", ws.pseudo);
for (next_vertex, p, inode) in ws.pseudo.drain(..) {
// Reorder next_vertex and p.dest to reformulate this as an "a -> b" edge.
debug!("{:?} {:?}", next_vertex, p);
let (a, b) = if p.flag.contains(EdgeFlags::PARENT) {
if let Ok(dest) = txn.find_block_end(channel, p.dest) {
(dest, next_vertex)
} else {
continue;
}
} else {
if let Ok(dest) = txn.find_block(channel, p.dest) {
(next_vertex, dest)
} else {
continue;
}
};
let a_is_alive = a.is_root()
|| txn
.iter_adjacent(
channel,
a,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)
.filter(|e| !e.flag.contains(EdgeFlags::PSEUDO))
.next()
.is_some();
let b_is_alive = b.is_root()
|| txn
.iter_adjacent(
channel,
b,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)
.filter(|e| !e.flag.contains(EdgeFlags::PSEUDO))
.next()
.is_some();
debug!("{:?} {:?}", a_is_alive, b_is_alive);
if a_is_alive {
if !b_is_alive {
debug!("deleting {:?} -> {:?}", a, b);
if txn.del_graph_with_rev(
channel,
p.flag - EdgeFlags::PARENT,
a,
b,
p.introduced_by,
)? {
// repair down context.
crate::missing_context::repair_missing_down_context(
txn,
channel,
&mut ws.missing_context,
inode,
b,
&[a],
)?
}
}
} else if b_is_alive {
debug!("deleting {:?} -> {:?}", a, b);
if txn.del_graph_with_rev(channel, p.flag - EdgeFlags::PARENT, a, b, p.introduced_by)? {
// repair up context.
crate::missing_context::repair_missing_up_context(
txn,
channel,
&mut ws.missing_context,
inode,
a,
&[b],
p.flag.contains(EdgeFlags::FOLDER),
)?
}
} else {
txn.del_graph_with_rev(channel, p.flag - EdgeFlags::PARENT, a, b, p.introduced_by)?;
}
}
Ok(())
}
// org id aio1AtR3Nlw9db3yFD42mkoPf3Bx0O6CI+hHFGjyUO0=
fn repair_missing_contexts<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
change_id: ChangeId,
change: &Change,
) -> Result<(), anyhow::Error> {
let now = std::time::Instant::now();
crate::missing_context::repair_parents_of_deleted(txn, channel, &mut ws.missing_context)?;
for atom in change.changes.iter().flat_map(|r| r.iter()) {
match atom {
Atom::NewVertex(ref n) => {
let vertex = Vertex {
change: change_id,
start: n.start,
end: n.end,
};
debug!("repairing missing context for {:?}", vertex);
for up in n.up_context.iter() {
let up = txn.find_block_end(channel, txn.internal_pos(&up, change_id)?)?;
if !txn.is_alive(channel, up) {
debug!("repairing missing up context {:?} {:?}", up, vertex);
repair_missing_up_context(
txn,
channel,
&mut ws.missing_context,
n.inode,
up,
&[vertex],
n.flag.contains(EdgeFlags::FOLDER),
)?
}
}
if !n.flag.contains(EdgeFlags::FOLDER) {
for down in n.down_context.iter() {
let down = txn.find_block(channel, txn.internal_pos(&down, change_id)?)?;
if txn
.iter_adjacent(
channel,
down,
EdgeFlags::PARENT,
EdgeFlags::all() - EdgeFlags::DELETED,
)
.filter(|e| e.introduced_by != change_id)
.next()
.is_none()
{
debug!("repairing missing down context {:?} {:?}", down, vertex);
repair_missing_down_context(
txn,
channel,
&mut ws.missing_context,
n.inode,
down,
&[vertex],
)?
}
}
}
debug!("done repairing contexts for {:?}", vertex);
}
Atom::EdgeMap(ref n) => {
for e in n.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
if !e.flag.contains(EdgeFlags::DELETED) {
trace!("repairing context nondeleted {:?}", e);
repair_context_nondeleted(
txn,
channel,
&mut ws.missing_context,
n.inode,
change_id,
|h| change.knows(&h),
e,
)?
}
}
}
}
}
for atom in change.changes.iter().flat_map(|r| r.iter()) {
match atom {
Atom::EdgeMap(ref n) => {
for e in n.edges.iter() {
if e.flag.contains(EdgeFlags::DELETED) {
trace!("repairing context deleted {:?}", e);
repair_context_deleted(
txn,
channel,
&mut ws.missing_context,
n.inode,
change_id,
|h| change.knows(&h),
e,
)?
}
}
}
_ => {}
}
}
crate::missing_context::delete_pseudo_edges(txn, channel, &mut ws.missing_context)?;
crate::TIMERS.lock().unwrap().repair_context += now.elapsed();
Ok(())
}
// org id JQod1ELUh8e4H59MbZI504jS4RLdF1g3gcZQR16F44o=
fn repair_cyclic_paths<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
ws: &mut Workspace,
change_id: ChangeId,
change: &Change,
) -> Result<(), anyhow::Error> {
let now = std::time::Instant::now();
for atom in change.changes.iter().flat_map(|r| r.iter()) {
match atom {
Atom::EdgeMap(ref n) => {
for e in n.edges.iter() {
assert!(!e.flag.contains(EdgeFlags::PARENT));
if e.flag.contains(EdgeFlags::FOLDER | EdgeFlags::DELETED) {
if e.to.len() > 0 {
repair_edge(txn, channel, e, change_id, ws)?;
}
}
}
}
_ => {}
}
}
crate::TIMERS.lock().unwrap().check_cyclic_paths += now.elapsed();
Ok(())
}
fn repair_edge<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
e: &NewEdge<Option<Hash>>,
change_id: ChangeId,
ws: &mut Workspace,
) -> Result<(), anyhow::Error> {
// If we're deleting a name and not a whole file.
let h = if let Some(h) = e.to.change {
h
} else {
return Ok(());
};
let to = Vertex {
change: if let Some(h) = txn.get_internal(h) {
h
} else {
return Err(crate::Error::InconsistentChange.into());
},
start: e.to.start,
end: e.to.end,
};
// Check that the inode descendant of this name is rooted.
let mut unrooted = false;
let f0 = EdgeFlags::FOLDER;
let f1 = EdgeFlags::FOLDER | EdgeFlags::BLOCK | EdgeFlags::PSEUDO;
for ee in txn.iter_adjacent(channel, to, f0, f1) {
if !is_rooted(txn, channel, ee.dest.inode_vertex(), ws)? {
unrooted = true;
}
}
// If not, repair.
if unrooted {
let from = txn.find_block_end(channel, txn.internal_pos(&e.from, change_id)?)?;
debug!("repairing unrooted: {:?} {:?}", from, to);
txn.put_graph_with_rev(
channel,
EdgeFlags::FOLDER | EdgeFlags::PSEUDO,
from,
to,
ChangeId::ROOT,
)?;
}
Ok(())
}
fn is_rooted<T: TxnT>(
txn: &T,
channel: &Channel<T>,
v: Vertex<ChangeId>,
ws: &mut Workspace,
) -> Result<bool, crate::Error> {
// Recycling ws.up_context and ws.parents as a stack and a
// "visited" hashset, respectively.
let ref mut stack = ws.up_context;
stack.clear();
stack.push(v);
let ref mut visited = ws.parents;
visited.clear();
while let Some(to) = stack.pop() {
debug!("is_rooted, pop = {:?}", to);
if to.is_root() {
stack.clear();
for v in visited.drain() {
ws.rooted.insert(v, true);
}
return Ok(true);
}
if !visited.insert(to) {
continue;
}
if let Some(&rooted) = ws.rooted.get(&to) {
if rooted {
for v in visited.drain() {
ws.rooted.insert(v, true);
}
return Ok(true);
} else {
continue;
}
}
let f = EdgeFlags::PARENT | EdgeFlags::FOLDER;
for parent in txn.iter_adjacent(channel, to, f, f | EdgeFlags::PSEUDO | EdgeFlags::BLOCK) {
debug!("is_rooted, parent = {:?}", parent);
let parent = txn.find_block_end(channel, parent.dest)?;
stack.push(parent)
}
}
for v in visited.drain() {
ws.rooted.insert(v, false);
}
Ok(false)
}
// org id jGPI4uvXHOqlYmQOZfGf5v7mWvmbC5zvHIE109pMmp8=
use super::{Flags, Graph, VertexId};
use crate::vector2::*;
use std::cmp::min;
impl Graph {
pub(crate) fn tarjan(&mut self) -> Vector2<VertexId> {
if self.lines.len() <= 1 {
let mut sccs = Vector2::with_capacities(self.lines.len(), self.lines.len());
sccs.push();
sccs.push_to_last(VertexId(0));
return sccs;
}
let mut call_stack = vec![(VertexId(1), 0, true)];
let mut index = 0;
let mut stack = Vec::new();
let mut scc = Vector2::new();
'recursion: while let Some((n_l, i, first_visit)) = call_stack.pop() {
if first_visit {
let ref mut l = self[n_l];
l.index = index;
l.lowlink = index;
l.flags = l.flags | Flags::ONSTACK | Flags::VISITED;
stack.push(n_l);
index = index + 1;
} else {
let &(_, n_child) = self.child(n_l, i);
self[n_l].lowlink = self[n_l].lowlink.min(self[n_child].lowlink);
}
for j in i..self[n_l].n_children {
let &(_, n_child) = self.child(n_l, j);
if !self[n_child].flags.contains(Flags::VISITED) {
call_stack.push((n_l, j, false));
call_stack.push((n_child, 0, true));
continue 'recursion;
} else if self[n_child].flags.contains(Flags::ONSTACK) {
self[n_l].lowlink = min(self[n_l].lowlink, self[n_child].index)
}
}
if self[n_l].index == self[n_l].lowlink {
let n_scc = scc.len();
scc.push();
loop {
match stack.pop() {
None => break,
Some(n_p) => {
self[n_p].scc = n_scc;
self[n_p].flags = self[n_p].flags ^ Flags::ONSTACK;
scc.push_to_last(n_p);
if n_p == n_l {
break;
}
}
}
}
}
}
scc
}
}
// org id hGK6MGS40qzDFMBQyv5XVltMB8aIcok+Pgym9VgR9Vk=
use super::{AliveVertex, Flags, Graph, VertexId};
use crate::pristine::*;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
pub fn retrieve<T: TxnT>(txn: &T, channel: &Channel<T>, pos0: Position<ChangeId>) -> Graph {
// org id sYJLuP12Bb9ojQy2BgMV3s1jy9cGxExEdwA79BNti8w=
let now = std::time::Instant::now();
let mut graph = Graph {
lines: Vec::new(),
children: Vec::new(),
total_bytes: 0,
};
// org id 5vWbyONVh9JXfLbAXyWtgOPI0Uw7ePLizs5Ktr69a7A=
let mut cache: HashMap<Position<ChangeId>, VertexId> = HashMap::new();
graph.lines.push(AliveVertex::DUMMY);
cache.insert(Position::BOTTOM, VertexId(0));
graph.lines.push(AliveVertex {
vertex: pos0.inode_vertex(),
flags: Flags::empty(),
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
});
cache.insert(pos0, VertexId(1));
// org id +NO0a8x0MX1UOumulz5JeM8HzIIpUzB+FDngTJ+n9I8=
let mut stack = vec![VertexId(1)];
while let Some(vid) = stack.pop() {
debug!("vid {:?}", vid);
graph[vid].children = graph.children.len();
for e in txn.iter_adjacent(
&channel,
graph[vid].vertex,
EdgeFlags::empty(),
EdgeFlags::PSEUDO | EdgeFlags::BLOCK,
) {
let dest_vid = match cache.entry(e.dest) {
Entry::Vacant(ent) => {
let n = VertexId(graph.lines.len());
ent.insert(n);
let alive = new_vertex(txn, channel, e.dest);
graph.total_bytes += alive.vertex.len();
graph.lines.push(alive);
stack.push(n);
n
}
Entry::Occupied(e) => *e.get(),
};
assert_ne!(graph[vid].vertex.start_pos(), e.dest);
debug!("child {:?}", dest_vid);
graph.children.push((Some(e), dest_vid));
graph[vid].n_children += 1;
}
// org id hsj9MpTqN7i3gJbLw17mqAbo+0+lz2RNkm24v+T+WvQ=
graph.children.push((None, VertexId::DUMMY));
graph[vid].n_children += 1;
}
crate::TIMERS.lock().unwrap().alive_retrieve += now.elapsed();
graph
}
// org id xfKIMD6U0jHcxTiNzXKvDewGhKlvjnNe3nmaoh6nnz4=
fn new_vertex<T: TxnT>(txn: &T, channel: &Channel<T>, pos: Position<ChangeId>) -> AliveVertex {
let vertex = txn.find_block(&channel, pos).unwrap();
AliveVertex {
vertex,
flags: if txn
.iter_adjacent(
&channel,
vertex,
EdgeFlags::PARENT | EdgeFlags::DELETED,
EdgeFlags::all(),
)
.next()
.is_some()
{
Flags::ZOMBIE
} else {
Flags::empty()
},
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
}
}
// org id jpSLF26wQdmz0kBfuXYavQ2T6SBQYdE5u9SfkPOypeg=
pub(crate) fn remove_forward_edges<T: MutTxnT>(
txn: &mut T,
channel: &mut Channel<T>,
pos: Position<ChangeId>,
) -> Result<(), anyhow::Error> {
let mut graph = retrieve(txn, channel, pos);
let scc = graph.tarjan(); // SCCs are given here in reverse order.
let (_, forward_scc) = graph.dfs(&scc);
let mut forward = Vec::new();
graph.collect_forward_edges(txn, channel, &scc, &forward_scc, &mut forward);
for &(vertex, edge) in forward.iter() {
let dest = txn.find_block(&channel, edge.dest).unwrap();
debug!(target:"libpijul::forward", "deleting forward edge {:?} {:?} {:?}", vertex, dest, edge);
txn.del_graph_with_rev(channel, edge.flag, vertex, dest, edge.introduced_by)?;
}
Ok(())
}
// org id rugGQSda+7h3fgWnFg5o6rrh0FxPthevqf1C7yFpDXo=
use super::dfs::{Path, PathElement};
use super::{Flags, Graph, VertexId};
use crate::changestore::ChangeStore;
use crate::pristine::*;
use crate::vector2::Vector2;
use crate::vertex_buffer::VertexBuffer;
// org id 83qmEYXouVuMdaYiRx+rqfb1HtaYFvovt7XmeQIpuuI=
#[derive(Debug)]
struct ConflictStackElt {
conflict: Vec<Path>,
side: usize,
idx: usize,
}
fn output_conflict<T: TxnT, B: VertexBuffer, P: ChangeStore>(
changes: &P,
txn: &T,
channel: &Channel<T>,
line_buf: &mut B,
graph: &Graph,
sccs: &Vector2<VertexId>,
conflict: Path,
) -> Result<(), anyhow::Error> {
let mut stack = vec![ConflictStackElt {
conflict: vec![conflict],
side: 0,
idx: 0,
}];
while let Some(mut elt) = stack.pop() {
let n_sides = elt.conflict.len();
// org id OQlVwkimOiJb/Ruz+/oEO0Pz1khIxRkvwN+/W8om9Nc=
if n_sides > 1 && elt.side == 0 && elt.idx == 0 {
line_buf.begin_conflict()?;
// org id roGnEVojWYAOJ0zF3ObAAA8Kuqfjs3on9NNRV0mgRIc=
elt.conflict.sort_by(|a, b| {
let a_ = a
.path
.iter()
.map(|a| a.oldest_vertex(changes, txn, channel, graph, sccs))
.min()
.unwrap();
let b_ = b
.path
.iter()
.map(|b| b.oldest_vertex(changes, txn, channel, graph, sccs))
.min()
.unwrap();
a_.cmp(&b_)
});
}
// org id GmZmOnTyQotgknRlrewBJc6+O1mQfhuF2lNOYlgkys4=
let mut next = None;
'outer: while elt.side < n_sides {
if elt.side > 0 && elt.idx == 0 {
line_buf.conflict_next()?;
}
while elt.idx < elt.conflict[elt.side].path.len() {
match elt.conflict[elt.side].path[elt.idx] {
PathElement::Scc { scc } => {
output_scc(changes, txn, graph, &sccs[scc], line_buf)?;
elt.idx += 1;
}
PathElement::Conflict { ref mut sides } => {
let sides = std::mem::replace(sides, Vec::new());
elt.idx += 1;
next = Some(ConflictStackElt {
side: 0,
idx: 0,
conflict: sides,
});
break 'outer;
}
}
}
elt.side += 1;
elt.idx = 0;
}
// org id TAF52f6cZFJyHIBx5wfYk6RR2l7NG8tOHboIyrGFakc=
if elt.side >= n_sides {
if n_sides > 1 {
line_buf.end_conflict()?;
}
} else {
stack.push(elt);
stack.push(next.unwrap())
}
}
Ok(())
}
// org id U/M8FDWU06DAEBXf7bvPvuN33e6GPJLmgvUK84CFuJg=
impl PathElement {
fn oldest_vertex<T: TxnT, C: ChangeStore>(
&self,
changes: &C,
txn: &T,
channel: &Channel<T>,
graph: &Graph,
sccs: &Vector2<VertexId>,
) -> u64 {
match *self {
PathElement::Scc { ref scc } => sccs[*scc]
.iter()
.map(|x| {
if let Some(t) =
txn.get_changeset(&channel.changes, graph[*x].vertex.change, None)
{
t
} else {
if log_enabled!(log::Level::Debug) {
let f = std::fs::File::create("debug_oldest").unwrap();
graph.debug(changes, txn, channel, false, true, f).unwrap();
}
panic!("vertex not in channel: {:?}", graph[*x].vertex)
}
})
.min()
.unwrap(),
PathElement::Conflict { ref sides } => sides
.iter()
.map(|x| {
x.path
.iter()
.map(|x| x.oldest_vertex(changes, txn, channel, graph, sccs))
.min()
.unwrap()
})
.min()
.unwrap(),
}
}
}
// org id 4fCwUx872WHp3z4W0/WwwMhX1cFRa2iPigNaXNpaEpA=
fn output_scc<T: TxnT, B: VertexBuffer, P: ChangeStore>(
changes: &P,
txn: &T,
graph: &Graph,
scc: &[VertexId],
vbuf: &mut B,
) -> Result<(), anyhow::Error> {
if scc.len() > 1 {
vbuf.begin_cyclic_conflict()?
}
for &v in scc.iter() {
let now = std::time::Instant::now();
if graph[v].flags.contains(Flags::ZOMBIE) {
vbuf.begin_zombie_conflict()?;
}
crate::TIMERS.lock().unwrap().alive_write += now.elapsed();
let vertex = graph[v].vertex;
let now = std::time::Instant::now();
let get_contents = |buf: &mut Vec<u8>| {
changes.get_contents(|p| txn.get_external(p), vertex, buf)?;
Ok(())
};
crate::TIMERS.lock().unwrap().alive_contents += now.elapsed();
let now = std::time::Instant::now();
debug!("outputting {:?}", vertex);
vbuf.output_line(vertex, get_contents)?;
if graph[v].flags.contains(Flags::ZOMBIE) {
vbuf.end_conflict()?;
}
crate::TIMERS.lock().unwrap().alive_write += now.elapsed();
}
let now = std::time::Instant::now();
if scc.len() > 1 {
vbuf.end_cyclic_conflict()?;
}
crate::TIMERS.lock().unwrap().alive_write += now.elapsed();
Ok(())
}
// org id RA5o6M3qAtAD3/CSapAI6tIWd7yWNmsb/uCKXtvMf6c=
pub fn output_graph<T: TxnT, B: VertexBuffer, P: ChangeStore>(
changes: &P,
txn: &T,
channel: &Channel<T>,
line_buf: &mut B,
graph: &mut Graph,
forward: &mut Vec<(Vertex<ChangeId>, Edge)>,
) -> Result<(), anyhow::Error> {
if graph.lines.len() <= 1 {
return Ok(());
}
let now0 = std::time::Instant::now();
let scc = graph.tarjan(); // SCCs are given here in reverse order.
let (conflict_tree, forward_scc) = graph.dfs(&scc);
graph.collect_forward_edges(txn, channel, &scc, &forward_scc, forward);
crate::TIMERS.lock().unwrap().alive_graph += now0.elapsed();
let now1 = std::time::Instant::now();
debug!("conflict_tree = {:?}", conflict_tree);
output_conflict(changes, txn, channel, line_buf, graph, &scc, conflict_tree)?;
crate::TIMERS.lock().unwrap().alive_output += now1.elapsed();
Ok(())
}
// org id ae+cN7ii/nf6mJV7ygVX+QzaT1iUIKfCuf/x9VMpjjQ=
use crate::pristine::{ChangeId, Edge, Vertex};
mod debug;
mod dfs;
mod output;
pub mod retrieve;
mod tarjan;
pub(crate) use output::*;
pub(crate) use retrieve::*;
// org id ccK061RVWq8mU8p92w938EIRcX5xf53Y/5klJaCYV5c=
#[derive(Debug, Clone)]
pub(crate) struct AliveVertex {
pub vertex: Vertex<ChangeId>,
flags: Flags,
children: usize,
n_children: usize,
index: usize,
lowlink: usize,
pub scc: usize,
}
bitflags! {
struct Flags: u8 {
const ZOMBIE = 4;
const VISITED = 2;
const ONSTACK = 1;
}
}
// org id vnQ9DifkEHpAgUd3zjGq5N9u+oBkZGdVua6BJxF9jTg=
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub(crate) struct VertexId(pub(crate) usize);
impl VertexId {
const DUMMY: VertexId = VertexId(0);
}
impl AliveVertex {
const DUMMY: AliveVertex = AliveVertex {
vertex: Vertex::BOTTOM,
flags: Flags::empty(),
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
};
}
#[derive(Debug)]
pub struct Graph {
pub(crate) lines: Vec<AliveVertex>,
children: Vec<(Option<Edge>, VertexId)>,
total_bytes: usize,
}
impl Graph {
pub fn len_vertices(&self) -> usize {
self.lines.len()
}
pub fn len_bytes(&self) -> usize {
self.total_bytes
}
}
impl std::ops::Index<VertexId> for Graph {
type Output = AliveVertex;
fn index(&self, idx: VertexId) -> &Self::Output {
self.lines.index(idx.0)
}
}
impl std::ops::IndexMut<VertexId> for Graph {
fn index_mut(&mut self, idx: VertexId) -> &mut Self::Output {
self.lines.index_mut(idx.0)
}
}
// org id SzNAy0qgvhYjwiibrE05FzjomcwRYrc00kLuPE6TWsA=
impl Graph {
pub(crate) fn children(&self, i: VertexId) -> &[(Option<Edge>, VertexId)] {
let ref line = self[i];
&self.children[line.children..line.children + line.n_children]
}
fn child(&self, i: VertexId, j: usize) -> &(Option<Edge>, VertexId) {
&self.children[self[i].children + j]
}
}
// org id IhdHg3+NO19nL+0XTy56j0KXgReFfEe/3TjQW3vU47s=
use std::collections::{HashMap, HashSet};
pub(crate) fn remove_redundant_children(
graph: &Graph,
vids: &HashMap<Vertex<ChangeId>, crate::alive::VertexId>,
vertices: &mut HashSet<Vertex<ChangeId>>,
target: Vertex<ChangeId>,
) {
let mut min = std::usize::MAX;
let mut stack = Vec::new();
for p in vertices.iter() {
let vid = if let Some(vid) = vids.get(p) {
*vid
} else {
continue;
};
min = min.min(graph[vid].scc);
stack.push(vid);
}
let target_scc = if let Some(&target) = vids.get(&target) {
graph[target].scc
} else {
std::usize::MAX
};
let mut visited = HashSet::new();
while let Some(p) = stack.pop() {
if !visited.insert(p) {
continue;
}
for (_, child) in graph.children(p) {
if graph[p].scc < target_scc && graph[p].scc != graph[*child].scc {
assert!(graph[p].scc > graph[*child].scc);
vertices.remove(&graph[*child].vertex);
}
if graph[*child].scc >= min {
stack.push(*child);
}
}
}
}
// org id YjNtUksz6F4FDjsnoWIdFtsnyqrhhwGB81kMPoQh1y0=
pub(crate) fn remove_redundant_parents(
graph: &Graph,
vids: &HashMap<Vertex<ChangeId>, crate::alive::VertexId>,
vertices: &mut HashSet<Vertex<ChangeId>>,
covered: &mut HashSet<(Vertex<ChangeId>, Vertex<ChangeId>)>,
target: Vertex<ChangeId>,
) {
let mut min = std::usize::MAX;
let mut stack = Vec::new();
for p in vertices.iter() {
let vid = if let Some(vid) = vids.get(p) {
*vid
} else {
continue;
};
min = min.min(graph[vid].scc);
stack.push((vid, false));
}
stack.sort_by(|(a, _), (b, _)| graph[*a].scc.cmp(&graph[*b].scc));
let target_scc = if let Some(&target) = vids.get(&target) {
graph[target].scc
} else {
0
};
let mut visited = HashSet::new();
while let Some((p, _)) = stack.pop() {
if !visited.insert(p) {
continue;
}
if graph[p].scc > target_scc
&& (vertices.contains(&graph[p].vertex) || covered.contains(&(graph[p].vertex, target)))
{
for (pp, pp_on_path) in stack.iter() {
if graph[*pp].scc != graph[p].scc && *pp_on_path {
vertices.remove(&graph[*pp].vertex);
covered.insert((graph[*pp].vertex, target));
}
}
}
stack.push((p, true));
for (_, child) in graph.children(p) {
if graph[*child].scc >= min {
stack.push((*child, false));
}
if graph[p].scc > target_scc
&& graph[*child].scc != graph[p].scc
&& covered.contains(&(graph[*child].vertex, target))
{
assert!(graph[*child].scc < graph[p].scc);
vertices.remove(&graph[p].vertex);
covered.insert((graph[p].vertex, target));
}
}
}
}
// org id FN4X/UQj4sOrdr1L/34stLjMnN7aRX6dGB8ovnXm4g4=
use super::{Graph, VertexId};
use crate::pristine::*;
use crate::vector2::Vector2;
use std::collections::HashSet;
// org id a6g8r8xMc72herurxBoGK09TlqlNFesqMxOlGye7+wU=
#[derive(Debug)]
pub(super) struct Path {
pub path: Vec<PathElement>,
pub sccs: HashSet<usize>,
pub end: usize,
}
#[derive(Debug)]
pub(super) enum PathElement {
Scc { scc: usize },
Conflict { sides: Vec<Path> },
}
impl Path {
fn new() -> Self {
Path {
path: Vec::new(),
sccs: HashSet::new(),
end: 0,
}
}
}
// org id Evvo1XwskiRIPyqRTa8pr3UJx6bL0a0ky0UqCjDbj7k=
struct DFS {
visits: Vec<Visits>,
counter: usize,
}
#[derive(Clone, Debug)]
struct Visits {
first: usize,
last: usize,
}
impl DFS {
pub fn new(n: usize) -> Self {
DFS {
visits: vec![Visits { first: 0, last: 0 }; n],
counter: 1,
}
}
}
// org id rs9ua+JLr5uzyFxLi79zXVVGmVsbPAbn4+J7Pz+itfQ=
#[derive(Debug)]
struct State {
n_scc: usize,
// org id 1jLjvnQC+CzGcbSN/yGdM+O7tiu5ytCDkvQ7ddM/K8s=
descendants: Option<(usize, usize)>,
// org id ops5Ku702mQyuAsCratzGLyafN90+9XdMoqHYonqqiY=
current_path: Path,
// org id I6sttXHyuv42p25LtpQ6rm+GHXz6XX3RSkXdPtw++rs=
current_path_len: usize,
// org id tLdXR6Nj0fAJXI2MuYhja197v7oPjfGj5UsZwSsPFfc=
return_values: Vec<Path>,
}
// org id ZKZCQZOOiAuzQ2hplUivR12avMGPrw2HbcBPp1dM96U=
fn init_stack(n: usize) -> Vec<State> {
let mut stack = Vec::with_capacity(n);
stack.push(State {
n_scc: n - 1,
descendants: None,
current_path: Path::new(),
current_path_len: 0,
return_values: Vec::new(),
});
stack
}
#[test]
fn test4165() {
env_logger::try_init().unwrap_or(());
use super::*;
use crate::pristine::*;
let mut graph = Graph {
lines: vec![
AliveVertex {
vertex: Vertex {
change: ChangeId(1),
start: ChangePosition(0),
end: ChangePosition(1)
},
flags: super::Flags::empty(),
children: 0,
n_children: 0,
index: 0,
lowlink: 0,
scc: 0,
};
13
],
children: Vec::new(),
total_bytes: 0,
};
for i in 0..13 {
graph.lines[i].vertex.change = ChangeId(i as u64);
}
for (i, &children) in [
&[][..],
&[4, 2, 6, 7, 5, 3][..],
&[12, 4, 7, 6][..],
&[5][..],
&[10, 8, 9][..],
&[6, 7, 4][..],
&[4, 7][..],
&[4][..],
&[10][..],
&[10][..],
&[11][..],
&[0][..],
&[4][..],
]
.iter()
.enumerate()
{
graph.lines[i].children = graph.children.len();
graph.lines[i].n_children = children.len();
for &chi in children.iter() {
graph.children.push((
Some(Edge {
dest: graph.lines[chi].vertex.start_pos(),
flag: EdgeFlags::empty(),
introduced_by: ChangeId(4165),
}),
VertexId(chi),
))
}
}
let scc = graph.tarjan();
for i in 0..scc.len() {
for &j in scc[i].iter() {
graph[j].scc = i
}
}
let mut f = std::fs::File::create("debug4165").unwrap();
graph.debug_raw(&mut f).unwrap();
println!("{:#?}", graph.dfs(&scc))
}
impl Graph {
pub(super) fn dfs(&mut self, scc: &Vector2<VertexId>) -> (Path, HashSet<(usize, usize)>) {
let mut dfs = DFS::new(scc.len());
let mut stack = init_stack(scc.len());
let mut forward_scc = HashSet::new();
let mut regular_scc = HashSet::new();
let mut return_value = None;
let mut descendants = Vector2::with_capacities(scc.len(), scc.len());
// org id fTBj4yfBA4uziK5nATQHePJ03VSVY5lNtqx3jpU4djc=
'recursion: while let Some(mut state) = stack.pop() {
debug!("dfs state = {:?}", state);
let (i, mut j) = if let Some(n) = state.descendants {
n
} else {
first_visit(self, &mut dfs, scc, &mut descendants, &mut state)
};
debug!("i = {:?}, j = {:?}", i, j);
// org id GWpsS0aAAfDTvTAz0PS0OxCQeAXMwg/YHlEzPlHN6OA=
let scc_vertices: Vec<_> = scc[state.n_scc].iter().map(|x| &self[*x]).collect();
debug!("scc_vertices = {:?}", scc_vertices);
while j > 0 {
let child = descendants[i][j - 1];
let scc_child: Vec<_> = scc[child].iter().map(|x| &self[*x]).collect();
debug!("dfs child = {:?} {:?}", child, scc_child);
if dfs.visits[state.n_scc].first < dfs.visits[child].first {
// This is a forward edge.
if child > 0 && !regular_scc.contains(&(state.n_scc, child)) {
debug!("forward edge");
forward_scc.insert((state.n_scc, child));
}
} else if dfs.visits[child].first == 0 {
// Regular edge.
regular_scc.insert((state.n_scc, child));
debug!("regular edge, return_value {:?}", return_value);
if let Some(return_value) = return_value.take() {
state.return_values.push(return_value)
}
recurse(state, (i, j), child, &mut stack);
continue 'recursion;
} else {
// Cross edge.
regular_scc.insert((state.n_scc, child));
debug!("cross edge");
}
j -= 1
}
// org id +IS4jNPNEDK5HDBm4U7DLoNFfhwvHzn40JAm4tKnBHk=
return_value = Some(if let Some(return_value_) = return_value.take() {
dfs.visits[state.n_scc].last = dfs.counter;
dfs.counter += 1;
if state.return_values.is_empty() {
return_value_
} else {
state.return_values.push(return_value_);
make_conflict(&mut state)
}
} else {
state.current_path
});
debug!("end of loop, returning {:?}", return_value);
}
// org id FHiJPM0yAi0g9oxzzog93HBlJMVO+zkyPcyOIqCcvwI=
(return_value.unwrap_or(Path::new()), forward_scc)
}
}
// org id iFf2gT1lTCyh5S5G9chCOQSV/yv6LUhW94o0G2+xy8s=
fn first_visit(
graph: &Graph,
dfs: &mut DFS,
scc: &Vector2<VertexId>,
descendants: &mut Vector2<usize>,
state: &mut State,
) -> (usize, usize) {
// org id +ptaxUWOVF6qdFxwHAnI2hr3kJfCBRh7am0SWZ+zdOg=
assert_eq!(dfs.visits[state.n_scc].first, 0);
dfs.visits[state.n_scc].first = dfs.counter;
dfs.counter += 1;
// org id gf22ooM9FpFRowUF1cPE6ISPMkymBIWZOjPS3Y1ktgQ=
state
.current_path
.path
.push(PathElement::Scc { scc: state.n_scc });
state.current_path.sccs.insert(state.n_scc);
// org id I/pQaV+bTryz+byZO/sdFhxqPD1EYTVf6PbC+JNJFTM=
let i = descendants.len();
descendants.push();
let mut descendants_end = 0;
for cousin in scc[state.n_scc].iter() {
for &(_, n_child) in graph.children(*cousin) {
let child_component = graph[n_child].scc;
if child_component > state.n_scc {
// let mut f = std::fs::File::create("debug_alive").unwrap();
// graph.debug_raw(&mut f).unwrap();
panic!("{} > {}", child_component, state.n_scc);
} else if child_component == state.n_scc {
debug!("cyclic component {:?}", child_component);
continue;
}
if dfs.visits[child_component].first == 0 {
descendants.push_to_last(child_component)
} else {
descendants_end = descendants_end.max(child_component);
}
}
}
state.current_path.end = descendants_end;
// org id tppibf8fu/NgBP2gA4yEeGMCsZjARmQ3FsU1mfY8/VY=
let d = descendants.last_mut().unwrap();
d.sort();
debug!(
"first visit, n_scc = {:?}, state.current_path = {:?}, descendants = {:?}",
state.n_scc, state.current_path, d
);
(i, d.len())
}
// org id PAb27lKMvw3ODopZ9ed0R1+VLBapBvGicfts0Qfgoyk=
fn recurse(mut state: State, (i, j): (usize, usize), child: usize, stack: &mut Vec<State>) {
let current_path = std::mem::replace(&mut state.current_path, Path::new());
let len = stack.len();
stack.push(State {
descendants: Some((i, j - 1)),
..state
});
stack.push(State {
n_scc: child,
descendants: None,
current_path_len: current_path.path.len(),
current_path,
return_values: Vec::new(),
});
debug!("recursing {:?}", &stack[len..]);
}
// org id +EtEUqu55ysMiPbQtSxlpzsP1Ghbgr/qJg/w1dYu0Tw=
fn make_conflict(state: &mut State) -> Path {
let mut main_path = state.return_values[0]
.path
.split_off(state.current_path_len + 1);
std::mem::swap(&mut state.return_values[0].path, &mut main_path);
debug!(
"make_conflict {:#?} {:#?}",
state.return_values[0].path, main_path
);
// org id UnhSWIMI7GsPiN8g1SS8LsKzx++g5mrRn7+M1c+VfV4=
state.return_values.sort_by(|a, b| a.end.cmp(&b.end));
// org id Bd49Fx5qUr95VSDjZvf4LiwAmliSU/tlBb1NId/a1H8=
let sccs = state
.return_values
.iter()
.flat_map(|side| side.sccs.iter())
.map(|&x| x)
.collect();
// org id Kh9EOwJWoqxbWMdM/TBylYAtuhQKoDTzX9am56rGfAU=
let mut conflict_sides = Vec::new();
while let Some(side) = state.return_values.pop() {
debug!("side = {:#?}", side);
// org id eDtZjLZ89YmK+LQZwf0JXAHwC8hahJ+UmZGJJY9FPyw=
let main_side = if let Some(n) = state
.return_values
.iter()
.position(|side_| side_.sccs.contains(&side.end))
{
n
} else {
conflict_sides.push(side);
continue;
};
// org id X+Vkv6E9j5wcfbqV6wd0zJcU5h4pk/xoB0fNy+jTt1w=
if let PathElement::Conflict { ref mut sides, .. } = state.return_values[main_side].path[0]
{
if sides[0].end == side.end {
sides.push(side);
continue;
}
}
// org id Jzso6NctQ7yQrJvt+kp4n6+R+C/j21VYuljDPYbovE0=
create_nested_conflict(&mut state.return_values[main_side], side);
}
// org id XJWvAQc9UuYzztlyavXZtb9f5v+8Q3s9dKVWBgc9Cnc=
if conflict_sides.len() > 1 {
main_path.push(PathElement::Conflict {
sides: conflict_sides,
})
} else {
main_path.extend(conflict_sides.pop().unwrap().path.into_iter())
}
Path {
path: main_path,
sccs,
end: 0,
}
}
// org id 99XChhgQdUuM9P0XMZXaiLSvZkshZYwisljAQKwv5ZA=
fn create_nested_conflict(main_side: &mut Path, side: Path) {
let end = main_side
.path
.iter()
.position(|v| match v {
PathElement::Scc { ref scc } => *scc == side.end,
PathElement::Conflict { ref sides } => {
sides.iter().any(|side_| side_.sccs.contains(&side.end))
}
})
.unwrap();
// org id t4p2vXw81IHi1A61czKB1ySau+YllpLXCs+03dU030Q=
let mut v = vec![PathElement::Conflict { sides: Vec::new() }];
v.extend(main_side.path.drain(end..));
let side0 = std::mem::replace(&mut main_side.path, v);
// org id XkcQPnFnid03vRjpdNnv8fkp8TtbYYOh0VE3JOtW6xw=
let mut sccs0 = HashSet::new();
for elt in side0.iter() {
match *elt {
PathElement::Scc { scc } => {
sccs0.insert(scc);
}
PathElement::Conflict { ref sides } => {
for side in sides {
for &scc in side.sccs.iter() {
sccs0.insert(scc);
}
}
}
}
}
// org id qQ8ru9vhgn8xaCAW63XcBxFqrHy0K6dYJZQWCeoXNMA=
main_side.sccs.extend(side.sccs.iter().map(|&x| x));
main_side.path[0] = PathElement::Conflict {
sides: vec![
Path {
path: side0,
sccs: sccs0,
end: side.end,
},
side,
],
};
}
// org id LsKadSAjaUm19infXAsCqtTnGC1RMQq6d6h3WfZwioo=
impl Graph {
pub(super) fn collect_forward_edges<T: TxnT>(
&self,
txn: &T,
channel: &Channel<T>,
scc: &Vector2<VertexId>,
forward_scc: &HashSet<(usize, usize)>,
forward: &mut Vec<(Vertex<ChangeId>, Edge)>,
) {
for &(a, b) in forward_scc.iter() {
for cousin in scc[a].iter() {
for &(edge, n_child) in self.children(*cousin) {
if self[n_child].scc != b {
continue;
}
if let Some(edge) = edge {
// org id duMgQ0jpJDLTmJMPqpgkKy6QBJyDJpNZzOsgqHhs/EI=
if edge.flag.contains(EdgeFlags::PSEUDO)
&& !txn.test_edge(
channel,
Position {
change: self[*cousin].vertex.change,
pos: self[*cousin].vertex.start,
},
edge.dest,
EdgeFlags::DELETED,
EdgeFlags::DELETED,
)
{
forward.push((self[*cousin].vertex, edge))
}
}
}
}
}
}
}
// org id rTQeEfoXKaEkHQXFyQtWhOFf2crR1lhKU0pRUjLxfb0=
use super::{Graph, VertexId};
use crate::changestore::*;
use crate::pristine::{Base32, Channel, Position, TxnT};
use std::collections::{HashMap, HashSet};
use std::io::Write;
impl Graph {
/// Write a graph to an `std::io::Write` in GraphViz (dot) format.
#[allow(dead_code)]
pub fn debug<W: Write, T: TxnT, P: ChangeStore>(
&self,
changes: &P,
txn: &T,
channel: &Channel<T>,
add_others: bool,
introduced_by: bool,
mut w: W,
) -> Result<(), anyhow::Error> {
writeln!(w, "digraph {{")?;
let mut buf = Vec::new();
let mut cache = HashMap::new();
if add_others {
for (line, i) in self.lines.iter().zip(0..) {
cache.insert(
Position {
change: line.vertex.change,
pos: line.vertex.start,
},
i,
);
}
}
let mut others = HashSet::new();
for (line, i) in self.lines.iter().zip(0..) {
changes.get_contents(|h| txn.get_external(h), line.vertex, &mut buf)?;
let contents = &buf;
// Produce an escaped string.
let contents = format!(
"{:?}",
if let Ok(contents) = std::str::from_utf8(contents) {
contents.chars().take(100).collect()
} else {
"<INVALID UTF8>".to_string()
}
);
// Remove the quotes around the escaped string.
let contents = contents.split_at(contents.len() - 1).0.split_at(1).1;
writeln!(
w,
"n_{}[label=\"{}({}): {}.[{};{}[: {}\"];",
i,
i,
line.scc,
line.vertex.change.to_base32(),
line.vertex.start.0,
line.vertex.end.0,
contents
)?;
if add_others && !line.vertex.is_root() {
for (_, v) in txn
.iter_graph(&channel.graph, line.vertex, None)
.take_while(|&(k, _)| k == line.vertex)
{
if let Some(dest) = cache.get(&v.dest) {
writeln!(
w,
"n_{} -> n_{}[color=red,label=\"{:?}{}{}\"];",
i,
dest,
v.flag.bits(),
if introduced_by { " " } else { "" },
if introduced_by {
v.introduced_by.to_base32()
} else {
String::new()
}
)?;
} else {
if !others.contains(&v.dest) {
others.insert(v.dest);
writeln!(
w,
"n_{}_{}[label=\"{}.{}\",color=red];",
v.dest.change.to_base32(),
v.dest.pos.0,
v.dest.change.to_base32(),
v.dest.pos.0
)?;
}
writeln!(
w,
"n_{} -> n_{}_{}[color=red,label=\"{:?}{}{}\"];",
i,
v.dest.change.to_base32(),
v.dest.pos.0,
v.flag.bits(),
if introduced_by { " " } else { "" },
if introduced_by {
v.introduced_by.to_base32()
} else {
String::new()
}
)?;
}
}
}
for &(edge, VertexId(j)) in
&self.children[line.children..line.children + line.n_children]
{
if let Some(ref edge) = edge {
writeln!(
w,
"n_{}->n_{}[label=\"{:?}{}{}\"];",
i,
j,
edge.flag.bits(),
if introduced_by { " " } else { "" },
if introduced_by {
edge.introduced_by.to_base32()
} else {
String::new()
}
)?
} else {
writeln!(w, "n_{}->n_0[label=\"none\"];", i)?
}
}
}
writeln!(w, "}}")?;
Ok(())
}
#[allow(dead_code)]
pub fn debug_raw<W: Write>(&self, mut w: W) -> Result<(), anyhow::Error> {
writeln!(w, "digraph {{")?;
for (line, i) in self.lines.iter().zip(0..) {
// Remove the quotes around the escaped string.
writeln!(
w,
"n_{}[label=\"{}(scc {}): {}.[{};{}[\"];",
i,
i,
line.scc,
line.vertex.change.to_base32(),
line.vertex.start.0,
line.vertex.end.0,
)?;
for &(edge, VertexId(j)) in
&self.children[line.children..line.children + line.n_children]
{
if let Some(ref edge) = edge {
writeln!(
w,
"n_{}->n_{}[label=\"{:?} {}\"];",
i,
j,
edge.flag.bits(),
edge.introduced_by.to_base32()
)?
} else {
writeln!(w, "n_{}->n_0[label=\"none\"];", i)?
}
}
}
writeln!(w, "}}")?;
Ok(())
}
}
[package]
name = "libpijul"
description = "Core library of Pijul, a distributed version control system based on a sound theory of collaborative work."
version = "1.0.0-alpha.1"
repository = "https://nest.pijul.com/pijul/libpijul"
documentation = "https://docs.rs/libpijul"
authors = ["Pierre-Étienne Meunier <pe@pijul.org>"]
edition = "2018"
license = "GPL-2.0"
include = [
"Cargo.toml",
"src",
"src/apply.rs",
"src/missing_context.rs",
"src/vector2.rs",
"src/path.rs",
"src/working_copy",
"src/working_copy/filesystem.rs",
"src/working_copy/mod.rs",
"src/working_copy/memory.rs",
"src/unrecord",
"src/unrecord/mod.rs",
"src/unrecord/working_copy.rs",
"src/record.rs",
"src/change.rs",
"src/alive",
"src/alive/tarjan.rs",
"src/alive/debug.rs",
"src/alive/retrieve.rs",
"src/alive/dfs.rs",
"src/alive/mod.rs",
"src/alive/output.rs",
"src/fs.rs",
"src/vertex_buffer.rs",
"src/changestore",
"src/changestore/filesystem.rs",
"src/changestore/mod.rs",
"src/changestore/memory.rs",
"src/small_string.rs",
"src/pristine",
"src/pristine/path_id.rs",
"src/pristine/edge.rs",
"src/pristine/merkle.rs",
"src/pristine/channel_dump.rs",
"src/pristine/patch_id.rs",
"src/pristine/inode_metadata.rs",
"src/pristine/inode.rs",
"src/pristine/sanakirja.rs",
"src/pristine/mod.rs",
"src/pristine/vertex.rs",
"src/pristine/hash.rs",
"src/pristine/change_id.rs",
"src/pristine/inode_vertex.rs",
"src/find_alive.rs",
"src/tests",
"src/tests/performance.rs",
"src/tests/file_conflicts.rs",
"src/tests/filesystem.rs",
"src/tests/missing_context.rs",
"src/tests/conflict.rs",
"src/tests/clone.rs",
"src/tests/change.rs",
"src/tests/unrecord.rs",
"src/tests/partial.rs",
"src/tests/rm_file.rs",
"src/tests/mod.rs",
"src/tests/add_file.rs",
"src/tests/patch.rs",
"src/output",
"src/output/mod.rs",
"src/output/archive.rs",
"src/output/output.rs",
"src/diff",
"src/diff/replace.rs",
"src/diff/split.rs",
"src/diff/diff.rs",
"src/diff/mod.rs",
"src/diff/delete.rs",
"src/diff/vertex_buffer.rs",
"src/lib.rs"
]
[features]
ondisk-repos = [ "mmap", "zstd", "ignore" ]
mmap = [ "sanakirja/mmap" ]
zstd = [ "zstd-seekable" ]
text-changes = [ "regex" ]
dump = [ "tokio" ]
default = [ "ondisk-repos", "text-changes", "dump" ]
tarball = [ "tar", "flate2" ]
[dependencies]
sanakirja = "0.13.1"
byteorder = "1.3"
log = "0.4"
serde = "1.0"
serde_derive = "1.0"
bitflags = "1.2"
anyhow = "1.0"
thiserror = "1.0"
rand = "0.7"
blake3 = "0.3"
chrono = { version = "0.4", features = ["serde"] }
pijul-macros = "0.1.0"
bincode = "1.3"
data-encoding = "2.3"
lru-cache = "0.1"
diffs = "0.4"
tempfile = "3.1"
toml = "0.5"
lazy_static = "1.4"
zstd-seekable = { version = "0.1.0", optional = true }
regex = { version = "1.4", optional = true }
tokio = { version = "0.2", optional = true, features = ["io-util"] }
curve25519-dalek = { version = "3", features = [ "serde" ] }
ignore = { version = "0.4", optional = true }
tar = { version = "0.4", optional = true }
flate2 = { version = "1.0", optional = true }
[dev-dependencies]
env_logger = "0.8"
{
description = "pijul, the sound distributed version control system";
inputs.nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
outputs = { self, nixpkgs }:
let
nameValuePair = name: value: { inherit name value; };
genAttrs = names: f: builtins.listToAttrs (map (n: nameValuePair n (f n)) names);
allSystems = [ "x86_64-linux" "aarch64-linux" "i686-linux" "x86_64-darwin" ];
forAllSystems = f: genAttrs allSystems (system: f {
inherit system;
pkgs = import nixpkgs { inherit system; };
});
deps = pkgs: with pkgs; {
nativeBuildInputs = [
clang
pkg-config
];
buildInputs = [
openssl
llvmPackages.libclang
libsodium
xxHash
zstd
] ++ lib.optionals stdenv.isDarwin [
CoreServices
Security
SystemConfiguration
];
};
in
{
devShell = forAllSystems ({ system, pkgs, ... }:
pkgs.mkShell {
name = "pijul";
inherit (deps pkgs) nativeBuildInputs;
buildInputs = with pkgs; [
rustPackages.cargo
rustPackages.rustfmt
rustPackages.clippy
libgit2
] ++ (deps pkgs).buildInputs;
LIBCLANG_PATH = "${pkgs.llvmPackages.libclang}/lib";
});
packages = forAllSystems
({ system, pkgs, ... }:
let
pijul = { withGitImport ? false }: with pkgs;
rustPlatform.buildRustPackage {
pname = "pijul";
version = "1.0.0-alpha";
src = ./.;
cargoSha256 = "05mvnzw9ms1p5hqp2yjrql42z3qvl460rd92akmx9amnhr62g6y2";
# https://github.com/rust-lang/cargo/pull/8074
cargoBuildFlags = lib.optionals withGitImport [ "-Zpackage-features" "--features=pijul/git" ];
LIBCLANG_PATH = "${llvmPackages.libclang}/lib";
# Tests are broken at the moment.
doCheck = false;
inherit (deps pkgs) nativeBuildInputs;
buildInputs = (deps pkgs).buildInputs
++ lib.optional withGitImport libgit2;
};
in
{
pijul = pijul { };
pijul-git = pijul { withGitImport = true; };
});
defaultPackage = forAllSystems ({ system, ... }: self.packages.${system}.pijul);
};
}
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1604762992,
"narHash": "sha256-7VB4fm5uIyqcMaUAgyx4MCqQEjsl9xZDdDzJMoi1DHo=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a52e974cff8fb80c427e0d55c01b3b8c770ccec4",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}
# Pijul
This is the repository of Pijul, a sound and fast distributed version control system based on a mathematical theory of asynchronous work.
## License
The license is GPL-2.0, meaning in particular that you can't relicense it under an incompatible license. Also, if you decide to make your own version of Pijul after looking at this code, this *is* derived work under copyright law, and you still have to respect this license.
## Documentation
While we are working on documentation, here are a few useful commands:
### Create a repository
~~~
$ pijul init
~~~
## Add files
If you want to track all the files in a folder, and record that change, do:
~~~
$ pijul rec that_folder
~~~
If you want to add files to track:
~~~
$ pijul add these_files
~~~
## Make a change
Pijul is based on changes, so perhaps the most important command is the one that creates them:
~~~
$ pijul rec
~~~
You will be presented with a change draft, which you can approve or edit by deleting sections, where sections are introduced by a number (of the form `1.`) followed by the name of an operation (example: `File addition: "my_file" in "/" 420`).
You can of course try other edits, but they are not guaranteed to work.
## Collaborate
A *remote* is the reference to another repository, for example `pijul@nest.pijul.com:manual` for this repository, or `me@nest.pijul.com:pijul/manual`, `https://nest.pijul.com/pijul/manual`, or a local path `/path/to/my/repository`.
The `remote` command allows one to view the saved remotes and possibly delete them.
The `push` and `pull` commands exchange changes with remotes.
Cloning repositories need a target directory at the moment, or else take the current directory as the target:
~~~
$ pijul clone https://nest.pijul.com/pijul/pijul
~~~
Hint: clones over SSH are almost always faster.
## Going back in time
If you want to reset your files to the last recorded version, just do:
~~~
$ pijul reset
~~~
If you want to remove some changes from the history:
~~~
$ pijul unrecord PREFIX_OF_CHANGE_HASH
~~~
Where `PREFIX_OF_CHANGE_HASH` is an unambiguous prefix of a change hash, which can be found by doing `pijul log`.
## Import a Git repository
If you have compiled Pijul with `--features git`, the `git` command allows one to import changes from a Git repository. This works by replaying the repository history, and is not particularly optimised, hence it may be take a long time on large repositories.
One missing feature of Git at the moment is symbolic links, which are treated as regular files by that command (i.e. the same might get imported multiple times).
## About channels
Channels are a way to maintain two related versions of a repository in the same place (a bit like branches in Git).
However, channels are different from Git branches, and do not serve the same purpose. In Pijul, **independent changes commute**, which makes **branches useless**.
The main differences with Git branches are:
- The identity of a change doesn't depend on the branch it is on, or in other words, rebase and merge are the same operation in Pijul.
- This implies that conflicts do not mysteriously come back after you solve them (which is what `git rerere` is for).
- Also, conflicts are between changes, so the resolution of a conflict on one channel solves the same conflict in all other channels.
[workspace]
members = [ "pijul-macros", "pijul", "libpijul" ]
[patch.crates-io]
pijul = { path = "pijul" }
pijul-macros = { path = "pijul-macros" }
libpijul = { path = "libpijul" }
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "addr2line"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
[[package]]
name = "aho-corasick"
version = "0.7.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
dependencies = [
"memchr",
]
[[package]]
name = "ansi_term"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "anyhow"
version = "1.0.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7"
[[package]]
name = "arrayref"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
[[package]]
name = "arrayvec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "backtrace"
version = "0.3.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2baad346b2d4e94a24347adeee9c7a93f412ee94b9cc26e5b59dea23848e9f28"
dependencies = [
"addr2line",
"cfg-if 1.0.0",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff"
[[package]]
name = "bincode"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d"
dependencies = [
"byteorder",
"serde",
]
[[package]]
name = "bindgen"
version = "0.55.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75b13ce559e6433d360c26305643803cb52cfbabbc2b9c47ce04a58493dfb443"
dependencies = [
"bitflags",
"cexpr",
"cfg-if 0.1.10",
"clang-sys",
"clap 2.33.3",
"env_logger 0.7.1",
"lazy_static",
"lazycell",
"log",
"peeking_take_while",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"which",
]
[[package]]
name = "bit-vec"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0dc55f2d8a1a85650ac47858bb001b4c0dd73d79e3c455a842925e68d29cd3"
[[package]]
name = "bitflags"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "blake2b_simd"
version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587"
dependencies = [
"arrayref",
"arrayvec",
"constant_time_eq",
]
[[package]]
name = "blake3"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9ff35b701f3914bdb8fad3368d822c766ef2858b2583198e41639b936f09d3f"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if 0.1.10",
"constant_time_eq",
"crypto-mac",
"digest",
]
[[package]]
name = "bstr"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "473fc6b38233f9af7baa94fb5852dca389e3d95b8e21c8e3719301462c5d9faf"
dependencies = [
"memchr",
]
[[package]]
name = "bumpalo"
version = "3.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820"
[[package]]
name = "byteorder"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
[[package]]
name = "bytes"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38"
[[package]]
name = "cc"
version = "1.0.62"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1770ced377336a88a67c473594ccc14eca6f4559217c34f64aac8f83d641b40"
dependencies = [
"jobserver",
]
[[package]]
name = "cexpr"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27"
dependencies = [
"nom",
]
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [
"libc",
"num-integer",
"num-traits",
"serde",
"time",
"winapi 0.3.9",
]
[[package]]
name = "clang-sys"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa785e9017cb8e8c8045e3f096b7d1ebc4d7337cceccdca8d678a27f788ac133"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]]
name = "clap"
version = "2.33.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
dependencies = [
"ansi_term",
"atty",
"bitflags",
"strsim 0.8.0",
"textwrap 0.11.0",
"unicode-width",
"vec_map",
]
[[package]]
name = "clap"
version = "3.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bd1061998a501ee7d4b6d449020df3266ca3124b941ec56cf2005c3779ca142"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"indexmap",
"lazy_static",
"os_str_bytes",
"strsim 0.10.0",
"termcolor",
"textwrap 0.12.1",
"unicode-width",
"vec_map",
]
[[package]]
name = "clap_derive"
version = "3.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "370f715b81112975b1b69db93e0b56ea4cd4e5002ac43b2da8474106a54096a1"
dependencies = [
"heck",
"proc-macro-error",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "cloudabi"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4344512281c643ae7638bbabc3af17a11307803ec8f0fcad9fae512a8bf36467"
dependencies = [
"bitflags",
]
[[package]]
name = "cmake"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e56268c17a6248366d66d4a47a3381369d068cce8409bb1716ed77ea32163bb"
dependencies = [
"cc",
]
[[package]]
name = "constant_time_eq"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
[[package]]
name = "core-foundation"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "core-foundation-sys"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b"
[[package]]
name = "crc32fast"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "crossbeam-utils"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
dependencies = [
"autocfg",
"cfg-if 0.1.10",
"lazy_static",
]
[[package]]
name = "crypto-mac"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab"
dependencies = [
"generic-array",
"subtle",
]
[[package]]
name = "cryptovec"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41a62a89db1efd82c9e3ebe8a3091a9f045087415b9e9842c5daf8895bed11d2"
dependencies = [
"kernel32-sys",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "curve25519-dalek"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8492de420e9e60bc9a1d66e2dbb91825390b738a388606600663fc529b4b307"
dependencies = [
"byteorder",
"digest",
"rand_core",
"serde",
"subtle",
"zeroize",
]
[[package]]
name = "data-encoding"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993a608597367c6377b258c25d7120740f00ed23a2252b729b1932dd7866f908"
[[package]]
name = "diffs"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb4e4582f27b475e229dc0926b12082a6fb8662c7cb25ed1465243486fe181a2"
[[package]]
name = "digest"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
dependencies = [
"generic-array",
]
[[package]]
name = "dirs"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3"
dependencies = [
"cfg-if 0.1.10",
"dirs-sys",
]
[[package]]
name = "dirs-next"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
dependencies = [
"cfg-if 1.0.0",
"dirs-sys-next",
]
[[package]]
name = "dirs-sys"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99de365f605554ae33f115102a02057d4fc18b01f3284d6870be0938743cfe7d"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
]
[[package]]
name = "dtoa"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b"
[[package]]
name = "edit"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "323032447eba6f5aca88b46d6e7815151c16c53e4128569420c09d7840db3bfc"
dependencies = [
"tempfile",
"which",
]
[[package]]
name = "encoding_rs"
version = "0.8.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "801bbab217d7f79c0062f4f7205b5d4427c6d1a7bd7aafdd1475f7c59d62b283"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "env_logger"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
dependencies = [
"atty",
"humantime 1.3.0",
"log",
"regex",
"termcolor",
]
[[package]]
name = "env_logger"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54532e3223c5af90a6a757c90b5c5521564b07e5e7a958681bcd2afad421cdcd"
dependencies = [
"atty",
"humantime 2.0.1",
"log",
"regex",
"termcolor",
]
[[package]]
name = "filetime"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c122a393ea57648015bf06fbd3d372378992e86b9ff5a7a497b076a28c79efe"
dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall",
"winapi 0.3.9",
]
[[package]]
name = "flate2"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
dependencies = [
"cfg-if 1.0.0",
"crc32fast",
"libc",
"miniz_oxide",
]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "form_urlencoded"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ece68d15c92e84fa4f19d3780f1294e5ca82a78a6d515f1efaabcc144688be00"
dependencies = [
"matches",
"percent-encoding",
]
[[package]]
name = "fs2"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
dependencies = [
"libc",
"winapi 0.3.9",
]
[[package]]
name = "fuchsia-zircon"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
dependencies = [
"bitflags",
"fuchsia-zircon-sys",
]
[[package]]
name = "fuchsia-zircon-sys"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
[[package]]
name = "futures"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b3b0c040a1fe6529d30b3c5944b280c7f0dcb2930d2c3062bca967b602583d0"
dependencies = [
"futures-channel",
"futures-core",
"futures-executor",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b7109687aa4e177ef6fe84553af6280ef2778bdb7783ba44c9dc3399110fe64"
dependencies = [
"futures-core",
"futures-sink",
]
[[package]]
name = "futures-core"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "847ce131b72ffb13b6109a221da9ad97a64cbe48feb1028356b836b47b8f1748"
[[package]]
name = "futures-executor"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4caa2b2b68b880003057c1dd49f1ed937e38f22fcf6c212188a121f08cf40a65"
dependencies = [
"futures-core",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-io"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "611834ce18aaa1bd13c4b374f5d653e1027cf99b6b502584ff8c9a64413b30bb"
[[package]]
name = "futures-macro"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77408a692f1f97bcc61dc001d752e00643408fbc922e4d634c655df50d595556"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "futures-sink"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f878195a49cee50e006b02b93cf7e0a95a38ac7b776b4c4d9cc1207cd20fcb3d"
[[package]]
name = "futures-task"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c554eb5bf48b2426c4771ab68c6b14468b6e76cc90996f528c3338d761a4d0d"
dependencies = [
"once_cell",
]
[[package]]
name = "futures-util"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d304cff4a7b99cfb7986f7d43fbe93d175e72e704a8860787cc95e9ffd85cbd2"
dependencies = [
"futures-channel",
"futures-core",
"futures-io",
"futures-macro",
"futures-sink",
"futures-task",
"memchr",
"pin-project 1.0.1",
"pin-utils",
"proc-macro-hack",
"proc-macro-nested",
"slab",
]
[[package]]
name = "generic-array"
version = "0.14.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getrandom"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
dependencies = [
"cfg-if 0.1.10",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
[[package]]
name = "gimli"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce"
[[package]]
name = "git2"
version = "0.13.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca6f1a0238d7f8f8fd5ee642f4ebac4dbc03e03d1f78fbe7a3ede35dcf7e2224"
dependencies = [
"bitflags",
"libc",
"libgit2-sys",
"log",
"openssl-probe",
"openssl-sys",
"url",
]
[[package]]
name = "glob"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "globset"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c152169ef1e421390738366d2f796655fec62621dabbd0fd476f905934061e4a"
dependencies = [
"aho-corasick",
"bstr",
"fnv",
"log",
"regex",
]
[[package]]
name = "h2"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e4728fd124914ad25e99e3d15a9361a879f6620f63cb56bbb08f95abb97a535"
dependencies = [
"bytes",
"fnv",
"futures-core",
"futures-sink",
"futures-util",
"http",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
"tracing-futures",
]
[[package]]
name = "hashbrown"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
[[package]]
name = "heck"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "hermit-abi"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
dependencies = [
"libc",
]
[[package]]
name = "http"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d569972648b2c512421b5f2a405ad6ac9666547189d0c5477a3f200f3e02f9"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http-body"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b"
dependencies = [
"bytes",
"http",
]
[[package]]
name = "httparse"
version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9"
[[package]]
name = "httpdate"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47"
[[package]]
name = "human-panic"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39f357a500abcbd7c5f967c1d45c8838585b36743823b9d43488f24850534e36"
dependencies = [
"backtrace",
"os_type",
"serde",
"serde_derive",
"termcolor",
"toml",
"uuid",
]
[[package]]
name = "humantime"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
dependencies = [
"quick-error",
]
[[package]]
name = "humantime"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c1ad908cc71012b7bea4d0c53ba96a8cba9962f048fa68d143376143d863b7a"
[[package]]
name = "hyper"
version = "0.13.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ad767baac13b44d4529fcf58ba2cd0995e36e7b435bc5b039de6f47e880dbf"
dependencies = [
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"h2",
"http",
"http-body",
"httparse",
"httpdate",
"itoa",
"pin-project 1.0.1",
"socket2",
"tokio",
"tower-service",
"tracing",
"want",
]
[[package]]
name = "hyper-tls"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d979acc56dcb5b8dddba3917601745e877576475aa046df3226eabdecef78eed"
dependencies = [
"bytes",
"hyper",
"native-tls",
"tokio",
"tokio-tls",
]
[[package]]
name = "idna"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9"
dependencies = [
"matches",
"unicode-bidi",
"unicode-normalization",
]
[[package]]
name = "ignore"
version = "0.4.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22dcbf2a4a289528dbef21686354904e1c694ac642610a9bff9e7df730d9ec72"
dependencies = [
"crossbeam-utils",
"globset",
"lazy_static",
"log",
"memchr",
"regex",
"same-file",
"thread_local",
"walkdir",
"winapi-util",
]
[[package]]
name = "indexmap"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]]
name = "instant"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb1fc4429a33e1f80d41dc9fea4d108a88bec1de8053878898ae448a0b52f613"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "iovec"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
dependencies = [
"libc",
]
[[package]]
name = "ipnet"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47be2f14c678be2fdcab04ab1171db51b2762ce6f0a8ee87c8dd4a04ed216135"
[[package]]
name = "itoa"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6"
[[package]]
name = "jobserver"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
dependencies = [
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca059e81d9486668f12d455a4ea6daa600bd408134cd17e3d3fb5a32d1f016f8"
dependencies = [
"wasm-bindgen",
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "lazycell"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
version = "0.2.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614"
[[package]]
name = "libgit2-sys"
version = "0.12.14+1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f25af58e6495f7caf2919d08f212de550cfa3ed2f5e744988938ea292b9f549"
dependencies = [
"cc",
"libc",
"libssh2-sys",
"libz-sys",
"openssl-sys",
"pkg-config",
]
[[package]]
name = "libloading"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1090080fe06ec2648d0da3881d9453d97e71a45f00eb179af7fdd7e3f686fdb0"
dependencies = [
"cfg-if 1.0.0",
"winapi 0.3.9",
]
[[package]]
name = "libpijul"
version = "1.0.0-alpha.1"
dependencies = [
"anyhow",
"bincode",
"bitflags",
"blake3",
"byteorder",
"chrono",
"curve25519-dalek",
"data-encoding",
"diffs",
"env_logger 0.8.1",
"flate2",
"ignore",
"lazy_static",
"log",
"lru-cache",
"pijul-macros",
"rand",
"regex",
"sanakirja",
"serde",
"serde_derive",
"tar",
"tempfile",
"thiserror",
"tokio",
"toml",
"zstd-seekable",
]
[[package]]
name = "libsodium-sys"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a685b64f837b339074115f2e7f7b431ac73681d08d75b389db7498b8892b8a58"
dependencies = [
"cc",
"libc",
"pkg-config",
]
[[package]]
name = "libssh2-sys"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca46220853ba1c512fc82826d0834d87b06bcd3c2a42241b7de72f3d2fe17056"
dependencies = [
"cc",
"libc",
"libz-sys",
"openssl-sys",
"pkg-config",
"vcpkg",
]
[[package]]
name = "libz-sys"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "linked-hash-map"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a"
[[package]]
name = "lock_api"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28247cc5a5be2f05fbcd76dd0cf2c7d3b5400cb978a28042abcd4fa0b3f8261c"
dependencies = [
"scopeguard",
]
[[package]]
name = "log"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
dependencies = [
"cfg-if 0.1.10",
]
[[package]]
name = "lru-cache"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "matches"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
[[package]]
name = "memchr"
version = "2.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
[[package]]
name = "memmap"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
dependencies = [
"libc",
"winapi 0.3.9",
]
[[package]]
name = "mime"
version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
[[package]]
name = "mime_guess"
version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212"
dependencies = [
"mime",
"unicase",
]
[[package]]
name = "miniz_oxide"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "mio"
version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
dependencies = [
"cfg-if 0.1.10",
"fuchsia-zircon",
"fuchsia-zircon-sys",
"iovec",
"kernel32-sys",
"libc",
"log",
"miow",
"net2",
"slab",
"winapi 0.2.8",
]
[[package]]
name = "mio-uds"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0"
dependencies = [
"iovec",
"libc",
"mio",
]
[[package]]
name = "miow"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
dependencies = [
"kernel32-sys",
"net2",
"winapi 0.2.8",
"ws2_32-sys",
]
[[package]]
name = "native-tls"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fcc7939b5edc4e4f86b1b4a04bb1498afaaf871b1a6691838ed06fcb48d3a3f"
dependencies = [
"lazy_static",
"libc",
"log",
"openssl",
"openssl-probe",
"openssl-sys",
"schannel",
"security-framework",
"security-framework-sys",
"tempfile",
]
[[package]]
name = "net2"
version = "0.2.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ebc3ec692ed7c9a255596c67808dee269f64655d8baf7b4f0638e51ba1d6853"
dependencies = [
"cfg-if 0.1.10",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "nom"
version = "5.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
dependencies = [
"memchr",
"version_check",
]
[[package]]
name = "num-bigint"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "object"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397"
[[package]]
name = "once_cell"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0"
[[package]]
name = "openssl"
version = "0.10.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d575eff3665419f9b83678ff2815858ad9d11567e082f5ac1814baba4e2bcb4"
dependencies = [
"bitflags",
"cfg-if 0.1.10",
"foreign-types",
"lazy_static",
"libc",
"openssl-sys",
]
[[package]]
name = "openssl-probe"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
[[package]]
name = "openssl-sys"
version = "0.9.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a842db4709b604f0fe5d1170ae3565899be2ad3d9cbc72dedc789ac0511f78de"
dependencies = [
"autocfg",
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "os_str_bytes"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afb2e1c3ee07430c2cf76151675e583e0f19985fa6efae47d6848a3e2c824f85"
[[package]]
name = "os_type"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edc011af0ae98b7f88cf7e4a83b70a54a75d2b8cb013d6efd02e5956207e9eb"
dependencies = [
"regex",
]
[[package]]
name = "parking_lot"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4893845fa2ca272e647da5d0e46660a314ead9c2fdd9a883aabc32e481a8733"
dependencies = [
"instant",
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b"
dependencies = [
"cfg-if 0.1.10",
"cloudabi",
"instant",
"libc",
"redox_syscall",
"smallvec",
"winapi 0.3.9",
]
[[package]]
name = "peeking_take_while"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
[[package]]
name = "percent-encoding"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "pijul"
version = "1.0.0-alpha.1"
dependencies = [
"anyhow",
"byteorder",
"chrono",
"clap 3.0.0-beta.2",
"cryptovec",
"data-encoding",
"dirs-next",
"edit",
"env_logger 0.8.1",
"futures",
"futures-util",
"git2",
"human-panic",
"ignore",
"lazy_static",
"libpijul",
"log",
"rand",
"regex",
"reqwest",
"rpassword",
"sanakirja",
"serde",
"serde_derive",
"serde_json",
"tempfile",
"thiserror",
"thrussh",
"thrussh-keys",
"tokio",
"toml",
"whoami",
]
[[package]]
name = "pijul-macros"
version = "0.1.0"
dependencies = [
"proc-macro2",
"quote",
"regex",
"syn",
]
[[package]]
name = "pin-project"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15"
dependencies = [
"pin-project-internal 0.4.27",
]
[[package]]
name = "pin-project"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee41d838744f60d959d7074e3afb6b35c7456d0f61cad38a24e35e6553f73841"
dependencies = [
"pin-project-internal 1.0.1",
]
[[package]]
name = "pin-project-internal"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-internal"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81a4ffa594b66bff340084d4081df649a7dc049ac8d7fc458d8e628bfbbb2f86"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-lite"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c917123afa01924fc84bb20c4c03f004d9c38e5127e3c039bbf7f4b9c76a2f6b"
[[package]]
name = "pin-utils"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkg-config"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
[[package]]
name = "ppv-lite86"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2",
"quote",
"syn",
"version_check",
]
[[package]]
name = "proc-macro-error-attr"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2",
"quote",
"version_check",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro-nested"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a"
[[package]]
name = "proc-macro2"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
dependencies = [
"unicode-xid",
]
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom",
"libc",
"rand_chacha",
"rand_core",
"rand_hc",
]
[[package]]
name = "rand_chacha"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_hc"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
dependencies = [
"rand_core",
]
[[package]]
name = "redox_syscall"
version = "0.1.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
[[package]]
name = "redox_users"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d"
dependencies = [
"getrandom",
"redox_syscall",
"rust-argon2",
]
[[package]]
name = "regex"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
"thread_local",
]
[[package]]
name = "regex-syntax"
version = "0.6.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
[[package]]
name = "remove_dir_all"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "reqwest"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9eaa17ac5d7b838b7503d118fa16ad88f440498bf9ffe5424e621f93190d61e"
dependencies = [
"base64",
"bytes",
"encoding_rs",
"futures-core",
"futures-util",
"http",
"http-body",
"hyper",
"hyper-tls",
"ipnet",
"js-sys",
"lazy_static",
"log",
"mime",
"mime_guess",
"native-tls",
"percent-encoding",
"pin-project-lite",
"serde",
"serde_urlencoded",
"tokio",
"tokio-tls",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"winreg",
]
[[package]]
name = "rpassword"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d755237fc0f99d98641540e66abac8bc46a0652f19148ac9e21de2da06b326c9"
dependencies = [
"libc",
"winapi 0.3.9",
]
[[package]]
name = "rust-argon2"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dab61250775933275e84053ac235621dfb739556d5c54a2f2e9313b7cf43a19"
dependencies = [
"base64",
"blake2b_simd",
"constant_time_eq",
"crossbeam-utils",
]
[[package]]
name = "rustc-demangle"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "ryu"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "sanakirja"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f32a19c77f82ac8c2c3a3d7a6eb8b3faca7c5f6eb5d8071b2046bf11cd257f9f"
dependencies = [
"fs2",
"git2",
"log",
"memmap",
"parking_lot",
"rand",
]
[[package]]
name = "schannel"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75"
dependencies = [
"lazy_static",
"winapi 0.3.9",
]
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "security-framework"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1759c2e3c8580017a484a7ac56d3abc5a6c1feadf88db2f3633f12ae4268c69"
dependencies = [
"bitflags",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework-sys"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f99b9d5e26d2a71633cc4f2ebae7cc9f874044e0c351a27e17892d76dce5678b"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]]
name = "serde"
version = "1.0.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.117"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbd1ae72adb44aab48f325a02444a5fc079349a8d804c1fc922aed3f7454c74e"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.59"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97"
dependencies = [
"dtoa",
"itoa",
"serde",
"url",
]
[[package]]
name = "shlex"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
[[package]]
name = "slab"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
[[package]]
name = "smallvec"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252"
[[package]]
name = "socket2"
version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fd8b795c389288baa5f355489c65e71fd48a02104600d15c4cfbc561e9e429d"
dependencies = [
"cfg-if 0.1.10",
"libc",
"redox_syscall",
"winapi 0.3.9",
]
[[package]]
name = "strsim"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "subtle"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "343f3f510c2915908f155e94f17220b19ccfacf2a64a2a5d8004f2c3e311e7fd"
[[package]]
name = "syn"
version = "1.0.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc371affeffc477f42a221a1e4297aedcea33d47d19b61455588bd9d8f6b19ac"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "tar"
version = "0.4.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "489997b7557e9a43e192c527face4feacc78bfbe6eed67fd55c4c9e381cba290"
dependencies = [
"filetime",
"libc",
"redox_syscall",
"xattr",
]
[[package]]
name = "tempfile"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
dependencies = [
"cfg-if 0.1.10",
"libc",
"rand",
"redox_syscall",
"remove_dir_all",
"winapi 0.3.9",
]
[[package]]
name = "termcolor"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f"
dependencies = [
"winapi-util",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
dependencies = [
"unicode-width",
]
[[package]]
name = "textwrap"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "203008d98caf094106cfaba70acfed15e18ed3ddb7d94e49baec153a2b462789"
dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e9ae34b84616eedaaf1e9dd6026dbe00dcafa92aa0c8077cb69df1fcfe5e53e"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ba20f23e85b10754cd195504aebf6a27e2e6cbe28c17778a0c930724628dd56"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread_local"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
dependencies = [
"lazy_static",
]
[[package]]
name = "threadpool"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
dependencies = [
"num_cpus",
]
[[package]]
name = "thrussh"
version = "0.29.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e80d1ea46aaf8cbd6431c3b0b264251ce78f1ab3a7258d3b4ce6a8e7eae6d0b4"
dependencies = [
"anyhow",
"bitflags",
"byteorder",
"cryptovec",
"flate2",
"futures",
"log",
"openssl",
"thiserror",
"thrussh-keys",
"thrussh-libsodium",
"tokio",
]
[[package]]
name = "thrussh-keys"
version = "0.18.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a61b8ff0dc67e4fcf82904da73cd18a2ec6b93c92bc049826f567a3574e17c2"
dependencies = [
"anyhow",
"bit-vec",
"byteorder",
"cryptovec",
"data-encoding",
"dirs",
"futures",
"log",
"num-bigint",
"num-integer",
"openssl",
"serde",
"serde_derive",
"thiserror",
"thrussh-libsodium",
"tokio",
"yasna",
]
[[package]]
name = "thrussh-libsodium"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe89c70d27b1cb92e13bc8af63493e890d0de46dae4df0e28233f62b4ed9500"
dependencies = [
"lazy_static",
"libc",
"libsodium-sys",
"pkg-config",
"vcpkg",
]
[[package]]
name = "time"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
dependencies = [
"libc",
"wasi 0.10.0+wasi-snapshot-preview1",
"winapi 0.3.9",
]
[[package]]
name = "tinyvec"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b78a366903f506d2ad52ca8dc552102ffdd3e937ba8a227f024dc1d1eae28575"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokio"
version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6d7ad61edd59bfcc7e80dababf0f4aed2e6d5e0ba1659356ae889752dfc12ff"
dependencies = [
"bytes",
"fnv",
"futures-core",
"iovec",
"lazy_static",
"libc",
"memchr",
"mio",
"mio-uds",
"num_cpus",
"pin-project-lite",
"slab",
"tokio-macros",
]
[[package]]
name = "tokio-macros"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tokio-tls"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a70f4fcd7b3b24fb194f837560168208f669ca8cb70d0c4b862944452396343"
dependencies = [
"native-tls",
"tokio",
]
[[package]]
name = "tokio-util"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"log",
"pin-project-lite",
"tokio",
]
[[package]]
name = "toml"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75cf45bb0bef80604d001caaec0d09da99611b3c0fd39d3080468875cdb65645"
dependencies = [
"serde",
]
[[package]]
name = "tower-service"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860"
[[package]]
name = "tracing"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0987850db3733619253fe60e17cb59b82d37c7e6c0236bb81e4d6b87c879f27"
dependencies = [
"cfg-if 0.1.10",
"log",
"pin-project-lite",
"tracing-core",
]
[[package]]
name = "tracing-core"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f"
dependencies = [
"lazy_static",
]
[[package]]
name = "tracing-futures"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab7bb6f14721aa00656086e9335d363c5c8747bae02ebe32ea2c7dece5689b4c"
dependencies = [
"pin-project 0.4.27",
"tracing",
]
[[package]]
name = "try-lock"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642"
[[package]]
name = "typenum"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
[[package]]
name = "unicase"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
dependencies = [
"version_check",
]
[[package]]
name = "unicode-bidi"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
dependencies = [
"matches",
]
[[package]]
name = "unicode-normalization"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7f98e67a4d84f730d343392f9bfff7d21e3fca562b9cb7a43b768350beeddc6"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db8716a166f290ff49dabc18b44aa407cb7c6dbe1aa0971b44b8a24b0ca35aae"
[[package]]
name = "unicode-width"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
[[package]]
name = "unicode-xid"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
[[package]]
name = "url"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5909f2b0817350449ed73e8bcd81c8c3c8d9a7a5d8acba4b27db277f1868976e"
dependencies = [
"form_urlencoded",
"idna",
"matches",
"percent-encoding",
]
[[package]]
name = "uuid"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11"
dependencies = [
"rand",
]
[[package]]
name = "vcpkg"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6454029bf181f092ad1b853286f23e2c507d8e8194d01d92da4a55c274a5508c"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "version_check"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
[[package]]
name = "walkdir"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d"
dependencies = [
"same-file",
"winapi 0.3.9",
"winapi-util",
]
[[package]]
name = "want"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0"
dependencies = [
"log",
"try-lock",
]
[[package]]
name = "wasi"
version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
name = "wasm-bindgen"
version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ac64ead5ea5f05873d7c12b545865ca2b8d28adfc50a49b84770a3a97265d42"
dependencies = [
"cfg-if 0.1.10",
"serde",
"serde_json",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f22b422e2a757c35a73774860af8e112bff612ce6cb604224e8e47641a9e4f68"
dependencies = [
"bumpalo",
"lazy_static",
"log",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7866cab0aa01de1edf8b5d7936938a7e397ee50ce24119aef3e1eaa3b6171da"
dependencies = [
"cfg-if 0.1.10",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b13312a745c08c469f0b292dd2fcd6411dba5f7160f593da6ef69b64e407038"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f249f06ef7ee334cc3b8ff031bfc11ec99d00f34d86da7498396dc1e3b1498fe"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d649a3145108d7d3fbcde896a468d1bd636791823c9921135218ad89be08307"
[[package]]
name = "web-sys"
version = "0.3.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bf6ef87ad7ae8008e15a355ce696bed26012b7caa21605188cfd8214ab51e2d"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "which"
version = "3.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724"
dependencies = [
"libc",
]
[[package]]
name = "whoami"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7884773ab69074615cb8f8425d0e53f11710786158704fca70f53e71b0e05504"
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "winreg"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69"
dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "ws2_32-sys"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "xattr"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c"
dependencies = [
"libc",
]
[[package]]
name = "yasna"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de7bff972b4f2a06c85f6d8454b09df153af7e3a4ec2aac81db1b105b684ddb"
dependencies = [
"bit-vec",
"num-bigint",
]
[[package]]
name = "zeroize"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f33972566adbd2d3588b0491eb94b98b43695c4ef897903470ede4f3f5a28a"
[[package]]
name = "zstd-seekable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6da0308f103ce1f046228c831786f60ebad8efff658d7e056fd94c761956ec2d"
dependencies = [
"anyhow",
"bindgen",
"cc",
"cmake",
"libc",
"thiserror",
"threadpool",
]