pijul_org / pijul

Init

By pmeunier on February 27, 2018
This patch is not signed.
9tNKV1UL5cYbw3gpDScSfoZSRthQ3a6KHHKMQTjA1CyCumqrVcXey6t37qJMHURpZQwZt1UsKoj3bhNPfevwdhTK
This patch is in the following branches:
latest
master
testing


version = "0.9.1"
thrussh-keys = "0.9.4"



















































































































































































































































































                self.delete_old_pseudo_edges(branch, patch_id, to, children)?
    fn delete_old_pseudo_edges(&mut self, branch: &mut Branch, patch_id: PatchId, to: Key<PatchId>, pseudo_edges: &mut HashSet<Edge>) -> Result<()> {
            let is_zombie_marker = to_edge.introduced_by != patch_id && loop {
                match self.iter_nodes(branch, Some((to, Some(&e)))).next() {
                    Some((k, v)) if k == to && v.dest == e.dest && v.flag == e.flag =>
                        if v.introduced_by != patch_id {
                            break true
                        },
                    _ => break false
            };
    /// Recursively find all ancestors by doing a DFS, and collect all
    /// edges until finding an alive ancestor. This is especially
    /// useful to compute zombie edges resulting from a missing
    /// context.
    pub fn find_alive_ancestors_with_edges(&self,
                                           find_alive: &mut FindAlive,
                                           branch: &Branch,
                                           alive: &mut Vec<Key<PatchId>>,
                                           ancestors: &mut Vec<(Key<PatchId>, Edge)>) {

        while let Some(a) = find_alive.pop() {

            if self.is_alive(branch, a) {
                // This node is alive.
                alive.push(a);
            } else {
                let e = Edge::zero(EdgeFlags::PARENT_EDGE|EdgeFlags::DELETED_EDGE);
                for (_, v) in self.iter_nodes(&branch, Some((a, Some(&e))))
                    .take_while(|&(k, v)| k == a && v.flag <= e.flag | EdgeFlags::FOLDER_EDGE) {

                        debug!("find_alive_ancestors_with_edges: {:?}", v);
                        ancestors.push((a, *v));
                        find_alive.push(v.dest)
                    }
            }
        }
    }

    pub fn find_alive_ancestors(&self,
                                find_alive: &mut FindAlive,
                                branch: &Branch,
                                alive: &mut Vec<Key<PatchId>>,
                                files: &mut HashSet<(Key<PatchId>, Edge)>) -> bool {
                    .take_while(|&(k, v)| k == a) {
                            file = Some(a) //.insert((a, *v));

                        .take_while(|&(k, v)| k == a) {
                            debug!("find_alive_ancestors: {:?}", v);
                            files.insert((a, *v));
    pub fn find_alive_descendants_with_edges(&self,
                                         find_alive: &mut FindAlive,
                                         branch: &Branch,
                                         alive: &mut Vec<Key<PatchId>>,
                                         descendants: &mut Vec<(Key<PatchId>, Edge)>) {
        // debug!("find_alive_descendants_with_edges: {:?}", a);
        while let Some(a) = find_alive.pop() {
            debug!("find_alive_descendants_with_edges: popped {:?}", a);
            if self.is_alive(branch, a) {
                alive.push(a);
            } else {
                // Else, we need to explore its deleted descendants.
                let e = Edge::zero(EdgeFlags::DELETED_EDGE);
                for (_, v) in self.iter_nodes(&branch, Some((a, Some(&e))))
                    .take_while(|&(k, _)| k == a)
                    .filter(|&(_, v)| !v.flag.contains(EdgeFlags::PARENT_EDGE)) {

                        debug!("find_alive_descendants_with_edges: {:?}", v);
                        if v.flag.contains(EdgeFlags::DELETED_EDGE) {
                            debug!("find_alive_descendants: {:?}", v);
                            find_alive.push(v.dest)
                        } else {
                            debug!("alive in for: {:?}", v.dest);
                            alive.push(v.dest)
                        }
                    }
            }
        }
    }

    /// Recursively find all descendants by doing a DFS on deleted
    /// edges (including folder edges), and collect all edges until
    /// finding an alive or zombie descendant.
    pub fn find_alive_descendants(&self,
                                  find_alive: &mut FindAlive,
                                  branch: &Branch,
                                  alive: &mut Vec<Key<PatchId>>) -> bool {
                    .take_while(|&(k, v)| k == a)
                    .filter(|&(k, v)| !v.flag.contains(EdgeFlags::PARENT_EDGE))
                                  branch: &Branch,
                                  find_alive: &mut FindAlive,
                                  alive: &mut HashSet<Key<PatchId>>,
                                  file: &mut Option<Key<PatchId>>,
                                  current: Key<PatchId>) {
                                branch: &Branch,
                                find_alive: &mut FindAlive,
                                alive: &mut HashSet<Key<PatchId>>,
                                file: &mut Option<Key<PatchId>>,
                                current: Key<PatchId>) {
        let (pending, local_pending) = {
            let mut record = RecordState::new();
            self.record(&mut record, branch_name, &r, None)?;
            let (changes, local) = record.finish();
            let mut p = UnsignedPatch::empty();
            p.changes = changes.into_iter().flat_map(|x| x.into_iter()).map(|x| self.globalize_change(x)).collect();
            p.dependencies = self.dependencies(&branch, p.changes.iter());
            (p.leave_unsigned(), local)
        };
                    info!("get_patch returned {:?}", self.get_patch(&branch.patches, internal));
            info!("sanakirja put internal {:?} {:?}", patch_hash, internal);
                             inode_updates: &[InodeUpdate],
        info!("registering a patch with {} changes: {:?}",
              patch.changes().len(),
              patch);
        debug!("applying patch");

        let mut relatives = Vec::new();
        let mut unknown_children = Vec::new();
                Change::NewEdges { flag, ref edges, .. } => {
                            &mut relatives,
                            &mut relatives,
                            &mut unknown_children,
                        self.repair_missing_up_context(
                            &mut find_alive,
                            branch,
                            c,
                            patch_id,
                            &mut relatives,
                        )?
                        self.repair_missing_down_context(
                            &mut find_alive,
                            branch,
                            c,
                            patch_id,
                            &mut relatives,
                        )?
        relatives: &mut Vec<(Key<PatchId>, Edge)>,
            self.repair_missing_up_context(
                find_alive,
                branch,
                up_context,
                patch_id,
                relatives,
            )?;

            self.repair_missing_down_context(
                find_alive,
                branch,
                down_context,
                patch_id,
                relatives,
            )?;
        relatives: &mut Vec<(Key<PatchId>, Edge)>,
        unknown_children: &mut Vec<(Key<PatchId>, PatchId)>,
            unknown_children.clear();
            unknown_children.extend(
                self.iter_nodes(branch, Some((dest, None)))
                    .take_while(|&(k, v)| {
                        k == dest &&
                            v.flag | EdgeFlags::FOLDER_EDGE <=
                                EdgeFlags::PSEUDO_EDGE | EdgeFlags::FOLDER_EDGE
                    })
                    .filter(|&(_, v)| {
                        let ext = self.external_hash(v.introduced_by).to_owned();
                        let is_unknown = v.introduced_by != patch_id &&
                            !dependencies.contains(&ext);
                        debug!(
                            "child introduced_by = {:?}, ext = {:?}",
                            v.introduced_by,
                            ext
                        );
                        debug!("child is_unknown({}): {:?} {:?}", line!(), v, is_unknown);
                        is_unknown
                    })
                    .map(|(k, v)| (k.to_owned(), v.introduced_by)),
            );
            for (ch, patch) in unknown_children.drain(..) {
                self.repair_missing_up_context(
                    find_alive,
                    branch,
                    ch,
                    patch,
                    relatives,
                )?
            unknown_children.extend(
                self.iter_nodes(branch, Some((dest, Some(&e))))
                    .take_while(|&(k, v)| {
                        k == dest &&
                            v.flag | EdgeFlags::FOLDER_EDGE ==
                                EdgeFlags::PARENT_EDGE | EdgeFlags::FOLDER_EDGE
                    })
                    .filter(|&(_, v)| {
                        let ext = self.external_hash(v.introduced_by).to_owned();
                        let is_unknown = v.introduced_by != patch_id &&
                            !dependencies.contains(&ext);
                        debug!("parent is_unknown({}): {:?} {:?}", line!(), v, is_unknown);
                        is_unknown
                    })
                    .map(|(k, v)| (k.to_owned(), v.introduced_by)),
            );
            for (ch, patch) in unknown_children.drain(..) {
                self.repair_missing_down_context(
                    find_alive,
                    branch,
                    ch,
                    patch,
                    relatives,
                )?
                k == context && v.flag | EdgeFlags::FOLDER_EDGE == e.flag | EdgeFlags::FOLDER_EDGE
            })
        {
                context_deleted = false;
                break;
        patch_id: PatchId,
        relatives: &mut Vec<(Key<PatchId>, Edge)>,
        // Is the up context deleted by another patch, and the
        // deletion was not also confirmed by this patch?
        let up_context_deleted = self.was_context_deleted(branch, patch_id, context);

        debug!(
            "up_context_deleted: patch_id = {:?} context = {:?} up_context_deleted = {:?}",
            patch_id,
            context,
            up_context_deleted
        );
        // If so, collect edges to add.
        if up_context_deleted {
            // Alright, the up context was deleted, so the alive
            // component of the graph might be disconnected, and needs
            // a repair.

            // Follow all paths upwards (in the direction of
            // DELETED_EDGE|PARENT_EDGE) until finding an alive
            // ancestor, and turn them all into zombie edges.
            find_alive.clear();
            find_alive.push(context.to_owned());
            relatives.clear();
            let mut alive = Vec::new();
            self.find_alive_ancestors_with_edges(find_alive, &branch, &mut alive, relatives);
            for (key, mut edge) in relatives.drain(..) {
                if !self.is_connected(branch, key, edge.dest) {
                    edge.introduced_by = patch_id.clone();
                    edge.flag = EdgeFlags::PSEUDO_EDGE | EdgeFlags::PARENT_EDGE |
                        (edge.flag & EdgeFlags::FOLDER_EDGE);
                    debug!("repairing up context: {:?} {:?}", key, edge);
                    self.put_nodes_with_rev(branch, key, edge)?;
                }
        patch_id: PatchId,
        relatives: &mut Vec<(Key<PatchId>, Edge)>,
        let down_context_deleted = self.was_context_deleted(branch, patch_id, context);

        debug!("down_context_deleted: {:?}", down_context_deleted);

        if down_context_deleted {
            // Find all alive descendants, as well as the paths
            // leading to them, and double these edges with
            // pseudo-edges everywhere.
            let mut alive = Vec::new();
            find_alive.clear();
            let e = Edge::zero(EdgeFlags::DELETED_EDGE);
            for (_, v) in self.iter_nodes(branch, Some((context, Some(&e))))
                .take_while(|&(k, v)| k == context && v.flag == e.flag)
            {
                find_alive.push(v.dest)
            }
            self.find_alive_descendants(find_alive, &branch, &mut alive);
            debug!("down context relatives: {:?}", alive);
            let mut edge = Edge::zero(EdgeFlags::PSEUDO_EDGE);
            for key in alive.drain(..) {
                if !self.is_connected(branch, key, edge.dest) {
                    edge.dest = key;
                    edge.introduced_by = patch_id.clone();
                    edge.flag = EdgeFlags::PSEUDO_EDGE | (edge.flag & EdgeFlags::FOLDER_EDGE);
                    debug!("repairing down context: {:?} {:?}", key, edge);
                    self.put_nodes_with_rev(branch, context, edge)?;
                }

                (try!(self.txn.create_db()), try!(self.txn.create_db()), try!(self.txn.create_db()), 0)




















































































































































































































































































































        let child_inode = match child_inode {
            None => self.create_new_inode(),
            Some(i) => i.clone(),
        };
        // Avoid inserting a name that already exists.
        if self.get_tree(&parent_id.as_file_id()).is_none() {
                // If this is a directory, add a name-less file id without
                // a reverse in revtree.
        } else {
            Err(ErrorKind::AlreadyAdded.into())
    pub fn rec_delete(&mut self, key: Inode) -> Result<()> {
        {
            for (a, b) in children {
                self.rec_delete(b)?;
                // println!("deleting {:?} {:?}", a, b);
                debug!("deleting from tree");
                self.del_tree(&a.as_file_id(), Some(b))?;
                self.del_revtree(b, Some(&a.as_file_id()))?;
                debug!("done deleting from tree");
            }
        debug!("b");
        } else {
        Ok(())
                                 -> Vec<PathBuf> {
    w.add_ignore(repo_root.join(PIJUL_DIR_NAME).join("local").join("ignore"));
    let mut ret = vec![];
            ret.push(pb);
        let mut stack = vec![key0];
            if !is_first && !side.is_empty() {
                        conflicts.begin_conflict(buf)?;
                        conflicts.end_conflict(buf)?;
        graph: &Graph,
extern crate log;
extern crate bitflags;
extern crate sanakirja;
extern crate bincode;
extern crate hex;
extern crate tempdir;
#[macro_use]
extern crate error_chain;
extern crate bs58;
extern crate base64;
extern crate ignore;
extern crate openssl;
            _ => false
pub use backend::{DEFAULT_BRANCH, Repository, MutTxn, LineId, PatchId, EdgeFlags,
                  Hash, HashRef, Key, Edge, Txn, Branch,
                  Inode, ROOT_INODE, ROOT_KEY, SmallString, SmallStr, ApplyTimestamp,
                  FileMetadata, GenericTxn, FileStatus, OwnedFileId, FileId};
pub use record::{RecordState, InodeUpdate};
    pub fn output_changes_file<P: AsRef<Path>>(&mut self,
                                               branch: &Branch,
                                               path: P)
                                               -> Result<()> {
        let changes_file = fs_representation::branch_changes_file(path.as_ref(),
                                                                  branch.name.as_str());

    fn file_nodes_fold_<A, F: FnMut(A, Key<PatchId>) -> A>(&self,
                                                           branch: &Branch,
                                                           root: Key<PatchId>,
                                                           level: usize,
                                                           mut init: A,
                                                           f: &mut F)
                                                           -> Result<A> {

            .take_while(|&(k, v)| k.is_root() && v.flag.contains(EdgeFlags::FOLDER_EDGE)
                        && !v.flag.contains(EdgeFlags::PARENT_EDGE)) {

                debug!("file_nodes_fold_: {:?} {:?}", k, v);
                if level & 1 == 0 && level > 0 {
                    init = f(init, k)
                }
                init = self.file_nodes_fold_(branch, v.dest, level + 1, init, f)?

    pub fn file_nodes_fold<A, F: FnMut(A, Key<PatchId>) -> A>(&self,
                                                              branch: &Branch,
                                                              init: A,
                                                              mut f: F)
                                                              -> Result<A> {


        key == ROOT_KEY
            || self.has_edge(branch, key, EdgeFlags::PARENT_EDGE, EdgeFlags::PARENT_EDGE)
            || self.has_edge(branch, key, EdgeFlags::PARENT_EDGE|EdgeFlags::FOLDER_EDGE, EdgeFlags::PARENT_EDGE|EdgeFlags::FOLDER_EDGE)
        key.is_root() || self.has_edge(branch, key, EdgeFlags::PARENT_EDGE, EdgeFlags::PARENT_EDGE|EdgeFlags::FOLDER_EDGE)
    pub fn has_edge(&self,
                    branch: &Branch,
                    key: Key<PatchId>,
                    min: EdgeFlags,
                    max: EdgeFlags)
                    -> bool {


                        .take_while(|&(k, _)| k == key) {
                            debug!("all_edges: {:?}", v);
                        }
                        .take_while(|&(k, _)| k == key) {

                            debug!("get_file {:?}", v);
                            if v.flag|EdgeFlags::PSEUDO_EDGE == EdgeFlags::PARENT_EDGE|EdgeFlags::PSEUDO_EDGE {
                                debug!("push!");
                                stack.push(v.dest.clone())
                            } else if v.flag.contains(EdgeFlags::PARENT_EDGE|EdgeFlags::FOLDER_EDGE) {
                                names.push(key);
                            }
    pub fn get_file_names<'a>(&'a self, branch: &Branch, key: Key<PatchId>) -> Vec<(Key<PatchId>, Vec<&'a str>)> {

                        break

fn make_remote<'a, I: Iterator<Item = &'a Hash>>
    (target: &Path,
     remote: I)
     -> Result<(Vec<(Hash, Patch)>, usize)> {


pub fn apply_resize<'a, I, F>(target: &Path,
                              branch_name: &str,
                              remote: I,
                              apply_cb: F)
                              -> Result<()>
    where I: Iterator<Item = &'a Hash>,
          F: FnMut(usize, &Hash)
pub fn apply_resize_no_output<'a, I: Iterator<Item = &'a Hash>>(target: &Path,
                                                                branch_name: &str,
                                                                remote: I)
                                                                -> Result<()> {
pub fn unrecord_no_resize(repo_dir: &Path,
                          repo_root: &Path,
                          branch_name: &str,
                          selected: &mut Vec<(Hash, Patch)>,
                          increase: u64)
                          -> Result<()> {

            .take_while(|&(k, b)| k == key && b.flag <= EdgeFlags::FOLDER_EDGE | EdgeFlags::PSEUDO_EDGE) {
        let ext = self.get_external(name_key.patch).unwrap().to_base58();
            format!("{}.{}", basename, &ext[..10])
                                self.put_tree(&file_id.as_file_id(), inode)?;
                                self.put_revtree(inode, &file_id.as_file_id())?;
                            header.status = FileStatus::Ok;
                                status: FileStatus::Ok
                            status: FileStatus::Ok
                        self.remove_redundant_edges(branch, &l, &forward)?
                    if key.key.patch == pending_patch_id || self.is_alive(branch, key.key) {
        local_pending: &[InodeUpdate]
use std::io::{Read, BufRead, Write};
use std::fs::{File, OpenOptions, metadata};
use {Result, ErrorKind};

            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
    pub fn read_dependencies<R:Read>(mut r: R) -> Result<Vec<Hash>> {
        let version:u32 = deserialize_from(&mut r, Infinite)?;
        let _header:PatchHeader = deserialize_from(&mut r, Infinite)?;


        inode: Key<Option<Hash>>
    pub fn from_reader_nochanges<R:Read>(mut r: R) -> Result<PatchHeader> {
        let version:u32 = deserialize_from(&mut r, Infinite)?;

    FileDel { name: String, del: Change<Context> },
    FileAdd { name: String, add: Change<Context> },
    Change { file: Rc<PathBuf>, change: Change<Context>, conflict_reordering: Vec<Change<Context>> },
    Replace { file: Rc<PathBuf>, adds: Change<Context>, dels: Change<Context>, conflict_reordering: Vec<Change<Context>> },
                Record::FileDel { del: c, .. } |
                Record::FileAdd { add: c, .. } |
                Record::Change { change: c, .. } => Some(c),
                },
                Record::FileMove { ref del, ref add, .. } => {
                Record::FileDel { del: ref c, .. } |
                Record::FileAdd { add: ref c, .. } |
                Record::Change { change: ref c, .. } => Some(c),
                Record::Replace { ref adds, ref dels, .. } => {
                Change::NewNodes { ref up_context, flag, line_num, ref nodes, ref inode, .. } => {
                    let edges = up_context.iter().map(|up| {
                        NewEdge {
                                    None => Some(hash.clone())
                                line: line_num
                            introduced_by: Some(hash.clone())
                        }
                    }).chain((1..nodes.len()).map(|i| {
                        NewEdge {
                                line: line_num + (i -1),
                        }
                    })).collect();
                Change::NewEdges { previous, flag, ref edges, ref inode } => {
                    changes.push(Change::NewEdges {
                        previous: flag,
                        flag: previous,
                        inode: inode.clone(),
                        edges: edges.iter().map(|e| {
                            NewEdge {
                                from: e.from.clone(),
                                to: e.to.clone(),
                                introduced_by: Some(hash.clone())
                            }
                        }).collect()
                    })
                }
                },
                Change::NewEdges { ref edges, .. } => {
                    size += edges.len() * 2048
                    return Err(ErrorKind::EOF.into())
                return Err(ErrorKind::EOF.into())
                Hash::from_binary(&base64::decode_config(filename, base64::URL_SAFE_NO_PAD).unwrap()).unwrap()

            Patch::Signed0 | Patch::Unsigned0 => panic!("refusing to interact with old patch version"),
                if &filename.0[..] == &hash.0[..] => Ok(()),
            let mut signatures: SignatureFile = serde_json::from_reader(&mut file)
                .unwrap_or(SignatureFile {
                    signatures: HashMap::new()
            signatures.signatures.insert(
                public_key,
                bs58::encode(&signature.as_ref()).into_string(),
            );
            return Err(ErrorKind::WrongPatchSignature.into())
pub struct Signatures<'a, R:Read>(serde_json::StreamDeserializer<'a, serde_json::de::IoRead<R>, SignatureFile>);
pub fn read_signatures<'a, R:Read>(r: R) -> Signatures<'a, R> {
impl<'a, R:Read> Iterator for Signatures<'a, R> {







        match (sp.next().and_then(Hash::from_base58), sp.next().and_then(|s| s.parse().ok())) {
            (Some(h), Some(s)) => { result.insert(h, s); }
pub fn read_changes_from_file<P: AsRef<Path>>(changes_file: P)
                                              -> Result<HashMap<Hash, ApplyTimestamp>> {
    pub fn new_patch<I:Iterator<Item = Hash>>(
        flag: PatchFlags


    pub fn dependencies<'a, I: Iterator<Item = &'a Change<ChangeContext<Hash>>>>(&self,
                                                                              branch: &Branch,
                                                                              changes: I)
                                                                              -> HashSet<Hash> {
                Change::NewNodes { ref up_context,
                                   ref down_context,
                                   .. } => {
                    for c in up_context.iter().chain(down_context.iter()) {
                        match c.patch {
                            None | Some(Hash::None) => {}
                            Some(ref dep) => {
                                deps.insert(dep.clone());
                            }
                }
                Change::NewEdges { flag, ref edges, .. } => {
                        match e.from.patch {
                                deps.insert(h.clone());
                                if flag.contains(EdgeFlags::DELETED_EDGE|EdgeFlags::PARENT_EDGE) {
                                        patch: self.get_internal(h.as_ref())
                                            .unwrap().to_owned(),
                                        line: e.from.line.clone(),
                        match e.to.patch {
                                deps.insert(h.clone());
                                if flag.contains(EdgeFlags::DELETED_EDGE) && !flag.contains(EdgeFlags::PARENT_EDGE) {
                                        patch: self.get_internal(h.as_ref())
                                            .unwrap()
                                            .to_owned(),
                                        line: e.to.line.clone(),
                                deps.insert(h.clone());
            // deps.insert(z);
        // deps


                let already_covered =
                    covered.get(&current).is_some() || (current != dep && {
                        let current_ext = self.get_external(current).unwrap();
                        deps.get(&current_ext.to_owned()).is_some()
                    });
                    break
                    for (_, parent) in self.iter_revdep(Some((current, None))).take_while(|k| k.0 == current) {
            .take_while(|&(k_, e_)| k_ == k && e_.flag <= EdgeFlags::PSEUDO_EDGE) {

                let ext = self.get_external(edge.dest.patch).unwrap().to_owned();
            deps.insert(ext);


            line: key.line

            patch: self.external_hash_opt(h.patch)
    updatables: Vec<InodeUpdate>,
#[derive(Debug)]
        st.updatables.push(InodeUpdate::Add {
                })
                            st.updatables.push(InodeUpdate::Deleted {
        // There's always at least one edge to kill (for the file
        // itself), so `edges` is not empty.
        st.actions.push(Record::FileDel {
            name: realpath.to_string_lossy().to_string(),
            del: Change::NewEdges {
                edges: edges,
                previous: EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE,
                flag: EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE | EdgeFlags::DELETED_EDGE,
                inode: self.external_key(&current_node).unwrap(),
            },
        });
        if file_edges.len() > 0 {
                st.updatables.push(InodeUpdate::Moved {
        let mut e = Edge::zero(EdgeFlags::PARENT_EDGE|EdgeFlags::FOLDER_EDGE|EdgeFlags::DELETED_EDGE);
            updatables: Vec::new(),
    pub fn finish(self) -> (Vec<Record<Rc<RefCell<ChangeContext<PatchId>>>>>, Vec<InodeUpdate>) {
            self.rec_delete(inode)?






                            Some((k_, v_)) if k_ == key && v_.dest == edge.dest && v_.flag == edge.flag => {
                            _ => {
use std::cmp::min;
        .skip(1)
        if leading_equals >= min(diff.contents_a.len(), b.len()) {
            (&diff.contents_a[leading_equals..]).iter().rev()












































































            if flags.contains(EdgeFlags::FOLDER_EDGE) {
                // Here, this patch (Q) reintroduced a file F deleted
                // by a previous patch P, which means that we're
                // deleting the file again by unrecording Q.
                //
                // Now, unrecording Q deletes F again, so if other
                // (for instance later) patches R edited F, unrecording
                // Q must create a conflict (between P and R).

                // self.reconnect_deleted_folder_nodes(branch, &targets)?
            } else {
                self.reconnect_across_deleted_nodes(branch, &targets)?
            }
            if flags.contains(EdgeFlags::FOLDER_EDGE) {
                // We might need to do something here, in the
                // following case: a file is being reintroduced as a
                // result of the unrecord of patch A, but its
                // hierarchy has been deleted by another patch B.

                // Since B can be created independently, we cannot
                // solve this problem by adding dependencies to
                // prevent this unrecord.

                // Therefore, this unrecord must recreate the zombie
                // hierarchy.
                // But actually, this must also happen in the "else"
                // branch of this "if", if the reintroduced *lines*
                // are in a deleted file, so `reconnect_deletions`
                // must be common.
            } else {
                // If we're reintroducing a non-deleted edge, there is
                // no reason why the deleted part is still connected
                // to the alive component of the graph, so we must
                // reconnect the deleted part to its alive ancestors
                // and descendants.
                self.reconnect_deletions(branch, patch_id, edges, flags, find_alive)?
            }
                // This is actually a file deletion, so it's not
                // in the tree anymore. Put it back into
                // tree/revtrees, and to inodes/revinodes.
                //
                // bottom, source's parent is an inode, and must
                // be in inodes/revinodes.
                    self.get_revinodes(source_parent).unwrap()
                        self.put_nodes_with_rev(branch, source, edge)?;
            let mut files = HashSet::new();
            if self.find_alive_ancestors(find_alive, branch, &mut alive_relatives, &mut files) {
                for (mut k, mut v) in files.drain() {
        let mut files = HashSet::new();
        // find alive ancestors of the deleted nodes.
                self.find_alive_ancestors(&mut find_alive, branch, &mut alive_ancestors, &mut files);
            }
        }
        // file_edges should contain the now zombie files.
        debug!("file: {:?}", files);
        for (mut k, mut v) in files.drain() {
            assert!(v.flag.contains(EdgeFlags::DELETED_EDGE));
            v.flag = (v.flag | EdgeFlags::PSEUDO_EDGE) ^ EdgeFlags::DELETED_EDGE;
            self.put_nodes(branch, k, &v)?;
        // find alive descendants of the deleted nodes.
        let mut alive_descendants = Vec::new();
        if !alive_ancestors.is_empty() {
                    self.find_alive_descendants(&mut find_alive, branch, &mut alive_descendants);
        }
        debug!(
            "ancestors = {:?}, descendants = {:?}",
            alive_ancestors,
            alive_descendants
        );
        for ancestor in alive_ancestors.iter() {
            for descendant in alive_descendants.iter() {
                let mut edge = Edge::zero(EdgeFlags::PSEUDO_EDGE);
                edge.dest = *descendant;
                debug!("adding {:?} -> {:?}", ancestor, edge);
                self.put_nodes_with_rev(branch, *ancestor, edge)?;
            self.reconnect_across_deleted_nodes(branch, &internal_down_context)?











version = "0.9.1"
thrussh = "0.19.2"
thrussh-keys = "0.9.4"
term = "0.4"
libpijul = "0.9.1"
liner = { git = "https://github.com/P-E-Meunier/liner" }
termion = "1.5"

[target.'cfg(windows)'.dependencies]
rustyline = "1.0"






































































































































































































































































































































































































































































































































































































































































































































































































































































































use libpijul::patch::{Change, Patch, Record, PatchHeader, ChangeContext};
use libpijul::{MutTxn, LineId, EdgeFlags, Hash, PatchId};
#[cfg(unix)] use liner::Context;
#[cfg(windows)] use rustyline;
use term::{StdoutTerminal, Attr};
#[derive(Clone,Copy)]
pub fn print_patch_descr(term: &mut Option<Box<StdoutTerminal>>, hash: &Hash, internal: Option<PatchId>, patch: &PatchHeader) {


fn check_forced_decision(command: Command,
                         choices: &HashMap<&Hash, bool>,
                         rev_dependencies: &HashMap<&Hash, Vec<&Hash>>,
                         a: &Hash,
                         b: &Patch)
                         -> Option<bool> {

fn interactive_ask(getch: &getch::Getch,
                   a: &Hash,
                   patchid: Option<PatchId>,
                   b: &Patch,
                   command_name: Command)
                   -> Result<(char, Option<bool>), Error> {
    let mut term =
        if stdout_isatty() {
            term::stdout()
        } else {
            None
        };
    print!("{} [ynkad] ",
           match command_name {
               Command::Push => "Shall I push this patch?",
               Command::Pull => "Shall I pull this patch?",
               Command::Unrecord => "Shall I unrecord this patch?",
           });


pub fn ask_patches(command: Command, patches: &[(Hash, Option<PatchId>, Patch)]) -> Result<HashSet<Hash>, Error> {


                    Command::Pull | Command::Push => {
                        for ref dep in b.dependencies().iter() {
                            let d = rev_dependencies.entry(dep).or_insert(vec![]);
                            d.push(a)
                        }
                    }
                    Command::Unrecord => {
                        for ref dep in b.dependencies().iter() {
                            let d = rev_dependencies.entry(dep).or_insert(vec![]);
                            d.push(a)
                        }
                    }
    Ok(choices.into_iter()
        .filter(|&(_, selected)| selected)
        .map(|(x, _)| x.to_owned())
        .collect())
fn change_deps(id: usize, c: &Record<ChangeContext<Hash>>, provided_by: &mut HashMap<LineId, usize>) -> HashSet<LineId> {
            Change::NewNodes { ref up_context, ref down_context, ref line_num, ref nodes, .. } => {

            Change::NewEdges { ref edges, .. } => {
                for e in edges {
                    if e.from.patch.is_none() && !e.from.line.is_root() {
                        s.insert(e.from.line.clone());
                    }
                    if e.to.patch.is_none() && !e.from.line.is_root() {
                        s.insert(e.to.line.clone());
                    }
            }
fn print_change<T: rand::Rng>(term: &mut Option<Box<StdoutTerminal>>,
                              repo: &MutTxn<T>,
                              current_file: &mut Option<Rc<PathBuf>>,
                              c: &Record<ChangeContext<Hash>>)
                              -> Result<(), Error> {

            println!("{}", name);
            println!("{}", name);
            println!("{}", new_name);
        Record::Replace { ref adds, ref dels, ref file, .. } => {
            let r = Record::Change { change: dels.clone(), file: file.clone(), conflict_reordering: Vec::new() };
            print_change(term, repo, current_file, &r)?;
            let r = Record::Change { change: adds.clone(), file: file.clone(), conflict_reordering: Vec::new() };
            print_change(term, repo, current_file, &r)
        Record::Change { ref change, ref file, .. } => {
                Change::NewNodes { // ref up_context,ref down_context,ref line_num,
                                   ref flag,
                                   ref nodes,
                                   .. } => {
                                println!("In file {:?}\n", file);
                Change::NewEdges { ref edges, flag, .. } => {
                        let (target, flag) =
                            if !flag.contains(EdgeFlags::PARENT_EDGE) {
                                if h_targets.insert(&e.to) {
                                    (Some(&e.to), flag)
                                } else {
                                    (None, flag)
                                }
                                if h_targets.insert(&e.from) {
                                    (Some(&e.from), flag)
                                } else {
                                    (None, flag)
                                }
                            };
                                println!("In file {:?}\n", file);

    Revert
            _ => false
pub fn ask_changes<T: rand::Rng>(repository: &MutTxn<T>,
                                 changes: &[Record<ChangeContext<Hash>>],
                                 direction: ChangesDirection)
                                 -> Result<HashMap<usize, bool>, Error> {
    debug!("changes: {:?}", changes);
    let mut terminal =
        if stdout_isatty(){
            term::stdout()
        } else {
            None
        let e = match decision {
            Some(true) => 'Y',
            Some(false) => 'N',
                    d
                    debug!("changes: {:?}", changes[i]);
                    try!(print_change(&mut terminal, repository, &mut current_file, &changes[i]));
                    println!("");
                    print!("Shall I {} this change? ({}/{}) [ynkad] ", direction.verb(), i+1, changes.len());
                    try!(stdout().flush());
                    match getch.getch().ok().and_then(|x| from_u32(x as u32)) {
                        Some(e) => {
                            println!("{}\n", e);
                            let e = e.to_uppercase().next().unwrap_or('\0');
                            match e {
                                'A' => {
                                    final_decision = Some('Y');
                                    'Y'
                                }
                                'D' => {
                                    final_decision = Some('N');
                                    'N'
                                }
                                e => e,
                            }
                        }
                        _ => '\0',
                    }
            }
    Ok(choices)
#[cfg(unix)]
    let mut con = Context::new();
    if let Ok(res) = con.read_line(s, &mut |_| {}) {
        res.into()

    #[cfg(unix)]
    {
        Ok(vec![read_line("What is your name <and email address>? ")])
    }

    #[cfg(windows)]
    {
        let mut rl = rustyline::Editor::<()>::new();
        let input = rl.readline("What is your name <and email address>? ")?;
        Ok(vec![input])
    }
pub fn ask_patch_name(repo_root: &Path,
                      maybe_editor: Option<&String>)
                      -> Result<(String, Option<String>), Error> {
        let mut editor_cmd = editor.trim().split(" ").map(OsString::from).collect::<Vec<_>>();
            .map_err(|e| ErrorKind::CannotSpawnEditor(editor.to_owned(), e.to_string()))?;
        let mut patch_name = File::open(patch_name_file.as_path())
            .map_err(|_| ErrorKind::EmptyPatchName)?;
        #[cfg(unix)]
        {
            let res = read_line("What is the name of this patch? ");
            debug!("res = {:?}", res);
            if res.trim().is_empty() {
                Err(ErrorKind::EmptyPatchName.into())
            } else {
                Ok((res, None))
            }
        }

        #[cfg(windows)]
        {
            let mut rl = rustyline::Editor::<()>::new();
            let input = rl.readline("What is the name of this patch? ")?;
            debug!("input = {:?}", input);
            if input.trim().is_empty() {
                Err(ErrorKind::EmptyPatchName.into())
            } else {
                Ok((input, None))
            }
    print!("The authenticity of host {:?}:{} cannot be established.\nThe fingerprint is {:?}.",
           host,
           port,
           fingerprint);
    #[cfg(unix)]
    {
        let input = read_line("Are you sure you want to continue (yes/no)? ");
        let input = input.trim().to_uppercase();
        Ok(input == "YES")
    }

    #[cfg(windows)]
    {
        let mut rl = rustyline::Editor::<()>::new();
        let input = rl.readline("Are you sure you want to continue (yes/no)? ")?;
        let input = input.to_uppercase();
        Ok(input.trim() == "YES")
    }
pub fn print_status<T: rand::Rng>(repository: &MutTxn<T>,
                                  changes: &[Record<ChangeContext<Hash>>]) -> Result<(), Error> {

    let mut terminal =
        if stdout_isatty(){
            term::stdout()
        } else {
            None
        };
        try!(print_change(&mut terminal, repository, &mut current_file, &changes[i]));
}

#[cfg(unix)]
pub fn password() -> Result<String, Error> {
    print!("Password: ");
    std::io::stdout().flush()?;
    use std::io::{stdin, stdout};
    use termion::input::TermRead;
    let stdout = stdout();
    let mut stdout = stdout.lock();
    let stdin = stdin();
    let mut stdin = stdin.lock();
    if let Some(pass) = stdin.read_passwd(&mut stdout)? {
        return Ok(pass)
    } else {
        return Ok(String::new())
    }
}

#[cfg(not(unix))]
pub fn password() -> Result<String, Error> {
    print!("Password: ");
    std::io::stdout().flush()?;
    let mut terminal =
        if stdout_isatty() {
            term::stdout()
        } else {
            None
        };
    if let Some(ref mut terminal) = terminal {
        terminal.attr(term::Attr::Secure)?;
    }
    let stdin = std::io::stdin();
    let line = stdin.lock().lines().next().unwrap_or(Ok(String::new()))?;
    Ok(line)
            &Vec::new()
    super::ask::print_status(&txn, &changes)?;
use libpijul::fs_representation::find_repo_root;
    // TODO: make this configurable.
    let allow_nested = false;
    match find_repo_root(&wd) {
        Some(_) if allow_nested => create_repo(&wd),
        Some(r) => Err(ErrorKind::InARepository(r).into()),
        None => create_repo(&wd),
    }
                    let cap =
                            cap
                            return Err(ErrorKind::CannotParseRemote.into())
                    let user = &cap[1];
                    let server = &cap[2];
                    let client = SshClient::new(port, server, key, &h);
                        (server, port), config, None, client,
                            AuthAttemptFuture::new(connection, AuthAttempts::new(user.to_string(), repository, use_agent), user.to_string())
    type FutureSign = futures::future::FutureResult<(SshClient, thrussh::CryptoVec), Error>;
use std::fs::{File, canonicalize, metadata};
    let mut path = fs_representation::repo_dir(root);
    let mut path = fs_representation::repo_dir(root);
    if fs_representation::find_repo_root(dir).is_some() {
        Err(ErrorKind::InARepository(dir.to_owned()).into())
    } else {
        Ok(())
    let repo_dir = fs_representation::repo_dir(dir);
    if let Ok(attrs) = metadata(&repo_dir) {
        if attrs.is_dir() {
            return Err(ErrorKind::InARepository(dir.to_owned()).into());
        }
        let repo_root = if let Some(r) = fs_representation::find_repo_root(&wd) {
use clap::{SubCommand, ArgMatches, Arg, ArgGroup};
use commands::{BasicOptions, StaticSubcommand, default_explain};
use libpijul::{Hash, Key, PatchId, Branch, Txn,
               Transaction, Value, LineId, ROOT_KEY, EdgeFlags};
use libpijul::patch::{NewEdge, Patch, Change};
use std::io::{stdout, copy, BufReader};
use std::collections::{HashSet, HashMap};
        .arg(Arg::with_name("bin")
             .long("bin")
             .help("Output the patch in binary.")
        )
        .arg(Arg::with_name("name")
             .long("name")
             .help("Output the patch name.")
        .arg(Arg::with_name("description")
             .long("description")
             .help("Output the patch description.")
        .arg(Arg::with_name("authors")
             .long("authors")
             .help("Output the patch authors.")
        .arg(Arg::with_name("date")
             .long("date")
             .help("Output the patch date.")
        .group(ArgGroup::with_name("details")
               .required(false)
               .args(&["bin", "name", "description", "date", "authors"])
    let v:  View = match ( args.is_present("bin")
          , args.is_present("name")
          , args.is_present("description")
          , args.is_present("date")
          , args.is_present("authors")
          ) {
        (_, _, _, _, _)    => View::Normal
        };
            _ => { // it cannot be View::Bin, so it has to be View::Normal
                        Change::NewEdges { ref edges, flag, .. } => {
                            render_new_edges(&mut terminal, &txn, &branch, &mut buf, internal, edges, flag)?
                        }
            txn.get_file_names(branch, x.clone()).into_iter().map(|(_,
              name)| {
                name
            })
    assert_eq!(file_names.len(), 1);
        print!("(also known as {:?}", file_names[0]);
        txn.find_alive_nonfolder_ancestors(branch, &mut find_alive, &mut alive, &mut file, up.clone());
        self.numbers.insert(
            key.clone(),
            (self.current_file.clone(), self.n),
        );
    flag: EdgeFlags

        txn.find_alive_nonfolder_ancestors(branch, &mut find_alive, &mut alive, &mut file, to.clone());
                    txn.get_file_names(branch, x.clone()).into_iter().map(|(_,
                      name)| {
                        name
                    })

use libpijul::{Repository, Hash, InodeUpdate, Patch, PatchId, MutTxn, RecordState};
use libpijul::patch::PatchFlags;
use libpijul::fs_representation::{patches_dir, untracked_files};
use std::fs::{canonicalize};
use std::fs::metadata;
use std::str::{FromStr};
            .arg(Arg::with_name("all")
                 .short("a")
                 .long("all")
                 .help("Answer 'y' to all questions")
                 .takes_value(false))
            .arg(Arg::with_name("add-new-files")
                 .short("n")
                 .long("add-new-files")
                 .help("Offer to add files that have been created since the last record")
                 .takes_value(false))
            .arg(Arg::with_name("depends-on")
                 .help("Add a dependency to this patch (internal id or hash accepted)")
                 .long("depends-on")
                 .takes_value(true)
                 .multiple(true))
            .arg(Arg::with_name("prefix")
                 .help("Prefix to start from")
                 .takes_value(true)
                 .multiple(true))
    )
fn add_untracked_files<T: rand::Rng>(txn: &mut MutTxn<T>, repo_root: &Path)
    -> Result<()>
{
        txn.add_file(&file, m.is_dir())?;
    let patch_date = args.value_of("date")
        .map_or(Ok(chrono::Utc::now()),
                |x| chrono::DateTime::from_str(x)
                        .map_err(|_| ErrorKind::InvalidDate(String::from(x)))
    )?;
    let (changes, syncs) = {
        // Increase by 100 pages. The most things record can
        // write is one write in the branches table, affecting
        // at most O(log n) blocks.
        let repo = opts.open_and_grow_repo(409600)?;
        let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
        if add_new_files { add_untracked_files(&mut txn, &opts.repo_root)? };
        let (changes, syncs) = changes_from_prefixes(
            &opts.repo_root,
            &mut txn,
            &branch_name,
            prefix.as_ref()
        )?;
        let changes:Vec<_> = changes.into_iter().map(|x| txn.globalize_record(x)).collect();
        if !yes_to_all {
            let c = ask_changes(&txn, &changes, ChangesDirection::Record)?;
            let selected = changes.into_iter()
                .enumerate()
                .filter(|&(i, _)| *(c.get(&i).unwrap_or(&false)))
                .map(|(_, x)| x)
                .collect();
            txn.commit()?;
            (selected, syncs)
        } else {
            txn.commit()?;
            (changes, syncs)
        }
    };
                (m.to_string(), patch_descr_arg.map(|x| String::from(x.trim())))
                                return Err(ErrorKind::ExtraDepNotOnBranch(hash).into())
                            return Err(ErrorKind::PatchNotFound(opts.repo_root(), hash).into())
                                return Err(ErrorKind::ExtraDepNotOnBranch(hash.to_owned()).into())
                        return Err(ErrorKind::WrongHash.into())
            txn.new_patch(&branch,
                          authors,
                          patch_name,
                          description,
                          patch_date,
                          changes,
                          extra_deps.into_iter(),
                          PatchFlags::empty())
            match record_no_resize(&pristine_dir, &opts.repo_root, &branch_name, &hash, &patch,
                                   &syncs, increase) {
                Err(ref e) if e.lacks_space() => { increase *= 2 },
                e => break e
pub fn record_no_resize(pristine_dir: &Path, r: &Path, branch_name: &str, hash: &Hash,
                        patch: &Patch, syncs: &[InodeUpdate], increase: u64)
                        -> Result<Option<Hash>> {

        Err(x) => {
            return Err(ErrorKind::Repository(x).into())
        }
    txn.apply_local_patch(&branch_name, r, &hash, &patch, &syncs, false)?;
pub fn changes_from_prefixes<T: rand::Rng>(repo_root: &Path, txn: &mut MutTxn<T>, branch_name: &str, prefix: Option<&HashSet<PathBuf>>)
                                           -> Result<(Vec<libpijul::patch::Record<Rc<RefCell<libpijul::patch::ChangeContext<PatchId>>>>>, Vec<libpijul::InodeUpdate>)>  {
            txn.record(&mut record, &branch_name, repo_root, Some(prefix.as_path()))?;
        let prefixes:Result<HashSet<_>> = prefixes
                let p = if let Ok(p) = canonicalize(&p) {
                    p
                } else {
                    p
                };
    Ssh {
        l: tokio_core::reactor::Core,
        id: &'a str,
        path: &'a str,
        pijul_cmd: &'a str,
        session: Option<thrussh::client::Connection<TcpStream, Client>>,
    },
    Uri {
        l: tokio_core::reactor::Core,
        uri: &'a str,
        client: reqwest_async::Client
    },
    Local { path: &'a Path },
impl <'a> Drop for Session<'a> {
        match *self {
            Session::Ssh { ref mut l, ref mut session, .. } => {
                if let Some(mut session) = session.take() {
                    debug!("disconnecting");
                    session.disconnect(thrussh::Disconnect::ByApplication, "finished", "EN");
                    if let Err(e) = l.run(session) {
                        error!("While dropping SSH Session: {:?}", e);
                    }
                }
            _ => {}
impl<'a> Session<'a> {
        match *self {
            Session::Ssh { ref mut l, ref path, ref mut session, ref pijul_cmd, .. } => {

                let esc_path = escape(Cow::Borrowed(path));
                let cmd = format!("{} log --repository {} --branch {:?} --hash-only",
                                  pijul_cmd,
                                  esc_path,
                                  branch);
                if let Some(ref mut session) = *session {
                    session.handler_mut().state = State::Changes { changes: HashMap::new() }
                }
                let mut channel = None;
                *session = Some(l.run(session.take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut connection, chan)| {
                            debug!("exec: {:?}", cmd);
                            channel = Some(chan);
                            connection.handler_mut().exit_status.remove(&chan);
                            connection.handler_mut().channel = Some(chan);
                            connection.exec(chan, false, &cmd);
                            connection.channel_eof(chan);
                            // Wait until channel close.
                            debug!("waiting channel close");
                            connection.wait(move |session| {
                                session.handler().exit_status.get(&chan).is_some()
                            }).and_then(move |mut session| {
                                if session.is_channel_open(chan) {
                                    session.channel_close(chan);
                                }
                                session.wait(move |session| !session.is_channel_open(chan))
                            })
                        }))
                    .unwrap());
                if let Some(ref session) = *session {
                    if let Some(channel) = channel {
                        if let Some(&exit_code) = session.handler().exit_status.get(&channel) {
                            debug!("exit_code = {:?}", exit_code);
                            if exit_code != 0 {
                                return Ok(HashMap::new())
                            }
                        }
                    }
                }
                if let Some(ref mut session) = *session {
                    match std::mem::replace(&mut session.handler_mut().state, State::None) {
                        State::Changes { changes } => {
                            debug!("changes: {:?}", changes);
                            Ok(changes)
                        }
                        _ => unreachable!(),
                } else {
                    unreachable!()
            Session::Local { path } => {
                let repo_dir = pristine_dir(&path);
                let repo = Repository::open(&repo_dir, None)?;
                let txn = repo.txn_begin()?;
                Ok(if let Some(branch) = txn.get_branch(&branch) {
                    txn.iter_patches(&branch, None)
                        .map(|(hash, s)| (txn.get_external(hash).unwrap().to_owned(), s))
                        .collect()
                } else {
                    HashMap::new()
                })
            }
            Session::Uri { ref mut l, uri, ref mut client } => {
                let mut uri = uri.to_string();
                uri = uri + "/" + PIJUL_DIR_NAME + "/" + &branch_changes_base_path(branch);
                let mut req = reqwest_async::Request::new(
                    reqwest::Method::Get,
                    uri.parse().unwrap()
                );
                req.headers_mut().set_raw("connection", "close");
                let res:Vec<u8> = l.run(
                    client.execute(req)
                        .and_then(|resp: reqwest_async::Response| {
                            let res = Vec::new();
                            let body = resp.into_body();
                            body.fold(res, |mut res, x| {
                                res.extend(x.iter());
                                futures::finished::<_, reqwest::Error>(res)
                            })
                        })
                )?;
                let changes = read_changes(&mut &res[..]).unwrap_or(HashMap::new());
                debug!("http: {:?}", changes);
                Ok(changes)
    pub fn download_patch(&mut self,
                          repo_root: &Path,
                          patch_hash: &Hash)
                          -> Result<PathBuf> {
        let patches_dir_ = patches_dir(repo_root);
        let local_file = patches_dir_.join(&patch_file_name(patch_hash.as_ref()));
        if metadata(&local_file).is_ok() {
            Ok(local_file)
        } else {
            match *self {
                Session::Local { path } => {
                    debug!("local downloading {:?}", patch_hash);
                    let mut remote_file = patches_dir(path).join(&patch_file_name(patch_hash.as_ref()));
                    debug!("hard linking {:?} to {:?}", remote_file, local_file);
                    if hard_link(&remote_file, &local_file).is_err() {
                        copy(&remote_file, &local_file)?;
                    }
                    Ok(local_file)
                }
                Session::Ssh { ref mut l, ref path, ref mut session, ref pijul_cmd, .. } => {
                    let esc_path = escape(Cow::Borrowed(path));
                    let cmd = format!("{} patch --repository {} --bin {}",
                                      pijul_cmd,
                                      esc_path,
                                      patch_hash.to_base58());
                    debug!("cmd {:?} {:?}", cmd, local_file);
                    let tmp_dir = TempDir::new_in(&patches_dir_, "pijul_patch")?;
                    let local_tmp_file = tmp_dir.path().join("patch");
                    if let Some(ref mut session) = *session {
                        session.handler_mut().state =
                            State::DownloadPatch {
                                file: try!(File::create(&local_tmp_file))
                            };
                        session.handler_mut().channel = None;
                    }
                    *session = Some(l.run(session.take()
                            .unwrap()
                            .channel_open_session()
                            .and_then(move |(mut connection, chan)| {
                                connection.handler_mut().exit_status.remove(&chan);
                                connection.handler_mut().channel = Some(chan);
                                connection.exec(chan, false, &cmd);
                                connection.channel_eof(chan);
                                connection.wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                }).and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                            }))
                    if let Some(ref mut session) = *session {
                        if let State::DownloadPatch { mut file, .. } = std::mem::replace(&mut session.handler_mut().state, State::None) {
                            file.flush()?;
                            rename(&local_tmp_file, &local_file)?;
                        }
                    }
                    Ok(local_file)
                }
                Session::Uri { ref mut l, ref mut client, uri } => {
                    let uri =
                        uri.to_string() + "/" + PIJUL_DIR_NAME + "/patches/" +
                        &patch_hash.to_base58() + ".gz";
                    debug!("downloading uri {:?}", uri);
                    let mut req = reqwest_async::Request::new(
                        reqwest::Method::Get,
                        uri.parse().unwrap()
                    );
                    req.headers_mut().set_raw("connection", "close");
                    let tmp_dir = TempDir::new_in(&patches_dir_, "pijul_patch")?;
                    let local_tmp_file = tmp_dir.path().join("patch");
                    let res = l.run(
                        client.execute(req)
                            .and_then(|resp| {
                                if resp.status() == reqwest::StatusCode::Ok {
                                    let res = Vec::new();
                                    futures::future::Either::A(resp.into_body().fold(res, |mut res, x| {
                                        res.extend(x.iter());
                                        futures::finished::<_, reqwest::Error>(res)
                                    }).map(|body| {
                                        debug!("response={:?}", body);
                                        let mut f = File::create(&local_tmp_file).unwrap();
                                        f.write_all(&body).unwrap();
                                        debug!("patch downloaded through http: {:?}", body);
                                        Some(local_file)
                                    }))
                                } else {
                                    futures::future::Either::B(futures::finished(None))
                                }
                            })
                    ).unwrap();
                    if let Some(local_file) = res {
                        rename(&local_tmp_file, &local_file)?;
                        Ok(local_file)
                        Err(ErrorKind::PatchNotFound(
                            repo_root.to_path_buf(),
                            patch_hash.to_owned()
                        ).into())
                }
            }
    fn remote_apply(&mut self,
                    repo_root: &Path,
                    remote_branch: &str,
                    patch_hashes: &HashSet<Hash>)
                    -> Result<()> {
        match *self {
            Session::Ssh { ref mut l, ref mut session, ref path, ref pijul_cmd, .. } => {
                let pdir = patches_dir(repo_root);
                let mut exit_status = None;
                *session = Some(l.run(session.take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut session, chan)| {
                            session.handler_mut().exit_status.remove(&chan);
                            session.handler_mut().channel = Some(chan);
                            let esc_path = escape(Cow::Borrowed(path));
                            debug!("exec {:?}",
                                   format!("{} apply --repository {} --branch {:?}",
                                           pijul_cmd,
                                           esc_path,
                                           remote_branch));
                            session.exec(chan,
                                         false,
                                         &format!("{} apply --repository {} --branch {:?}",
                                                  pijul_cmd,
                                                  esc_path,
                                                  remote_branch));
                            futures::stream::iter_ok(patch_hashes.iter())
                                .fold((session, Vec::new()), move |(session, buf), hash| {
                                    let mut pdir = pdir.clone();
                                    pdir.push(hash.to_base58());
                                    pdir.set_extension("gz");
                                    let f = std::fs::File::open(&pdir).unwrap();
                                    pdir.pop();
                                    SendFile {
                                        f: f,
                                        buf: Some(buf),
                                        chan: chan,
                                        state: Some(SendFileState::Read(session)),
                                    }
                                })
                                .and_then(move |(mut session, _)| {
                                    session.channel_eof(chan);
                                    session.wait(move |session| {
                                        session.handler().exit_status.get(&chan).is_some()
                                    }).map(move |mut session| {
                                        exit_status = session.handler().exit_status.get(&chan).map(|x| *x);
                                        session.channel_close(chan);
                                        session
                                    })
                                })
                                .map_err(From::from)
                        }))
                                .unwrap());
                if let Some(ref session) = *session {
                    debug!("exit status = {:?}", session.handler().exit_status);
                }
                Ok(())
            }
            Session::Local { path } => {
                let mut remote_path = patches_dir(path);
                let mut local_path = patches_dir(repo_root);
                for hash in patch_hashes {
                    remote_path.push(&hash.to_base58());
                    remote_path.set_extension("gz");
                    local_path.push(&hash.to_base58());
                    debug!("hard linking {:?} to {:?}", local_path, remote_path);
                    if metadata(&remote_path).is_err() {
                        if metadata(&local_path).is_err() {
                            local_path.pop();
                            let b = hash.as_ref().to_binary();
                            local_path.push(&base64::encode_config(&b, base64::URL_SAFE_NO_PAD));
                            local_path.set_extension("gz");
                            debug!("local_file: {:?}", local_path);
                            let f = BufReader::new(std::fs::File::open(&local_path).unwrap());
                            let mut rr = flate2::bufread::GzDecoder::new(f);
                            let f = File::create(&remote_path).unwrap();
                            let stem = hash.to_base58();
                            let mut w = flate2::GzBuilder::new()
                                .filename(stem.as_bytes())
                                .write(f, flate2::Compression::best());
                            std::io::copy(&mut rr, &mut w).unwrap();
                            w.finish().unwrap();
                        } else if hard_link(&local_path, &remote_path).is_err() {
                            copy(&local_path, &remote_path)?;
                        }
                    }
                    local_path.pop();
                    remote_path.pop();
                }
                loop {
                    match apply_resize(&path, &remote_branch, patch_hashes.iter(), |_, _| {}) {
                        Err(ref e) if e.lacks_space() => {},
                        Ok(()) => return Ok(()),
                        Err(e) => return Err(From::from(e))
                    }
            _ => panic!("upload to URI impossible"),
    pub fn remote_init(&mut self) -> Result<()> {
            Session::Ssh { ref mut l, ref mut session, ref path, ref pijul_cmd, .. } => {
                let esc_path = escape(Cow::Borrowed(path));
                let cmd = format!("{} init {}", pijul_cmd, esc_path);
                debug!("command line:{:?}", cmd);
                *session = Some(l.run(session.take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut session, chan)| {
                            debug!("chan = {:?}", chan);
                            session.handler_mut().exit_status.remove(&chan);
                            session.handler_mut().channel = Some(chan);
                            session.exec(chan, false, &cmd);
                            session.channel_eof(chan);
                            // Wait until channel close.
                            session
                                .wait(move |session| session.handler().exit_status.get(&chan).is_some())
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }))
                    .unwrap());
                Ok(())
            }
            Session::Local { path } => {
                try!(assert_no_containing_repo(path));
                create_repo(path)
        // fn from_remote(remote:&Remote<'a>) -> Result<Session<'a>,Error> {
            Remote::Local { ref path } => Ok(Session::Local { path: path.as_path() }),
                Ok(Session::Uri {
                })
            Remote::Ssh { ref user, ref host, port, ref path, ref id, ref local_repo_root, ref pijul_cmd } => {
                Ok(Session::Ssh {
                    id,
                })
    let (pending, pending_syncs):(_,Vec<_>) =
                    (Vec::new(), Vec::new())
                    let c = ask_changes(&txn, &changes, ChangesDirection::Revert)?;
            (UnsignedPatch::empty().leave_unsigned(), Vec::new())
fn output_repository(r: &Path, pristine_dir: &Path, branch: &str, size_increase: Option<u64>, prefixes: Option<&HashSet<PathBuf>>, pending: &Patch, pending_syncs: &[InodeUpdate]) -> Result<()> {
    let mut meta = match Meta::load(&opts.repo_root) {
use super::ask;
                    if let Ok(password) = ask::password() {
                    let password = ask::password()?;
                    untracked: Vec<PathBuf>,
                     untracked: Vec<PathBuf>,
/// Produce a PathBuf with the relative path from cwd to the given file
///
/// We assume cwd is a folder and file is a file. We also assume they
/// share a common root (at least the repository root).
///
/// cwd=/a/b/c/e
/// fil=/a/b/c/d/foo.rs
///
/// relativize(cwd, fil) #=> ../d/foo.rs
fn relativize(cwd: &Path, file: &Path) -> PathBuf {
    let mut p = PathBuf::new();
    let mut c1 = cwd.components();
    let mut c2 = file.parent().unwrap().components();
    loop {
        match (c1.next(), c2.next()) {
            (Some(r1), Some(r2)) if r1 != r2 => {
                // paths diverge
                p.push("..");
                for _ in c1 {
                    p.push("..");
                }
                p.push(r2.as_os_str());
                p.push(c2.as_path());
                break;
            }
            (None, Some(r2)) => {
                // Remaining stuff is relative path
                p.push(r2.as_os_str());
                p.push(c2.as_path());
                break;
            }
            (Some(_), None) => {
                p.push("..");
                for _ in c1 {
                    p.push("..");
                }
                break;
            }
            (None, None) => {
                break;
            }
            (Some(_), Some(_)) => {}
        }
    }
    let name = file.file_name().unwrap();
    p.push(name);
    p.set_file_name(name);
    debug!("rel({}, {})={}", cwd.display(), file.display(), p.display());
    p
}


#[cfg(test)]
mod test {
    use super::relativize;
    use std::path::{Path, PathBuf};

    #[test]
    fn test_relativize() {
        let cases = vec![
            ("/a/b/c", "/a/b/c/foo.rs", "foo.rs"),
            ("/a/b/c", "/a/b/c/d/foo.rs", "d/foo.rs"),
            ("/a/b/c/e", "/a/b/c/foo.rs", "../foo.rs"),
            ("/a/b/c/e", "/a/b/c/d/foo.rs", "../d/foo.rs"),
            ("/a/b/c/d/e", "/a/b/c/foo.rs", "../../foo.rs"),
            ("/home/foo/rust/pijul", "/home/foo/rust/pijul/Cargo.lock", "Cargo.lock")
        ];
        for (root, file, expected) in cases {
            assert_eq!(PathBuf::from(expected), relativize(&Path::new(root), &Path::new(file)));
        }
    }
}
                                       &[], increase) {






















use {thrussh, thrussh_keys, libpijul, reqwest, rustyline, term, toml, hex, bs58, regex};
        Rustyline(rustyline::error::ReadlineError);
        PatchNotFound(repo_root: PathBuf, patch_hash: libpijul::Hash) {
        PatchNotFound(repo_root: PathBuf, patch_hash: libpijul::Hash) {
extern crate liner;
#[cfg(unix)]
#[cfg(windows)]
extern crate rustyline;

#[cfg(unix)]
extern crate termion;
    let version = crate_version!();
    let app = clap::App::new("pijul")
        .version(&version[..])
        .author("Pierre-√Čtienne Meunier and Florent Becker")
        .about("Version Control: fast, distributed, easy to use; pick any three");
    let app = app.subcommands(commands::all_command_invocations());
    if let Some(mut path) = std::env::home_dir() {
        path.push(".pijulconfig");
        Ok(path)
    } else {
        Err(ErrorKind::NoHomeDir.into())