pijul_org / pijul

libpijul ergonomy: Introduce a type libpijul::fs_representation::RepoRoot for paths which do point to the root of some repo

By FlorentBecker on December 18, 2018
This patch is not signed.
91JpazeAsFXvCn58Hs2kwG6YYM6JFQUuLDyMz1fhuryad3g2jDBPgUE39W2XtnWiUZucwNfWczMQq9zbrcFxbtar
This patch is in the following branches:
latest
master
testing


1
2
3
4
5
6
7
8
9
10
11


14


17

19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61


62
63
64
65
66
67
68
69
70
71

72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96

97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191


192
193
            self.record(diff_algorithm, &mut record, branch, r, None)?;
            self.output_repository(branch, r, &partial_paths, &pending, &local_pending)?;
use backend::*;
use patch::*;
use rand;
use record::{InodeUpdate, RecordState};
use std::collections::HashSet;
use std::path::Path;
use {Error, Result};
mod apply;
pub mod find_alive;
mod repair_deleted_context;
use diff;

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
use fs_representation::RepoRoot;

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
use fs_representation::{RepoRoot, in_repo_root};

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
use output;
use output::ConflictingFile;

impl<U: Transaction, R> GenericTxn<U, R> {
    /// Return the patch id corresponding to `e`, or `internal` if `e==None`.
    pub fn internal_hash(&self, e: &Option<Hash>, internal: PatchId) -> PatchId {
        match *e {
            Some(Hash::None) => ROOT_PATCH_ID.clone(),
            Some(ref h) => self.get_internal(h.as_ref()).unwrap().to_owned(),
            None => internal.clone(),
        }
    }

    /// Fetch the internal key for this external key (or `internal` if
    /// `key.patch` is `None`).
    pub fn internal_key(&self, key: &Key<Option<Hash>>, internal: PatchId) -> Key<PatchId> {
        // debug!("internal_key: {:?} {:?}", key, internal);
        Key {
            patch: self.internal_hash(&key.patch, internal),
            line: key.line.clone(),
        }
    }

    pub fn internal_key_unwrap(&self, key: &Key<Option<Hash>>) -> Key<PatchId> {
        Key {
            patch: self
                .get_internal(key.patch.as_ref().unwrap().as_ref())
                .unwrap()
                .to_owned(),
            line: key.line.clone(),
        }
    }
}

impl<'env, T: rand::Rng> MutTxn<'env, T> {
    /// Assumes all patches have been downloaded. The third argument
    /// `remote_patches` needs to contain at least all the patches we
    /// want to apply, and the fourth one `local_patches` at least all
    /// the patches the other repository doesn't have.
    pub fn apply_patches<F, P: output::ToPrefixes>(
        &mut self,
        diff_algorithm: diff::Algorithm,
        branch: &mut Branch,
        r: &Path,
        r: &Path,
        r: &RepoRoot<impl AsRef<Path>>,
        remote_patches: &[(Hash, Patch)],
        partial_paths: P,
        mut f: F,
    ) -> Result<Vec<ConflictingFile>>
    where
        F: FnMut(usize, &Hash),
    {
        let (pending, local_pending) = {
            let mut record = RecordState::new();
            self.record(diff_algorithm, &mut record, branch, &r, None)?;
            self.record(diff_algorithm, &mut record, branch, r, &in_repo_root())?;
            let (changes, local) = record.finish();
            let mut p = UnsignedPatch::empty();
            p.changes = changes
                .into_iter()
                .flat_map(|x| x.into_iter())
                .map(|x| self.globalize_change(x))
                .collect();
            p.dependencies = self.dependencies(&branch, p.changes.iter());
            (p.leave_unsigned(), local)
        };

        let mut new_patches_count = 0;
        for &(ref p, ref patch) in remote_patches.iter() {
            debug!("apply_patches: {:?}", p);
            self.apply_patches_rec(branch, remote_patches, p, patch, &mut new_patches_count)?;
            f(new_patches_count, p);
        }
        debug!("{} patches applied", new_patches_count);

        if new_patches_count > 0 {
            let partial_paths = partial_paths.to_prefixes(self, &branch);
            self.output_changes_file(&branch, r)?;
            debug!("output_repository");
            self.output_partials(branch.name.as_str(), &partial_paths)?;
            self.output_repository(branch, &r, &partial_paths, &pending, &local_pending)?;
            self.output_repository(branch, r, &partial_paths, &pending, &local_pending)
        } else {
            debug!("finished apply_patches");
            Ok(Vec::new())
        }
    }

    /// Lower-level applier. This function only applies patches as
    /// found in `patches_dir`, following dependencies recursively. It
    /// outputs neither the repository nor the "changes file" of the
    /// branch, necessary to exchange patches locally or over HTTP.
    pub fn apply_patches_rec(
        &mut self,
        branch: &mut Branch,
        patches: &[(Hash, Patch)],
        patch_hash: &Hash,
        patch: &Patch,
        new_patches_count: &mut usize,
    ) -> Result<()> {
        let internal = {
            if let Some(internal) = self.get_internal(patch_hash.as_ref()) {
                if self.get_patch(&branch.patches, internal).is_some() {
                    debug!(
                        "get_patch returned {:?}",
                        self.get_patch(&branch.patches, internal)
                    );
                    None
                } else {
                    // Doesn't have patch, but the patch is known in
                    // another branch
                    Some(internal.to_owned())
                }
            } else {
                // The patch is totally new to the repository.
                let internal = self.new_internal(patch_hash.as_ref());
                Some(internal)
            }
        };
        if let Some(internal) = internal {
            info!(
                "Now applying patch {:?} {:?} to branch {:?}",
                patch.name, patch_hash, branch
            );
            if patch.dependencies().is_empty() {
                info!("Patch {:?} has no dependencies", patch_hash);
            }
            for dep in patch.dependencies().iter() {
                info!("Applying dependency {:?}", dep);
                info!("dep hash {:?}", dep.to_base58());
                let is_applied = {
                    if let Some(dep_internal) = self.get_internal(dep.as_ref()) {
                        self.get_patch(&branch.patches, dep_internal).is_some()
                    } else {
                        false
                    }
                };
                if !is_applied {
                    info!("Not applied");
                    // If `patches` is sorted in topological order,
                    // this shouldn't happen, because the dependencies
                    // have been applied before.
                    if let Some(&(_, ref patch)) = patches.iter().find(|&&(ref a, _)| a == dep) {
                        self.apply_patches_rec(branch, patches, &dep, patch, new_patches_count)?;
                    } else {
                        error!("Dependency not found");
                        return Err(Error::MissingDependency(dep.to_owned()));
                    }
                } else {
                    info!("Already applied");
                }
                let dep_internal = self.get_internal(dep.as_ref()).unwrap().to_owned();
                self.put_revdep(dep_internal, internal)?;
                self.put_dep(internal, dep_internal)?;
            }

            // Sanakirja doesn't let us insert the same pair twice.
            self.register_patch(internal, patch_hash, patch)?;

            let now = branch.apply_counter;
            branch.apply_counter += 1;
            self.apply(branch, &patch, internal, now)?;

            *new_patches_count += 1;

            Ok(())
        } else {
            info!("Patch {:?} has already been applied", patch_hash);
            Ok(())
        }
    }

    /// Apply a patch from a local record: register it, give it a hash, and then apply.
    pub fn apply_local_patch(
        &mut self,
        branch: &mut Branch,
        working_copy: &Path,
        working_copy: &Path,
        working_copy: &RepoRoot<impl AsRef<Path>>,
        hash: &Hash,

























1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25






26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115





116
117
118
119
120
121
122
123
124
125
126
127



128
129
130
131
132



133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242





243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290

291
292
293
294
295
296
297

298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314




315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355

357


























































































































358
/// A directory at theroot of a pijul repository.
pub struct RepoRoot<P: AsRef<Path>>{
    pub repo_root: P
    
    
    
    
    
    



impl<P: AsRef<Path>+'static> RepoRoot<P> {
    pub fn untracked_files<T: rand::Rng, Q:AsRef<Path>>(&self, txn: &MutTxn<T>, path: Q) -> impl Iterator<Item = PathBuf> + '_ {
        // not fail as the glob is hard-coded
                    return None
                let pb = p.to_path_buf();
                if let Ok(stripped) = p.strip_prefix(self.repo_root.as_ref()) {
                    if known_files.iter().any(|t| *t == stripped) {
                        return None
                    }
                Some(pb)

                return Some(RepoRoot {repo_root: p});
        repo_root: canonicalize(dir)?
//! Layout of a repository (files in `.pijul`) on the disk. This
//! module exports both high-level functions that require no knowledge
//! of the repository, and lower-level constants documented on
//! [pijul.org/documentation/repository](https://pijul.org/documentation/repository),
//! used for instance for downloading files from remote repositories.

use super::Repository;
use backend::DEFAULT_BRANCH;
use backend::{Hash, HashRef};
use backend::{MutTxn, ROOT_INODE};
use bs58;
use flate2;
use ignore::overrides::OverrideBuilder;
use ignore::WalkBuilder;
use patch::{Patch, PatchHeader};
use rand::distributions::Alphanumeric;
use rand::Rng;
use std;
use std::fs::canonicalize;
use std::fs::{create_dir_all, metadata, File};
use std::io::{BufReader, Read, Write};
use std::path::{Path, PathBuf};
use {Result, Error};
use std::ffi::OsStr;

/// Name of the root directory, i.e. `.pijul`.
pub const PIJUL_DIR_NAME: &'static str = ".pijul";

/// Concatenate the parameter with `PIJUL_DIR_NAME`.
pub fn repo_dir<P: AsRef<Path>>(p: P) -> PathBuf {
    p.as_ref().join(PIJUL_DIR_NAME)
/// Given a Path-like type P, RepoPath<P> is a 'P' relative to some fs_representation::RepoRoot
#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
pub struct RepoPath<P: ?Sized>(pub P);

pub fn in_repo_root() -> RepoPath<&'static Path> {
    RepoPath(Path::new(""))
}

impl RepoPath<std::path::PathBuf> {
    pub fn push(&mut self, x: &str) {
        self.0.push(x)
    }

    pub fn pop(&mut self) -> bool {
        self.0.pop()
    }

    pub fn new() -> Self {
        RepoPath(PathBuf::new())
    }
    
    pub fn as_ref(&self) -> RepoPath<&Path> {
        RepoPath(self.0.as_ref())
    }

    pub fn set_file_name(&mut self, filename: &OsStr) {
        self.0.set_file_name(filename)
    }

    pub fn from_string(path: String) -> Self {
        RepoPath(PathBuf::from(path))
    }
}

    
impl<P: AsRef<Path>> RepoPath<P> {
    pub fn as_path(&self) -> &std::path::Path {
        self.0.as_ref()
    }
}

impl<P: AsRef<Path>> RepoPath<P> {    
    pub fn parent(&self) -> Option<RepoPath<&std::path::Path>> {
        self.as_path().parent().map(RepoPath)
    }

    pub fn file_name(&self) -> Option<&OsStr> {
        self.as_path().file_name()
    }

    pub fn split(&self) -> Option<(RepoPath<&std::path::Path>, &OsStr)> {
        self.parent().map(|p| {(p, self.file_name().expect("file_name and parent should be consistent"))})
    }

    pub fn components(&self) -> std::path::Components {
        self.as_path().components()
    }

    pub fn to_path_buf(&self) -> PathBuf {
        self.as_path().to_path_buf()
    }

    pub fn display(&self) -> std::path::Display {
        self.as_path().display()
    }

    
    pub fn to_owned(&self) -> RepoPath<PathBuf> {
        RepoPath(self.0.as_ref().to_path_buf())
    }    
    
    pub fn join(&self, path: &Path) -> RepoPath<PathBuf> {
        // TODO: check that the joined path is indeed inside the repo
        let joined_path = self.as_path().join(path);
        RepoPath(joined_path)
    }
}

impl<P: AsRef<Path>> RepoPath<P> {
    pub fn empty(&self) -> bool {
        self.as_path() == Path::new("")
    }
}

/// A directory at the root of a pijul repository.
#[derive(Clone, Copy, Debug)]
pub struct RepoRoot<P: AsRef<Path>> {
    pub repo_root: P,
}

/// Directory where the patches are. `patches_dir("/a/b") = "/a/b/.pijul/patches"`.
pub fn patches_dir<P: AsRef<Path>>(p: P) -> PathBuf {
    return p.as_ref().join(PIJUL_DIR_NAME).join("patches");
}
}
/// Name of the root directory, i.e. `.pijul`.
pub const PIJUL_DIR_NAME: &'static str = ".pijul";

/// Basename of the changes file for branch `br`. This file is only
/// used when pulling/pushing over HTTP (where calling remote programs
/// to list patches is impossible).
///
/// The changes file contains the same information as the one returned by `pijul log --hash-only`.
pub fn branch_changes_base_path(b: &str) -> String {
    "changes.".to_string() + &bs58::encode(b.as_bytes()).into_string()
}

/// Changes file from the repository root and branch name.
pub fn branch_changes_file(p: &Path, b: &str) -> PathBuf {
    p.join(PIJUL_DIR_NAME).join(branch_changes_base_path(b))
/// Basename of the patch corresponding to the given patch hash.
pub fn patch_file_name(hash: HashRef) -> String {
    hash.to_base58() + ".gz"
}

/// The meta file, where user preferences are stored.
pub fn meta_file(p: &Path) -> PathBuf {
    p.join(PIJUL_DIR_NAME).join("meta.toml")
impl<P: AsRef<Path>> RepoRoot<P> {
    /// The subdirectory of `self` with pijul's metadata
    pub fn repo_dir(&self) -> PathBuf {
        self.repo_root.as_ref().join(PIJUL_DIR_NAME)
    }

    /// Directory where the pristine of `self` is.
    /// For instance, if the repository in in `/a/b`,
    /// `self.pristine_dir() is `/a/b/.pijul/pristine`.
    pub fn pristine_dir(&self) -> PathBuf {
        self.repo_dir().join("pristine")
    }

    /// Directory where the patches are. `patches_dir("/a/b") = "/a/b/.pijul/patches"`.
    pub fn patches_dir(&self) -> PathBuf {
        self.repo_dir().join("patches")
    }

    /// The location of the changes file for theb branch `b`.
    ///
    /// The changes file contains the same information as the one returned by `pijul log --hash-only`.
    pub fn branch_changes_file(&self, b: &str) -> PathBuf {
        self.repo_dir().join(branch_changes_base_path(b))
    }

    /// The meta file, where user preferences are stored.
    pub fn meta_file(&self) -> PathBuf {
        self.repo_dir().join("meta.toml")
    }

    /// The id file is used for remote operations, to identify a
    /// repository and save bandwidth when the remote state is partially
    /// known.
    pub fn id_file(&self) -> PathBuf {
        self.repo_dir().join("id")
    }

    /// Read a complete patch.
    pub fn read_patch(&self, hash: HashRef) -> Result<Patch> {
        let patch_dir = self.patches_dir();
        let path = patch_dir.join(&patch_file_name(hash));
        let f = File::open(path)?;
        let mut f = BufReader::new(f);
        let (_, _, patch) = Patch::from_reader_compressed(&mut f)?;
        Ok(patch)
    }

    /// Read a patch, but without the "changes" part, i.e. the actual
    /// contents of the patch.
    pub fn read_patch_nochanges(&self, hash: HashRef) -> Result<PatchHeader> {
        let patch_dir = self.patches_dir();
        let path = patch_dir.join(&patch_file_name(hash));
        let f = File::open(path)?;
        let mut f = flate2::bufread::GzDecoder::new(BufReader::new(f));
        Ok(PatchHeader::from_reader_nochanges(&mut f)?)
    }

    /// Read a patch, but without the "changes" part, i.e. the actual
    /// contents of the patch.
    pub fn read_dependencies(&self, hash: HashRef) -> Result<Vec<Hash>> {
        let patch_dir = self.patches_dir();
        let path = patch_dir.join(&patch_file_name(hash));
        let f = File::open(path)?;
        let mut f = flate2::bufread::GzDecoder::new(BufReader::new(f));
        Ok(Patch::read_dependencies(&mut f)?)
    }

    /// The ignore file that is _not_ tracked by pijul.
    pub fn local_ignore_file(&self) -> PathBuf {
        self.repo_dir().join("local").join("ignore")
    }

    pub fn get_current_branch(&self) -> Result<String> {
        let mut path = self.repo_dir();
        path.push("current_branch");
        if let Ok(mut f) = File::open(&path) {
            let mut s = String::new();
            f.read_to_string(&mut s)?;
            Ok(s.trim().to_string())
        } else {
            Ok(DEFAULT_BRANCH.to_string())
        }
    }

    pub fn set_current_branch(&self, branch: &str) -> Result<()> {
        let mut path = self.repo_dir();
        path.push("current_branch");
        let mut f = File::create(&path)?;
        f.write_all(branch.trim().as_ref())?;
        f.write_all(b"\n")?;
        Ok(())
    }

    pub fn open_repo(&self, increase: Option<u64>) -> Result<Repository> {
        Repository::open(self.pristine_dir(), increase)
    }

    
    pub fn relativize<'a>(&self, path: &'a Path) -> Result<RepoPath<&'a Path>> {
    	match path.strip_prefix(&self.repo_root) {
            Ok(p) => Ok(RepoPath(p)),
            Err(_) => Err(Error::FileNotInRepo(path.to_path_buf()))
        }
    }

    pub fn absolutize<'a>(&self, path: &RepoPath<impl AsRef<Path>>) -> PathBuf {
        self.repo_root.as_ref().join(path.as_path())
    }
}

/// The id file is used for remote operations, to identify a
/// repository and save bandwidth when the remote state is partially
/// known.
pub fn id_file(p: &Path) -> PathBuf {
    p.join(PIJUL_DIR_NAME).join("id")
impl<P: AsRef<Path> + 'static> RepoRoot<P> {
    pub fn untracked_files<T: rand::Rng, Q: AsRef<Path>>(
        &self,
        txn: &MutTxn<T>,
        path: Q,
    ) -> impl Iterator<Item = RepoPath<PathBuf>> + '_ {
        let known_files = txn.list_files(ROOT_INODE).unwrap_or_else(|_| vec![]);

        let o = OverrideBuilder::new(self.repo_root.as_ref())
            .add("!.pijul")
            .unwrap()
            .build()
            .unwrap(); // we can be pretty confident these two calls will
                       // not fail as the glob is hard-coded

        let mut w = WalkBuilder::new(path.as_ref());
        w.git_ignore(false)
            .git_exclude(false)
            .git_global(false)
            .hidden(false)
            .add_custom_ignore_filename(".pijulignore");

        // add .pijul/local/ignore
        w.add_ignore(self.local_ignore_file());
        w.overrides(o);

        w.build().filter_map(move |f| {
            if let Ok(f) = f {
                let p = f.path();
                if p == self.repo_root.as_ref() {
                    return None;
                }

                let p_in_repo = self.relativize(&p).unwrap();
                // ^- cannot fail since p must be within repo_root.
                if known_files.iter().any(|t| t.as_ref() == p_in_repo) {
                    return None
                }
                Some(p_in_repo.to_owned())
            } else {
                None
            }
        })
    }
}

/// Find the repository root from one of its descendant
/// directories. Return `None` iff `dir` is not in a repository.
pub fn find_repo_root<'a>(dir: &'a Path) -> Option<PathBuf> {
pub fn find_repo_root<'a>(dir: &'a Path) -> Option<RepoRoot<PathBuf>> {
    let mut p = dir.to_path_buf();
    loop {
        p.push(PIJUL_DIR_NAME);
        match metadata(&p) {
            Ok(ref attr) if attr.is_dir() => {
                p.pop();
                return Some(p);
                return Some(RepoRoot { repo_root: p });
            }
            _ => {}
        }
        p.pop();

        if !p.pop() {
            return None;
        }
    }
}

#[doc(hidden)]
pub const ID_LENGTH: usize = 100;

/// Create a repository. `dir` must be the repository root (a
/// `".pijul"` directory will be created in `dir`).
pub fn create<R: Rng>(dir: &Path, mut rng: R) -> std::io::Result<()> {
    debug!("create: {:?}", dir);
    debug!("create: {:?}", dir);
    let mut repo_dir = repo_dir(dir);
pub fn create<R: Rng>(dir: &Path, mut rng: R) -> std::io::Result<RepoRoot<PathBuf>> {
    let r = RepoRoot {
        repo_root: canonicalize(dir)?,
    };

    let mut repo_dir = r.repo_dir();
    create_dir_all(&repo_dir)?;

    repo_dir.push("pristine");
    create_dir_all(&repo_dir)?;
    repo_dir.pop();

    repo_dir.push("patches");
    create_dir_all(&repo_dir)?;
    repo_dir.pop();

    repo_dir.push("id");
    let mut f = std::fs::File::create(&repo_dir)?;
    let mut x = String::new();
    x.extend(rng.sample_iter(&Alphanumeric).take(ID_LENGTH));
    f.write_all(x.as_bytes())?;
    repo_dir.pop();

    repo_dir.push("version");
    let mut f = std::fs::File::create(&repo_dir)?;
    writeln!(f, "{}", env!("CARGO_PKG_VERSION"))?;
    repo_dir.pop();

    repo_dir.push("local");
    create_dir_all(&repo_dir)?;
    repo_dir.pop();

    repo_dir.push("hooks");
    create_dir_all(&repo_dir)?;
    repo_dir.pop();

    repo_dir.push("local");
    repo_dir.push("ignore");
    std::fs::File::create(&repo_dir)?;
    repo_dir.pop();
    repo_dir.pop();

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    Ok(())
}

/// Basename of the patch corresponding to the given patch hash.
pub fn patch_file_name(hash: HashRef) -> String {
    hash.to_base58() + ".gz"
}

/// Read a complete patch.
pub fn read_patch(repo: &Path, hash: HashRef) -> Result<Patch> {
    let patch_dir = patches_dir(repo);
    let path = patch_dir.join(&patch_file_name(hash));
    debug!("read_patch, reading from {:?}", path);
    let f = File::open(path)?;
    let mut f = BufReader::new(f);
    let (_, _, patch) = Patch::from_reader_compressed(&mut f)?;
    Ok(patch)
}

/// Read a patch, but without the "changes" part, i.e. the actual
/// contents of the patch.
pub fn read_patch_nochanges(repo: &Path, hash: HashRef) -> Result<PatchHeader> {
    let patch_dir = patches_dir(repo);
    let path = patch_dir.join(&patch_file_name(hash));
    debug!("read_patch_nochanges, reading from {:?}", path);
    let f = File::open(path)?;
    let mut f = flate2::bufread::GzDecoder::new(BufReader::new(f));
    Ok(PatchHeader::from_reader_nochanges(&mut f)?)
}

/// Read a patch, but without the "changes" part, i.e. the actual
/// contents of the patch.
pub fn read_dependencies(repo: &Path, hash: HashRef) -> Result<Vec<Hash>> {
    let patch_dir = patches_dir(repo);
    let path = patch_dir.join(&patch_file_name(hash));
    debug!("read_patch_nochanges, reading from {:?}", path);
    let f = File::open(path)?;
    let mut f = flate2::bufread::GzDecoder::new(BufReader::new(f));
    Ok(Patch::read_dependencies(&mut f)?)
}

pub fn ignore_file(repo_root: &Path) -> PathBuf {
    repo_root.join(PIJUL_DIR_NAME).join("local").join("ignore")
}

pub fn untracked_files<T: rand::Rng, P:AsRef<Path>+'static, Q:AsRef<Path>>(txn: &MutTxn<T>, repo_root: P, path: Q) -> impl Iterator<Item = PathBuf> {
    let known_files = txn.list_files(ROOT_INODE).unwrap_or_else(|_| vec![]);
    let known_files = txn.list_files(ROOT_INODE).unwrap_or_else(|_| vec![]);


    let o = OverrideBuilder::new(repo_root.as_ref())
        .add("!.pijul")
        .add("!.pijul")
        .unwrap()
        .build()
        .unwrap(); // we can be pretty confident these two calls will
                   // not fail as the glob is hard-coded


    let mut w = WalkBuilder::new(path.as_ref());
    w.git_ignore(false)
    w.git_ignore(false)
        .git_exclude(false)
        .git_global(false)
        .hidden(false)
        .add_custom_ignore_filename(".pijulignore");


    // add .pijul/local/ignore
    w.add_ignore(ignore_file(repo_root.as_ref()));
    w.overrides(o);
    w.overrides(o);
    w.overrides(o);

    w.build().filter_map(move |f| {
        if let Ok(f) = f {
        if let Ok(f) = f {
            let p = f.path();
            if p == repo_root.as_ref() {
                return None
            }
            }
            let pb = p.to_path_buf();

            if let Ok(stripped) = p.strip_prefix(repo_root.as_ref()) {
                if known_files.iter().any(|t| *t == stripped) {
                if known_files.iter().any(|t| *t == stripped) {
                    return None
                }
                }
            }
            Some(pb)
        } else {
            None
        }
        }
        }
    })
}
}

pub fn get_current_branch(root: &Path) -> Result<String> {
    debug!("path: {:?}", root);
    let mut path = repo_dir(&canonicalize(root)?);
    path.push("current_branch");
    if let Ok(mut f) = File::open(&path) {
        let mut s = String::new();
        f.read_to_string(&mut s)?;
        Ok(s.trim().to_string())
    } else {
        Ok(DEFAULT_BRANCH.to_string())
    }
}

pub fn set_current_branch(root: &Path, branch: &str) -> Result<()> {
    debug!("set current branch: root={:?}, branch={:?}", root, branch);
    let mut path = repo_dir(&canonicalize(root)?);
    path.push("current_branch");
    let mut f = File::create(&path)?;
    f.write_all(branch.trim().as_ref())?;
    f.write_all(b"\n")?;
    Ok(())
    Ok(r)



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235



238


241
242

244
245
246
247
248
249
250
251
252
253
254

255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487

488
489
490
491
492
493
494

495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546


547
548
549
550
551
552
553
554
555
556
557


558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594

595
596
597
598
599
600
601
602
603
604


605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624

625
626
    pub fn output_changes_file<P: AsRef<Path>>(&mut self, branch: &Branch, fs_repo: &RepoRoot<P>) -> Result<()> {
            fs_repo.branch_changes_file(branch.name.as_str());
    target: & fs_representation::RepoRoot<impl AsRef<Path>>,
//! This crate contains the core API to access Pijul repositories.
//!
//! The key object is a `Repository`, on which `Txn` (immutable
//! transactions) and `MutTxn` (mutable transactions) can be started,
//! to perform a variety of operations.
//!
//! Another important object is a `Patch`, which encodes two different pieces of information:
//!
//! - Information about deleted and inserted lines between two versions of a file.
//!
//! - Information about file moves, additions and deletions.
//!
//! The standard layout of a repository is defined in module
//! `fs_representation`, and mainly consists of a directory called
//! `.pijul` at the root of the repository, containing:
//!
//! - a directory called `pristine`, containing a Sanakirja database
//! storing most of the repository information.
//!
//! - a directory called `patches`, actually containing the patches,
//! where each patch is a gzipped compression of the bincode encoding
//! of the `patch::Patch` type.
//!
//! At the moment, users of this library, such as the Pijul
//! command-line tool, may use other files in the `.pijul` directory,
//! such as user preferences, or information about remote branches and
//! repositories.
#![recursion_limit = "128"]
#[macro_use]
extern crate bitflags;
extern crate chrono;
#[macro_use]
extern crate log;

extern crate base64;
extern crate bincode;
extern crate bs58;
extern crate byteorder;
extern crate flate2;
extern crate hex;
extern crate ignore;
extern crate openssl;
extern crate rand;
extern crate sanakirja;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate diffs;
extern crate failure;
extern crate sequoia_openpgp;
extern crate serde_json;
extern crate tempdir;
extern crate toml;

pub use sanakirja::Transaction;
use std::collections::HashSet;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;

#[derive(Debug)]
pub enum Error {
    IO(std::io::Error),
    Sanakirja(sanakirja::Error),
    Bincode(bincode::Error),
    Utf8(std::str::Utf8Error),
    Serde(serde_json::Error),
    OpenSSL(openssl::error::Error),
    OpenSSLStack(openssl::error::ErrorStack),
    Base58Decode(bs58::decode::DecodeError),
    Failure(failure::Error),
    AlreadyAdded,
    FileNotInRepo(PathBuf),
    NoDb(backend::Root),
    WrongHash,
    EOF,
    WrongPatchSignature,
    BranchNameAlreadyExists(String),
    WrongFileHeader(Key<PatchId>),
    FileNameCount(Key<PatchId>),
    MissingDependency(Hash),
    PatchNotOnBranch(PatchId),
    CannotAddDotPijul,
    KeyIsEncrypted,
}

impl std::convert::From<std::io::Error> for Error {
    fn from(e: std::io::Error) -> Self {
        Error::IO(e)
    }
}

impl std::convert::From<failure::Error> for Error {
    fn from(e: failure::Error) -> Self {
        Error::Failure(e)
    }
}

impl std::convert::From<sanakirja::Error> for Error {
    fn from(e: sanakirja::Error) -> Self {
        Error::Sanakirja(e)
    }
}

impl std::convert::From<bincode::Error> for Error {
    fn from(e: bincode::Error) -> Self {
        Error::Bincode(e)
    }
}

impl std::convert::From<serde_json::Error> for Error {
    fn from(e: serde_json::Error) -> Self {
        Error::Serde(e)
    }
}

impl std::convert::From<std::str::Utf8Error> for Error {
    fn from(e: std::str::Utf8Error) -> Self {
        Error::Utf8(e)
    }
}

impl std::convert::From<openssl::error::ErrorStack> for Error {
    fn from(e: openssl::error::ErrorStack) -> Self {
        Error::OpenSSLStack(e)
    }
}

impl std::convert::From<bs58::decode::DecodeError> for Error {
    fn from(e: bs58::decode::DecodeError) -> Self {
        Error::Base58Decode(e)
    }
}

pub type Result<A> = std::result::Result<A, Error>;

impl std::fmt::Display for Error {
    fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
        match *self {
            Error::IO(ref e) => e.fmt(fmt),
            Error::Sanakirja(ref e) => e.fmt(fmt),
            Error::Bincode(ref e) => e.fmt(fmt),
            Error::Utf8(ref e) => e.fmt(fmt),
            Error::Serde(ref e) => e.fmt(fmt),
            Error::OpenSSL(ref e) => e.fmt(fmt),
            Error::OpenSSLStack(ref e) => e.fmt(fmt),
            Error::Base58Decode(ref e) => e.fmt(fmt),
            Error::Failure(ref e) => e.fmt(fmt),
            Error::AlreadyAdded => write!(fmt, "Already added"),
            Error::FileNotInRepo(ref file) => write!(fmt, "File {:?} not tracked", file),
            Error::NoDb(ref e) => write!(fmt, "Table missing: {:?}", e),
            Error::WrongHash => write!(fmt, "Wrong hash"),
            Error::EOF => write!(fmt, "EOF"),
            Error::WrongPatchSignature => write!(fmt, "Wrong patch signature"),
            Error::BranchNameAlreadyExists(ref name) => {
                write!(fmt, "Branch {:?} already exists", name)
            }
            Error::WrongFileHeader(ref h) => write!(
                fmt,
                "Wrong file header (possible branch corruption): {:?}",
                h
            ),
            Error::FileNameCount(ref f) => {
                write!(fmt, "Name {:?} doesn't have exactly one child", f)
            }
            Error::MissingDependency(ref f) => write!(fmt, "Missing dependency: {:?}", f),
            Error::PatchNotOnBranch(ref f) => {
                write!(fmt, "The patch is not on this branch {:?}", f)
            }
            Error::CannotAddDotPijul => write!(fmt, "Cannot add a file or directory name .pijul"),
            Error::KeyIsEncrypted => write!(fmt, "Key is encrypted"),
        }
    }
}

impl std::error::Error for Error {
    fn description(&self) -> &str {
        match *self {
            Error::IO(ref e) => e.description(),
            Error::Sanakirja(ref e) => e.description(),
            Error::Bincode(ref e) => e.description(),
            Error::Utf8(ref e) => e.description(),
            Error::Serde(ref e) => e.description(),
            Error::OpenSSL(ref e) => e.description(),
            Error::OpenSSLStack(ref e) => e.description(),
            Error::Base58Decode(ref e) => e.description(),
            Error::Failure(ref e) => e.name().unwrap_or("Unknown Failure"),
            Error::AlreadyAdded => "Already added",
            Error::FileNotInRepo(_) => "File not tracked",
            Error::NoDb(_) => "One of the tables is missing",
            Error::WrongHash => "Wrong hash",
            Error::EOF => "EOF",
            Error::WrongPatchSignature => "Wrong patch signature",
            Error::BranchNameAlreadyExists(_) => "Branch name already exists",
            Error::WrongFileHeader(_) => "Wrong file header (possible branch corruption)",
            Error::FileNameCount(_) => "A file name doesn't have exactly one child",
            Error::MissingDependency(_) => "Missing dependency",
            Error::PatchNotOnBranch(_) => "The patch is not on this branch",
            Error::CannotAddDotPijul => "Cannot add a file or directory name .pijul",
            Error::KeyIsEncrypted => "Key is encrypted",
        }
    }
}

impl Error {
    pub fn lacks_space(&self) -> bool {
        match *self {
            Error::Sanakirja(sanakirja::Error::NotEnoughSpace) => true,
            _ => false,
        }
    }
}

#[macro_use]
mod backend;
mod file_operations;
pub mod fs_representation;

pub mod patch;
pub mod status;

pub mod apply;
mod conflict;
mod diff;
pub mod graph;
mod output;
mod record;
mod unrecord;

pub use backend::{
    ApplyTimestamp, Branch, Edge, EdgeFlags, FileId, FileMetadata, FileStatus, GenericTxn, Hash,
    HashRef, Inode, Key, LineId, MutTxn, OwnedFileId, PatchId, Repository, SmallStr, SmallString,
    Txn, DEFAULT_BRANCH, ROOT_INODE, ROOT_KEY,
};

use fs_representation::ID_LENGTH;

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
use fs_representation::{RepoRoot, ID_LENGTH};

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
pub use fs_representation::{RepoRoot, RepoPath};
use fs_representation::{ID_LENGTH};

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
pub use output::{ConflictingFile, Prefixes, ToPrefixes};
pub use patch::{Patch, PatchHeader};
use rand::distributions::Alphanumeric;
use rand::Rng;
pub use record::{InodeUpdate, RecordState};
pub use sanakirja::value::Value;
use std::io::Read;

pub use diff::Algorithm as DiffAlgorithm;

impl<'env, T: rand::Rng> backend::MutTxn<'env, T> {
    pub fn output_changes_file<P: AsRef<Path>>(&mut self, branch: &Branch, path: P) -> Result<()> {
    pub fn output_changes_file<P: AsRef<Path>>(
        &mut self,
        branch: &Branch,
        fs_repo: &RepoRoot<P>,
    ) -> Result<()> {
        let changes_file = fs_repo.branch_changes_file(branch.name.as_str());
        let mut branch_id: Vec<u8> = vec![b'\n'; ID_LENGTH + 1];
        {
            if let Ok(mut file) = std::fs::File::open(&changes_file) {
                file.read_exact(&mut branch_id)?;
            }
        }
        let mut branch_id = if let Ok(s) = String::from_utf8(branch_id) {
            s
        } else {
            "\n".to_string()
        };
        if branch_id.as_bytes()[0] == b'\n' {
            branch_id.truncate(0);
            let mut rng = rand::thread_rng();
            branch_id.extend(rng.sample_iter(&Alphanumeric).take(ID_LENGTH));
            branch_id.push('\n');
        }

        let mut file = std::fs::File::create(&changes_file)?;
        file.write_all(&branch_id.as_bytes())?;
        for (s, hash) in self.iter_applied(&branch, None) {
            let hash_ext = self.get_external(hash).unwrap();
            writeln!(file, "{}:{}", hash_ext.to_base58(), s)?
        }
        Ok(())
    }

    pub fn branch_patches(&mut self, branch: &Branch) -> HashSet<(backend::Hash, ApplyTimestamp)> {
        self.iter_patches(branch, None)
            .map(|(patch, time)| (self.external_hash(patch).to_owned(), time))
            .collect()
    }

    pub fn fork(&mut self, branch: &Branch, new_name: &str) -> Result<Branch> {
        if branch.name.as_str() == new_name {
            Err(Error::BranchNameAlreadyExists(new_name.to_string()))
        } else {
            Ok(Branch {
                db: self.txn.fork(&mut self.rng, &branch.db)?,
                patches: self.txn.fork(&mut self.rng, &branch.patches)?,
                revpatches: self.txn.fork(&mut self.rng, &branch.revpatches)?,
                name: SmallString::from_str(new_name),
                apply_counter: branch.apply_counter,
            })
        }
    }
    
    pub fn add_file<P: AsRef<Path>>(&mut self, path: &RepoPath<P>, is_dir: bool) -> Result<()> {
        self.add_inode(None, path, is_dir)
    }

    fn file_nodes_fold_<A, F: FnMut(A, Key<PatchId>) -> A>(
        &self,
        branch: &Branch,
        root: Key<PatchId>,
        level: usize,
        mut init: A,
        f: &mut F,
    ) -> Result<A> {
        for v in self
            .iter_adjacent(&branch, root, EdgeFlags::empty(), EdgeFlags::all())
            .take_while(|v| {
                v.flag.contains(EdgeFlags::FOLDER_EDGE) && !v.flag.contains(EdgeFlags::PARENT_EDGE)
            })
        {
            debug!("file_nodes_fold_: {:?} {:?}", root, v);
            if level & 1 == 0 && level > 0 {
                init = f(init, root)
            }
            init = self.file_nodes_fold_(branch, v.dest, level + 1, init, f)?
        }
        Ok(init)
    }

    pub fn file_nodes_fold<A, F: FnMut(A, Key<PatchId>) -> A>(
        &self,
        branch: &Branch,
        init: A,
        mut f: F,
    ) -> Result<A> {
        self.file_nodes_fold_(branch, ROOT_KEY, 0, init, &mut f)
    }
}

impl<T: Transaction, R> backend::GenericTxn<T, R> {
    /// Tells whether a `key` is alive in `branch`, i.e. is either the
    /// root, or all its ingoing edges are alive.
    pub fn is_alive(&self, branch: &Branch, key: Key<PatchId>) -> bool {
        debug!("is_alive {:?}?", key);
        let mut alive = key.is_root();
        let e = Edge::zero(EdgeFlags::PARENT_EDGE);
        for (k, v) in self.iter_nodes(&branch, Some((key, Some(e)))) {
            if k != key {
                break;
            }
            alive = alive
                || (!v.flag.contains(EdgeFlags::DELETED_EDGE)
                    && !v.flag.contains(EdgeFlags::PSEUDO_EDGE))
        }
        alive
    }

    /// Tells whether a `key` is alive or zombie in `branch`, i.e. is
    /// either the root, or has at least one of its incoming alive
    /// edge is alive.
    pub fn is_alive_or_zombie(&self, branch: &Branch, key: Key<PatchId>) -> bool {
        debug!("is_alive_or_zombie {:?}?", key);
        if key == ROOT_KEY {
            return true;
        }
        let e = Edge::zero(EdgeFlags::PARENT_EDGE);
        for (k, v) in self.iter_nodes(&branch, Some((key, Some(e)))) {
            if k != key {
                break;
            }
            debug!("{:?}", v);
            if v.flag.contains(EdgeFlags::PARENT_EDGE) && !v.flag.contains(EdgeFlags::DELETED_EDGE)
            {
                return true;
            }
        }
        false
    }

    /// Test whether `key` has a neighbor with flag `flag0`. If
    /// `include_pseudo`, this includes pseudo-neighbors.
    pub fn has_edge(
        &self,
        branch: &Branch,
        key: Key<PatchId>,
        min: EdgeFlags,
        max: EdgeFlags,
    ) -> bool {
        let e = Edge::zero(min);
        if let Some((k, v)) = self.iter_nodes(&branch, Some((key, Some(e)))).next() {
            debug!("has_edge {:?}", v.flag);
            k == key && (v.flag <= max)
        } else {
            false
        }
    }

    /// Tells which paths (of folder nodes) a key is in.
    pub fn get_file<'a>(&'a self, branch: &Branch, key: Key<PatchId>) -> Vec<Key<PatchId>> {
        let mut stack = vec![key.to_owned()];
        let mut seen = HashSet::new();
        let mut names = Vec::new();
        loop {
            match stack.pop() {
                None => break,
                Some(key) if !seen.contains(&key) => {
                    debug!("key {:?}, None", key);
                    seen.insert(key.clone());

                    for v in self.iter_adjacent(branch, key, EdgeFlags::empty(), EdgeFlags::all()) {
                        debug!("all_edges: {:?}", v);
                    }
                    for v in
                        self.iter_adjacent(branch, key, EdgeFlags::PARENT_EDGE, EdgeFlags::all())
                    {
                        debug!("get_file {:?}", v);
                        if v.flag | EdgeFlags::PSEUDO_EDGE
                            == EdgeFlags::PARENT_EDGE | EdgeFlags::PSEUDO_EDGE
                        {
                            debug!("push!");
                            stack.push(v.dest.clone())
                        } else if v
                            .flag
                            .contains(EdgeFlags::PARENT_EDGE | EdgeFlags::FOLDER_EDGE)
                        {
                            names.push(key);
                        }
                    }
                }
                _ => {}
            }
        }
        debug!("get_file returning {:?}", names);
        names
    }

    pub fn get_file_names<'a>(
        &'a self,
        branch: &Branch,
        key: Key<PatchId>,
    ) -> Vec<(Key<PatchId>, Vec<&'a str>)> {
        let mut names = vec![(key, Vec::new())];
        debug!("inode: {:?}", names);
        // Go back to the root.
        let mut next_names = Vec::new();
        let mut only_roots = false;
        let mut inodes = HashSet::new();
        while !only_roots {
            next_names.clear();
            only_roots = true;
            for (inode, names) in names.drain(..) {
                if !inodes.contains(&inode) {
                    inodes.insert(inode.clone());

                    if inode != ROOT_KEY {
                        only_roots = false;
                    }
                    let names_ = self.file_names(branch, inode);
                    if names_.is_empty() {
                        next_names.push((inode, names));
                        break;
                    } else {
                        debug!("names_ = {:?}", names_);
                        for (inode_, _, base) in names_ {
                            let mut names = names.clone();
                            names.push(base);
                            next_names.push((inode_, names))
                        }
                    }
                }
            }
            std::mem::swap(&mut names, &mut next_names)
        }
        debug!("end: {:?}", names);
        for &mut (_, ref mut name) in names.iter_mut() {
            name.reverse()
        }
        names
    }
}

fn make_remote<'a, I: Iterator<Item = &'a Hash>>(
    target: &Path,
    target: &fs_representation::RepoRoot<impl AsRef<Path>>,
    remote: I,
) -> Result<(Vec<(Hash, Patch)>, usize)> {
    use fs_representation::*;
    use std::fs::File;
    use std::io::BufReader;
    let mut patches = Vec::new();
    let mut patches_dir = patches_dir(target).to_path_buf();
    let mut patches_dir = target.patches_dir();
    let mut size_increase = 0;

    for h in remote {
        patches_dir.push(&patch_file_name(h.as_ref()));

        debug!("opening {:?}", patches_dir);
        let file = try!(File::open(&patches_dir));
        let mut file = BufReader::new(file);
        let (h, _, patch) = Patch::from_reader_compressed(&mut file)?;

        size_increase += patch.size_upper_bound();
        patches.push((h.clone(), patch));

        patches_dir.pop();
    }
    Ok((patches, size_increase))
}

/// Apply a number of patches, guessing the new repository size.  If
/// this fails, the repository size is guaranteed to have been
/// increased by at least some pages, and it is safe to call this
/// function again.
///
/// Also, this function takes a file lock on the repository.
pub fn apply_resize<'a, I, F, P: output::ToPrefixes>(
    diff_algorithm: diff::Algorithm,
    target: &fs_representation::RepoRoot<impl AsRef<Path>>,
    branch_name: &str,
    remote: I,
    partial_paths: P,
    apply_cb: F,
) -> Result<Vec<ConflictingFile>>
where
    I: Iterator<Item = &'a Hash>,
    F: FnMut(usize, &Hash),
{
    let (patches, size_increase) = make_remote(target, remote)?;
    apply_resize_patches(
        diff_algorithm,
        target,
        branch_name,
        &patches,
        size_increase,
        partial_paths,
        apply_cb,
    )
}

/// A version of `apply_resize` with the patches list already loaded.
pub fn apply_resize_patches<'a, F, P: output::ToPrefixes>(
    diff_algorithm: diff::Algorithm,
    target: &Path,
    target: &Path,
    target: &fs_representation::RepoRoot<impl AsRef<Path>>,
    branch_name: &str,
    patches: &[(Hash, Patch)],
    size_increase: usize,
    partial_paths: P,
    apply_cb: F,
) -> Result<Vec<ConflictingFile>>
where
    F: FnMut(usize, &Hash),
{
    info!("applying patches with size_increase {:?}", size_increase);
    let pristine_dir = pristine_dir(target).to_path_buf();
    let repo = Repository::open(pristine_dir, Some(size_increase as u64))?;
    let repo = target.open_repo(Some(size_increase as u64))?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let mut branch = txn.open_branch(branch_name)?;
    let conflicts = txn.apply_patches(
        diff_algorithm,
        &mut branch,
        target,
        &patches,
        partial_paths,
        apply_cb,
    )?;
    txn.commit_branch(branch)?;
    txn.commit()?;
    Ok(conflicts)
}

/// Apply a number of patches, guessing the new repository size.  If
/// this fails, the repository size is guaranteed to have been
/// increased by at least some pages, and it is safe to call this
/// function again.
///
/// Also, this function takes a file lock on the repository.
pub fn apply_resize_no_output<'a, F, I>(
    target: &RepoRoot<impl AsRef<Path>>,
    branch_name: &str,
    remote: I,
    apply_cb: F,
) -> Result<()>
where
    I: Iterator<Item = &'a Hash>,
    F: FnMut(usize, &Hash),
{
    let (patches, size_increase) = make_remote(target, remote)?;
    apply_resize_patches_no_output(target, branch_name, &patches, size_increase, apply_cb)
}

pub fn apply_resize_patches_no_output<'a, F>(
    target: &Path,
    target: &RepoRoot<impl AsRef<Path>>,
    branch_name: &str,
    patches: &[(Hash, Patch)],
    size_increase: usize,
    mut apply_cb: F,
) -> Result<()>
where
    F: FnMut(usize, &Hash),
{
    debug!("apply_resize_no_output: patches = {:?}", patches);
    let pristine_dir = pristine_dir(target).to_path_buf();
    let repo = try!(Repository::open(pristine_dir, Some(size_increase as u64)));
    let repo = target.open_repo(Some(size_increase as u64))?;
    let mut txn = try!(repo.mut_txn_begin(rand::thread_rng()));
    let mut branch = txn.open_branch(branch_name)?;
    let mut new_patches_count = 0;
    for &(ref p, ref patch) in patches.iter() {
        debug!("apply_patches: {:?}", p);
        txn.apply_patches_rec(&mut branch, &patches, p, patch, &mut new_patches_count)?;
        apply_cb(new_patches_count, p);
    }
    info!("branch: {:?}", branch);
    txn.commit_branch(branch)?;
    txn.commit()?;
    Ok(())
}

/// Open the repository, and unrecord the patch, without increasing
/// the size. If this fails, the repository file is guaranteed to have
/// been increased by `increase` bytes.
pub fn unrecord_no_resize(
    repo_dir: &Path,
    repo_root: &Path,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    branch_name: &str,



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223

225
226
227


230


233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440

441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461

462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500

501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544


545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578


579
580
use super::fs_representation::RepoRoot;
                        Some((k.to_owned(), v, self.filename_of_inode(v, working_copy.repo_root.as_ref())))
        self.output_alive_files(branch, prefixes, working_copy.repo_root.as_ref())?;
use backend::*;
use graph;
use patch::*;
use rand;
use record::InodeUpdate;
use std;
use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
use tempdir;
use {Error, Result};

use super::fs_representation::{RepoRoot, RepoPath, in_repo_root};

#[cfg(not(windows))]
use std::os::unix::fs::PermissionsExt;

#[cfg(not(windows))]
fn set_permissions(name: &Path, permissions: u16) -> Result<()> {
    let metadata = std::fs::metadata(&name)?;
    let mut current = metadata.permissions();
    debug!(
        "setting mode for {:?} to {:?} (currently {:?})",
        name, permissions, current
    );
    current.set_mode(permissions as u32);
    std::fs::set_permissions(name, current)?;
    Ok(())
}

#[cfg(windows)]
fn set_permissions(_name: &Path, _permissions: u16) -> Result<()> {
    Ok(())
}

#[derive(Debug)]
struct OutputItem {
    parent: Inode,
    meta: FileMetadata,
    key: Key<PatchId>,
    inode: Option<Inode>,
    is_zombie: bool,
    related: Related,
}

#[derive(Debug, PartialEq, Eq)]
pub enum Related {
    No,
    Ancestor,
    Exact,
}

pub struct ConflictingFile {
    pub inode: Inode,
    pub n_conflicts: usize,
    pub path: RepoPath<PathBuf>,
}

fn is_related(prefixes: &Prefixes, key: Key<PatchId>) -> Related {
    if prefixes.0.is_empty() {
        return Related::Exact;
    }
    for pref in prefixes.0.iter() {
        let mut is_first = true;
        for &p in pref {
            if p == key {
                if is_first {
                    return Related::Exact;
                } else {
                    return Related::Ancestor;
                }
            }
            is_first = false
        }
    }
    Related::No
}

impl<'env, T: rand::Rng> MutTxn<'env, T> {
    // Climb up the tree (using revtree).
    fn filename_of_inode(&self, inode: Inode, working_copy: &Path) -> Option<PathBuf> {
        let mut components = Vec::new();
        let mut current = inode;
        loop {
            match self.get_revtree(current) {
                Some(v) => {
                    components.push(v.basename.to_owned());
                    current = v.parent_inode.clone();
                    if current == ROOT_INODE {
                        break;
                    }
                }
                None => {
                    debug!("filename_of_inode: not in tree");
                    return None;
                }
            }
        }
        let mut working_copy = working_copy.to_path_buf();
        for c in components.iter().rev() {
            working_copy.push(c.as_small_str().as_str());
        }
        Some(working_copy)
    }

    /// Collect all the children of key `key` into `files`.
    fn collect_children(
        &mut self,
        branch: &Branch,
        path: RepoPath<&Path>,
        key: Key<PatchId>,
        inode: Inode,
        base_path: &RepoPath<impl AsRef<Path> + std::fmt::Debug>,
        prefixes: &Prefixes,
        files: &mut HashMap<RepoPath<PathBuf>, HashMap<Key<PatchId>, OutputItem>>,
    ) -> Result<()> {
        debug!("collect_children {:?}", base_path);
        let f = EdgeFlags::FOLDER_EDGE | EdgeFlags::PSEUDO_EDGE | EdgeFlags::EPSILON_EDGE;
        for b in self.iter_adjacent(&branch, key, EdgeFlags::empty(), f) {
            debug!("b={:?}", b);
            let cont_b = self.get_contents(b.dest).unwrap();
            let (_, b_key) = self
                .iter_nodes(
                    &branch,
                    Some((b.dest, Some(Edge::zero(EdgeFlags::FOLDER_EDGE)))),
                )
                .next()
                .unwrap();
            let b_inode = self.get_revinodes(b_key.dest);

            // This is supposed to be a small string, so we can do
            // as_slice.
            if cont_b.as_slice().len() < 2 {
                error!("cont_b {:?} b.dest {:?}", cont_b, b.dest);
                return Err(Error::WrongFileHeader(b.dest));
            }
            let (perms, basename) = cont_b.as_slice().split_at(2);

            let perms = FileMetadata::from_contents(perms);
            let basename = std::str::from_utf8(basename).unwrap();
            debug!("filename: {:?} {:?}", perms, basename);
            let name = path.join(Path::new(basename));
            let related = is_related(&prefixes, b_key.dest);
            debug!("related {:?} = {:?}", base_path, related);
            if related != Related::No {
                let v = files.entry(name).or_insert(HashMap::new());
                if v.get(&b.dest).is_none() {
                    let is_zombie = {
                        let f = EdgeFlags::FOLDER_EDGE
                            | EdgeFlags::PARENT_EDGE
                            | EdgeFlags::DELETED_EDGE;
                        self.iter_adjacent(&branch, b_key.dest, f, f)
                            .next()
                            .is_some()
                    };
                    debug!("is_zombie = {:?}", is_zombie);
                    v.insert(
                        b.dest,
                        OutputItem {
                            parent: inode,
                            meta: perms,
                            key: b_key.dest,
                            inode: b_inode,
                            is_zombie,
                            related,
                        },
                    );
                }
            }
        }
        Ok(())
    }

    /// Collect names of files with conflicts
    ///
    /// As conflicts have an internal representation, it can be determined
    /// exactly which files contain conflicts.
    pub fn list_conflict_files(
        &mut self,
        branch_name: &str,
        prefixes: &[RepoPath<&Path>],
    ) -> Result<Vec<RepoPath<PathBuf>>> {
        let mut files = HashMap::new();
        let mut next_files = HashMap::new();
        let branch = self.open_branch(branch_name)?;
        let mut base_path = in_repo_root();
        let prefixes = prefixes.to_prefixes(self, &branch);
        self.collect_children(
            &branch,
            in_repo_root(),
            ROOT_KEY,
            ROOT_INODE,
            &mut base_path,
            &prefixes,
            &mut files,
        )?;

        let mut ret = vec![];
        let mut forward = Vec::new();
        while !files.is_empty() {
            next_files.clear();
            for (a, b) in files.drain() {
                for (_, output_item) in b {
                    // (_, meta, inode_key, inode, is_zombie)
                    // Only bother with existing files
                    if let Some(inode) = output_item.inode {
                        if output_item.is_zombie {
                            ret.push(a.clone())
                        }
                        if output_item.meta.is_dir() {
                            self.collect_children(
                                &branch,
                                a.as_ref(),
                                output_item.key,
                                inode,
                                &mut base_path,
                                &prefixes,
                                &mut next_files,
                            )?;
                        } else {
                            let mut graph = self.retrieve(&branch, output_item.key);
                            let mut buf = graph::Writer::new(std::io::sink());

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                            let n_conflicts =
                                self.output_file(&branch, &mut buf, &mut graph, &mut forward)?;
                            if n_conflicts > 0 {

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                            if self.output_file(&branch, &mut buf, &mut graph, &mut forward)? {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
                                ret.push(a.clone())
                            }
                        }
                    }
                }
            }
            std::mem::swap(&mut files, &mut next_files);
        }
        Ok(ret)
    }

    fn make_conflicting_name(&self, name: &mut RepoPath<PathBuf>, name_key: Key<PatchId>) {
        let basename = {
            let basename = name.file_name().unwrap().to_string_lossy();
            format!("{}.{}", basename, &name_key.patch.to_base58())
        };
        name.set_file_name(std::ffi::OsStr::new(&basename));
    }

    fn output_alive_files(
        &mut self,
        branch: &mut Branch,
        prefixes: &Prefixes,
        working_copy: &Path,
        conflicts: &mut Vec<ConflictingFile>,
    ) -> Result<()> {
        debug!("working copy {:?}", working_copy);
        let mut files = HashMap::new();
        let mut next_files = HashMap::new();
        let mut base_path = RepoPath(PathBuf::new());
        self.collect_children(
            branch,
            in_repo_root(),
            ROOT_KEY,
            ROOT_INODE,
            &mut base_path,
            prefixes,
            &mut files,
        )?;

        let mut done = HashSet::new();
        while !files.is_empty() {
            debug!("files {:?}", files);
            next_files.clear();
            for (a, b) in files.drain() {
                let b_len = b.len();
                for (name_key, output_item) in b {
                    // (parent_inode, meta, inode_key, inode, is_zombie)
                    /*let has_several_names = {
                        let e = Edge::zero(EdgeFlags::PARENT_EDGE | EdgeFlags::FOLDER_EDGE);
                        let mut it = self.iter_nodes(branch, Some((inode_key, Some(&e))))
                            .take_while(|&(k, v)| {
                                k == inode_key && v.flag|EdgeFlags::PSEUDO_EDGE == e.flag|EdgeFlags::PSEUDO_EDGE
                            });
                        it.next();
                        it.next().is_some()
                    };*/
                    if !done.insert(output_item.key) {
                        debug!("already done {:?}", output_item.key);
                        continue;
                    }

                    let name = if b_len > 1
                    /*|| has_several_names*/
                    {
                        // debug!("b_len = {:?}, has_several_names {:?}", b_len, has_several_names);
                        let mut name = a.clone();
                        self.make_conflicting_name(&mut name, name_key);
                        Cow::Owned(name.0)
                    } else {
                        Cow::Borrowed(a.as_path())
                    };
                    let file_name = name.file_name().unwrap().to_string_lossy();
                    base_path.push(&file_name);
                    let file_id = OwnedFileId {
                        parent_inode: output_item.parent,
                        basename: SmallString::from_str(&file_name),
                    };
                    let working_copy_name = working_copy.join(name.as_ref());

                    let status = if output_item.is_zombie {
                        FileStatus::Zombie
                    } else {
                        FileStatus::Ok
                    };

                    let inode = if let Some(inode) = output_item.inode {
                        // If the file already exists, find its
                        // current name and rename it if that name
                        // is different.
                        if let Some(ref current_name) = self.filename_of_inode(inode, "".as_ref()) {
                            if current_name != name.as_ref() {
                                let current_name = working_copy.join(current_name);
                                debug!("renaming {:?} to {:?}", current_name, working_copy_name);
                                let parent = self.get_revtree(inode).unwrap().to_owned();
                                self.del_revtree(inode, None)?;
                                self.del_tree(&parent.as_file_id(), None)?;

                                debug!("file_id: {:?}", file_id);
                                if let Some(p) = working_copy_name.parent() {
                                    std::fs::create_dir_all(p)?
                                }
                                if let Err(e) = std::fs::rename(&current_name, &working_copy_name) {
                                    error!(
                                        "while renaming {:?} to {:?}: {:?}",
                                        current_name, working_copy_name, e
                                    )
                                }
                            }
                        }
                        self.put_tree(&file_id.as_file_id(), inode)?;
                        self.put_revtree(inode, &file_id.as_file_id())?;
                        // If the file had been marked for deletion, remove that mark.
                        if let Some(header) = self.get_inodes(inode) {
                            debug!("header {:?}", header);
                            let mut header = header.to_owned();
                            header.status = status;
                            self.replace_inodes(inode, header)?;
                        } else {
                            let header = FileHeader {
                                key: output_item.key,
                                metadata: output_item.meta,
                                status,
                            };
                            debug!("no header {:?}", header);
                            self.replace_inodes(inode, header)?;
                            self.replace_revinodes(output_item.key, inode)?;
                        }
                        inode
                    } else {
                        // Else, create new inode.
                        let inode = self.create_new_inode();
                        let file_header = FileHeader {
                            key: output_item.key,
                            metadata: output_item.meta,
                            status,
                        };
                        self.replace_inodes(inode, file_header)?;
                        self.replace_revinodes(output_item.key, inode)?;
                        debug!("file_id: {:?}", file_id);
                        self.put_tree(&file_id.as_file_id(), inode)?;
                        self.put_revtree(inode, &file_id.as_file_id())?;
                        inode
                    };
                    if output_item.meta.is_dir() {
                        // This is a directory, register it in inodes/trees.
                        std::fs::create_dir_all(&working_copy_name)?;
                        if let Related::Exact = output_item.related {
                            self.collect_children(
                                branch,
                                RepoPath(name.as_ref()),
                                output_item.key,
                                inode,
                                &mut base_path,
                                &Prefixes(Vec::new()),
                                &mut next_files,
                            )?
                        } else {
                            self.collect_children(
                                branch,
                                RepoPath(name.as_ref()),
                                output_item.key,
                                inode,
                                &mut base_path,
                                prefixes,
                                &mut next_files,
                            )?
                        }
                    } else {
                        // Output file.
                        info!(
                            "creating file {:?}, key {:?} {:?}",
                            &name, output_item.key, working_copy_name
                        );
                        let mut f =
                            graph::Writer::new(std::fs::File::create(&working_copy_name).unwrap());
                        debug!("done");
                        let mut l = self.retrieve(branch, output_item.key);
                        if log_enabled!(log::Level::Debug) {
                            let mut w = working_copy_name.clone();
                            w.set_extension("pijul_debug");
                            let f = std::fs::File::create(&w)?;
                            l.debug(self, branch, false, false, f)?;
                        }
                        let mut forward = Vec::new();
                        let n_conflicts = self.output_file(branch, &mut f, &mut l, &mut forward)?;
                        if n_conflicts > 0 {
                            conflicts.push(ConflictingFile {
                                inode,
                                n_conflicts,
                                path: RepoPath(name.to_path_buf()),
                            })
                        }
                        self.remove_redundant_edges(branch, &forward)?
                    }
                    base_path.pop();
                    set_permissions(&working_copy_name, output_item.meta.permissions())?
                }
            }
            std::mem::swap(&mut files, &mut next_files);
        }
        Ok(())
    }

    fn output_repository_assuming_no_pending_patch(
        &mut self,
        prefixes: &Prefixes,
        branch: &mut Branch,
        working_copy: &Path,
        working_copy: &RepoRoot<impl AsRef<Path>>,
        pending_patch_id: PatchId,
        conflicts: &mut Vec<ConflictingFile>,
    ) -> Result<()> {
        debug!(
            "inodes: {:?}",
            self.iter_inodes(None)
                .map(|(u, v)| (u.to_owned(), v.to_owned()))
                .collect::<Vec<_>>()
        );
        // Now, garbage collect dead inodes.
        let dead: Vec<_> = self
            .iter_tree(None)
            .filter_map(|(k, v)| {
                debug!("{:?} {:?}", k, v);
                if let Some(key) = self.get_inodes(v) {
                    if key.key.patch == pending_patch_id || self.is_alive_or_zombie(branch, key.key)
                    {
                        // Don't delete.
                        None
                    } else {
                        Some((k.to_owned(), v, self.filename_of_inode(v, working_copy)))
                        Some((
                            k.to_owned(),
                            v,
                            self.filename_of_inode(v, working_copy.repo_root.as_ref()),
                        ))
                    }
                } else {
                    debug!("not in inodes");
                    Some((k.to_owned(), v, None))
                }
            })
            .collect();
        debug!("dead: {:?}", dead);

        // Now, "kill the deads"
        for (ref parent, inode, ref name) in dead {
            self.remove_inode_rec(inode)?;
            debug!("removed");
            if let Some(ref name) = *name {
                debug!("deleting {:?}", name);
                if let Ok(meta) = fs::metadata(name) {
                    if let Err(e) = if meta.is_dir() {
                        fs::remove_dir_all(name)
                    } else {
                        fs::remove_file(name)
                    } {
                        error!("while deleting {:?}: {:?}", name, e);
                    }
                }
            } else {
                self.del_tree(&parent.as_file_id(), Some(inode))?;
                self.del_revtree(inode, Some(&parent.as_file_id()))?;
            }
        }
        debug!("done deleting dead files");
        // Then output alive files. This has to be done *after*
        // removing files, because we a file removed might have the
        // same name as a file added without there being a conflict
        // (depending on the relation between the two patches).
        self.output_alive_files(branch, prefixes, working_copy)?;
        self.output_alive_files(branch, prefixes, working_copy.repo_root.as_ref(), conflicts)?;
        debug!("done raw_output_repository");
        Ok(())
    }

    fn remove_inode_rec(&mut self, inode: Inode) -> Result<()> {
        // Remove the inode from inodes/revinodes.
        let mut to_kill = vec![inode];
        while let Some(inode) = to_kill.pop() {
            debug!("kill dead {:?}", inode.to_hex());
            let header = self.get_inodes(inode).map(|x| x.to_owned());
            if let Some(header) = header {
                self.del_inodes(inode, None)?;
                self.del_revinodes(header.key, None)?;
                let mut kills = Vec::new();
                // Remove the inode from tree/revtree.
                for (k, v) in self
                    .iter_revtree(Some((inode, None)))
                    .take_while(|&(k, _)| k == inode)
                {
                    kills.push((k.clone(), v.to_owned()))
                }
                for &(k, ref v) in kills.iter() {
                    self.del_tree(&v.as_file_id(), Some(k))?;
                    self.del_revtree(k, Some(&v.as_file_id()))?;
                }
                // If the dead is a directory, remove its descendants.
                let inode_fileid = OwnedFileId {
                    parent_inode: inode.clone(),
                    basename: SmallString::from_str(""),
                };
                to_kill.extend(
                    self.iter_tree(Some((&inode_fileid.as_file_id(), None)))
                        .take_while(|&(ref k, _)| k.parent_inode == inode)
                        .map(|(_, v)| v.to_owned()),
                )
            }
        }
        Ok(())
    }

    pub fn output_repository(
        &mut self,
        branch: &mut Branch,
        working_copy: &Path,
        working_copy: &Path,
        working_copy: &RepoRoot<impl AsRef<Path>>,
        prefixes: &Prefixes,
        pending: &Patch,
        local_pending: &HashSet<InodeUpdate>,
    ) -> Result<Vec<ConflictingFile>> {
        debug!("begin output repository");

        debug!("applying pending patch");
        let tempdir = tempdir::TempDir::new("pijul")?;
        let hash = pending.save(tempdir.path(), None)?;
        let internal =
            self.apply_local_patch(branch, working_copy, &hash, pending, local_pending, true)?;

        debug!("applied as {:?}", internal.to_base58());

        // let prefixes = prefixes.to_prefixes(&self, &branch);
        debug!("prefixes {:?}", prefixes);
        let mut conflicts = Vec::new();
        self.output_repository_assuming_no_pending_patch(
            &prefixes,
            branch,
            working_copy,
            internal,
            &mut conflicts,
        )?;

        debug!("unrecording pending patch");
        self.unrecord(branch, internal, pending)?;
        Ok(conflicts)
    }

    pub fn output_repository_no_pending(
        &mut self,
        branch: &mut Branch,
        working_copy: &Path,
        working_copy: &Path,
        working_copy: &RepoRoot<impl AsRef<Path>>,
        prefixes: &Prefixes,



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822



823
824
825
826
827
use fs_representation::RepoRoot;
        prefix: Option<&Path>,
            let mut realpath = PathBuf::from(working_copy.repo_root.as_ref());
use backend::*;
use graph;
use patch::*;
use {Error, Result};

use diff;
use rand;
use std;
use std::collections::HashSet;
use std::fs::metadata;
use std::io::BufRead;
use std::io::Read;
#[cfg(not(windows))]
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use std::rc::Rc;

use fs_representation::{RepoRoot, RepoPath};

#[cfg(not(windows))]
fn permissions(attr: &std::fs::Metadata) -> Option<usize> {
    Some(attr.permissions().mode() as usize)
}
#[cfg(windows)]
fn permissions(_: &std::fs::Metadata) -> Option<usize> {
    None
}

fn file_metadata(path: &Path) -> Result<FileMetadata> {
    let attr = metadata(&path)?;
    let permissions = permissions(&attr).unwrap_or(0o755);
    debug!("permissions = {:?}", permissions);
    Ok(FileMetadata::new(permissions & 0o777, attr.is_dir()))
}

impl<U: Transaction, R> GenericTxn<U, R> {
    pub fn globalize_change(
        &self,
        change: Change<ChangeContext<PatchId>>,
    ) -> Change<ChangeContext<Hash>> {
        match change {
            Change::NewNodes {
                up_context,
                down_context,
                flag,
                line_num,
                nodes,
                inode,
            } => Change::NewNodes {
                up_context: up_context
                    .iter()
                    .map(|&k| self.external_key_opt(k))
                    .collect(),
                down_context: down_context
                    .iter()
                    .map(|&k| self.external_key_opt(k))
                    .collect(),
                flag,
                line_num,
                nodes,
                inode,
            },
            Change::NewEdges {
                previous,
                flag,
                edges,
                inode,
            } => Change::NewEdges {
                previous,
                flag,
                edges,
                inode,
            },
        }
    }
    pub fn globalize_record(
        &self,
        change: Record<ChangeContext<PatchId>>,
    ) -> Record<ChangeContext<Hash>> {
        match change {
            Record::FileMove { new_name, del, add } => Record::FileMove {
                new_name,
                del: self.globalize_change(del),
                add: self.globalize_change(add),
            },
            Record::FileDel {
                name,
                del,
                contents,
            } => Record::FileDel {
                name,
                del: self.globalize_change(del),
                contents: contents.map(|del| self.globalize_change(del)),
            },
            Record::FileAdd {
                name,
                add,
                contents,
            } => Record::FileAdd {
                name,
                add: self.globalize_change(add),
                contents: contents.map(|add| self.globalize_change(add)),
            },
            Record::Change {
                file,
                change,
                replacement,
                old_line,
                new_line,
            } => Record::Change {
                file,
                change: self.globalize_change(change),
                replacement: replacement.map(|x| self.globalize_change(x)),
                old_line,
                new_line,
            },
        }
    }
}

pub struct RecordState {
    line_num: LineId,
    updatables: HashSet<InodeUpdate>,
    actions: Vec<Record<ChangeContext<PatchId>>>,
    redundant: Vec<(Key<PatchId>, Edge)>,
}

/// An account of the files that have been added, moved or deleted, as
/// returned by record, and used by apply (when applying a patch
/// created locally) to update the trees and inodes databases.
#[derive(Debug, Hash, PartialEq, Eq)]
pub enum InodeUpdate {
    Add {
        /// `LineId` in the new patch.
        line: LineId,
        /// `FileMetadata` in the updated file.
        meta: FileMetadata,
        /// `Inode` added by this file addition.
        inode: Inode,
    },
    Moved {
        /// `Inode` of the moved file.
        inode: Inode,
        metadata: FileMetadata,
    },
    Deleted {
        /// `Inode` of the deleted file.
        inode: Inode,
    },
}

#[derive(Debug)]
pub enum WorkingFileStatus {
    Moved {
        from: FileMetadata,
        to: FileMetadata,
    },
    Deleted,
    Ok,
    Zombie,
}

pub(crate) fn is_text(x: &[u8]) -> bool {
    x.iter().take(8000).all(|&c| c != 0)
}

impl<'env, R: rand::Rng> MutTxn<'env, R> {
    /// Create appropriate NewNodes for adding a file.
    fn record_file_addition(
        &self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        st: &mut RecordState,
        current_inode: Inode,
        parent_node: Key<Option<PatchId>>,
        realpath: &mut RepoPath<std::path::PathBuf>,
        basename: &str,
    ) -> Result<Option<LineId>> {
        let name_line_num = st.line_num.clone();
        let blank_line_num = st.line_num + 1;
        st.line_num += 2;

        debug!("metadata for {:?}", realpath);
        let path = &repo_root.absolutize(realpath);
        let meta = match file_metadata(&path) {
            Ok(metadata) => metadata,
            Err(e) => return Err(e),
        };
        debug!("meta = {:?}", meta.is_dir());

        let mut name = Vec::with_capacity(basename.len() + 2);
        name.write_metadata(meta).unwrap(); // 2 bytes.
        name.extend(basename.as_bytes());

        let mut nodes = Vec::new();

        st.updatables.insert(InodeUpdate::Add {
            line: blank_line_num.clone(),
            meta: meta,
            inode: current_inode.clone(),
        });
        let up_context_ext = Key {
            patch: if parent_node.line.is_root() {
                Some(Hash::None)
            } else if let Some(patch_id) = parent_node.patch {
                Some(self.external_hash(patch_id).to_owned())
            } else {
                None
            },
            line: parent_node.line.clone(),
        };
        let up_context = Key {
            patch: if parent_node.line.is_root() {
                Some(ROOT_PATCH_ID)
            } else if let Some(patch_id) = parent_node.patch {
                Some(patch_id)
            } else {
                None
            },
            line: parent_node.line.clone(),
        };
        st.actions.push(Record::FileAdd {
            name: realpath.to_owned(),
            add: Change::NewNodes {
                up_context: vec![up_context],
                line_num: name_line_num,
                down_context: vec![],
                nodes: vec![name, vec![]],
                flag: EdgeFlags::FOLDER_EDGE,
                inode: up_context_ext.clone(),
            },
            contents: None,
        });
        // Reading the file
        if !meta.is_dir() {
            nodes.clear();

            let mut node = Vec::new();
            {
                let mut f = std::fs::File::open(path.as_path())?;
                f.read_to_end(&mut node)?;
            }

            let up_context = Key {
                patch: None,
                line: blank_line_num.clone(),
            };
            let up_context_ext = Key {
                patch: None,
                line: blank_line_num.clone(),
            };
            if is_text(&node) {
                let mut line = Vec::new();
                let mut f = &node[..];
                loop {
                    match f.read_until('\n' as u8, &mut line) {
                        Ok(l) => {
                            if l > 0 {
                                nodes.push(line.clone());
                                line.clear()
                            } else {
                                break;
                            }
                        }
                        Err(_) => break,
                    }
                }
                let len = nodes.len();
                if !nodes.is_empty() {
                    if let Some(Record::FileAdd {
                        ref mut contents, ..
                    }) = st.actions.last_mut()
                    {
                        *contents = Some(Change::NewNodes {
                            up_context: vec![up_context],
                            line_num: st.line_num,
                            down_context: vec![],
                            nodes: nodes,
                            flag: EdgeFlags::empty(),
                            inode: up_context_ext,
                        });
                    }
                }
                st.line_num += len;
            } else if let Some(Record::FileAdd {
                ref mut contents, ..
            }) = st.actions.last_mut()
            {
                *contents = Some(Change::NewNodes {
                    up_context: vec![up_context],
                    line_num: st.line_num,
                    down_context: vec![],
                    nodes: vec![node],
                    flag: EdgeFlags::empty(),
                    inode: up_context_ext,
                });
                st.line_num += 1;
            }
            Ok(None)
        } else {
            Ok(Some(blank_line_num))
        }
    }

    /// Diff for binary files, doesn't bother splitting the file in
    /// lines. This is wasteful, but doesn't break the format, and
    /// doesn't create conflicts inside binary files.
    fn diff_with_binary(
        &self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        algorithm: diff::Algorithm,
        inode: Key<Option<Hash>>,
        branch: &Branch,
        st: &mut RecordState,
        ret: &mut graph::Graph,
        path: Rc<RepoPath<PathBuf>>,
    ) -> Result<()> {
        let mut lines_b = Vec::new();
        {
            debug!("opening file for diff: {:?}", path);
            let mut f = std::fs::File::open(repo_root.absolutize(&path))?;
            f.read_to_end(&mut lines_b)?;
        }

        self.diff(
            algorithm,
            inode,
            branch,
            path,
            &mut st.line_num,
            &mut st.actions,
            &mut st.redundant,
            ret,
            &lines_b,
        )
    }

    fn record_moved_file(
        &self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        diff_algorithm: diff::Algorithm,
        branch: &Branch,
        realpath: &mut RepoPath<std::path::PathBuf>,
        st: &mut RecordState,
        parent_node: Key<Option<PatchId>>,
        current_node: Key<PatchId>,
        basename: &str,
        new_meta: FileMetadata,
        old_meta: FileMetadata,
    ) -> Result<()> {
        debug!("record_moved_file: parent_node={:?}", parent_node);
        // Delete all former names.
        let mut edges = Vec::new();
        // Now take all grandparents of l2, delete them.

        let mut name = Vec::with_capacity(basename.len() + 2);
        name.write_metadata(new_meta).unwrap();
        name.extend(basename.as_bytes());
        for parent in self.iter_parents(branch, current_node, EdgeFlags::FOLDER_EDGE) {
            debug!("iter_parents: {:?}", parent);
            let previous_name: &[u8] = match self.get_contents(parent.dest) {
                None => &[],
                Some(n) => n.as_slice(),
            };
            let name_changed =
                (&previous_name[2..] != &name[2..]) || (new_meta != old_meta && cfg!(not(windows)));

            for grandparent in self.iter_parents(branch, parent.dest, EdgeFlags::FOLDER_EDGE) {
                debug!("iter_parents: grandparent = {:?}", grandparent);
                let grandparent_changed = if let Some(ref parent_node_patch) = parent_node.patch {
                    *parent_node_patch != grandparent.dest.patch
                        || parent_node.line != grandparent.dest.line
                } else {
                    true
                };
                if grandparent_changed || name_changed {
                    edges.push(NewEdge {
                        from: Key {
                            line: parent.dest.line.clone(),
                            patch: Some(self.external_hash(parent.dest.patch).to_owned()),
                        },
                        to: Key {
                            line: grandparent.dest.line.clone(),
                            patch: Some(self.external_hash(grandparent.dest.patch).to_owned()),
                        },
                        introduced_by: Some(
                            self.external_hash(grandparent.introduced_by).to_owned(),
                        ),
                    })
                }
            }
        }
        debug!("edges:{:?}", edges);
        let up_context_ext = Key {
            patch: if parent_node.line.is_root() {
                Some(Hash::None)
            } else if let Some(parent_patch) = parent_node.patch {
                Some(self.external_hash(parent_patch).to_owned())
            } else {
                None
            },
            line: parent_node.line.clone(),
        };
        let up_context = Key {
            patch: if parent_node.line.is_root() {
                Some(ROOT_PATCH_ID)
            } else if let Some(parent_patch) = parent_node.patch {
                Some(parent_patch)
            } else {
                None
            },
            line: parent_node.line.clone(),
        };
        if !edges.is_empty() {
            // If this file's name or meta info has changed.
            st.actions.push(Record::FileMove {
                new_name: realpath.to_owned(),
                del: Change::NewEdges {
                    edges: edges,
                    previous: EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE,
                    flag: EdgeFlags::DELETED_EDGE | EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE,
                    inode: up_context_ext.clone(),
                },
                add: Change::NewNodes {
                    up_context: vec![up_context],
                    line_num: st.line_num,
                    down_context: vec![Key {
                        patch: Some(current_node.patch),
                        line: current_node.line.clone(),
                    }],
                    nodes: vec![name],
                    flag: EdgeFlags::FOLDER_EDGE,
                    inode: up_context_ext.clone(),
                },
            });
            st.line_num += 1;
        }
        if !old_meta.is_dir() {
            info!("retrieving");
            let mut ret = self.retrieve(branch, current_node);
            debug!("diff");
            let patch_ext = self.get_external(current_node.patch).unwrap();
            self.diff_with_binary(
                repo_root,
                diff_algorithm,
                Key {
                    patch: Some(patch_ext.to_owned()),
                    line: current_node.line,
                },
                branch,
                st,
                &mut ret,
                Rc::new(realpath.clone()),
            )?;
        };
        Ok(())
    }

    fn record_deleted_file(
        &self,
        st: &mut RecordState,
        branch: &Branch,
        realpath: &RepoPath<impl AsRef<Path>>,
        current_node: Key<PatchId>,
    ) -> Result<()> {
        debug!("record_deleted_file");
        let mut edges = Vec::new();
        let mut previous = EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE;
        // Now take all grandparents of the current node, delete them.
        for parent in self.iter_parents(branch, current_node, EdgeFlags::FOLDER_EDGE) {
            for grandparent in self.iter_parents(branch, parent.dest, EdgeFlags::FOLDER_EDGE) {
                edges.push(NewEdge {
                    from: self.external_key(&parent.dest).unwrap(),
                    to: self.external_key(&grandparent.dest).unwrap(),
                    introduced_by: Some(self.external_hash(grandparent.introduced_by).to_owned()),
                });
                previous = grandparent.flag;
            }
        }
        // If the file is a directory, delete recursively
        let mut file_edges = vec![];
        {
            debug!("del={:?}", current_node);
            let ret = self.retrieve(branch, current_node);
            debug!("ret {:?}", ret);
            for l in ret.lines.iter() {
                if l.key != ROOT_KEY {
                    let ext_key = self.external_key(&l.key).unwrap();
                    debug!("ext_key={:?}", ext_key);
                    for v in self.iter_parents(branch, l.key, EdgeFlags::empty()) {
                        debug!("v={:?}", v);
                        file_edges.push(NewEdge {
                            from: ext_key.clone(),
                            to: self.external_key(&v.dest).unwrap(),
                            introduced_by: Some(self.external_hash(v.introduced_by).to_owned()),
                        });
                        if let Some(inode) = self.get_revinodes(v.dest) {
                            st.updatables.insert(InodeUpdate::Deleted {
                                inode: inode.to_owned(),
                            });
                        }
                    }
                    for v in self.iter_parents(branch, l.key, EdgeFlags::FOLDER_EDGE) {
                        debug!("v={:?}", v);
                        edges.push(NewEdge {
                            from: ext_key.clone(),
                            to: self.external_key(&v.dest).unwrap(),
                            introduced_by: Some(self.external_hash(v.introduced_by).to_owned()),
                        });
                    }
                }
            }
        }

        if !edges.is_empty() {
            st.actions.push(Record::FileDel {
                name: realpath.to_owned(),
                del: Change::NewEdges {
                    edges: edges,
                    previous,
                    flag: EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE | EdgeFlags::DELETED_EDGE,
                    inode: self.external_key(&current_node).unwrap(),
                },
                contents: if file_edges.is_empty() {
                    None
                } else {
                    Some(Change::NewEdges {
                        edges: file_edges,
                        previous: EdgeFlags::PARENT_EDGE,
                        flag: EdgeFlags::PARENT_EDGE | EdgeFlags::DELETED_EDGE,
                        inode: self.external_key(&current_node).unwrap(),
                    })
                },
            });
        }
        Ok(())
    }

    fn record_children(
        &self,
        repo_root:  &RepoRoot<impl AsRef<Path>>,
        diff_algorithm: diff::Algorithm,
        branch: &Branch,
        st: &mut RecordState,
        path: &mut RepoPath<std::path::PathBuf>,
        current_node: Key<Option<PatchId>>,
        current_inode: Inode,
        obsolete_inodes: &mut Vec<Inode>,
    ) -> Result<()> {
        debug!("children of current_inode {}", current_inode.to_hex());
        let file_id = OwnedFileId {
            parent_inode: current_inode.clone(),
            basename: SmallString::from_str(""),
        };
        debug!("iterating tree, starting from {:?}", file_id.as_file_id());
        for (k, v) in self
            .iter_tree(Some((&file_id.as_file_id(), None)))
            .take_while(|&(ref k, _)| k.parent_inode == current_inode)
        {
            debug!("calling record_all recursively, {}", line!());

            if k.basename.len() > 0 {
                // If this is an actual file and not just the "."
                self.record_inode(
                    repo_root,
                    diff_algorithm,
                    branch,
                    st,
                    current_node.clone(), // parent
                    v,                    // current_inode
                    path,
                    obsolete_inodes,
                    k.basename.as_str(),
                )?
            }
        }
        Ok(())
    }

    /// If `inode` is a file known to the current branch, return
    /// whether it's been moved, deleted, or its "status" (including
    /// permissions) has been changed.
    ///
    /// Returns `None` if `inode` is not known to the current branch.
    fn inode_status(&self,
                    repo_root: &RepoRoot<impl AsRef<Path>>,
                    inode: Inode,
                    path: &RepoPath<impl AsRef<Path>>)
                    -> (Option<(WorkingFileStatus, FileHeader)>) {
        match self.get_inodes(inode) {
            Some(file_header) => {
                let old_meta = file_header.metadata;
                let new_meta = file_metadata(&repo_root.absolutize(path)).ok();
                
                debug!("current_node={:?}", file_header);
                debug!("old_attr={:?},int_attr={:?}", old_meta, new_meta);

                let status = match (new_meta, file_header.status) {
                    (Some(new_meta), FileStatus::Moved) => WorkingFileStatus::Moved {
                        from: old_meta,
                        to: new_meta,
                    },
                    (Some(new_meta), _) if old_meta != new_meta => WorkingFileStatus::Moved {
                        from: old_meta,
                        to: new_meta,
                    },
                    (None, _) | (_, FileStatus::Deleted) => WorkingFileStatus::Deleted,
                    (Some(_), FileStatus::Ok) => WorkingFileStatus::Ok,
                    (Some(_), FileStatus::Zombie) => WorkingFileStatus::Zombie,
                };
                Some((status, file_header.clone()))
            }
            None => None,
        }
    }

    fn record_inode(
        &self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        diff_algorithm: diff::Algorithm,
        branch: &Branch,
        st: &mut RecordState,
        parent_node: Key<Option<PatchId>>,
        current_inode: Inode,
        realpath: &mut RepoPath<std::path::PathBuf>,
        obsolete_inodes: &mut Vec<Inode>,
        basename: &str,
    ) -> Result<()> {
        realpath.push(basename);
        debug!("realpath: {:?}", realpath);
        debug!("inode: {:?}", current_inode);
        debug!("header: {:?}", self.get_inodes(current_inode));
        let status_header = self.inode_status(repo_root, current_inode, realpath);
        debug!("status_header: {:?}", status_header);
        let mut current_key = match &status_header {
            &Some((_, ref file_header)) => Some(Key {
                patch: Some(file_header.key.patch.clone()),
                line: file_header.key.line.clone(),
            }),
            &None => None,
        };

        match status_header {
            Some((
                WorkingFileStatus::Moved {
                    from: old_meta,
                    to: new_meta,
                },
                file_header,
            )) => {
                st.updatables.insert(InodeUpdate::Moved {
                    inode: current_inode.clone(),
                    metadata: new_meta,
                });
                self.record_moved_file(
                    repo_root,
                    diff_algorithm,
                    branch,
                    realpath,
                    st,
                    parent_node,
                    file_header.key,
                    basename,
                    new_meta,
                    old_meta,
                )?
            }
            Some((WorkingFileStatus::Deleted, file_header)) => {
                st.updatables.insert(InodeUpdate::Deleted {
                    inode: current_inode.clone(),
                });
                self.record_deleted_file(st, branch, realpath, file_header.key)?;
                // If we are deleting a directory, don't recurse,
                // because record_deleted_file already did it.
                realpath.pop();
                return Ok(());
            }
            Some((WorkingFileStatus::Ok, file_header)) => {
                if !file_header.metadata.is_dir() {
                    let mut ret = self.retrieve(branch, file_header.key);
                    debug!("now calling diff {:?}", file_header.key);
                    let inode = Key {
                        patch: Some(self.external_hash(file_header.key.patch).to_owned()),
                        line: file_header.key.line,
                    };
                    self.confirm_path(st, branch, realpath, file_header.key)?;
                    self.diff_with_binary(
                        repo_root,
                        diff_algorithm,
                        inode,
                        branch,
                        st,
                        &mut ret,
                        Rc::new(realpath.clone()),
                    )?;
                } else {
                    // Confirm
                    self.confirm_path(st, branch, &realpath, file_header.key)?;
                }
            }
            Some((WorkingFileStatus::Zombie, _)) => {
                // This file is a zombie, but the user has not
                // specified anything to do with this file, so leave
                // it alone.
            }
            None => {
                if let Ok(new_key) =
                    self.record_file_addition(repo_root, st, current_inode, parent_node,
                                              realpath, basename)
                {
                    current_key = new_key.map(|next| Key {
                        patch: None,
                        line: next,
                    })
                } else {
                    obsolete_inodes.push(current_inode)
                }
            }
        }

        let current_key = current_key;
        debug!("current_node={:?}", current_key);
        if let Some(current_node) = current_key {
            self.record_children(
                repo_root,
                diff_algorithm,
                branch,
                st,
                realpath,
                current_node,
                current_inode,
                obsolete_inodes,
            )?;
        };
        realpath.pop();
        Ok(())
    }

    fn external_newedge(
        &self,
        from: Key<PatchId>,
        to: Key<PatchId>,
        introduced_by: PatchId,
    ) -> NewEdge {
        NewEdge {
            from: Key {
                patch: Some(self.external_hash(from.patch).to_owned()),
                line: from.line,
            },
            to: Key {
                patch: Some(self.external_hash(to.patch).to_owned()),
                line: to.line,
            },
            introduced_by: Some(self.external_hash(introduced_by).to_owned()),
        }
    }

    /// `key` must be a non-root inode key.
    fn confirm_path(
        &self,
        st: &mut RecordState,
        branch: &Branch,
        realpath: &RepoPath<impl AsRef<Path>>,
        key: Key<PatchId>,
    ) -> Result<()> {
        debug!("confirm_path");
        let f = EdgeFlags::PARENT_EDGE | EdgeFlags::FOLDER_EDGE | EdgeFlags::DELETED_EDGE;
        // Are there deleted parent edges?
        let mut edges = Vec::new();
        for v in self.iter_adjacent(branch, key, f, f) {
            debug!("confirm {:?}", v.dest);
            edges.push(self.external_newedge(key, v.dest, v.introduced_by));
            for v_ in self.iter_adjacent(branch, v.dest, f, f) {
                debug!("confirm 2 {:?}", v_.dest);
                edges.push(self.external_newedge(v.dest, v_.dest, v_.introduced_by));
            }
        }

        if !edges.is_empty() {
            let inode = Key {
                patch: Some(self.external_hash(key.patch).to_owned()),
                line: key.line.clone(),
            };
            st.actions.push(Record::FileAdd {
                name: realpath.to_owned(),
                add: Change::NewEdges {
                    edges,
                    previous: EdgeFlags::FOLDER_EDGE
                        | EdgeFlags::PARENT_EDGE
                        | EdgeFlags::DELETED_EDGE,
                    flag: EdgeFlags::FOLDER_EDGE | EdgeFlags::PARENT_EDGE,
                    inode,
                },
                contents: None,
            });
        }
        debug!("/confirm_path");

        Ok(())
    }
}

impl RecordState {
    pub fn new() -> Self {
        RecordState {
            line_num: LineId::new() + 1,
            actions: Vec::new(),
            updatables: HashSet::new(),
            redundant: Vec::new(),
        }
    }

    pub fn finish(self) -> (Vec<Record<ChangeContext<PatchId>>>, HashSet<InodeUpdate>) {
        (self.actions, self.updatables)
    }
}

impl<'env, T: rand::Rng> MutTxn<'env, T> {
    pub fn record(
        &mut self,
        diff_algorithm: diff::Algorithm,
        state: &mut RecordState,
        branch: &Branch,
        working_copy: &std::path::Path,
        working_copy: &std::path::Path,
        prefix: Option<&std::path::Path>,
        working_copy: &RepoRoot<impl AsRef<Path>>,
        prefix: &RepoPath<impl AsRef<Path>>,
    ) -> Result<()> {
        let mut obsolete_inodes = Vec::new();



2
3
4
5
6
7
8
9
10


13


16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77


79


82



86


89
use fs_representation::{RepoRoot};

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
use super::{EdgeFlags, MutTxn};
use backend::{ROOT_PATCH_ID, FileMetadata};
use fs_representation::{in_repo_root, RepoPath, RepoRoot};
use patch::{Change, Record};
use record::RecordState;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use Result;
use std::borrow::Cow;

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
use std::path::{PathBuf};

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

#[derive(Debug)]
pub enum ChangeType {
    Modified,
    New,
    Del,
    Move {
        new_name: Rc<RepoPath<PathBuf>>,
        former_names: Vec<Vec<(FileMetadata, String)>>,
    },
}

impl ChangeType {
    pub fn short(&self) -> Cow<'static, str> {
        match *self {
            ChangeType::Modified => "M".into(),
            ChangeType::New => "A".into(),
            ChangeType::Del => "D".into(),
            ChangeType::Move { ref former_names, .. } => {
                let mut s = String::new();
                if former_names.len() == 1 {
                    for (i, (_, name)) in former_names[0].iter().enumerate() {
                        if i > 0 {
                            s.push_str("/");
                        }
                        s.push_str(name)
                    }
                    s.push_str(" →");
                } else if former_names.len() > 1 {
                    s.push_str("{ ");
                    for (k, former_names) in former_names.iter().enumerate() {
                        if k > 0 {
                            s.push_str(", ");
                        }
                        for (i, (_, name)) in former_names.iter().enumerate() {
                            if i > 0 {
                                s.push_str("/");
                            }
                            s.push_str(name)
                        }
                    }
                    s.push_str(" } →");
                } else {
                    s.push_str("→");
                }
                s.into()
            },
        }
    }

    pub fn long(&self) -> &str {
        match *self {
            ChangeType::Modified => "modified:",
            ChangeType::New => "new file:",
            ChangeType::Del => "deleted:",
            ChangeType::Move { .. } => "moved:",
        }
    }
}

pub fn unrecorded_changes<T: rand::Rng>(
    txn: &mut MutTxn<T>,
    repo_root: &Path,

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    repo_root: &RepoRoot<impl AsRef<Path>>,

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    repo_root: &PathBuf,

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    repo_root: &Path,

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    branch: &String,

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85

86
87
use super::{get_current_branch, validate_base58, BasicOptions};
use super::{validate_base58, BasicOptions};
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, StaticSubcommand};
use error::Error;
use libpijul::patch::Patch;
use libpijul::{apply_resize, apply_resize_no_output, Hash, RepoPath};
use std::collections::HashSet;
use std::fs::File;
use std::io::{stdin, Read, Write};
use std::path::Path;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("apply")
        .about("Apply a patch")
        .arg(
            Arg::with_name("patch")
                .help(
                    "Hash of the patch to apply, in base58. If no patch is given, patches are \
                     read from the standard input.",
                )
                .takes_value(true)
                .multiple(true)
                .validator(validate_base58),
        )
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help(
                    "Path to the repository where the patches will be applied. Defaults to the \
                     repository containing the current directory.",
                )
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help(
                    "The branch to which the patches will be applied. Defaults to the current \
                     branch.",
                )
                .takes_value(true),
        )
        .arg(
            Arg::with_name("no-output")
                .long("no-output")
                .help("Only apply the patch, don't output it to the repository."),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    debug!("applying");
    let mut remote = HashSet::new();

    // let remote: HashSet<Hash> =
    let mut has_patches = false;
    if let Some(hashes) = args.values_of("patch") {
        remote.extend(hashes.map(|h| Hash::from_base58(&h).unwrap()));
        has_patches = true
    }

    if !has_patches {
        // Read patches in gz format from stdin.
        let mut buf = Vec::new();
        stdin().read_to_end(&mut buf)?;

        let mut buf_ = &buf[..];
        let mut i = 0;
        while let Ok((h, _, patch)) = Patch::from_reader_compressed(&mut buf_) {
            debug!("{:?}", patch);

            {
                let mut path = opts.patches_dir();
                path.push(h.to_base58());
                path.set_extension("gz");
                let mut f = File::create(&path)?;
                f.write_all(&buf[i..(buf.len() - buf_.len())])?;
                i = buf.len() - buf_.len();
            }

            remote.insert(h);
        }
    }

    debug!("remote={:?}", remote);
    let is_current_branch = if let Ok(br) = get_current_branch(&opts.repo_root) {
    let is_current_branch = if let Ok(br) = opts.repo_root.get_current_branch() {
        br == opts.branch()


1
2
3
4
5
6
7
8
9
10
11
12

13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331

333
334


337


340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401

403


406

408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686

687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702

703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881

882
883
884
885
886
887
use libpijul::fs_representation::{RepoRoot, PIJUL_DIR_NAME};
        let repo_root = repo_root.repo_root.as_ref();
use getch;
use libpijul::patch::{Change, ChangeContext, Patch, PatchHeader, Record};
use std::io::prelude::*;

use std::collections::{HashMap, HashSet};
use std::ffi::OsString;
use std::io::stdout;
use std::path::{Path, PathBuf};
use std::rc::Rc;

use regex::Regex;

use libpijul::fs_representation::PIJUL_DIR_NAME;
use commands::pretty_repo_path;
use libpijul::fs_representation::{RepoPath, RepoRoot, PIJUL_DIR_NAME};

use atty;
use error::Error;
use libpijul::{EdgeFlags, Hash, LineId, MutTxn, PatchId};
use rand;
use std;
use std::char::from_u32;
use std::fs::{remove_file, File};
use std::process;
use std::str;
use term;
use term::{Attr, StdoutTerminal};

use ignore::gitignore::GitignoreBuilder;
use line;

const BINARY_CONTENTS: &'static str = "<binary contents>";
#[derive(Clone, Copy)]
pub enum Command {
    Pull,
    Push,
    Unrecord,
}

impl Command {
    fn verb(&self) -> &'static str {
        match *self {
            Command::Push => "push",
            Command::Pull => "pull",
            Command::Unrecord => "unrecord",
        }
    }
}

fn print_section(term: &mut Option<Box<StdoutTerminal>>, title: &str, contents: &str) {
    if let Some(ref mut term) = *term {
        term.attr(Attr::Bold).unwrap_or(());
    }
    let mut stdout = std::io::stdout();
    write!(stdout, "{}", title).unwrap_or(());
    if let Some(ref mut term) = *term {
        term.reset().unwrap_or(());
    }
    writeln!(stdout, "{}", contents).unwrap_or(());
}

fn remove_escape_codes(f: &str) -> std::borrow::Cow<str> {
    if f.as_bytes().contains(&27) {
        std::borrow::Cow::Owned(f.chars().filter(|&c| c != 27 as char).collect())
    } else {
        std::borrow::Cow::Borrowed(f)
    }
}

pub fn print_patch_descr(
    term: &mut Option<Box<StdoutTerminal>>,
    hash: &Hash,
    internal: Option<PatchId>,
    patch: &PatchHeader,
) {
    print_section(term, "Hash:", &format!(" {}", &hash.to_base58()));
    if let Some(internal) = internal {
        print_section(term, "Internal id:", &format!(" {}", &internal.to_base58()));
    }

    print_section(
        term,
        "Authors:",
        &format!(" {}", remove_escape_codes(&patch.authors.join(", "))),
    );
    print_section(term, "Timestamp:", &format!(" {}", patch.timestamp));

    let is_tag = if !patch.flag.is_empty() { "TAG: " } else { "" };

    let mut stdout = std::io::stdout();
    writeln!(
        stdout,
        "\n    {}{}",
        is_tag,
        remove_escape_codes(&patch.name)
    )
    .unwrap_or(());
    if let Some(ref d) = patch.description {
        writeln!(stdout, "").unwrap_or(());
        let d = remove_escape_codes(d);
        for descr_line in d.lines() {
            writeln!(stdout, "    {}", descr_line).unwrap_or(());
        }
    }
    writeln!(stdout, "").unwrap_or(());
}

fn check_forced_decision(
    command: Command,
    choices: &HashMap<&Hash, bool>,
    rev_dependencies: &HashMap<&Hash, Vec<&Hash>>,
    a: &Hash,
    b: &Patch,
) -> Option<bool> {
    let covariant = match command {
        Command::Pull | Command::Push => true,
        Command::Unrecord => false,
    };
    // If we've selected patches that depend on a, and this is a pull
    // or a push, select a.
    if let Some(x) = rev_dependencies.get(a) {
        for y in x {
            // Here, y depends on a.
            //
            // If this command is covariant, and we've selected y, select a.
            // If this command is covariant, and we've unselected y, don't do anything.
            //
            // If this command is contravariant, and we've selected y, don't do anything.
            // If this command is contravariant, and we've unselected y, unselect a.
            if let Some(&choice) = choices.get(y) {
                if choice == covariant {
                    return Some(covariant);
                }
            }
        }
    };

    // If we've unselected dependencies of a, unselect a.
    for y in b.dependencies().iter() {
        // Here, a depends on y.
        //
        // If this command is covariant, and we've selected y, don't do anything.
        // If this command is covariant, and we've unselected y, unselect a.
        //
        // If this command is contravariant, and we've selected y, select a.
        // If this command is contravariant, and we've unselected y, don't do anything.

        if let Some(&choice) = choices.get(&y) {
            if choice != covariant {
                return Some(!covariant);
            }
        }
    }

    None
}

fn interactive_ask(
    getch: &getch::Getch,
    a: &Hash,
    patchid: Option<PatchId>,
    b: &Patch,
    command_name: Command,
    show_help: bool,
) -> Result<(char, Option<bool>), Error> {
    let mut term = if atty::is(atty::Stream::Stdout) {
        term::stdout()
    } else {
        None
    };
    print_patch_descr(&mut term, a, patchid, b);

    if show_help {
        display_help(command_name);
        print!("Shall I {} this patch? ", command_name.verb());
    } else {
        print!("Shall I {} this patch? [ynkad?] ", command_name.verb());
    }

    stdout().flush()?;
    match getch.getch().ok().and_then(|x| from_u32(x as u32)) {
        Some(e) => {
            println!("{}", e);
            let e = e.to_uppercase().next().unwrap_or('\0');
            match e {
                'A' => Ok(('Y', Some(true))),
                'D' => Ok(('N', Some(false))),
                e => Ok((e, None)),
            }
        }
        _ => Ok(('\0', None)),
    }
}

fn display_help(c: Command) {
    println!("Available options: ynkad?");
    println!("y: {} this patch", c.verb());
    println!("n: don't {} this patch", c.verb());
    println!("k: go bacK to the previous patch");
    println!("a: {} all remaining patches", c.verb());
    println!("d: finish, skipping all remaining patches");
    println!("")
}

/// Patches might have a dummy "changes" field here.
pub fn ask_patches(
    command: Command,
    patches: &[(Hash, Option<PatchId>, Patch)],
) -> Result<Vec<Hash>, Error> {
    let getch = getch::Getch::new();
    let mut i = 0;

    // Record of the user's choices.
    let mut choices: HashMap<&Hash, bool> = HashMap::new();

    // For each patch, the list of patches that depend on it.
    let mut rev_dependencies: HashMap<&Hash, Vec<&Hash>> = HashMap::new();

    // Decision for the remaining patches ('a' or 'd'), if any.
    let mut final_decision = None;
    let mut show_help = false;

    while i < patches.len() {
        let (ref a, patchid, ref b) = patches[i];
        let forced_decision = check_forced_decision(command, &choices, &rev_dependencies, a, b);

        // Is the decision already forced by a previous choice?
        let e = match final_decision.or(forced_decision) {
            Some(true) => 'Y',
            Some(false) => 'N',
            None => {
                debug!("decision not forced");
                let (current, remaining) =
                    interactive_ask(&getch, a, patchid, b, command, show_help)?;
                final_decision = remaining;
                current
            }
        };

        show_help = false;

        debug!("decision: {:?}", e);
        match e {
            'Y' => {
                choices.insert(a, true);
                match command {
                    Command::Pull | Command::Push => {
                        for ref dep in b.dependencies().iter() {
                            let d = rev_dependencies.entry(dep).or_insert(vec![]);
                            d.push(a)
                        }
                    }
                    Command::Unrecord => {}
                }
                i += 1
            }
            'N' => {
                choices.insert(a, false);
                match command {
                    Command::Unrecord => {
                        for ref dep in b.dependencies().iter() {
                            let d = rev_dependencies.entry(dep).or_insert(vec![]);
                            d.push(a)
                        }
                    }
                    Command::Pull | Command::Push => {}
                }
                i += 1
            }
            'K' if i > 0 => {
                let (ref a, _, _) = patches[i];
                choices.remove(a);
                i -= 1
            }
            '?' => {
                show_help = true;
            }
            _ => {}
        }
    }
    Ok(patches
        .into_iter()
        .filter_map(|&(ref hash, _, _)| {
            if let Some(true) = choices.get(hash) {
                Some(hash.to_owned())
            } else {
                None
            }
        })
        .collect())
}

/// Compute the dependencies of this change.
fn change_deps(
    id: usize,
    c: &Record<ChangeContext<Hash>>,
    provided_by: &mut HashMap<LineId, usize>,
) -> HashSet<LineId> {
    let mut s = HashSet::new();
    for c in c.iter() {
        match *c {
            Change::NewNodes {
                ref up_context,
                ref down_context,
                ref line_num,
                ref nodes,
                ..
            } => {
                for cont in up_context.iter().chain(down_context) {
                    if cont.patch.is_none() && !cont.line.is_root() {
                        s.insert(cont.line.clone());
                    }
                }
                for i in 0..nodes.len() {
                    provided_by.insert(*line_num + i, id);
                }
            }
            Change::NewEdges { ref edges, .. } => {
                for e in edges {
                    if e.from.patch.is_none() && !e.from.line.is_root() {
                        s.insert(e.from.line.clone());
                    }
                    if e.to.patch.is_none() && !e.from.line.is_root() {
                        s.insert(e.to.line.clone());
                    }
                }
            }
        }
    }
    s
}


>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
fn print_record<T: rand::Rng>(
    repo_root: &RepoRoot<impl AsRef<Path>>,

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
fn print_change<T: rand::Rng>(

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    term: &mut Option<Box<StdoutTerminal>>,
    cwd: &Path,
    repo: &MutTxn<T>,
    current_file: &mut Option<Rc<RepoPath<PathBuf>>>,
    c: &Record<ChangeContext<Hash>>,
) -> Result<(), Error> {
    match *c {
        Record::FileAdd {
            ref name,
            ref contents,
            ..
        } => {
            if let Some(ref mut term) = *term {
                term.fg(term::color::CYAN).unwrap_or(());
            }
            print!("added file ");
            if let Some(ref mut term) = *term {
                term.reset().unwrap_or(());
            }
            println!("{}", pretty_repo_path(repo_root, name, cwd).display());
            if let Some(ref change) = contents {
                print_change(term, repo, 0, 0, change)?;
            }
            Ok(())
        }
        Record::FileDel {
            ref name,
            ref contents,
            ..
        } => {
            if let Some(ref mut term) = *term {
                term.fg(term::color::MAGENTA).unwrap_or(());
            }
            print!("deleted file: ");
            if let Some(ref mut term) = *term {
                term.reset().unwrap_or(());
            }
            println!("{}", pretty_repo_path(repo_root, name, cwd).display());
            if let Some(ref change) = contents {
                print_change(term, repo, 0, 0, change)?;
            }
            Ok(())
        }
        Record::FileMove { ref new_name, .. } => {
            if let Some(ref mut term) = *term {
                term.fg(term::color::YELLOW).unwrap_or(());
            }
            print!("file moved to: ");
            if let Some(ref mut term) = *term {
                term.reset().unwrap_or(());
            }
            println!("{}", pretty_repo_path(repo_root, new_name, cwd).display());
            Ok(())
        }
        Record::Change {
            ref change,
            ref replacement,
            ref file,
            old_line,
            new_line,
            ..
        } => {

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                            }

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                            if let Some(ref mut term) = *term {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
            let mut file_changed = true;
            if let Some(ref cur_file) = *current_file {
                if file == cur_file {
                    file_changed = false;
                }
            }
            if file_changed {
                if let Some(ref mut term) = *term {
                    term.attr(Attr::Bold).unwrap_or(());
                    term.attr(Attr::Underline(true)).unwrap_or(());
                }
                println!("{}", pretty_repo_path(repo_root, file, cwd).display());
                if let Some(ref mut term) = *term {
                    term.reset().unwrap_or(());
                }
                *current_file = Some(file.clone())
            }

            print_change(term, repo, old_line, new_line, change)?;
            if let Some(ref c) = *replacement {
                print_change(term, repo, old_line, new_line, c)?
            }
            Ok(())
        }
    }
}

fn print_change<T: rand::Rng>(
    term: &mut Option<Box<StdoutTerminal>>,
    repo: &MutTxn<T>,
    old_line: usize,
    new_line: usize,
    change: &Change<ChangeContext<Hash>>,
) -> Result<(), Error> {
    match *change {
        Change::NewNodes {
            // ref up_context,ref down_context,ref line_num,
            ref flag,
            ref nodes,
            ..
        } => {
            if flag.contains(EdgeFlags::FOLDER_EDGE) {
                for n in nodes {
                    if n.len() >= 2 {
                        if let Some(ref mut term) = *term {
                            term.fg(term::color::CYAN).unwrap_or(());
                        }
                        print!("new file ");
                        if let Some(ref mut term) = *term {
                            term.reset().unwrap_or(());
                        }
                        println!("{}", str::from_utf8(&n[2..]).unwrap_or(""));
                    }
                }
            } else {
                if new_line > 0 {
                    println!("From line {}\n", new_line);
                }

                for n in nodes {
                    let s = str::from_utf8(n).unwrap_or(BINARY_CONTENTS);
                    if let Some(ref mut term) = *term {
                        term.fg(term::color::GREEN).unwrap_or(());
                    }
                    print!("+ ");
                    if let Some(ref mut term) = *term {
                        term.reset().unwrap_or(());
                    }
                    if s.ends_with("\n") {
                        print!("{}", s);
                    } else {
                        println!("{}", s);
                    }
                }
            }
            Ok(())
        }
        Change::NewEdges {
            ref edges, flag, ..
        } => {
            let mut h_targets = HashSet::with_capacity(edges.len());

            if old_line > 0 {
                println!("From line {}\n", old_line);
            }
            for e in edges {
                let (target, flag) = if !flag.contains(EdgeFlags::PARENT_EDGE) {
                    if h_targets.insert(&e.to) {
                        (Some(&e.to), flag)
                    } else {
                        (None, flag)
                    }
                } else {
                    if h_targets.insert(&e.from) {
                        (Some(&e.from), flag)
                    } else {
                        (None, flag)
                    }
                };
                if let Some(target) = target {
                    let internal = repo.internal_key_unwrap(target);
                    let l = repo.get_contents(internal).unwrap();
                    let l = l.into_cow();
                    let s = str::from_utf8(&l).unwrap_or(BINARY_CONTENTS);

                    if flag.contains(EdgeFlags::DELETED_EDGE) {
                        if let Some(ref mut term) = *term {
                            term.fg(term::color::RED).unwrap_or(());
                        }
                        print!("- ");
                    } else {
                        if let Some(ref mut term) = *term {
                            term.fg(term::color::GREEN).unwrap_or(());
                        }
                        print!("+ ");
                    }
                    if let Some(ref mut term) = *term {
                        term.reset().unwrap_or(());
                    }
                    if s.ends_with("\n") {
                        print!("{}", s)
                    } else {
                        println!("{}", s)
                    }
                }
            }
            Ok(())
        }
    }
}

#[derive(Clone, Copy, Debug)]
pub enum ChangesDirection {
    Record,
    Revert,
}

impl ChangesDirection {
    fn is_record(&self) -> bool {
        match *self {
            ChangesDirection::Record => true,
            _ => false,
        }
    }
    fn verb(&self) -> &str {
        match *self {
            ChangesDirection::Record => "record",
            ChangesDirection::Revert => "revert",
        }
    }
}

fn display_help_changes(
    potential_new_ignore: Option<&RepoPath<impl AsRef<Path>>>,
    direction: ChangesDirection,
) {
    println!("Available options:");
    println!("y: {} this change", direction.verb());
    println!("n: don't {} this change", direction.verb());
    println!(
        "f: {} the rest of the changes to this file",
        direction.verb()
    );
    println!(
        "s: don't {} the rest of the changes to this file",
        direction.verb()
    );
    println!("k: go back to the previous change");
    println!("a: {} all remaining changes", direction.verb());
    println!("d: skip all remaining changes");
    match potential_new_ignore {
        Some(filename) => println!("i: ignore file {}", filename.display()),
        None => (),
    }
    println!("")
}

fn prompt_one_change<T: rand::Rng>(
    repository: &MutTxn<T>,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    change: &Record<ChangeContext<Hash>>,
    current_file: &mut Option<Rc<RepoPath<PathBuf>>>,
    n_changes: usize,
    i: usize,
    direction: ChangesDirection,
    potential_new_ignore: Option<&RepoPath<impl AsRef<Path>>>,
    terminal: &mut Option<Box<StdoutTerminal>>,
    getch: &getch::Getch,
    cwd: &Path,
    show_help: bool,
) -> Result<(char, Option<char>, Option<char>), Error> {
    debug!("changes: {:?}", change);
    print_record(repo_root, terminal, cwd, repository, current_file, &change)?;
    println!("");
    let choices = if potential_new_ignore.is_some() {
        "[ynsfkadi?]"
    } else {
        "[ynsfkad?]"
    };
    if show_help {
        display_help_changes(potential_new_ignore, direction);
        print!(
            "Shall I {} this change? ({}/{}) ",
            direction.verb(),
            i + 1,
            n_changes
        );
    } else {
        print!(
            "Shall I {} this change? ({}/{}) {} ",
            direction.verb(),
            i + 1,
            n_changes,
            choices
        );
    }
    stdout().flush()?;
    match getch.getch().ok().and_then(|x| from_u32(x as u32)) {
        Some(e) => {
            println!("{}\n", e);
            let e = e.to_uppercase().next().unwrap_or('\0');
            match e {
                'A' => Ok(('Y', Some('Y'), None)),
                'D' => Ok(('N', Some('N'), None)),
                'F' => Ok(('Y', None, Some('Y'))),
                'S' => Ok(('N', None, Some('N'))),
                e => Ok((e, None, None)),
            }
        }
        _ => Ok(('\0', None, None)),
    }
}

fn add_to_ignore_file(
    file: &RepoPath<impl AsRef<Path>>,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    new_ignored_patterns: &mut Vec<String>,
    new_ignore_builder: &mut GitignoreBuilder,
) {
    loop {
        let pat = read_line_with_suggestion(
            "Pattern to add to ignore file (relative to repository root, empty to add nothing)? ",
            &file.as_path().to_string_lossy(),
        );
        if pat.is_empty() {
            return;
        };

        let mut ignore_builder = GitignoreBuilder::new(&repo_root.repo_root);
        let add_ok = match ignore_builder.add_line(None, &pat) {
            Ok(i) => match i.build() {
                Ok(i) => i
                    .matched_path_or_any_parents(file.as_path(), false)
                    .is_ignore(),
                Err(e) => {
                    println!("could not match pattern {}: {}", &pat, e);
                    false
                }
            },
            Err(e) => {
                println!("did not understand pattern {}: {}", &pat, e);
                false
            }
        };
        if add_ok {
            new_ignore_builder.add_line(None, &pat).unwrap();
            new_ignored_patterns.push(pat);
            return;
        }
        println!(
            "pattern {} is incorrect or does not match {}",
            pat,
            &file.display()
        );
    }
}

pub fn ask_changes<T: rand::Rng>(
    repository: &MutTxn<T>,
    repo_root: &Path,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    cwd: &Path,
    changes: &[Record<ChangeContext<Hash>>],
    direction: ChangesDirection,
    to_unadd: &mut HashSet<RepoPath<PathBuf>>,
) -> Result<(HashMap<usize, bool>, Vec<String>), Error> {
    debug!("changes: {:?}", changes);
    let mut terminal = if atty::is(atty::Stream::Stdout) {
        term::stdout()
    } else {
        None
    };
    let getch = getch::Getch::new();
    let mut i = 0;
    let mut choices: HashMap<usize, bool> = HashMap::new();
    let mut new_ignored_patterns: Vec<String> = Vec::new();
    let mut new_ignore_builder = GitignoreBuilder::new(repo_root);
    let mut new_ignore_builder = GitignoreBuilder::new(&repo_root.repo_root);
    let mut final_decision = None;
    // determines what decision to make on remaining change in the same file
    let mut file_decision: Option<char> = None;
    let mut provided_by = HashMap::new();
    let mut line_deps = Vec::with_capacity(changes.len());
    for i in 0..changes.len() {
        line_deps.push(change_deps(i, &changes[i], &mut provided_by));
    }
    let mut deps: HashMap<usize, Vec<usize>> = HashMap::new();
    let mut rev_deps: HashMap<usize, Vec<usize>> = HashMap::new();
    for i in 0..changes.len() {
        for dep in line_deps[i].iter() {
            debug!("provided: i {}, dep {:?}", i, dep);
            let p = provided_by.get(dep).unwrap();
            debug!("provided: p= {}", p);

            let e = deps.entry(i).or_insert(Vec::new());
            e.push(*p);

            let e = rev_deps.entry(*p).or_insert(Vec::new());
            e.push(i);
        }
    }

    let empty_deps = Vec::new();
    let mut current_file = None;
    let mut show_help = false;

    while i < changes.len() {
        let decision=
            // If one of our dependencies has been unselected (with "n")
            if deps.get(&i)
            .unwrap_or(&empty_deps)
            .iter()
            .any(|x| { ! *(choices.get(x).unwrap_or(&true)) }) {
                Some(false)
            } else if rev_deps.get(&i).unwrap_or(&empty_deps)
            .iter().any(|x| { *(choices.get(x).unwrap_or(&false)) }) {
                // If we are a dependency of someone selected (with "y").
                Some(true)
            } else {
                None
            };

        let decision = match changes[i] {
            Record::FileAdd { ref name, .. } => {
                let i = new_ignore_builder.build().unwrap();
                if i.matched_path_or_any_parents(name.as_path(), false)
                    .is_ignore()
                {
                    Some(false)
                } else {
                    None
                }
            }
            _ => decision,
        };
        let potential_new_ignore: Option<&RepoPath<PathBuf>> = match direction {
            ChangesDirection::Revert => None,
            ChangesDirection::Record => match changes[i] {
                Record::FileAdd { ref name, .. } => Some(&name),
                _ => None,
            },
        };
        let (e, f, file_d) = match decision {
            Some(true) => ('Y', final_decision, file_decision),
            Some(false) => ('N', final_decision, file_decision),
            None => {
                if let Some(d) = final_decision {
                    (d, Some(d), file_decision)
                } else {
                    let command_decisions = if let Some(ref f) = current_file {
                        file_decision.and_then(|d| match changes[i] {
                            Record::Change { ref file, .. } => {
                                if f == file {
                                    Some((d, final_decision, Some(d)))
                                } else {
                                    None
                                }
                            }
                            _ => None,
                        })
                    } else {
                        None
                    };

                    if let Some(res) = command_decisions {
                        res
                    } else {
                        prompt_one_change(
                            repository,
                            repo_root,
                            &changes[i],
                            &mut current_file,
                            changes.len(),
                            i,
                            direction,
                            potential_new_ignore,
                            &mut terminal,
                            &getch,
                            cwd,
                            show_help,
                        )?
                    }
                }
            }
        };

        show_help = false;

        final_decision = f;
        file_decision = file_d;
        match e {
            'Y' => {
                choices.insert(i, direction.is_record());
                match changes[i] {
                    Record::FileAdd { ref name, .. } => {
                        to_unadd.remove(&name);
                    }
                    _ => (),
                }
                i += 1
            }
            'N' => {
                choices.insert(i, !direction.is_record());
                i += 1
            }
            'K' if i > 0 => {
                choices.remove(&i);
                i -= 1
            }
            'I' => match potential_new_ignore {
                Some(file) => {
                    add_to_ignore_file(
                        file,
                        repo_root,
                        &mut new_ignored_patterns,
                        &mut new_ignore_builder,
                    );
                    choices.insert(i, !direction.is_record());
                    i += 1;
                }
                _ => {}
            },
            '?' => {
                show_help = true;
            }
            _ => {}
        }
    }
    Ok((choices, new_ignored_patterns))
}

fn read_line(s: &str) -> String {
    print!("{}", s);
    if let Some(mut term) = line::Terminal::new() {
        term.read_line().unwrap()
    } else {
        let stdin = std::io::stdin();
        let mut stdin = stdin.lock().lines();
        if let Some(Ok(x)) = stdin.next() {
            x
        } else {
            String::new()
        }
    }
}

pub fn read_line_with_suggestion(prompt: &str, _suggestion: &str) -> String {
    read_line(prompt)
}

pub fn ask_authors() -> Result<Vec<String>, Error> {
    std::io::stdout().flush()?;
    Ok(vec![read_line("What is your name <and email address>? ")])
}

pub fn ask_patch_name(
    repo_root: &Path,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    maybe_editor: Option<&String>,
    template: String,
) -> Result<(String, Option<String>), Error> {
    let repo_root = repo_root.repo_root.as_ref();
    if let Some(editor) = maybe_editor {
1
2

3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23

24
25
use clap::{Arg, ArgMatches, SubCommand};

use super::{default_explain, get_current_branch, BasicOptions, StaticSubcommand};
use super::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("branches")
        .about("List all branches")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help(
                    "Path to a pijul repository. Defaults to the repository containing the \
                     current directory.",
                )
                .takes_value(true),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let repo = opts.open_repo()?;
    let txn = repo.txn_begin()?;
    let current_branch = get_current_branch(&opts.repo_root)?;
    let current_branch = opts.repo_root.get_current_branch()?;
    for branch in txn.iter_branches(None) {
82
83

84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138

139
140

141
142
    let repo = opts.open_and_grow_repo(provision)?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let current_branch = get_current_branch(&opts.repo_root)?;
    let current_branch = opts.repo_root.get_current_branch()?;
    // We need to check at least that there are no file
    // moves/additions/deletions, because these would be
    // overwritten by the checkout, sometimes causing Pijul to
    // panic.
    if force {
        // Check whether there are file moves.
        if txn
            .iter_inodes(None)
            .any(|(_, ch)| ch.status != FileStatus::Ok)
        {
            return Err(Error::PendingChanges);
        }
    } else {
        // Check whether there are more general changes.
        let mut record = RecordState::new();
        let current_branch = txn.open_branch(&current_branch)?;
        txn.record(
            libpijul::DiffAlgorithm::default(),
            &mut record,
            &current_branch,
            &opts.repo_root,
            &in_repo_root(),
        )?;
        txn.commit_branch(current_branch)?;
        let (changes, _) = record.finish();

        if !changes.is_empty() {
            return Err(Error::PendingChanges);
        }
    }

    debug!("output repository");

    let mut branch = if let Some(branch) = txn.get_branch(branch_name) {
        branch
    } else {
        return Err(Error::NoSuchBranch);
    };
    let pref = if let Some(partial) = partial_path {
        (&[partial][..]).to_prefixes(&txn, &branch)
    } else {
        (&[][..] as &[RepoPath<&Path>]).to_prefixes(&txn, &branch)
    };
    txn.output_repository(
        &mut branch,
        &opts.repo_root,
        &pref,
        &UnsignedPatch::empty().leave_unsigned(),
        &HashSet::new(),
    )?;
    txn.commit_branch(branch)?;

    txn.commit()?;

    set_current_branch(&opts.repo_root, branch_name)?;
    opts.repo_root.set_current_branch(branch_name)?;

    println!("Current branch: {:?}", get_current_branch(&opts.repo_root)?);
    println!("Current branch: {:?}", opts.repo_root.get_current_branch()?);
    Ok(())





1
2
3
4


5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118


119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154


155
156
157
158
159
160
161
162

163
164
165
166
167
168
169
170
171
            create_repo(&repo_root.repo_root)?;
                .pullable_patches(args.from_branch, args.to_branch, &repo_root, &args.from_path)?
            session.pull(&repo_root, args.to_branch, &mut pullable, &args.from_path, true)?;
            repo_root.set_current_branch(args.to_branch).map_err(|x| x.into())
                    path.set_current_branch(args.to_branch).map_err(|x| x.into())
use clap::{Arg, ArgMatches, SubCommand};
use commands::remote::{parse_remote, Remote};
use commands::{assert_no_containing_repo, create_repo, default_explain, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::set_current_branch;
use libpijul::fs_representation::set_current_branch;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::{Hash, DEFAULT_BRANCH};
use regex::Regex;
use std::io::{stderr, Write};
use std::path::Path;
use std::process::exit;
use tempfile::tempdir_in;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("clone")
        .about("Clone a remote branch")
        .arg(
            Arg::with_name("from")
                .help("Repository to clone.")
                .required(true),
        )
        .arg(
            Arg::with_name("from_branch")
                .long("from-branch")
                .help("The branch to pull from")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("to_branch")
                .long("to-branch")
                .help("The branch to pull into")
                .takes_value(true),
        )
        .arg(Arg::with_name("to").help("Target."))
        .arg(
            Arg::with_name("from_path")
                .long("path")
                .help("Only pull patches relative to that path.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .long("patch")
                .help("Pull a patch and its dependencies.")
                .takes_value(true)
                .multiple(true),
        )
        .arg(
            Arg::with_name("port")
                .short("p")
                .long("port")
                .help("Port of the remote ssh server.")
                .takes_value(true)
                .validator(|val| {
                    let x: Result<u16, _> = val.parse();
                    match x {
                        Ok(_) => Ok(()),
                        Err(_) => Err(val),
                    }
                }),
        );
}
#[derive(Debug)]
pub struct Params<'a> {
    pub from: Remote<'a>,
    pub from_branch: &'a str,
    pub from_path: Vec<RepoPath<&'a Path>>,
    pub to: Remote<'a>,
    pub to_branch: &'a str,
}

pub fn parse_args<'a>(args: &'a ArgMatches) -> Params<'a> {
    // At least one must not use its "port" argument
    let from = parse_remote(
        args.value_of("from").unwrap(),
        args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
        None,
        None,
    );
    let to = if let Some(to) = args.value_of("to") {
        parse_remote(
            to,
            args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
            None,
            None,
        )
    } else {
        let basename = Regex::new(r"([^/:]+)").unwrap();
        let from = args.value_of("from").unwrap();
        if let Some(to) = basename.captures_iter(from).last().and_then(|to| to.get(1)) {
            parse_remote(
                to.as_str(),
                args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
                None,
                None,
            )
        } else {
            panic!("Could not parse target")
        }
    };
    let from_branch = args.value_of("from_branch").unwrap_or(DEFAULT_BRANCH);
    let from_path = args
        .values_of("from_path")
        .map(|x| x.map(|p| RepoPath(Path::new(p))).collect())
        .unwrap_or(Vec::new());
    let to_branch = args.value_of("to_branch").unwrap_or(from_branch);
    Params {
        from,
        from_branch,
        from_path,
        to,
        to_branch,
    }
}

pub fn run(args_: &ArgMatches) -> Result<(), Error> {
    let args = parse_args(args_);
    debug!("{:?}", args);
    match args.to {
        Remote::Local { ref path } => {
        Remote::Local { ref path } => {
        Remote::Local { path: repo_root } => {
            assert_no_containing_repo(&repo_root.repo_root)?;

            let parent = repo_root.repo_root.parent().unwrap();
            let tmp_dir = tempdir_in(parent)?;
            {
                create_repo(tmp_dir.path())?;
                let tmp_root = RepoRoot {
                    repo_root: tmp_dir.path(),
                };
                let mut session = args.from.session()?;
                let mut pullable: Vec<_> = if let Some(patches) = args_.values_of("patch") {
                    let mut p = Vec::new();
                    for x in patches {
                        p.push((Hash::from_base58(x).unwrap(), 0))
                    }
                    p
                } else {
                    session.changes(args.from_branch, &args.from_path[..])?
                };
                session.pull(
                    &tmp_root,
                    args.to_branch,
                    &mut pullable,
                    &args.from_path,
                    true,
                )?;
                tmp_root.set_current_branch(args.to_branch)?;
            }
            let path = tmp_dir.into_path();
            std::fs::rename(&path, &repo_root.repo_root)?;
            Ok(())
        }
        _ => {
            // Clone between remote repositories.
            match args.from {
                Remote::Local { ref path } => {
                Remote::Local { ref path } => {
                Remote::Local { path } => {
                    let mut to_session = args.to.session()?;
                    debug!("remote init");
                    to_session.remote_init()?;
                    debug!("pushable?");
                    let pushable = to_session.pushable_patches(
                        args.from_branch,
                        args.to_branch,
                        path,
                        &path,
                        &args.from_path,
                    )?;
                    debug!("pushable = {:?}", pushable);
                    let pushable = pushable.pushable.into_iter().map(|(h, _, _)| h).collect();
                    to_session.push(&path, args.to_branch, pushable)?;
                    path.set_current_branch(args.to_branch)
                        .map_err(|x| x.into())
                }


1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38

39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59

60
61
62

63
64
65
66
67
68
69

70
71
72
73
74
75
76

77
78
    if let Ok(file) = p.strip_prefix(&opts.repo_root.repo_root) {
                target: opts.repo_root,
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::RepoRoot;
use libpijul::graph::LineBuffer;
use libpijul::{Key, PatchId, Txn, Value};
use std::fs::canonicalize;
use std::io::{stdout, Stdout};
use std::path::Path;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("credit")
        .about("Show what patch introduced each line of a file.")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .takes_value(true)
                .help("Local repository."),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("The branch to annotate, defaults to the current branch.")
                .takes_value(true)
                .required(false),
        )
        .arg(
            Arg::with_name("file")
                .help("File to annotate.")
                .required(true)
                .takes_value(true),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let file = Path::new(args.value_of("file").unwrap());
    let p = canonicalize(opts.cwd.join(file))?;
    if let Ok(file) = p.strip_prefix(&opts.repo_root) {

    let file = opts.repo_root.relativize(&p)?;
    let repo = opts.open_repo()?;
    let txn = repo.txn_begin()?;
    if let Some(branch) = txn.get_branch(&opts.branch()) {
        let inode = txn.find_inode(&file)?;
        if txn.is_directory(&inode) {
            return Err(Error::IsDirectory);
        }
        let node = txn.get_inodes(inode).unwrap();
        let mut graph = txn.retrieve(&branch, node.key);
        let mut buf = OutBuffer {
            stdout: stdout(),
            txn: &txn,
            target: opts.repo_root,
        };
        txn.output_file(&branch, &mut buf, &mut graph, &mut Vec::new())?;
    }
    Ok(())
}

struct OutBuffer<'a> {
struct OutBuffer<'a, P: AsRef<Path> + 'a> {
    stdout: Stdout,
    txn: &'a Txn<'a>,
    target: &'a Path,
    target: RepoRoot<P>,
}

use libpijul;
use libpijul::Transaction;
use std::io::Write;

impl<'a, T: 'a + Transaction> LineBuffer<'a, T> for OutBuffer<'a> {
impl<'a, P: AsRef<Path>, T: 'a + Transaction> LineBuffer<'a, T> for OutBuffer<'a, P> {
    fn output_line(
        &mut self,
        key: &Key<PatchId>,
        contents: Value<'a, T>,
    ) -> Result<(), libpijul::Error> {
        let ext = self.txn.get_external(key.patch).unwrap();
        let patch = read_patch_nochanges(self.target, ext)?;
        let patch = self.target.read_patch_nochanges(ext)?;
        write!(



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121


122
123
124
125
126
127
128

129
130
131
132
133
134
135
136
137
138
139
140

141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172

173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204

205
206
use libpijul::fs_representation::{find_repo_root};
use libpijul::{Hash, DEFAULT_BRANCH};
use super::{default_explain, get_wd, StaticSubcommand, validate_base58};
use super::{default_explain, get_wd, validate_base58, StaticSubcommand};
use clap::{Arg, ArgMatches, SubCommand};
use error::Error;
use libpijul::fs_representation::find_repo_root;
use libpijul::{Hash, DEFAULT_BRANCH};
use std::collections::HashSet;
use std::mem;
use std::path::Path;
use std::string::String;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("dependencies")
        .alias("deps")
        .about("Print the patch dependencies using the DOT syntax in stdout")
        .arg(
            Arg::with_name("hash")
                .help("Hash of a patch.")
                .takes_value(true)
                .required(false)
                .multiple(true)
                .validator(validate_base58),
        )
        .arg(
            Arg::with_name("depth")
                .long("depth")
                .help("The depth of the dependencies graph")
                .takes_value(true)
                .required(false)
                .validator(|x| {
                    if let Ok(x) = x.parse::<usize>() {
                        if x >= 1 {
                            return Ok(());
                        }
                    }
                    Err("The depth argument must be an integer, and at least 1".to_owned())
                }),
        )
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .multiple(true)
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("Branch.")
                .takes_value(true)
                .required(false),
        );
}

enum Target<'a> {
    Branch(Option<&'a str>),
    Hash(Vec<&'a str>, usize),
}

pub struct Params<'a> {
    pub repository: Option<&'a Path>,
    target: Target<'a>,
}

pub fn parse_args<'a>(args: &'a ArgMatches) -> Result<Params<'a>, Error> {
    let target = if let Some(hash) = args.values_of("hash") {
        let depth = args
            .value_of("depth")
            .unwrap_or("1")
            .parse::<usize>()
            .unwrap();

        Target::Hash(hash.collect(), depth)
    } else {
        Target::Branch(args.value_of("branch"))
    };

    Ok(Params {
        repository: args.value_of("repository").map(|x| Path::new(x)),
        target: target,
    })
}

fn label_sanitize(str: String) -> String {
    // First, we escape the quotes, because otherwise it may interfere with dot
    // notation.
    let label = str.replace("\"", "\\\"");

    // Then, to get a more readable graph, we add line breaks every five words,
    // in order to avoid very width nodes.
    let mut words = label.split_whitespace();

    let mut nth = 0;
    let mut res = String::from("");

    if let Some(first_word) = words.next() {
        res.push_str(first_word);

        for word in words {
            if nth >= 5 {
                res.push_str("\\n");
                nth = 0;
            } else {
                res.push_str(" ");
                nth += 1;
            }

            res.push_str(word);
        }
    }

    res
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let args = parse_args(args)?;
    let wd = get_wd(args.repository)?;
    let target = if let Some(r) = find_repo_root(&wd) {
        r
    } else {
        return Err(Error::NotInARepository);
    };
    let repo_dir = pristine_dir(&target);
    let repo = Repository::open(&repo_dir, None)?;
    let repo = target.open_repo(None)?;
    let txn = repo.txn_begin()?;

    match args.target {
        Target::Branch(branch_arg) => {
            let branch_name = if let Some(b) = branch_arg {
                b.to_string()
            } else if let Ok(b) = get_current_branch(&target) {
            } else if let Ok(b) = target.get_current_branch() {
                b
            } else {
                DEFAULT_BRANCH.to_string()
            };

            if let Some(branch) = txn.get_branch(&branch_name) {
                println!("digraph dependencies {{");
                println!("  graph [rankdir=LR];");

                for (_, hash) in txn.rev_iter_applied(&branch, None) {
                    let hash_ext = txn.get_external(hash).unwrap();
                    let patch = read_patch(&target, hash_ext)?;
                    let patch = target.read_patch(hash_ext)?;

                    patch_node(
                        hash_ext.to_base58(),
                        patch.header().name.clone(),
                        patch.is_tag(),
                    );

                    let deps = txn.minimize_deps(patch.dependencies());
                    for hash_dep in deps {
                        println!("  N{} -> N{}", hash_ext.to_base58(), hash_dep.to_base58());
                    }
                }
                println!("}}");
            }
        }
        Target::Hash(hashes, depth) => {
            let mut seen = HashSet::new();
            let mut vec: Vec<_> = hashes
                .iter()
                .map(|h| Hash::from_base58(h).unwrap())
                .collect();
            let mut next = Vec::new();

            println!("digraph dependencies {{");
            println!("  graph [rankdir=LR];");

            for _ in 0..depth {
                for hash in vec.drain(..) {
                    debug!("hash: {:?}", hash);
                    seen.insert(hash.clone());
                    let hash_ext = hash.as_ref();
                    let patch = read_patch(&target, hash_ext)?;
                    let patch = target.read_patch(hash_ext)?;

                    patch_node(
                        hash_ext.to_base58(),
                        patch.header().name.clone(),
                        patch.is_tag(),
                    );

                    let deps = txn.minimize_deps(patch.dependencies());
                    for hash_dep in deps.iter() {
                        debug!("dep: {:?}", hash_dep);
                        println!("  N{} -> N{}", hash_ext.to_base58(), hash_dep.to_base58());

                        let h = hash_dep.to_owned();

                        if !seen.contains(&h) {
                            seen.insert(h.clone());
                            next.push(h);
                        }
                    }
                }

                // vec should be empty, has it has been consumed by drain
                // on the other hand, next contains all the
                // dependencies to walk into in the next loop
                // iteration
                mem::swap(&mut next, &mut vec);
            }

            // lets have a last for to get the name of the last dependencies
            for hash in vec.drain(..) {
                let hash_ext = hash.as_ref();
                let patch = read_patch(&target, hash_ext)?;
                let patch = target.read_patch(hash_ext)?;


1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55

        if let Ok(file) = p.strip_prefix(&opts.repo_root.repo_root) {
use clap::{Arg, ArgMatches, SubCommand};
use commands::{BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::in_repo_root;
use libpijul::RecordState;
use rand;
use std::fs::canonicalize;
use std::io::{stderr, Write};
use std::process::exit;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("diff")
        .about("Show what would be recorded if record were called")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("The repository to show, defaults to the current directory.")
                .takes_value(true)
                .required(false),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("The branch to show, defaults to the current branch.")
                .takes_value(true)
                .required(false),
        )
        .arg(
            Arg::with_name("prefix")
                .help("Prefix to start from")
                .takes_value(true)
                .multiple(true),
        )
        .arg(
            Arg::with_name("patience")
                .long("patience")
                .help("Use patience diff instead of the default (Myers diff)")
                .conflicts_with("myers")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("myers")
                .long("myers")
                .help("Use Myers diff")
                .conflicts_with("patience")
                .takes_value(false),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let repo = opts.open_repo()?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let prefix = if let Some(prefix) = args.value_of("prefix") {
        let p = canonicalize(opts.cwd.join(prefix))?;
        if let Ok(file) = p.strip_prefix(&opts.repo_root) {

1
2


3
4
5
6

7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45

46
47
48
49
50
51
52
53
54
55
56

57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121

122
123
fn patch_dependencies(hash_str: &str, repo_root: &RepoRoot<impl AsRef<Path>>) -> Result<Vec<Hash>, Error> {
use clap::{Arg, ArgGroup, ArgMatches, SubCommand};
use commands::checkout::checkout;
use libpijul::fs_representation::{read_patch, set_current_branch};
use libpijul::fs_representation::{read_patch, set_current_branch};
use libpijul::fs_representation::RepoRoot;
use libpijul::{apply_resize_no_output, Hash};
use rand;
use std::mem;
use std::path::PathBuf;
use std::path::Path; // PathBuf;

use super::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("fork")
        .about("Create a new branch")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("Branch.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .long("patch")
                .help("A patch hash, preferably a tag.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("to")
                .help("Name of the new branch.")
                .takes_value(true)
                .required(true),
        )
        .group(
            ArgGroup::with_name("source")
                .required(false)
                .args(&["branch", "patch"]),
        );
}

fn patch_dependencies(hash_str: &str, repo_root: &PathBuf) -> Result<Vec<Hash>, Error> {
fn patch_dependencies(
    hash_str: &str,
    repo_root: &RepoRoot<impl AsRef<Path>>,
) -> Result<Vec<Hash>, Error> {
    let mut deps = Vec::new();
    let mut current = vec![Hash::from_base58(hash_str).ok_or::<Error>(Error::WrongHash)?];
    let mut next = Vec::new();

    while !current.is_empty() {
        for hash in current.drain(..) {
            deps.push(hash.clone());
            let patch = read_patch(&repo_root, hash.as_ref())?;
            let patch = repo_root.read_patch(hash.as_ref())?;

            for hash_dep in patch.dependencies().iter() {
                let h = hash_dep.to_owned();

                if !deps.contains(&h) {
                    next.push(h);
                }
            }
        }

        mem::swap(&mut next, &mut current);
    }

    deps.reverse();

    Ok(deps)
}

pub fn has_branch(opts: &BasicOptions, branch_name: &str) -> Result<bool, Error> {
    let repo = opts.open_repo()?;
    let txn = repo.txn_begin()?;

    Ok(txn.has_branch(branch_name))
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let to = args.value_of("to").unwrap();

    if !has_branch(&opts, to)? {
        if let Some(ref hash) = args.value_of("patch") {
            debug!(
                "Creating a new branch {:?} with dependencies of {:?}",
                to, hash
            );

            let deps = patch_dependencies(hash, &opts.repo_root)?;

            apply_resize_no_output(&opts.repo_root, to, deps.iter(), |_, _| ())?;

            println!("Branch {:?} has been created.", to);

            checkout(&opts, to, false, None)
        } else {
            let repo = opts.open_repo()?;
            let mut txn = repo.mut_txn_begin(rand::thread_rng())?;

            let br = opts.branch();
            let branch = txn.open_branch(&br)?;
            let new_branch = txn.fork(&branch, to)?;

            txn.commit_branch(branch)?;
            txn.commit_branch(new_branch)?;

            let partials = txn
                .iter_partials(&br)
                .take_while(|&(k, _)| k.as_str() == &br)
                .map(|(_, v)| v)
                .collect::<Vec<_>>();
            for &key in partials.iter() {
                txn.put_partials(to, key)?;
            }
            txn.commit()?;

            set_current_branch(&opts.repo_root, to)?;
            opts.repo_root.set_current_branch(to)?;




1
2
3
4

5
6
7
8
9
10
11
12
13
14
15
16
17
18
19

20
21
22
23
24
25
26
27
28


29
30
31
32
33
34
35
36
37
38
39
40
41
42

43
44
45

46
47
48
49
50
51
52

53
54
55
56
57
58
59
60

61
62
63
use libpijul::fs_representation::RepoRoot;
    let r = &repo_dir.repo_root;
                    for file in repo_dir.untracked_files(&txn, &p) {
use clap::ArgMatches;
use commands::BasicOptions;
use error::Error;
use libpijul;
use libpijul::Repository;
use libpijul::fs_representation::RepoRoot;
use rand;
use std::fs::{canonicalize, metadata};
use std::path::{Path, PathBuf};

#[derive(Debug, Clone, Copy)]
pub enum Operation {
    Add,
    Remove,
}

pub fn run(args: &ArgMatches, op: Operation) -> Result<(), Error> {
    debug!("fs_operation {:?}", op);
    let opts = BasicOptions::from_args(args)?;

    debug!("repo {:?}", opts.repo_root);
    debug!("repo {:?}", &opts.repo_root);
    let mut extra_space = 409600;
    let recursive = args.is_present("recursive");
    loop {
        let touched_files = match args.values_of("files") {
            Some(l) => l.map(|p| Path::new(p).to_owned()).collect(),
            None => vec![],
        };
        match really_run(
            &opts.pristine_dir(),
            &opts.cwd,
            &opts.repo_root,
            &opts.cwd,
            touched_files,
            recursive,
            op,
            extra_space,
        ) {
            Err(ref e) if e.lacks_space() => extra_space *= 2,
            e => return e,
        }
    }
}

fn really_run(
    repo_dir: &Path,
    //    repo_dir: &RepoRoot<&'static Path>,
    repo_dir: &RepoRoot<PathBuf>,
    wd: &Path,
    r: &Path,
    mut files: Vec<PathBuf>,
    recursive: bool,
    op: Operation,
    extra_space: u64,
) -> Result<(), Error> {
    debug!("files {:?}", files);
    let mut rng = rand::thread_rng();
    let repo = Repository::open(&repo_dir, Some(extra_space))?;
    let repo = repo_dir.open_repo(Some(extra_space))?;
    let mut txn = repo.mut_txn_begin(&mut rng)?;
    match op {
        Operation::Add => {
            for file_ in files.drain(..) {
                let p = canonicalize(wd.join(&file_))?;
                if recursive {
                    debug!("adding from {:?}", p);
                    for file in untracked_files(&txn, r.to_path_buf(), &p) {
                    let mut files = Vec::new();
                    for file in repo_dir.untracked_files(&txn, &p) {
                        debug!("untracked {:?}", file);

1
2
3
4
5
6

7
8
9
10
11
12
13
use libpijul::fs_representation::{PIJUL_DIR_NAME, RepoRoot};
use error::Error;
use libpijul::fs_representation::{RepoRoot, PIJUL_DIR_NAME};
use std::path::Path;
use std::process::Command;

pub fn run_hook(
    repo_root: &Path,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    hook: &'static str,
    additional_arg: Option<&String>,
) -> Result<(), Error> {
    let repo_root = &repo_root.repo_root;
    let mut cmd = repo_root.as_ref().to_path_buf();
    cmd.push(PIJUL_DIR_NAME);


1
2
3
4



5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
        let patch = settings.opts.repo_root.read_patch_nochanges(hash_ext)?;
            None => settings.opts.repo_root.read_patch_nochanges(hash_ext)?,
use clap::{Arg, ArgMatches, SubCommand};
use commands::patch::print_patch;
use commands::{ask, default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::{id_file, read_patch_nochanges};
use libpijul::fs_representation::{id_file, read_patch_nochanges};
use libpijul::fs_representation::{id_file, read_patch_nochanges};
use libpijul::fs_representation::RepoPath;
use libpijul::patch::Patch;
use libpijul::{Branch, PatchId, Txn};
use regex::Regex;
use std::fs::File;
use std::io::{BufReader, Read};
use std::path::PathBuf;
use term;

pub fn invocation() -> StaticSubcommand {
    SubCommand::with_name("log")
        .about("List the patches applied to the given branch")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Path to the repository to list.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("The branch to list.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("internal-id")
                .long("internal-id")
                .help("Display only patches with these internal identifiers.")
                .multiple(true)
                .takes_value(true),
        )
        .arg(
            Arg::with_name("hash-only")
                .long("hash-only")
                .help("Only display the hash of each path."),
        )
        .arg(
            Arg::with_name("repository-id")
                .long("repository-id")
                .help("display a header with the repository id")
        )
        .arg(
            Arg::with_name("path")
                .long("path")
                .multiple(true)
                .takes_value(true)
                .help("Only display patches that touch the given path."),
        )
        .arg(
            Arg::with_name("grep")
                .long("grep")
                .multiple(true)
                .takes_value(true)
                .help("Search patch name and description with a regular expression."),
        )
        .arg(
            Arg::with_name("last")
                .long("last")
                .takes_value(true)
                .help("Show only the last n patches. If `--first m` is also used, then (a) if the command normally outputs the last patches first, this means the last n patches of the first m ones. (b) Else, it means the first m patches of the last n ones."),
        )
        .arg(
            Arg::with_name("first")
                .long("first")
                .takes_value(true)
                .help("Show only the last n patches. If `--last m` is also used, then (a) if the command normally outputs the last patches first, this means the last m patches of the first n ones. (b) Else, it means the first n patches of the last m ones."),
        )
        .arg(
            Arg::with_name("patch")
                .long("patch")
                .short("p")
                .help("Show patches"),
        )
}

struct Settings<'a> {
    hash_only: bool,
    show_repoid: bool,
    show_patches: bool,
    regex: Vec<Regex>,
    opts: BasicOptions<'a>,
    path: Vec<RepoPath<PathBuf>>,
    first: Option<usize>,
    last: Option<usize>,
}

impl<'a> Settings<'a> {
    fn parse(args: &'a ArgMatches) -> Result<Self, Error> {
        let basic_opts = BasicOptions::from_args(args)?;
        let hash_only = args.is_present("hash-only");
        let first = args.value_of("first").and_then(|x| x.parse().ok());
        let last = args.value_of("last").and_then(|x| x.parse().ok());
        let show_patches = args.is_present("patch");
        let show_repoid = args.is_present("repository-id");
        let mut regex = Vec::new();
        if let Some(regex_args) = args.values_of("grep") {
            for r in regex_args {
                debug!("regex: {:?}", r);
                regex.push(Regex::new(r)?)
            }
        }
        let path = match args.values_of("path") {
            Some(arg_paths) => {
                let mut paths = Vec::new();
                for path in arg_paths {
                    let p = basic_opts.cwd.join(path);
                    let p = if let Ok(p) = std::fs::canonicalize(&p) {
                        p
                    } else {
                        p
                    };
                    paths.push(basic_opts.repo_root.relativize(&p)?.to_owned());
                }
                paths
            }
            None => Vec::new(),
        };
        Ok(Settings {
            hash_only,
            show_patches,
            show_repoid,
            regex,
            opts: basic_opts,
            path,
            first,
            last,
        })
    }
}

impl<'a> Settings<'a> {
    fn display_patch_(
        &self,
        txn: &Txn,
        branch: &Branch,
        nth: u64,
        patchid: PatchId,
    ) -> Result<(), Error> {
        let hash_ext = txn.get_external(patchid).unwrap();
        debug!("hash: {:?}", hash_ext.to_base58());

        let (matches_regex, o_patch) = if self.regex.is_empty() {
            (true, None)
        } else {
            let patch = self.opts.repo_root.read_patch_nochanges(hash_ext)?;
            let does_match = {
                let descr = match patch.description {
                    Some(ref d) => d,
                    None => "",
                };
                self.regex
                    .iter()
                    .any(|ref r| r.is_match(&patch.name) || r.is_match(descr))
            };
            (does_match, Some(patch))
        };
        if !matches_regex {
            return Ok(());
        };

        if self.hash_only {
            println!("{}:{}", hash_ext.to_base58(), nth);
        } else {
            let patch = match o_patch {
                None => self.opts.repo_root.read_patch_nochanges(hash_ext)?,
                Some(patch) => patch,
            };
            let mut term = term::stdout();
            ask::print_patch_descr(&mut term, &hash_ext.to_owned(), Some(patchid), &patch);
        }

        if self.show_patches {
            let mut patch_path = self.opts.repo_root.patches_dir().join(hash_ext.to_base58());
            patch_path.set_extension("gz");
            let f = File::open(&patch_path)?;

            let mut f = BufReader::new(f);
            let (hash, _, patch) = Patch::from_reader_compressed(&mut f)?;

            print_patch(&hash, &patch, txn, branch)?;
            println!();
        }

        Ok(())
    }

    fn display_patch(
        &self,
        txn: &Txn,
        branch: &Branch,
        n: u64,
        patchid: PatchId,
    ) -> Result<(), Error> {
        if self.path.is_empty() {
            self.display_patch_(txn, branch, n, patchid)?;
        } else {
            for path in self.path.iter() {
                let inode = txn.find_inode(&path)?;
                let key = if let Some(key) = txn.get_inodes(inode) {
                    key.key
                } else {
                    continue;
                };
                if txn.get_touched(key, patchid) {
                    self.display_patch_(txn, branch, n, patchid)?;
                    break;
                }
            }
        }
        Ok(())
    }

    fn is_touched(&self, txn: &Txn, patchid: PatchId) -> bool {
        self.path.is_empty()
            || self.path.iter().any(|path| {
                if let Ok(inode) = txn.find_inode(&path) {
                    if let Some(key) = txn.get_inodes(inode) {
                        return txn.get_touched(key.key, patchid);
                    }
                }
                false
            })
    }
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let settings = Settings::parse(args)?;
    let repo = settings.opts.open_repo()?;
    let txn = repo.txn_begin()?;
    let branch = match txn.get_branch(&settings.opts.branch()) {
        Some(b) => b,
        None => return Err(Error::NoSuchBranch),
    };

    if settings.show_repoid {
        let id_file = settings.opts.repo_root.id_file();
        let mut f = File::open(&id_file)?;





1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125



126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159

160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188

189
190
191
192
193
194
195
196

197
198
199
200


202


205


208
209
210



213


216
217
218

220
221
222

223
224
225
226

227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292

293
294
295
296
297
298
299
300
use libpijul::fs_representation::RepoRoot;
        self.repo_root.repo_dir()
        self.repo_root.open_repo(Some(increase)).map_err(|e| e.into())
pub fn relative_repo_path(repo_root: &RepoRoot<PathBuf>, base: &PathBuf, dir: &str) -> Result<PathBuf, Error> {
    Ok(dir.strip_prefix(&repo_root.repo_root)?.to_owned())
use clap;
use clap::ArgMatches;
pub type StaticSubcommand = clap::App<'static, 'static>;

mod ask;
mod fs_operation;
pub mod remote;
mod ssh_auth_attempts;

pub mod add;
pub mod apply;
pub mod branches;
pub mod checkout;
pub mod clone;
pub mod credit;
pub mod dependencies;
pub mod diff;
pub mod dist;
pub mod fork;
pub mod generate_completions;
pub mod grep;
pub mod hooks;
pub mod info;
pub mod init;
pub mod key;
pub mod log;
pub mod ls;
pub mod mv;
pub mod patch;
pub mod prune;
pub mod pull;
pub mod push;
pub mod record;
pub mod remove;
pub mod revert;
pub mod rollback;
pub mod sign;
pub mod status;
pub mod tag;
pub mod unrecord;

mod fold_until;

use error::Error;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::Hash;
use libpijul::{fs_representation, Inode, Repository, Txn, DEFAULT_BRANCH};
use rand;
use std::borrow::Cow;
use std::env::current_dir;
use std::env::var;
use std::fs::{canonicalize, create_dir, metadata};
use std::io::{stderr, Write};
use std::path::{Path, PathBuf};
use std::process::exit;

pub fn all_command_invocations() -> Vec<StaticSubcommand> {
    return vec![
        log::invocation(),
        info::invocation(),
        init::invocation(),
        record::invocation(),
        unrecord::invocation(),
        add::invocation(),
        pull::invocation(),
        push::invocation(),
        apply::invocation(),
        clone::invocation(),
        remove::invocation(),
        mv::invocation(),
        ls::invocation(),
        revert::invocation(),
        patch::invocation(),
        fork::invocation(),
        branches::invocation(),
        prune::invocation(),
        checkout::invocation(),
        diff::invocation(),
        credit::invocation(),
        dist::invocation(),
        key::invocation(),
        rollback::invocation(),
        status::invocation(),
        dependencies::invocation(),
        tag::invocation(),
        sign::invocation(),
        generate_completions::invocation(),
        grep::invocation(),
    ];
}

pub fn get_wd(repository_path: Option<&Path>) -> Result<PathBuf, Error> {
    debug!("get_wd: {:?}", repository_path);
    match repository_path {
        None => Ok(canonicalize(current_dir()?)?),
        Some(a) if a.is_relative() => Ok(canonicalize(current_dir()?.join(a))?),
        Some(a) => Ok(canonicalize(a)?),
    }
}

/// Returns an error if the `dir` is contained in a repository.
pub fn assert_no_containing_repo(dir: &Path) -> Result<(), Error> {
    if metadata(dir).is_ok() {
        if fs_representation::find_repo_root(&canonicalize(dir)?).is_some() {
            return Err(Error::InARepository {
                path: dir.to_owned(),
            });
        }
    }
    Ok(())
}

/// Creates an empty pijul repository in the given directory.
pub fn create_repo(dir: &Path) -> Result<(), Error> {
    // Check that a repository does not already exist.
    if metadata(dir).is_err() {
        create_dir(dir)?;
    }
    let dir = canonicalize(dir)?;
    if fs_representation::find_repo_root(&dir).is_some() {
        return Err(Error::InARepository {
            path: dir.to_owned(),
        });
    }

    fs_representation::create(&dir, rand::thread_rng())?;
    let pristine_dir = fs_representation::pristine_dir(&dir);
    let repo = Repository::open(&pristine_dir, None)?;
    let repo_root = fs_representation::create(&dir, rand::thread_rng())?;
    let repo = repo_root.open_repo(None)?;
    repo.mut_txn_begin(rand::thread_rng())?.commit()?;
    Ok(())
}

fn default_explain<R>(command_result: Result<R, Error>) {
    debug!("default_explain");
    match command_result {
        Ok(_) => (),
        Err(e) => {
            writeln!(stderr(), "error: {}", e).unwrap();
            exit(1)
        }
    }
}

fn validate_base58(x: String) -> ::std::result::Result<(), String> {
    if Hash::from_base58(&x).is_some() {
        Ok(())
    } else {
        Err(format!("\"{}\" is invalid base58", x))
    }
}

/// Almost all commands want to know the current directory and the repository root.  This struct
/// fills that need, and also provides methods for other commonly-used tasks.
pub struct BasicOptions<'a> {
    /// This isn't 100% the same as the actual current working directory, so pay attention: this
    /// will be the current directory, unless the user specifies `--repository`, in which case
    /// `cwd` will actually be the path of the repository root. In other words, specifying
    /// `--repository` has the same effect as changing directory to the repository root before
    /// running `pijul`.
    pub cwd: PathBuf,
    pub repo_root: PathBuf,
    pub repo_root: RepoRoot<PathBuf>,
    args: &'a ArgMatches<'a>,
}

pub enum ScanScope {
    FromRoot,
    WithPrefix(RepoPath<PathBuf>, String),
}

impl<'a> BasicOptions<'a> {
    /// Reads the options from command line arguments.
    pub fn from_args(args: &'a ArgMatches<'a>) -> Result<BasicOptions<'a>, Error> {
        let wd = get_wd(args.value_of("repository").map(Path::new))?;
        let repo_root = if let Some(r) = fs_representation::find_repo_root(&canonicalize(&wd)?) {
            r
        } else {
            return Err(Error::NotInARepository);
        };
        Ok(BasicOptions {
            cwd: wd,
            repo_root: repo_root,
            args: args,
        })
    }

    /// Gets the name of the desired branch.
    pub fn branch(&self) -> String {
        if let Some(b) = self.args.value_of("branch") {
            b.to_string()
        } else if let Ok(b) = get_current_branch(&self.repo_root) {
        } else if let Ok(b) = self.repo_root.get_current_branch() {
            b
        } else {
            DEFAULT_BRANCH.to_string()
        }
    }

    pub fn repo_root(&self) -> PathBuf {
        self.repo_root.clone()
        self.repo_root.repo_root.clone()
    }

    pub fn open_repo(&self) -> Result<Repository, Error> {
        fs_representation::open_repo(&self.repo_root, None).map_err(|e| e.into())

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        self.repo_root.open_repo(None).map_err(|e| e.into())

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        fs_representation::open_repo(&self.repo_root, None).map_err(|e| e.into())

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    }

    pub fn open_and_grow_repo(&self, increase: u64) -> Result<Repository, Error> {
        fs_representation::open_repo(&self.repo_root, Some(increase)).map_err(|e| e.into())

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        fs_representation::open_repo(&self.repo_root, Some(increase)).map_err(|e| e.into())

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
        self.repo_root
            .open_repo(Some(increase))
            .map_err(|e| e.into())

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    }

    pub fn pristine_dir(&self) -> PathBuf {
        fs_representation::pristine_dir(&self.repo_root)
        self.repo_root.pristine_dir()
    }

    pub fn patches_dir(&self) -> PathBuf {
        fs_representation::patches_dir(&self.repo_root)
        self.repo_root.patches_dir()
    }

    pub fn scan_scope(&self) -> Result<ScanScope, Error> {
        if let Some(prefix) = self.args.value_of("dir") {
            let root = self
                .args
                .value_of("repository")
                .map(|root| Path::new(root).to_path_buf())
                .unwrap_or(current_dir()?);

            Ok(ScanScope::WithPrefix(
                relative_repo_path(&self.repo_root, &root, prefix)?,
                prefix.into(),
            ))
        } else {
            Ok(ScanScope::FromRoot)
        }
    }

    fn dir_inode(&self, txn: &Txn) -> Result<Inode, Error> {
        use libpijul::ROOT_INODE;
        if let Some(dir) = self.args.value_of("dir") {
            let dir = if Path::new(dir).is_relative() {
                let root = if let Some(root) = self.args.value_of("repository") {
                    Path::new(root).to_path_buf()
                } else {
                    current_dir()?
                };
                root.join(&dir).canonicalize()?
            } else {
                Path::new(dir).canonicalize()?
            };
            let dir = self.repo_root.relativize(&dir)?;
            debug!("{:?}", dir);
            let inode = txn.find_inode(&dir)?;
            debug!("{:?}", inode);
            Ok(inode)
        } else {
            Ok(ROOT_INODE)
        }
    }
}

fn remote_pijul_cmd() -> Cow<'static, str> {
    if let Ok(cmd) = var("REMOTE_PIJUL") {
        Cow::Owned(cmd)
    } else {
        Cow::Borrowed("pijul")
    }
}

pub fn relative_repo_path(
    repo_root: &RepoRoot<PathBuf>,
    base: &PathBuf,
    dir: &str,
) -> Result<RepoPath<PathBuf>, Error> {
    let dir = if Path::new(dir).is_relative() {
        base.join(&dir).canonicalize()?
    } else {
        Path::new(dir).canonicalize()?
    };

    Ok(repo_root.relativize(&dir)?.to_owned())
}

    Ok(dir.strip_prefix(&repo_root)?.to_owned())
pub fn pretty_repo_path(
    repo_root: &RepoRoot<impl AsRef<Path>>,
    path: &RepoPath<impl AsRef<Path>>,
    cwd: &Path,
) -> PathBuf {
    let abs_path = repo_root.absolutize(path);
    pathdiff::diff_paths(&abs_path, cwd).unwrap_or(abs_path)
}







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58

59
60
61
62
63
64
65
66

67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102


103
104
105
106


107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127

128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147


148
149
150
        let path = path.strip_prefix(&opts.repo_root.repo_root).unwrap();
    let target_path = opts.repo_root.repo_root.join(&dest);
                let is_dir = metadata(&opts.repo_root.repo_root.join(f))?.is_dir();
                    opts.repo_root.repo_root.join(f),
                    opts.repo_root.repo_root.join(full_target_name.as_path())
                    &opts.repo_root.repo_root.join(f),
                    opts.repo_root.repo_root.join(full_target_name.as_path()),
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::RepoPath;
use std::fs::{metadata, rename};
use std::path::PathBuf;

use rand;
use std;
pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("mv")
        .about("Change file names")
        .arg(
            Arg::with_name("files")
                .multiple(true)
                .help("Files to move.")
                .required(true)
                .min_values(2),
        )
        .arg(
            Arg::with_name("repository")
                .takes_value(true)
                .long("repository")
                .help("Repository where the files are.")
                .takes_value(true),
        );
}

#[derive(Debug)]
pub enum Movement {
    IntoDir {
        from: Vec<RepoPath<PathBuf>>,
        to: RepoPath<PathBuf>,
    },
    FileToFile {
        from: RepoPath<PathBuf>,
        to: RepoPath<PathBuf>,
    },
}

fn get_movement(opts: &BasicOptions, args: &ArgMatches) -> Movement {
    debug!("wd = {:?}", opts.cwd);
    debug!("repo_root = {:?}", opts.repo_root);
    let mut repo_paths = Vec::new();
    for fname in args.values_of("files").unwrap() {
        debug!("fname: {:?}", fname);
        // if fname is absolute, erases current_dir.
        let mut path = std::env::current_dir().unwrap();
        path.push(fname);
        debug!("path = {:?}", path);
        let path = if let Ok(f) = std::fs::canonicalize(&path) {
            f
        } else {
            std::fs::canonicalize(&path.parent().unwrap())
                .unwrap()
                .join(&path.file_name().unwrap())
        };
        debug!("path = {:?}", path);
        let path = path.strip_prefix(&opts.repo_root).unwrap();
        let path = opts.repo_root.relativize(&path).unwrap();
        debug!("path = {:?}", path);

        repo_paths.push(path.to_owned());
    }
    debug!("parse_args: done");
    let repo_paths = repo_paths;
    let (dest, origs) = repo_paths.split_last().unwrap();
    let target_path = opts.repo_root.join(&dest);
    let target_path = opts.repo_root.absolutize(&dest);
    let to_dir = target_path.exists() && target_path.is_dir();

    if to_dir {
        Movement::IntoDir {
            from: Vec::from(origs),
            to: dest.clone(),
        }
    } else {
        if origs.len() == 1 {
            Movement::FileToFile {
                from: origs[0].clone(),
                to: dest.clone(),
            }
        } else {
            panic!(
                "Cannot move files into {}: it is not a valid directory",
                dest.display()
            );
        }
    }
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let movement = get_movement(&opts, args);
    let repo = opts.open_repo()?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    match movement {
        Movement::FileToFile {
            from: ref orig_path,
            to: ref dest_path,
        } => {
            txn.move_file(orig_path, dest_path, false)?;
            debug!(
                "1 renaming {:?} into {:?}",
                opts.repo_root.join(orig_path.as_path()),
                opts.repo_root.join(dest_path.as_path())
                opts.repo_root.repo_root.join(orig_path.as_path()),
                opts.repo_root.repo_root.join(dest_path.as_path())
            );
            rename(
                opts.repo_root.join(orig_path.as_path()),
                opts.repo_root.join(dest_path.as_path()),
                opts.repo_root.repo_root.join(orig_path.as_path()),
                opts.repo_root.repo_root.join(dest_path.as_path()),
            )?;
            txn.commit()?;
            Ok(())
        }
        Movement::IntoDir {
            from: ref orig_paths,
            to: ref dest_dir,
        } => {
            for file in orig_paths {
                let repo_target_name = {
                    let target_basename = if let Some(f) = file.file_name() {
                        f
                    } else {
                        return Err(Error::InvalidPath {
                            path: file.to_path_buf(),
                        });
                    };
                    dest_dir.join(std::path::Path::new(target_basename))
                };
                let is_dir = metadata(&opts.repo_root.join(f))?.is_dir();
                let is_dir = metadata(&opts.repo_root.absolutize(file))?.is_dir();
                txn.move_file(&file, &repo_target_name, is_dir)?;
            }
            for file in orig_paths {
                let full_target_name = {
                    let target_basename = if let Some(f) = file.file_name() {
                        f
                    } else {
                        return Err(Error::InvalidPath {
                            path: file.to_path_buf(),
                        });
                    };
                    dest_dir.join(std::path::Path::new(target_basename))
                };
                debug!(
                    "2 renaming {} into {}",
                    file.display(),
                    full_target_name.display()
                );
                rename(
                    &opts.repo_root.join(f),
                    opts.repo_root.join(full_target_name.as_path()),
                    opts.repo_root.absolutize(&file),
                    opts.repo_root.absolutize(&full_target_name),
                )?;
79
80

81
82
    let opts = BasicOptions::from_args(args)?;
    let patch = Hash::from_base58(args.value_of("patch").unwrap()).unwrap();
    let mut patch_path = patches_dir(&opts.repo_root).join(&patch.to_base58());
    let mut patch_path = opts.repo_root.patches_dir().join(&patch.to_base58());
    patch_path.set_extension("gz");
3
4

5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27

28
29
use rand;

use super::{default_explain, get_current_branch, BasicOptions, StaticSubcommand};
use super::{default_explain, BasicOptions, StaticSubcommand};

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("prune")
        .about("Delete a branch in the local repository")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .help("Branch to delete.")
                .takes_value(true)
                .required(true),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    debug!("args {:?}", args);
    let opts = BasicOptions::from_args(args)?;
    let branch = args.value_of("branch").unwrap();
    let current_branch = get_current_branch(&opts.repo_root)?;
    let current_branch = opts.repo_root.get_current_branch()?;
    if current_branch == branch {

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112

113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141

142
use libpijul::fs_representation::RepoRoot;
use clap::{Arg, ArgMatches, SubCommand};

use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use std::fs::File;
use std::path::Path;

use commands::ask::{ask_patches, Command};
use commands::remote;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::patch::Patch;
use libpijul::{ApplyTimestamp, Hash, PatchId, DEFAULT_BRANCH};
use meta::{Meta, Repository, DEFAULT_REMOTE};
use progrs;
use std::env::current_dir;
use std::io::BufReader;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("pull")
        .about("Pull from a remote repository")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Repository to list.")
                .takes_value(true),
        )
        .arg(Arg::with_name("remote").help("Repository from which to pull."))
        .arg(
            Arg::with_name("remote_branch")
                .long("from-branch")
                .help("The branch to pull from. Defaults to master.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("local_branch")
                .long("to-branch")
                .help("The branch to pull into. Defaults to the current branch.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("all")
                .short("a")
                .long("all")
                .help("Answer 'y' to all questions")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("set-default")
                .long("set-default")
                .help("Used with --set-remote, sets this remote as the default pull remote."),
        )
        .arg(
            Arg::with_name("set-remote")
                .long("set-remote")
                .help("Name this remote destination.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("remote_path")
                .long("path")
                .help("Only pull patches affecting the part of the repo under that path (relative to the root of the repo).")
                .help("Only pull patches relative to that patch.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("port")
                .short("p")
                .long("port")
                .help("Port of the remote ssh server.")
                .takes_value(true)
                .validator(|val| {
                    let x: Result<u16, _> = val.parse();
                    match x {
                        Ok(_) => Ok(()),
                        Err(_) => Err(val),
                    }
                }),
        );
}

#[derive(Debug)]
pub struct Params<'a> {
    pub remote_id: Option<&'a str>,
    pub set_remote: Option<&'a str>,
    pub yes_to_all: bool,
    pub set_default: bool,
    pub port: Option<u16>,
    pub local_branch: Option<&'a str>,
    pub remote_branch: &'a str,
    pub remote_paths: Vec<RepoPath<&'a Path>>,
}

fn parse_args<'a>(args: &'a ArgMatches) -> Params<'a> {
    Params {
        remote_id: args.value_of("remote"),
        set_remote: args.value_of("set-remote"),
        yes_to_all: args.is_present("all"),
        set_default: args.is_present("set-default"),
        port: args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
        local_branch: args.value_of("local_branch"),
        remote_branch: args.value_of("remote_branch").unwrap_or(DEFAULT_BRANCH),
        remote_paths: if let Some(rem) = args.values_of("remote_path") {
            rem.map(|p| RepoPath(Path::new(p))).collect()
        } else {
            Vec::new()
        },
    }
}

fn fetch_pullable_patches(
    session: &mut remote::Session,
    pullable: &[(Hash, ApplyTimestamp)],
    r: &Path,
    r: &RepoRoot<impl AsRef<Path>>,
) -> Result<Vec<(Hash, Option<PatchId>, Patch)>, Error> {
    let mut patches = Vec::new();

    let (mut p, mut n) = (progrs::start("Pulling patches", pullable.len() as u64), 0);
    for &(ref i, _) in pullable {
        let (hash, _, patch) = {
            let filename = session.download_patch(r, i)?;
            debug!("filename {:?}", filename);
            let file = File::open(&filename)?;
            let mut file = BufReader::new(file);
            Patch::from_reader_compressed(&mut file)?
        };
        p.display({
            n += 1;
            n
        });
        assert_eq!(&hash, i);
        patches.push((hash, None, patch));
    }
    p.stop("done");
    Ok(patches)
}

pub fn select_patches(
    interactive: bool,
    session: &mut remote::Session,
    remote_branch: &str,
    local_branch: &str,
    r: &Path,
    r: &RepoRoot<impl AsRef<Path>>,
150
151

152
153
                    i.clone(),
                    internal.clone(),
                    read_patch(&opts.repo_root, i.as_ref())?,
                    opts.repo_root.read_patch(i.as_ref())?,
                ))







1
2
3
4
5
6
7
8

9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102

103
104

105


108


111


114

116
117
118



119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369

370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433


434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467

468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502

503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524

526
527

528
use libpijul::fs_representation::RepoRoot;
        let file = file.strip_prefix(&repo_root.repo_root)?;
fn append_to_ignore_file(repo_root: &RepoRoot<impl AsRef<Path>>, lines: &Vec<String>) -> Result<(), Error> {
        let patches_dir = opts.repo_root.patches_dir();
                let file = p.strip_prefix(&opts.repo_root.repo_root)?;
    repo_root: &RepoRoot<impl AsRef<Path>>,
    let repo_root = &repo_root.repo_root;
use super::ask::{ask_changes, ChangesDirection};
use super::default_explain;
use chrono;
use clap::{Arg, ArgMatches, SubCommand};
use commands::hooks::run_hook;
use commands::{ask, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul;
use libpijul::fs_representation::{ignore_file, patches_dir, untracked_files};
use libpijul::fs_representation::{in_repo_root, RepoPath, RepoRoot};
use libpijul::patch::{PatchFlags, Record};
use libpijul::{Hash, InodeUpdate, Key, MutTxn, Patch, PatchId, RecordState, Repository};
use meta::{load_signing_key, Global, Meta};
use rand;
use std::collections::HashSet;
use std::fs::canonicalize;
use std::fs::{metadata, OpenOptions};
use std::io::Write;
use std::mem::drop;
use std::path::{Path, PathBuf};
use std::str::FromStr;

pub fn record_args(sub: StaticSubcommand) -> StaticSubcommand {
    sub.arg(Arg::with_name("repository")
            .long("repository")
            .help("The repository where to record, defaults to the current directory.")
            .takes_value(true)
            .required(false))
        .arg(Arg::with_name("branch")
             .long("branch")
             .help("The branch where to record, defaults to the current branch.")
             .takes_value(true)
             .required(false))
        .arg(Arg::with_name("date")
             .long("date")
             .help("The date to use to record the patch, default is now.")
             .takes_value(true)
             .required(false))
        .arg(Arg::with_name("message")
             .short("m")
             .long("message")
             .help("The name of the patch to record")
             .takes_value(true))
        .arg(Arg::with_name("description")
             .short("d")
             .long("description")
             .help("The description of the patch to record")
             .takes_value(true))
        .arg(Arg::with_name("no-editor")
             .long("no-editor")
             .help("Do not use an editor to write the patch name and description, even if the variable is set in the configuration file")
             .takes_value(false))
        .arg(Arg::with_name("author")
             .short("A")
             .long("author")
             .help("Author of this patch (multiple occurrences allowed)")
             .takes_value(true))
        .arg(Arg::with_name("patience")
             .long("patience")
             .help("Use patience diff instead of the default (Myers diff)")
             .conflicts_with("myers")
             .takes_value(false))
        .arg(Arg::with_name("myers")
             .long("myers")
             .help("Use Myers diff")
             .conflicts_with("patience")
             .takes_value(false))
}

pub fn invocation() -> StaticSubcommand {
    return record_args(
        SubCommand::with_name("record")
            .about("Record changes in the repository")
            .arg(
                Arg::with_name("all")
                    .short("a")
                    .long("all")
                    .help("Answer 'y' to all questions")
                    .takes_value(false),
            )
            .arg(
                Arg::with_name("add-new-files")
                    .short("n")
                    .long("add-new-files")
                    .help("Offer to add files that have been created since the last record")
                    .takes_value(false),
            )
            .arg(
                Arg::with_name("depends-on")
                    .help("Add a dependency to this patch (internal id or hash accepted)")
                    .long("depends-on")
                    .takes_value(true)
                    .multiple(true),
            )
            .arg(
                Arg::with_name("prefix")
                    .help("Prefix to start from")
                    .takes_value(true)
                    .multiple(true),
            ),
    );
}

fn add_untracked_files<T: rand::Rng>(
fn add_untracked_files<T: rand::Rng, P: AsRef<Path> + 'static>(
    txn: &mut MutTxn<T>,
    repo_root: &Path,
    repo_root: &RepoRoot<P>,

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        if let Err(e) = txn.add_file(&file, m.is_dir()) {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        }

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    }

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    Ok(untracked)
}

fn append_to_ignore_file(repo_root: &Path, lines: &Vec<String>) -> Result<(), Error> {
    let ignore_file = ignore_file(repo_root);
    let ignore_file = ignore_file(repo_root);
fn append_to_ignore_file(
    repo_root: &RepoRoot<impl AsRef<Path>>,
    lines: &Vec<String>,
) -> Result<(), Error> {
    let ignore_file = repo_root.local_ignore_file();
    let mut file = OpenOptions::new()
        .append(true)
        .create(true)
        .open(ignore_file)?;
    for line in lines {
        file.write_all(line.as_ref())?;
        file.write_all(b"\n")?
    }
    Ok(())
}

fn select_changes(
    algo: libpijul::DiffAlgorithm,
    opts: &BasicOptions,
    add_new_files: bool,
    branch_name: &str,
    yes_to_all: bool,
    prefix: Option<Vec<RepoPath<PathBuf>>>,
) -> Result<(Vec<Record<Vec<Key<Option<Hash>>>>>, HashSet<InodeUpdate>), Error> {
    // Increase by 100 pages. The most things record can write is one
    // write in the branches table, affecting at most O(log n) blocks.
    let repo = opts.open_and_grow_repo(409600)?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let mut to_unadd = if add_new_files {
        add_untracked_files(&mut txn, &opts.repo_root)?
    } else {
        HashSet::new()
    };
    let (changes, syncs) = changes_from_prefixes(
        algo,
        &opts.repo_root,
        &mut txn,
        &branch_name,
        prefix.as_ref(),
    )?;
    let changes: Vec<_> = changes
        .into_iter()
        .map(|x| txn.globalize_record(x))
        .collect();
    if !yes_to_all {
        let (c, i) = ask_changes(
            &txn,
            &opts.repo_root,
            &opts.cwd,
            &changes,
            ChangesDirection::Record,
            &mut to_unadd,
        )?;
        let selected = changes
            .into_iter()
            .enumerate()
            .filter(|&(i, _)| *(c.get(&i).unwrap_or(&false)))
            .map(|(_, x)| x)
            .collect();
        for file in to_unadd {
            txn.remove_file(&file)?
        }
        txn.commit()?;
        append_to_ignore_file(&opts.repo_root, &i)?;
        Ok((selected, syncs))
    } else {
        txn.commit()?;
        Ok((changes, syncs))
    }
}

pub fn run(args: &ArgMatches) -> Result<Option<Hash>, Error> {
    let opts = BasicOptions::from_args(args)?;
    let yes_to_all = args.is_present("all");
    let patch_name_arg = args.value_of("message");
    let patch_descr_arg = args.value_of("description");
    let authors_arg = args.values_of("author").map(|x| x.collect::<Vec<_>>());
    let branch_name = opts.branch();
    let add_new_files = args.is_present("add-new-files");

    let patch_date = args.value_of("date").map_or(Ok(chrono::Utc::now()), |x| {
        chrono::DateTime::from_str(x).map_err(|_| Error::InvalidDate {
            date: String::from(x),
        })
    })?;

    let mut save_meta = false;

    let (mut global, save_global) = Global::load().map(|c| (c, false)).unwrap_or_else(|e| {
        info!("loading global key, error {:?}", e);
        (Global::new(), true)
    });

    let mut meta = match Meta::load(&opts.repo_root) {
        Ok(m) => m,
        Err(_) => {
            save_meta = true;
            Meta::new()
        }
    };

    run_hook(&opts.repo_root, "pre-record", None)?;

    debug!("prefix {:?}", args.value_of("prefix"));
    let prefix = prefix(args, &opts)?;

    let (changes, syncs) = select_changes(
        if args.is_present("patience") {
            libpijul::DiffAlgorithm::Patience
        } else {
            libpijul::DiffAlgorithm::Myers
        },
        &opts,
        add_new_files,
        &branch_name,
        yes_to_all,
        prefix,
    )?;

    if changes.is_empty() {
        println!("Nothing to record");
        Ok(None)
    } else {
        let template = prepare_changes_template(patch_name_arg.unwrap_or(""), &changes);

        let repo = opts.open_repo()?;
        let patch = {
            let txn = repo.txn_begin()?;
            debug!("meta:{:?}", meta);

            let authors = decide_authors(authors_arg, &meta, &global)?;

            if authors.is_empty() {
                return Err(Error::NoAuthor);
            }

            if meta.authors.is_empty() {
                meta.authors = authors.clone();
                save_meta = true;
            }

            if global.author.is_none() {
                global.author = Some(authors[0].clone());
            }

            debug!("authors:{:?}", authors);

            let (patch_name, description) = decide_patch_message(
                patch_name_arg,
                patch_descr_arg,
                template,
                !args.is_present("no-editor"),
                &opts.repo_root,
                &meta,
                &global,
            )?;

            run_hook(&opts.repo_root, "patch-name", Some(&patch_name))?;

            debug!("patch_name:{:?}", patch_name);
            if save_meta {
                meta.save(&opts.repo_root)?
            }
            if save_global {
                global.save().unwrap_or(())
            }
            debug!("new");
            let changes = changes.into_iter().flat_map(|x| x.into_iter()).collect();
            let branch = txn.get_branch(&branch_name).unwrap();

            let mut extra_deps = Vec::new();
            if let Some(deps) = args.values_of("depends-on") {
                for dep in deps {
                    if let Some(hash) = Hash::from_base58(dep) {
                        if let Some(internal) = txn.get_internal(hash.as_ref()) {
                            if txn.get_patch(&branch.patches, internal).is_some() {
                                extra_deps.push(hash)
                            } else {
                                return Err(Error::ExtraDepNotOnBranch { hash });
                            }
                        } else {
                            return Err(Error::PatchNotFound {
                                repo_root: opts.repo_root().to_string_lossy().into_owned(),
                                patch_hash: hash,
                            });
                        }
                    } else if let Some(internal) = PatchId::from_base58(dep) {
                        if let Some(hash) = txn.get_external(internal) {
                            if txn.get_patch(&branch.patches, internal).is_some() {
                                extra_deps.push(hash.to_owned())
                            } else {
                                return Err(Error::ExtraDepNotOnBranch {
                                    hash: hash.to_owned(),
                                });
                            }
                        }
                    } else {
                        return Err(Error::WrongHash);
                    }
                }
            }
            txn.new_patch(
                &branch,
                authors,
                patch_name,
                description,
                patch_date,
                changes,
                extra_deps.into_iter(),
                PatchFlags::empty(),
            )
        };
        drop(repo);

        let patches_dir = opts.repo_root.patches_dir();
        let mut key = meta
            .signing_key
            .or(global.signing_key)
            .and_then(|s| load_signing_key(s).ok());
        let hash = if let Some(ref mut key) = key {
            key.check_author(&patch.header().authors)?;
            patch.save(&patches_dir, key.keys.get_mut(0))?
        } else {
            patch.save(&patches_dir, None)?
        };

        let pristine_dir = opts.pristine_dir();
        let mut increase = 409600;
        let res = loop {
            match record_no_resize(
                &pristine_dir,
                &opts.repo_root,
                &branch_name,
                &hash,
                &patch,
                &syncs,
                increase,
            ) {
                Err(ref e) if e.lacks_space() => increase *= 2,
                e => break e,
            }
        };

        run_hook(&opts.repo_root, "post-record", None)?;

        res
    }
}

pub fn record_no_resize(
    pristine_dir: &Path,
    r: &Path,
    r: &RepoRoot<impl AsRef<Path>>,
    branch_name: &str,
    hash: &Hash,
    patch: &Patch,
    syncs: &HashSet<InodeUpdate>,
    increase: u64,
) -> Result<Option<Hash>, Error> {
    let size_increase = increase + patch.size_upper_bound() as u64;
    let repo = match Repository::open(&pristine_dir, Some(size_increase)) {
        Ok(repo) => repo,
        Err(x) => return Err(Error::Repository(x)),
    };
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    // save patch
    debug!("syncs: {:?}", syncs);
    let mut branch = txn.open_branch(branch_name)?;
    txn.apply_local_patch(&mut branch, r, &hash, &patch, &syncs, false)?;
    txn.commit_branch(branch)?;
    txn.commit()?;
    println!("Recorded patch {}", hash.to_base58());
    Ok(Some(hash.clone()))
}

pub fn explain(res: Result<Option<Hash>, Error>) {
    default_explain(res)
}

pub fn changes_from_prefixes<T: rand::Rng, P: AsRef<Path>>(
    algo: libpijul::DiffAlgorithm,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    txn: &mut MutTxn<T>,
    branch_name: &str,
    prefix: Option<&Vec<RepoPath<P>>>,
) -> Result<
    (
        Vec<libpijul::patch::Record<libpijul::patch::ChangeContext<PatchId>>>,
        HashSet<libpijul::InodeUpdate>,
    ),
    Error,
> {
    let mut record = RecordState::new();
    let branch = txn.open_branch(branch_name)?;
    if let Some(prefixes) = prefix {
        for prefix in prefixes {
            txn.record(algo, &mut record, &branch, repo_root, prefix)?;
        }
    } else {
        txn.record(algo, &mut record, &branch, repo_root, &in_repo_root())?;
    }
    txn.commit_branch(branch)?;
    let (changes, updates) = record.finish();
    // let changes = changes.into_iter().map(|x| txn.globalize_change(x)).collect();
    Ok((changes, updates))
}

pub fn prefix(
    args: &ArgMatches,
    opts: &BasicOptions,
) -> Result<Option<Vec<RepoPath<PathBuf>>>, Error> {
    if let Some(prefixes) = args.values_of("prefix") {
        let prefixes: Result<Vec<_>, Error> = prefixes
            .map(|prefix| {
                let p = opts.cwd.join(prefix);
                let p = if let Ok(p) = canonicalize(&p) { p } else { p };
                let file = p.strip_prefix(&opts.repo_root)?;
                let file = p.strip_prefix(&opts.repo_root)?;
                let file = opts.repo_root.relativize(&p)?;
                Ok(file.to_owned())
            })
            .collect();
        Ok(Some(prefixes?))
    } else {
        Ok(None)
    }
}

pub fn decide_authors(
    authors_args: Option<Vec<&str>>,
    meta: &Meta,
    global: &Global,
) -> Result<Vec<String>, Error> {
    Ok(match authors_args {
        Some(authors) => authors.iter().map(|x| x.to_string()).collect(),
        _ => {
            if meta.authors.len() > 0 {
                meta.authors.clone()
            } else if let Some(ref auth) = global.author {
                vec![auth.clone()]
            } else {
                ask::ask_authors()?
            }
        }
    })
}

pub fn decide_patch_message(
    name_arg: Option<&str>,
    descr_arg: Option<&str>,
    template: String,
    use_editor: bool,
    repo_root: &PathBuf,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    meta: &Meta,
    global: &Global,
) -> Result<(String, Option<String>), Error> {
    Ok(match name_arg {
        Some(m) => (m.to_string(), descr_arg.map(|x| String::from(x.trim()))),
        _ => {
            let maybe_editor = if use_editor {
                if meta.editor.is_some() {
                    meta.editor.as_ref()
                } else {
                    global.editor.as_ref()
                }
            } else {
                None
            };

            ask::ask_patch_name(repo_root, maybe_editor, template)?
        }
    })
}

fn prepare_changes_template(descr: &str, changes: &[Record<Vec<Key<Option<Hash>>>>]) -> String {
    let mut res = format!(
        r#"
{}
# Please enter a patch title, and consider writing a description too. Lines
# starting with '#' will be ignored. Besides, an empty patch title aborts the
# patch recording.
#
# Here is a summary of the changes you are about to record:
#"#,
        descr
    );
    let mut known_files = Vec::new();

    for change in changes.iter() {
        match *change {
            Record::Change { ref file, .. } => {
                if !known_files.contains(&file) {
                    res = format!("{}\n#\tmodified:  {}", res, file.display());
                    known_files.push(file);
                }
            }
            Record::FileAdd { ref name, .. } => {
                res = format!("{}\n#\tnew file:  {}", res, name.display());
            }
            Record::FileDel { ref name, .. } => {
                res = format!("{}\n#\tdeleted:  {}", res, name.display());
            }
            Record::FileMove { ref new_name, .. } => {
                res = format!("{}\n#\t   moved:  to {}", res, new_name.display());
            }
        }
    }

    return res;
}

================================
) -> Result<HashSet<RepoPath<PathBuf>>, Error> {
    let mut untracked = HashSet::new();
    for file in untracked_files(txn, repo_root.to_path_buf(), repo_root) {
    for file in repo_root.untracked_files(txn, &repo_root.repo_root) {













1

2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78

79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527

528
529
530
531
532
533
534
535
536
537
538
539
540
541


544


547


550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621

623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842

843
844
845
846
847
848
849
850
851
852
853
854
855

856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892

893
894
895
896
897


898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931




932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962

963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983



984
985
986
987
988
989
990
991
992
993
994

995
996
997
998
999
1000
1001
1002
1003
1004

1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024

1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043

1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084

1085
1086
1087
1088

1089
1090
1091
1092
1093
1094
1095
1096
1097
1098


1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123

1124


1127


1130


1133

1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160


1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187

1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252


1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419

1420
1421
1422
1423
1424

1425
1426
1427
1428
    RepoRoot, branch_changes_base_path, patch_file_name, PIJUL_DIR_NAME,
//use super::get_current_branch;
        path: RepoRoot<PathBuf>,
        let remote_file = self.root.patches_dir().join(&patch_file_name(patch_hash.as_ref()));
                apply_resize_no_output(&self.root, &remote_branch, patch_hashes.iter(), |_, _| {})
    pub fn download_patch(&mut self, repo_root: &RepoRoot<impl AsRef<Path>>, patch_hash: &Hash) -> Result<PathBuf, Error> {
            let repo = source.open_repo(None)?;
            Remote::Local { path: RepoRoot { repo_root: ref path} } => Ok(Session::Local(LocalSession {
                root: RepoRoot { repo_root : path },
                Remote::Local { path: {RepoRoot { repo_root: path } } }
                Remote::Local { path: {RepoRoot { repo_root: path } } }
            Remote::Local { path: RepoRoot {repo_root: path } }
            Remote::Local { path: RepoRoot {repo_root: path } }
use libpijul::fs_representation::{
    branch_changes_base_path, patch_file_name, patches_dir, pristine_dir, PIJUL_DIR_NAME,
    branch_changes_base_path, patch_file_name, RepoRoot, PIJUL_DIR_NAME,
};
use libpijul::patch::read_changes;
use libpijul::{
    apply_resize, apply_resize_no_output, apply_resize_patches, apply_resize_patches_no_output,
    ApplyTimestamp, ConflictingFile, Hash, Patch, PatchId, RepoPath, Repository,
};
use regex::Regex;
use reqwest;
use reqwest::async as reqwest_async;

use error::Error;
use std;
use std::collections::hash_set::HashSet;
use std::collections::HashMap;
use std::fs::{copy, hard_link, metadata, rename, File};
use std::path::{Path, PathBuf};
use std::sync::Arc;

use commands::{ask, assert_no_containing_repo, create_repo};
use cryptovec;
use dirs;
use futures;
use futures::{Async, Future, Poll, Stream};
use meta;
use progrs;
use sequoia_openpgp::serialize::Serialize;
use shell_escape::unix::escape;
use std::borrow::Cow;
use std::io::prelude::*;
use std::io::BufReader;
use std::net::ToSocketAddrs;
use tempfile::tempdir_in;
use thrussh;
use thrussh_config;
use thrussh_keys;
use tokio;
use username;

use super::get_current_branch;
#[derive(Debug)]
pub struct SshRemote<'a> {
    user: Option<&'a str>,
    host: &'a str,
    port: Option<u16>,
    path: &'a str,
    id: &'a str,
    local_repo_root: Option<&'a Path>,
    pijul_cmd: Cow<'static, str>,
}

#[derive(Debug)]
pub enum Remote<'a> {
    Ssh(SshRemote<'a>),
    Uri { uri: &'a str },
    Local { path: RepoRoot<PathBuf> },
}

pub enum Session<'a> {
    Ssh(SshSession<'a>),
    Uri(UriSession<'a>),
    Local(LocalSession<'a>),
}

pub struct SshSession<'a> {
    pub l: tokio::runtime::Runtime,
    path: &'a str,
    pijul_cmd: &'a str,
    pub session: Option<thrussh::client::Connection<thrussh_config::Stream, Client>>,
}

pub struct UriSession<'a> {
    l: tokio::runtime::Runtime,
    uri: &'a str,
    client: reqwest_async::Client,
}

pub struct LocalSession<'a> {
    path: &'a Path,
    root: RepoRoot<&'a Path>,
}

impl<'a> Drop for SshSession<'a> {
    fn drop(&mut self) {
        if let Some(mut session) = self.session.take() {
            debug!("disconnecting");
            session.disconnect(thrussh::Disconnect::ByApplication, "finished", "EN");
            if let Err(e) = self.l.block_on(session) {
                error!("While dropping SSH Session: {:?}", e);
            }
        }
    }
}

#[cfg(unix)]
use thrussh_keys::agent::client::AgentClient;
#[cfg(unix)]
use tokio_uds::UnixStream;

pub struct Client {
    pub exit_status: HashMap<thrussh::ChannelId, u32>,
    state: State,
    host: String,
    port: u16,
    channel: Option<thrussh::ChannelId>,
    #[cfg(unix)]
    pub agent: Option<AgentClient<UnixStream>>,
    #[cfg(windows)]
    pub agent: Option<()>,
}

impl Client {
    #[cfg(unix)]
    fn new(port: Option<u16>, host: &str, l: &mut tokio::runtime::Runtime) -> Self {
        let agent = if let Ok(path) = std::env::var("SSH_AUTH_SOCK") {
            l.block_on(
                UnixStream::connect(path).map(thrussh_keys::agent::client::AgentClient::connect),
            )
            .ok()
        } else {
            None
        };
        debug!("Client::new(), agent: {:?}", agent.is_some());
        Client {
            exit_status: HashMap::new(),
            state: State::None,
            port: port.unwrap_or(22),
            host: host.to_string(),
            channel: None,
            agent,
        }
    }

    #[cfg(windows)]
    fn new(port: Option<u16>, host: &str, _: &mut tokio::runtime::Runtime) -> Self {
        Client {
            exit_status: HashMap::new(),
            state: State::None,
            port: port.unwrap_or(22),
            host: host.to_string(),
            channel: None,
            agent: None,
        }
    }
}

enum State {
    None,
    Changes {
        changes: Vec<(Hash, ApplyTimestamp)>,
    },
    DownloadPatch {
        file: File,
    },
    /*SendKey {
        key_pair: meta::SigningKeys,
    },*/
}

enum SendFileState {
    Read(thrussh::client::Connection<thrussh_config::Stream, Client>),
    Wait(thrussh::client::Data<thrussh_config::Stream, Client, Vec<u8>>),
}

struct SendFile {
    f: File,
    buf: Option<Vec<u8>>,
    chan: thrussh::ChannelId,
    state: Option<SendFileState>,
}

impl Future for SendFile {
    type Item = (
        thrussh::client::Connection<thrussh_config::Stream, Client>,
        Vec<u8>,
    );
    type Error = Error;
    fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
        debug!("SendFile loop starting");
        loop {
            debug!("sendfile loop");
            match self.state.take() {
                Some(SendFileState::Read(c)) => {
                    debug!("read");
                    let mut buf = self.buf.take().unwrap();
                    buf.resize(BUFFER_SIZE, 0);
                    let len = self.f.read(&mut buf)?;
                    if len == 0 {
                        // If nothing has been read, return.
                        return Ok(Async::Ready((c, buf)));
                    }
                    buf.truncate(len);
                    debug!("sending {:?} bytes, {:?}", len, buf.len());
                    self.state = Some(SendFileState::Wait(c.data(self.chan, None, buf)));
                }
                Some(SendFileState::Wait(mut c)) => {
                    debug!("wait");
                    match c.poll()? {
                        Async::Ready((c, buf)) => {
                            self.buf = Some(buf);
                            self.state = Some(SendFileState::Read(c))
                        }
                        Async::NotReady => {
                            self.state = Some(SendFileState::Wait(c));
                            return Ok(Async::NotReady);
                        }
                    }
                }
                None => unreachable!(),
            }
        }
    }
}

impl thrussh::client::Handler for Client {
    type Error = Error;
    type FutureUnit = futures::Finished<Client, Error>;
    type SessionUnit = futures::Finished<(Client, thrussh::client::Session), Error>;
    type FutureBool = futures::future::FutureResult<(Client, bool), Error>;
    type FutureSign =
        Box<futures::Future<Item = (Self, cryptovec::CryptoVec), Error = Self::Error>>;

    #[cfg(unix)]
    fn auth_publickey_sign(
        mut self,
        key: &thrussh_keys::key::PublicKey,
        mut to_sign: cryptovec::CryptoVec,
    ) -> Self::FutureSign {
        debug!("auth_publickey_sign");
        if let Some(agent) = self.agent.take() {
            use thrussh_keys::encoding::Encoding;
            debug!("using agent");
            Box::new(
                agent
                    .sign_request(key, &to_sign)
                    .then(move |result| match result {
                        Ok((client, sig)) => {
                            debug!("sig = {:?}", sig);
                            if let Some(sig) = sig {
                                to_sign.extend_ssh_string(&sig[..]);
                            }
                            self.agent = Some(client);
                            futures::finished::<_, Error>((self, to_sign))
                        }
                        Err(e) => {
                            error!("SSH agent error: {:?}", e);
                            futures::finished((self, to_sign))
                        }
                    })
                    .from_err(),
            )
        } else {
            debug!("no agent");
            Box::new(futures::finished((self, to_sign)))
        }
    }

    fn data(
        mut self,
        channel: thrussh::ChannelId,
        stream: Option<u32>,
        data: &[u8],
        session: thrussh::client::Session,
    ) -> Self::SessionUnit {
        debug!(
            "data ({:?}): {:?}",
            channel,
            &data[..std::cmp::min(data.len(), 100)]
        );
        if stream == Some(1) {
            std::io::stderr().write(data).unwrap();
        } else if stream == None {
            match self.state {
                State::None => {
                    std::io::stdout().write(data).unwrap();
                }
                State::Changes { ref mut changes } => {
                    let data = std::str::from_utf8(data).unwrap();
                    for l in data.lines() {
                        let mut spl = l.split(':');
                        if let (Some(h), Some(s)) = (spl.next(), spl.next()) {
                            if let (Some(h), Ok(s)) = (Hash::from_base58(h), s.parse()) {
                                changes.push((h, s));
                            }
                        }
                    }
                }
                State::DownloadPatch { ref mut file, .. } => {
                    file.write_all(data).unwrap();
                }
            }
        } else {
            debug!(
                "SSH data received on channel {:?}: {:?} {:?}",
                channel, stream, data
            );
        }
        futures::finished((self, session))
    }
    fn exit_status(
        mut self,
        channel: thrussh::ChannelId,
        exit_status: u32,
        session: thrussh::client::Session,
    ) -> Self::SessionUnit {
        debug!(
            "exit_status received on channel {:?}: {:?}:",
            channel, exit_status
        );
        debug!("self.channel = {:?}", self.channel);
        if let Some(c) = self.channel {
            if channel == c {
                self.exit_status.insert(channel, exit_status);
            }
        }
        debug!("self.exit_status = {:?}", self.exit_status);
        futures::finished((self, session))
    }

    fn check_server_key(
        self,
        server_public_key: &thrussh_keys::key::PublicKey,
    ) -> Self::FutureBool {
        let path = dirs::home_dir().unwrap().join(".ssh").join("known_hosts");
        match thrussh_keys::check_known_hosts_path(&self.host, self.port, server_public_key, &path)
        {
            Ok(true) => futures::done(Ok((self, true))),
            Ok(false) => {
                if let Ok(false) = ask::ask_learn_ssh(&self.host, self.port, "") {
                    // TODO
                    // &server_public_key.fingerprint()) {

                    futures::done(Ok((self, false)))
                } else {
                    thrussh_keys::learn_known_hosts_path(
                        &self.host,
                        self.port,
                        server_public_key,
                        &path,
                    )
                    .unwrap();
                    futures::done(Ok((self, true)))
                }
            }
            Err(e) => {
                if let thrussh_keys::Error::KeyChanged(line) = e {
                    println!(
                        "Host key changed! Someone might be eavesdropping this communication, \
                         refusing to continue. Previous key found line {}",
                        line
                    );
                    futures::done(Ok((self, false)))
                } else {
                    futures::done(Err(From::from(e)))
                }
            }
        }
    }
}

const BUFFER_SIZE: usize = 1 << 14; // 16 kb.

impl<'a> SshSession<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        let esc_path = escape(Cow::Borrowed(self.path));
        let mut cmd = format!(
            "{} log --repository {} --branch {:?} --hash-only",
            self.pijul_cmd, esc_path, branch
        );
        for p in path {
            cmd.push_str(&format!(" --path {}", p.display()))
        }

        if let Some(ref mut session) = self.session {
            session.handler_mut().state = State::Changes {
                changes: Vec::new(),
            }
        }
        let mut channel = None;
        self.session = Some(
            self.l
                .block_on(
                    self.session
                        .take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut connection, chan)| {
                            debug!("exec: {:?}", cmd);
                            channel = Some(chan);
                            connection.handler_mut().exit_status.remove(&chan);
                            connection.handler_mut().channel = Some(chan);
                            connection.exec(chan, false, &cmd);
                            connection.channel_eof(chan);
                            // Wait until channel close.
                            debug!("waiting channel close");
                            connection
                                .wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                })
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }),
                )
                .unwrap(),
        );

        if let Some(ref session) = self.session {
            if let Some(channel) = channel {
                if let Some(&exit_code) = session.handler().exit_status.get(&channel) {
                    debug!("exit_code = {:?}", exit_code);
                    if exit_code != 0 {
                        return Ok(Vec::new());
                    }
                }
            }
        }
        if let Some(ref mut session) = self.session {
            match std::mem::replace(&mut session.handler_mut().state, State::None) {
                State::Changes { changes } => {
                    debug!("changes: {:?}", changes);
                    Ok(changes)
                }
                _ => unreachable!(),
            }
        } else {
            unreachable!()
        }
    }

    pub fn send_key(&mut self, key_pair: meta::SigningKeys) -> Result<(), Error> {
        if let Some(ref mut session) = self.session {
            // session.handler_mut().state = State::SendKey { };
            session.handler_mut().channel = None;
        }
        let challenge_cmd = format!("{} key register", self.pijul_cmd);
        let mut data = Vec::new();
        key_pair.tsk.tpk().serialize(&mut data)?;
        self.session = Some(
            self.l.block_on(
                self.session
                    .take()
                    .unwrap()
                    .channel_open_session()
                    .and_then(move |(mut session, channelid)| {
                        session.exec(channelid, false, &challenge_cmd);
                        session
                            .data(channelid, None, data)
                            .and_then(move |(mut session, _)| {
                                session.channel_eof(channelid);
                                session.handler_mut().channel = Some(channelid);
                                session.wait(move |session| {
                                    session.handler().exit_status.get(&channelid).is_some()
                                })
                            })
                    }),
            )?,
        );
        Ok(())
    }

    pub fn fetch_patch(
        &mut self,
        patch_hash: &Hash,
        local_file: PathBuf,
        local_tmp_file: PathBuf,
    ) -> Result<PathBuf, Error> {
        let esc_path = escape(Cow::Borrowed(self.path));
        let cmd = format!(
            "{} patch --repository {} --bin {}",
            self.pijul_cmd,
            esc_path,
            patch_hash.to_base58()
        );
        debug!("cmd {:?} {:?}", cmd, local_file);
        if let Some(ref mut session) = self.session {
            session.handler_mut().state = State::DownloadPatch {
                file: File::create(&local_tmp_file)?,
            };
            session.handler_mut().channel = None;
        }
        self.session = Some(
            self.l
                .block_on(
                    self.session
                        .take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut connection, chan)| {
                            connection.handler_mut().exit_status.remove(&chan);
                            connection.handler_mut().channel = Some(chan);
                            connection.exec(chan, false, &cmd);
                            connection.channel_eof(chan);
                            connection
                                .wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                })
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }),
                )
                .unwrap(),
        );

        if let Some(ref mut session) = self.session {
            if let State::DownloadPatch { mut file, .. } =
                std::mem::replace(&mut session.handler_mut().state, State::None)
            {
                file.flush()?;
                rename(&local_tmp_file, &local_file)?;
            }
        }
        Ok(local_file)
    }

    pub fn remote_apply(
        &mut self,
        repo_root: &Path,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        patch_hashes: Vec<Hash>,
    ) -> Result<(), Error> {
        let pdir = repo_root.patches_dir();
        let mut exit_status = None;
        let esc_path = escape(Cow::Borrowed(&self.path));
        let apply_cmd = format!(
            "{} apply --repository {} --branch {:?}",
            self.pijul_cmd, esc_path, remote_branch
        );
        let sign_cmd = format!("{} sign --repository {}", self.pijul_cmd, esc_path);

        let session = self.session.take().unwrap();

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                        .fold((session, Vec::new()), move |(session, buf), hash| {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                                futures::future::Either::A((SendFile {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
        self.session = Some(
            self.l
                .block_on(
                    session
                        .channel_open_session()
                        .and_then(move |(session, chan0)| {
                            session
                                .channel_open_session()
                                .and_then(move |(mut session, chan1)| {
                                    session.handler_mut().exit_status.remove(&chan0);
                                    session.handler_mut().channel = Some(chan0);
                                    debug!("exec {:?}", apply_cmd);
                                    session.exec(chan0, false, &apply_cmd);
                                    debug!("exec {:?}", sign_cmd);
                                    session.exec(chan1, false, &sign_cmd);
                                    futures::stream::iter_ok(patch_hashes.into_iter())
                                        .fold((session, Vec::new()), move |(session, buf), hash| {
                                            let mut pdir = pdir.clone();
                                            pdir.push(hash.to_base58());
                                            pdir.set_extension("gz");
                                            let f = std::fs::File::open(&pdir).unwrap();
                                            pdir.set_extension("sig");
                                            if let Ok(sig) = std::fs::File::open(&pdir) {
                                                futures::future::Either::A(
                                                    (SendFile {
                                                        f: f,
                                                        buf: Some(buf),
                                                        chan: chan0,
                                                        state: Some(SendFileState::Read(session)),
                                                    })
                                                    .and_then(move |(session, mut buf)| {
                                                        buf.clear();
                                                        SendFile {
                                                            f: sig,
                                                            buf: Some(buf),
                                                            chan: chan1,
                                                            state: Some(SendFileState::Read(
                                                                session,
                                                            )),
                                                        }
                                                    }),
                                                )
                                            } else {
                                                futures::future::Either::B(SendFile {
                                                    f: f,
                                                    buf: Some(buf),
                                                    chan: chan0,
                                                    state: Some(SendFileState::Read(session)),
                                                })
                                            }
                                        })
                                        .and_then(move |(mut session, _)| {
                                            session.channel_eof(chan0);
                                            session
                                                .wait(move |session| {
                                                    session
                                                        .handler()
                                                        .exit_status
                                                        .get(&chan0)
                                                        .is_some()
                                                })
                                                .map(move |mut session| {
                                                    exit_status = session
                                                        .handler()
                                                        .exit_status
                                                        .get(&chan0)
                                                        .map(|x| *x);
                                                    session.channel_close(chan0);
                                                    session
                                                })
                                        })
                                        .map_err(From::from)

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
                                })
                        }),
                )
                .unwrap(),
        );

        if let Some(ref session) = self.session {
            debug!("exit status = {:?}", session.handler().exit_status);
        }
        Ok(())
    }

    pub fn remote_init(&mut self) -> Result<(), Error> {
        let esc_path = escape(Cow::Borrowed(self.path));
        let cmd = format!("{} init {}", self.pijul_cmd, esc_path);
        debug!("command line:{:?}", cmd);

        self.session = Some(
            self.l
                .block_on(
                    self.session
                        .take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut session, chan)| {
                            debug!("chan = {:?}", chan);
                            session.handler_mut().exit_status.remove(&chan);
                            session.handler_mut().channel = Some(chan);
                            session.exec(chan, false, &cmd);
                            session.channel_eof(chan);
                            // Wait until channel close.
                            session
                                .wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                })
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }),
                )
                .unwrap(),
        );
        Ok(())
    }
}

impl<'a> UriSession<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        if !path.is_empty() {
            return Err(Error::PartialPullOverHttp);
        }
        let mut uri = self.uri.to_string();
        uri = uri + "/" + PIJUL_DIR_NAME + "/" + &branch_changes_base_path(branch);
        let mut req = reqwest_async::Request::new(reqwest::Method::GET, uri.parse().unwrap());
        req.headers_mut().insert(
            reqwest::header::CONNECTION,
            reqwest::header::HeaderValue::from_static("close"),
        );
        let res: Vec<u8> = self.l.block_on(self.client.execute(req).and_then(
            |resp: reqwest_async::Response| {
                let res = Vec::new();
                let body = resp.into_body();
                body.fold(res, |mut res, x| {
                    res.extend(x.iter());
                    futures::finished::<_, reqwest::Error>(res)
                })
            },
        ))?;
        let changes = read_changes(&mut &res[..]).unwrap_or(Vec::new());
        debug!("http: {:?}", changes);
        Ok(changes)
    }

    pub fn fetch_patch(
        &mut self,
        patch_hash: &Hash,
        local_file: PathBuf,
        local_tmp_file: PathBuf,
    ) -> Result<PathBuf, Error> {
        let ref mut l = self.l;
        let ref mut client = self.client;
        let uri = self.uri.to_string()
            + "/"
            + PIJUL_DIR_NAME
            + "/patches/"
            + &patch_hash.to_base58()
            + ".gz";
        debug!("downloading uri {:?}", uri);

        let req = reqwest_async::Request::new(reqwest::Method::GET, uri.parse().unwrap());
        let uri_sig = self.uri.to_string()
            + "/"
            + PIJUL_DIR_NAME
            + "/patches/"
            + &patch_hash.to_base58()
            + ".sig";
        debug!("{:?}", uri_sig);
        let req_sig = reqwest_async::Request::new(reqwest::Method::GET, uri_sig.parse().unwrap());
        let mut local_sig_file = local_file.clone();
        let mut local_tmp_sig_file = local_tmp_file.clone();
        local_sig_file.set_extension("sig");
        local_tmp_sig_file.set_extension("sig");

        let res = l
            .block_on(
                client
                    .execute(req)
                    .and_then(move |resp| {
                        if resp.status() == reqwest::StatusCode::OK {
                            let res = Vec::new();
                            futures::future::Either::A(
                                resp.into_body()
                                    .fold(res, |mut res, x| {
                                        res.extend(x.iter());
                                        futures::finished::<_, reqwest::Error>(res)
                                    })
                                    .map(|body| {
                                        // debug!("response={:?}", body);
                                        let mut f = File::create(&local_tmp_file).unwrap();
                                        f.write_all(&body).unwrap();
                                        // debug!("patch downloaded through http: {:?}", body);
                                        Some((local_tmp_file, local_file))
                                    }),
                            )
                        } else {
                            futures::future::Either::B(futures::finished(None))
                        }
                    })
                    .join(client.execute(req_sig).then(move |resp| {
                        let resp = if let Ok(resp) = resp {
                            resp
                        } else {
                            return futures::future::Either::B(futures::finished(None));
                        };
                        debug!("sig status {:?}", resp.status());
                        if resp.status() == reqwest::StatusCode::OK {
                            let res = Vec::new();
                            futures::future::Either::A(
                                resp.into_body()
                                    .fold(res, |mut res, x| {
                                        res.extend(x.iter());
                                        futures::finished::<_, reqwest::Error>(res)
                                    })
                                    .map(|body| {
                                        // debug!("response={:?}", body);
                                        let mut f = File::create(&local_tmp_sig_file).unwrap();
                                        f.write_all(&body).unwrap();
                                        // debug!("patch downloaded through http: {:?}", body);
                                        Some((local_tmp_sig_file, local_sig_file))
                                    }),
                            )
                        } else {
                            futures::future::Either::B(futures::finished(None))
                        }
                    })),
            )
            .unwrap();
        if let Some((local_tmp_file, local_file)) = res.0 {
            debug!("renaming {:?} to {:?}", local_tmp_file, local_file);
            rename(&local_tmp_file, &local_file)?;
            if let Some((local_tmp_sig_file, local_sig_file)) = res.1 {
                debug!("renaming {:?} to {:?}", local_tmp_sig_file, local_sig_file);
                rename(&local_tmp_sig_file, &local_sig_file).unwrap_or(());
            }
            Ok(local_file)
        } else {
            Err(Error::PatchNotFound {
                repo_root: self.uri.into(),
                patch_hash: patch_hash.to_owned(),
            })
        }
    }
}

impl<'a> LocalSession<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        let repo = self.root.open_repo(None)?;
        let txn = repo.txn_begin()?;
        if let Some(branch) = txn.get_branch(&branch) {
            if !path.is_empty() {
                let mut patches = Vec::new();
                for (hash, s) in txn.iter_patches(&branch, None) {
                    for path in path {
                        let inode = txn.find_inode(path).unwrap();
                        let key = txn.get_inodes(inode).unwrap().key;
                        if txn.get_touched(key, hash) {
                            patches.push((txn.get_external(hash).unwrap().to_owned(), s));
                            break;
                        }
                    }
                }
                Ok(patches)
            } else {
                Ok(txn
                    .iter_patches(&branch, None)
                    .map(|(hash, s)| (txn.get_external(hash).unwrap().to_owned(), s))
                    .collect())
            }
        } else {
            Ok(Vec::new())
        }
    }

    pub fn fetch_patch(
        &mut self,
        patch_hash: &Hash,
        local_file: PathBuf,
    ) -> Result<PathBuf, Error> {
        debug!("local downloading {:?}", patch_hash);
        let remote_file = patches_dir(self.path).join(&patch_file_name(patch_hash.as_ref()));
        let remote_file = self
            .root
            .patches_dir()
            .join(&patch_file_name(patch_hash.as_ref()));
        debug!("hard linking {:?} to {:?}", remote_file, local_file);
        if hard_link(&remote_file, &local_file).is_err() {
            copy(&remote_file, &local_file)?;
        }
        Ok(local_file)
    }

    pub fn remote_apply(
        &mut self,
        repo_root: &Path,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        patch_hashes: &Vec<Hash>,
    ) -> Result<Vec<ConflictingFile>, Error> {
        let mut remote_path = self.root.patches_dir();
        let mut local_path = repo_root.patches_dir();
        let remote_current_branch = self.root.get_current_branch()?;

        for hash in patch_hashes {
            remote_path.push(&hash.to_base58());
            remote_path.set_extension("gz");

            local_path.push(&hash.to_base58());
            local_path.set_extension("gz");

            debug!("hard linking {:?} to {:?}", local_path, remote_path);
            if metadata(&remote_path).is_err() {
                if hard_link(&local_path, &remote_path).is_err() {
                    copy(&local_path, &remote_path)?;
                }
            }

            remote_path.set_extension("sig");
            local_path.set_extension("sig");

            if metadata(&remote_path).is_err() && metadata(&local_path).is_ok() {
                if hard_link(&local_path, &remote_path).is_err() {
                    copy(&local_path, &remote_path)?;
                }
            }

            local_path.pop();
            remote_path.pop();
        }

        loop {
            let app = if remote_current_branch != remote_branch {
                apply_resize_no_output(&self.path, &remote_branch, patch_hashes.iter(), |_, _| {})
                apply_resize_no_output(&self.root, &remote_branch, patch_hashes.iter(), |_, _| {})
                    .map(|_| Vec::new())
            } else {
                apply_resize(
                    libpijul::DiffAlgorithm::default(),
                    &self.path,
                    &self.path,
                    &self.root,
                    &remote_branch,
                    patch_hashes.iter(),
                    &[] as &[RepoPath<&Path>],
                    |_, _| {},
                )
            };
            match app {
                Err(ref e) if e.lacks_space() => debug!("lacks space"),
                Ok(v) => return Ok(v),
                Err(e) => return Err(From::from(e)),
            }
        }
    }
}

#[derive(Debug, Clone)]
pub struct PushablePatches {
    pub pushable: Vec<(Hash, Option<PatchId>, ApplyTimestamp)>,
    pub non_fast_forward: Vec<Hash>,
}

impl<'a> Session<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        remote_path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        match *self {
            Session::Ssh(ref mut ssh_session) => ssh_session.changes(branch, remote_path),
            Session::Local(ref mut local_session) => local_session.changes(branch, remote_path),
            Session::Uri(ref mut uri_session) => uri_session.changes(branch, remote_path),
        }
    }
    pub fn download_patch(&mut self, repo_root: &Path, patch_hash: &Hash) -> Result<PathBuf, Error> {
        let patches_dir_ = patches_dir(repo_root);
        let patches_dir_ = patches_dir(repo_root);
        let patches_dir_ = patches_dir(repo_root);
    pub fn download_patch(
        &mut self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        patch_hash: &Hash,
    ) -> Result<PathBuf, Error> {
        let patches_dir_ = repo_root.patches_dir();
        let local_file = patches_dir_.join(&patch_file_name(patch_hash.as_ref()));

        if !metadata(&local_file).is_ok() {
            match *self {
                Session::Local(ref mut local_session) => {
                    local_session.fetch_patch(patch_hash, local_file)
                }
                Session::Ssh(ref mut ssh_session) => {
                    let tmp_dir = tempdir_in(&patches_dir_)?;
                    let local_tmp_file = tmp_dir.path().join("patch");
                    ssh_session.fetch_patch(patch_hash, local_file, local_tmp_file)
                }
                Session::Uri(ref mut uri_session) => {
                    let tmp_dir = tempdir_in(&patches_dir_)?;
                    let local_tmp_file = tmp_dir.path().join("patch");
                    uri_session.fetch_patch(patch_hash, local_file, local_tmp_file)
                }
            }
        } else {
            Ok(local_file)
        }
    }

    fn remote_apply(
        &mut self,
        repo_root: &Path,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        patch_hashes: Vec<Hash>,
    ) -> Result<(), Error> {
        match *self {
            Session::Ssh(ref mut ssh_session) => {
                ssh_session.remote_apply(repo_root, remote_branch, patch_hashes)
            }

            Session::Local(ref mut local_session) => local_session
                .remote_apply(repo_root, remote_branch, &patch_hashes)
                .map(|_| ()),

            _ => panic!("upload to URI impossible"),
        }
    }

    pub fn remote_init(&mut self) -> Result<(), Error> {
        match *self {
            Session::Ssh(ref mut ssh_session) => ssh_session.remote_init(),
            Session::Local(ref mut local_session) => {
                assert_no_containing_repo(local_session.path)?;
                create_repo(local_session.path)
                create_repo(local_session.path)
                assert_no_containing_repo(local_session.root.repo_root)?;
                create_repo(local_session.root.repo_root)
            }
            _ => panic!("remote init not possible"),
        }
    }

    pub fn pullable_patches(
        &mut self,
        remote_branch: &str,
        local_branch: &str,
        target: &Path,
        target: &RepoRoot<impl AsRef<Path>>,
        remote_path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Pullable, Error> {
        let mut remote_patches: Vec<(Hash, ApplyTimestamp)> = self
            .changes(remote_branch, remote_path)?
            .into_iter()
            .map(|(h, s)| (h.to_owned(), s))
            .collect();
        remote_patches.sort_by(|&(_, ref a), &(_, ref b)| a.cmp(&b));
        let local_patches: HashMap<Hash, ApplyTimestamp> = {
            let repo_dir = pristine_dir(&target);
            let repo_dir = target.pristine_dir();
            let repo = Repository::open(&repo_dir, None)?;
            let txn = repo.txn_begin()?;
            if let Some(branch) = txn.get_branch(&local_branch) {
                txn.iter_patches(&branch, None)
                    .map(|(hash, s)| (txn.get_external(hash).unwrap().to_owned(), s))
                    .collect()
            } else {
                HashMap::new()
            }
        };
        debug!("pullable done: {:?}", remote_patches);
        Ok(Pullable {
            local: local_patches.iter().map(|(h, _)| h.to_owned()).collect(),
            remote: remote_patches.into_iter().collect(),
        })
    }

    pub fn pull(
        &mut self,
        target: &Path,
        target: &RepoRoot<impl AsRef<Path>>,
        to_branch: &str,
        pullable: &mut Vec<(Hash, ApplyTimestamp)>,
        partial_paths: &[RepoPath<impl AsRef<Path>>],
        display_progress: bool,
    ) -> Result<Vec<ConflictingFile>, Error> {
        let mut p = if display_progress && !pullable.is_empty() {
            Some((progrs::start("Pulling patches", pullable.len() as u64), 0))
        } else {
            None
        };
        let mut pullable_plus_deps = Vec::new();
        let mut pulled = HashSet::new();

        while let Some((hash, _)) = pullable.pop() {
            if pulled.contains(&hash) {
                continue;
            }
            debug!("hash = {:?}", hash);
            let path = self.download_patch(&target, &hash)?;
            let path = self.download_patch(target, &hash)?;

            let patch = {
                let file = File::open(&path)?;
                let mut file = BufReader::new(file);
                Patch::from_reader_compressed(&mut file)?.2
            };
            pulled.insert(hash.clone());

            // If the apply is partial, we might not have all the
            // dependencies. Add them to this list.
            if !partial_paths.is_empty() {
                for dep in patch.dependencies() {
                    if !pulled.contains(dep) {
                        pullable.push((dep.to_owned(), 0));
                    }
                }
            }

            pullable_plus_deps.push((hash.to_owned(), patch));

            p.as_mut().map(|&mut (ref mut p, ref mut n)| {
                p.display({
                    *n = *n + 1;
                    *n
                })
            });
        }

        // Because we've been popping the stack of pullable patches in
        // reverse order, we need to reverse the result.
        pullable_plus_deps.reverse();

        p.map(|(p, _)| p.stop("done"));
        debug!("patches downloaded");

        let p = std::cell::RefCell::new(progrs::start(
            "Applying patches",
            pullable_plus_deps.len() as u64,
        ));
        let mut size_increase = 4096;
        let current_branch = get_current_branch(target)?;
        let current_branch = target.get_current_branch()?;
        let conflicts = loop {
            let app = if current_branch != to_branch {
                apply_resize_patches_no_output(
                    target,
                    &target,
                    &to_branch,
                    &pullable_plus_deps,
                    size_increase,
                    |c, _| p.borrow_mut().display(c as u64),
                )
                .map(|_| Vec::new())
            } else {
                apply_resize_patches(
                    libpijul::DiffAlgorithm::default(),
                    target,
                    target,
                    &target,
                    &to_branch,
                    &pullable_plus_deps,
                    size_increase,
                    partial_paths,
                    |c, _| p.borrow_mut().display(c as u64),
                )
            };
            match app {
                Ok(conflicts) => break conflicts,
                Err(ref e) if e.lacks_space() => size_increase *= 2,
                Err(e) => return Err(e.into()),
            }
        };
        p.into_inner().stop("done");
        Ok(conflicts)
    }

    /// Returns a vector of pushable patches, and a vector of changes
    /// present on the remote branch but not on the local one (to
    /// identify fast-forward pushes).
    pub fn pushable_patches(
        &mut self,
        from_branch: &str,
        to_branch: &str,
        source: &Path,
        source: &RepoRoot<impl AsRef<Path> + std::fmt::Debug>,

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        remote_paths: &[&str],

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    ) -> Result<Vec<(Hash, Option<PatchId>, ApplyTimestamp)>, Error> {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
        remote_paths: &[RepoPath<impl AsRef<Path>>],

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    ) -> Result<PushablePatches, Error> {
        debug!("source: {:?}", source);

        let mut non_fast_forward = Vec::new();

        let to_changes_ = self.changes(to_branch, remote_paths)?;

        let repo = source.open_repo(None)?;
        let txn = repo.txn_begin()?;

        let mut to_changes = HashSet::new();
        let branch = txn.get_branch(&from_branch);
        for (h, _) in to_changes_.iter() {
            if let Some(ref branch) = branch {
                if let Some(patchid) = txn.get_internal(h.as_ref()) {
                    if txn.get_patch(&branch.patches, patchid).is_none() {
                        non_fast_forward.push(h.clone())
                    }
                } else {
                    non_fast_forward.push(h.clone())
                }
            }
            to_changes.insert(h.as_ref());
        }
        debug!("to_changes: {:?}", to_changes);
        let from_changes: Vec<_> = {
            let repo_dir = pristine_dir(&source);
            let repo = Repository::open(&repo_dir, None)?;
            if let Some(branch) = txn.get_branch(&from_branch) {
                txn.iter_applied(&branch, None)
                    .filter_map(|(s, patchid)| {
                        if let Some(hash) = txn.get_external(patchid) {
                            if to_changes.contains(&hash) {
                                None
                            } else {
                                Some((hash.to_owned(), Some(patchid), s))
                            }
                        } else {
                            None
                        }
                    })
                    .collect()
            } else {
                Vec::new()
            }
        };
        debug!("pushing: {:?}", from_changes);
        Ok(PushablePatches {
            pushable: from_changes,
            non_fast_forward,
        })
    }

    pub fn push(
        &mut self,
        source: &Path,
        source: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        pushable: Vec<Hash>,
    ) -> Result<(), Error> {
        debug!("push, remote_applying");
        debug!("pushable: {:?}", pushable);
        if pushable.len() > 0 {
            self.remote_apply(source, remote_branch, pushable)?;
        }
        Ok(())
    }
}

pub fn ssh_connect(
    user: &Option<&str>,
    host: &str,
    port: Option<u16>,
) -> Result<(thrussh_config::Config, thrussh_config::ConnectFuture), Error> {
    let mut ssh_config =
        thrussh_config::parse_home(host).unwrap_or(thrussh_config::Config::default());
    debug!("ssh_config = {:?}", ssh_config);

    if ssh_config.host_name.is_none() {
        ssh_config.host_name = Some(host.to_string())
    }

    if let Some(port) = port {
        ssh_config.port = Some(port)
    } else if ssh_config.port.is_none() {
        ssh_config.port = Some(22)
    }

    if let Some(ref user) = *user {
        ssh_config.user = Some(user.to_string())
    } else if ssh_config.user.is_none() {
        ssh_config.user = Some(username::get_user_name().unwrap())
    }

    ssh_config.update_proxy_command();
    let stream = if let Some(ref proxycmd) = ssh_config.proxy_command {
        debug!("{:?}", proxycmd);
        thrussh_config::Stream::proxy_command("sh", &["-c", proxycmd.as_str()])
    } else {
        let addr = if let Some(addrs) = (
            ssh_config.host_name.as_ref().unwrap().as_str(),
            ssh_config.port.unwrap(),
        )
            .to_socket_addrs()?
            .next()
        {
            addrs
        } else {
            return Err(Error::UnknownHost {
                host: host.to_string(),
            });
        };
        debug!("addr = {:?}", addr);
        thrussh_config::Stream::tcp_connect(&addr)
    };
    Ok((ssh_config, stream))
}

impl<'a> Remote<'a> {
    pub fn session(&'a self) -> Result<Session<'a>, Error> {
        match *self {
            Remote::Local { ref path } => Ok(Session::Local(LocalSession {
                path: path.as_path(),
            Remote::Local {
                path: RepoRoot {
                    repo_root: ref path,
                },
            } => Ok(Session::Local(LocalSession {
                root: RepoRoot { repo_root: path },
            })),
            Remote::Uri { uri } => {
                let l = tokio::runtime::Runtime::new().unwrap();
                let proxy_url = std::env::var("http_proxy");
                let c = match proxy_url {
                    Err(std::env::VarError::NotPresent) => reqwest_async::Client::new(),
                    Ok(p_url) => reqwest_async::Client::builder()
                        .proxy(reqwest::Proxy::all(reqwest::Url::parse(&p_url).unwrap())?)
                        .build()?,
                    Err(std::env::VarError::NotUnicode(s)) => {
                        panic!("invalid http_proxy value: {:?}", s)
                    }
                };
                Ok(Session::Uri(UriSession {
                    l,
                    uri: uri,
                    client: c,
                }))
            }
            Remote::Ssh(ref remote) => Ok(Session::Ssh(remote.session()?)),
        }
    }
}

impl<'a> SshRemote<'a> {
    pub fn session(&'a self) -> Result<SshSession<'a>, Error> {
        let mut l = tokio::runtime::Runtime::new().unwrap();

        let (ssh_config, stream) = ssh_connect(&self.user, self.host, self.port)?;
        let config = Arc::new(thrussh::client::Config::default());

        let local_repo_root = self.local_repo_root.map(|x| x.to_path_buf());
        let host = self.host.to_string();
        let handler = Client::new(
            ssh_config.port,
            ssh_config.host_name.as_ref().unwrap().as_str(),
            &mut l,
        );
        let session: thrussh::client::Connection<_, _> =
            l.block_on(stream.map_err(Error::from).and_then(move |socket| {
                let use_agent = handler.agent.is_some();
                futures::future::result(thrussh::client::Connection::new(
                    config.clone(),
                    socket,
                    handler,
                    None,
                ))
                .from_err()
                .and_then(move |connection| {
                    debug!("connection done");
                    use super::ssh_auth_attempts::{auth_attempt_future, AuthAttempts};
                    let user = ssh_config.user.unwrap();
                    auth_attempt_future(
                        connection,
                        AuthAttempts::new(host, local_repo_root, use_agent),
                        user,
                        ssh_config.add_keys_to_agent,
                    )
                })
            }))?;
        debug!("session ready");
        Ok(SshSession {
            l,
            session: Some(session),
            path: self.path,
            pijul_cmd: &self.pijul_cmd,
        })
    }
}

pub fn parse_ssh_remote<'a>(
    remote_id: &'a str,
    port: Option<u16>,
    local_repo_root: Option<&'a Path>,
) -> Option<SshRemote<'a>> {
    let ssh = Regex::new(r"^([^:]*):(.+)$").unwrap();
    if ssh.is_match(remote_id) {
        let cap = ssh.captures(remote_id).unwrap();
        let user_host = cap.get(1).unwrap().as_str();

        let (user, host) = {
            let ssh_user_host = Regex::new(r"^([^@]*)@(.*)$").unwrap();
            if ssh_user_host.is_match(user_host) {
                let cap = ssh_user_host.captures(user_host).unwrap();
                (
                    Some(cap.get(1).unwrap().as_str()),
                    cap.get(2).unwrap().as_str(),
                )
            } else {
                (None, user_host)
            }
        };
        let pijul_cmd = super::remote_pijul_cmd();
        Some(SshRemote {
            user: user,
            host: host,
            port: port,
            id: remote_id,
            path: cap.get(2).unwrap().as_str(),
            local_repo_root,
            pijul_cmd,
        })
    } else {
        None
    }
}

pub fn parse_ssh_remote_nopath<'a>(remote_id: &'a str, port: Option<u16>) -> Option<SshRemote<'a>> {
    let (user, host) = {
        let ssh_user_host = Regex::new(r"^([^@]*)@(.*)$").unwrap();
        if ssh_user_host.is_match(remote_id) {
            let cap = ssh_user_host.captures(remote_id).unwrap();
            (
                Some(cap.get(1).unwrap().as_str()),
                cap.get(2).unwrap().as_str(),
            )
        } else {
            (None, remote_id)
        }
    };
    let pijul_cmd = super::remote_pijul_cmd();
    Some(SshRemote {
        user: user,
        host: host,
        port: port,
        id: remote_id,
        path: "",
        local_repo_root: None,
        pijul_cmd,
    })
}

pub fn parse_remote<'a>(
    remote_id: &'a str,
    port: Option<u16>,
    base_path: Option<&'a Path>,
    local_repo_root: Option<&'a Path>,
) -> Remote<'a> {
    let uri = Regex::new(r"^([a-zA-Z]*)://(.*)$").unwrap();
    if uri.is_match(remote_id) {
        let cap = uri.captures(remote_id).unwrap();
        if &cap[1] == "file" {
            if let Some(a) = base_path {
                let path = a.join(&cap[2]);
                Remote::Local {
                    path: { RepoRoot { repo_root: path } },
                }
            } else {
                let path = Path::new(&cap[2]).to_path_buf();
                Remote::Local {
                    path: { RepoRoot { repo_root: path } },
                }
            }
        } else {
            Remote::Uri { uri: remote_id }
        }
    } else if let Some(rem) = parse_ssh_remote(remote_id, port, local_repo_root) {
        Remote::Ssh(rem)
    } else {
        if let Some(a) = base_path {
            let path = a.join(remote_id);
            Remote::Local { path: path }
            Remote::Local {
                path: RepoRoot { repo_root: path },
            }
        } else {
            let path = Path::new(remote_id).to_path_buf();
            Remote::Local { path: path }
            Remote::Local {
                path: RepoRoot { repo_root: path },
            }
        }

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144

145
146
use libpijul::fs_representation::RepoRoot;
use super::ask::{ask_changes, ChangesDirection};
use super::record;
use chrono;
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::patch::{Patch, PatchFlags, UnsignedPatch};
use libpijul::{Inode, InodeUpdate, Repository, ToPrefixes};
use rand;
use std;
use std::collections::HashSet;
use std::path::{Path, PathBuf};

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("revert")
        .about("Rewrite the working copy from the pristine")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .takes_value(true)
                .help("Local repository."),
        )
        .arg(
            Arg::with_name("all")
                .short("a")
                .long("all")
                .help("Answer 'y' to all questions")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("branch")
                .help("Branch to revert to.")
                .long("branch")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("prefix")
                .help("Prefix to start from")
                .takes_value(true)
                .multiple(true),
        )
        .arg(
            Arg::with_name("patience")
                .long("patience")
                .help("Use patience diff instead of the default (Myers diff)")
                .conflicts_with("myers")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("myers")
                .long("myers")
                .help("Use Myers diff")
                .conflicts_with("patience")
                .takes_value(false),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let yes_to_all = args.is_present("all");
    let branch_name = opts.branch();
    let prefix = record::prefix(args, &opts)?;
    // Generate the pending patch.
    let (pending, pending_syncs): (_, HashSet<_>) = if !yes_to_all || prefix.is_some() {
        let repo = opts.open_and_grow_repo(409600)?;
        let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
        let (changes, syncs) = {
            let (changes, syncs) = record::changes_from_prefixes(
                if args.is_present("patience") {
                    libpijul::DiffAlgorithm::Patience
                } else {
                    libpijul::DiffAlgorithm::Myers
                },
                &opts.repo_root,
                &mut txn,
                &branch_name,
                prefix.as_ref(),
            )?;
            let changes: Vec<_> = changes
                .into_iter()
                .map(|x| txn.globalize_record(x))
                .collect();
            if yes_to_all {
                (Vec::new(), HashSet::new())
            } else {
                let (c, _empty_vec) = ask_changes(
                    &txn,
                    &opts.repo_root,
                    &opts.cwd,
                    &changes,
                    ChangesDirection::Revert,
                    &mut HashSet::new(),
                )?;
                let selected = changes
                    .into_iter()
                    .enumerate()
                    .filter(|&(i, _)| *(c.get(&i).unwrap_or(&false)))
                    .map(|(_, x)| x)
                    .collect();
                (selected, syncs)
            }
        };
        debug!("changes {:?}", changes);
        debug!("syncs {:?}", syncs);
        let branch = txn.get_branch(&branch_name).unwrap();
        let changes = changes.into_iter().flat_map(|x| x.into_iter()).collect();
        let patch = txn.new_patch(
            &branch,
            Vec::new(),
            String::new(),
            None,
            chrono::Utc::now(),
            changes,
            std::iter::empty(),
            PatchFlags::empty(),
        );
        txn.commit()?;
        (patch, syncs)
    } else {
        (UnsignedPatch::empty().leave_unsigned(), HashSet::new())
    };

    let mut size_increase = None;
    let pristine = opts.pristine_dir();
    loop {
        match output_repository(
            &opts.repo_root,
            &pristine,
            &branch_name,
            size_increase,
            prefix.as_ref(),
            &pending,
            &pending_syncs,
        ) {
            Err(ref e) if e.lacks_space() => {
                size_increase = Some(Repository::repository_size(&pristine).unwrap())
            }
            e => return e,
        }
    }
}

fn output_repository(
    r: &Path,
    r: &RepoRoot<impl AsRef<Path>>,
    pristine_dir: &Path,


1
2

3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162

163
164
165
166
167
168
169
170
171
172
173
174
175
176

177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210

211
212

213
214
use super::{ask, default_explain, BasicOptions, StaticSubcommand,
use libpijul::fs_representation::{RepoRoot, patch_file_name};
use clap::{Arg, ArgMatches, SubCommand};

use super::{ask, default_explain, get_current_branch, BasicOptions, StaticSubcommand,
use super::{ask, default_explain, validate_base58, BasicOptions, StaticSubcommand};
use meta::{load_signing_key, Global, Meta};
use std::collections::HashSet;
use std::path::Path;

use chrono;
use libpijul::fs_representation::{patch_file_name, RepoPath, RepoRoot};
use libpijul::patch::{Patch, PatchFlags};
use libpijul::{apply_resize, apply_resize_no_output, Hash, HashRef, PatchId};
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::iter;
use std::mem::drop;
use std::str::FromStr;

use commands::record::{decide_authors, decide_patch_message, record_args};
use error::Error;

pub fn invocation() -> StaticSubcommand {
    record_args(
        SubCommand::with_name("rollback").arg(
            Arg::with_name("patch")
                .help("Patch to roll back.")
                .takes_value(true)
                .multiple(true)
                .validator(validate_base58),
        ),
    )
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let patches: Option<HashSet<Hash>> = args
        .values_of("patch")
        .map(|ps| ps.map(|x| Hash::from_base58(x).unwrap()).collect());

    let mut increase = 409600;
    let repo = opts.open_and_grow_repo(increase)?;
    let branch_name = opts.branch();

    let mut patches: HashMap<_, _> = if let Some(ref patches) = patches {
        let txn = repo.txn_begin()?;
        if let Some(branch) = txn.get_branch(&branch_name) {
            let mut patches_ = HashMap::new();
            for h in patches.iter() {
                debug!("unrecording {:?}", h);

                if let Some(internal) = txn.get_internal(h.as_ref()) {
                    if txn.get_patch(&branch.patches, internal).is_some() {
                        let patch = load_patch(&opts.repo_root, h.as_ref());
                        patches_.insert(h.to_owned(), patch);
                        continue;
                    }
                }
                return Err(Error::BranchDoesNotHavePatch {
                    branch_name: branch.name.as_str().to_string(),
                    patch: h.to_owned(),
                });
            }
            patches_
        } else {
            HashMap::new()
        }
    } else {
        let mut patches: Vec<_> = {
            let txn = repo.txn_begin()?;
            if let Some(branch) = txn.get_branch(&branch_name) {
                txn.rev_iter_applied(&branch, None)
                    .map(|(t, h)| {
                        let ext = txn.get_external(h).unwrap();
                        let patch = load_patch(&opts.repo_root, ext);
                        (ext.to_owned(), Some(h.to_owned()), patch, t)
                    })
                    .collect()
            } else {
                Vec::new()
            }
        };
        patches.sort_by(|&(_, _, _, a), &(_, _, _, b)| b.cmp(&a));
        let patches: Vec<(Hash, Option<PatchId>, Patch)> =
            patches.into_iter().map(|(a, b, c, _)| (a, b, c)).collect();
        // debug!("patches: {:?}", patches);
        let to_unrecord = ask::ask_patches(ask::Command::Unrecord, &patches).unwrap();
        debug!("to_unrecord: {:?}", to_unrecord);
        let patches: HashMap<_, _> = patches
            .into_iter()
            .filter(|&(ref k, _, _)| to_unrecord.contains(&k))
            .map(|(k, _, p)| (k, p))
            .collect();
        patches
    };

    let mut selected = Vec::new();
    loop {
        let hash = if let Some((hash, patch)) = patches.iter().next() {
            increase += patch.size_upper_bound() as u64;
            hash.to_owned()
        } else {
            break;
        };
        deps_dfs(&mut selected, &mut patches, &hash)
    }

    // Create the inverse changes.
    let mut changes = Vec::new();
    for &(ref hash, ref patch) in selected.iter() {
        debug!("inverting {:?}", patch);
        patch.inverse(hash, &mut changes)
    }

    let meta = Meta::load(&opts.repo_root).unwrap_or_else(|_| Meta::new());
    let (global, save_global) = Global::load()
        .map(|g| (g, false))
        .unwrap_or_else(|_| (Global::new(), true));

    if save_global {
        global.save().unwrap_or(())
    }

    // Create the inverse patch, and save it.
    let patch = {
        let authors_arg = args.values_of("author").map(|x| x.collect::<Vec<_>>());
        let patch_name_arg = args.value_of("message");
        let patch_descr_arg = args.value_of("description");

        let txn = repo.txn_begin()?;
        let authors = decide_authors(authors_arg, &meta, &global)?;

        let patch_date = args.value_of("date").map_or(Ok(chrono::Utc::now()), |x| {
            chrono::DateTime::from_str(x).map_err(|_| Error::InvalidDate {
                date: String::from(x),
            })
        })?;

        let (name, description) = decide_patch_message(
            patch_name_arg,
            patch_descr_arg,
            String::from(""),
            !args.is_present("no-editor"),
            &opts.repo_root,
            &meta,
            &global,
        )?;

        if let Some(branch) = txn.get_branch(&branch_name) {
            txn.new_patch(
                &branch,
                authors,
                name,
                description,
                patch_date,
                changes,
                iter::empty(),
                PatchFlags::empty(),
            )
        } else {
            unimplemented!()
        }
    };
    let patches_dir = patches_dir(&opts.repo_root);
    let patches_dir = opts.repo_root.patches_dir();
    let mut key = meta
        .signing_key
        .or(global.signing_key)
        .and_then(|s| load_signing_key(s).ok());
    let hash = if let Some(ref mut key) = key {
        key.check_author(&patch.header().authors)?;
        patch.save(&patches_dir, key.keys.get_mut(0))?
    } else {
        patch.save(&patches_dir, None)?
    };
    drop(repo);
    println!("Recorded patch {}", hash.to_base58());

    let is_current_branch = if let Ok(br) = get_current_branch(&opts.repo_root) {
    let is_current_branch = if let Ok(br) = opts.repo_root.get_current_branch() {
        br == opts.branch()
    } else {
        false
    };

    // Apply the inverse patch.
    loop {
        let app = if !is_current_branch {
            apply_resize_no_output(
                &opts.repo_root,
                &opts.branch(),
                iter::once(&hash),
                |_, _| (),
            )
            .map(|_| Vec::new())
        } else {
            apply_resize(
                libpijul::DiffAlgorithm::default(),
                &opts.repo_root,
                &opts.branch(),
                iter::once(&hash),
                &[] as &[RepoPath<&Path>],
                |_, _| (),
            )
        };
        match app {
            Err(ref e) if e.lacks_space() => {}
            Ok(_) => return Ok(()),
            Err(e) => return Err(From::from(e)),
        }
    }
}

fn load_patch(repo_root: &Path, ext: HashRef) -> Patch {
fn load_patch(repo_root: &RepoRoot<impl AsRef<Path>>, ext: HashRef) -> Patch {
    let base = patch_file_name(ext);
    let filename = patches_dir(repo_root).join(&base);
    let filename = repo_root.patches_dir().join(&base);
    debug!("filename: {:?}", filename);



1
2
3




6


9

11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81

82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114

115
116
117
118
119
120
121
122
123

124
125
            opts.repo_root.untracked_files(&txn, &opts.repo_root.repo_root).collect();
    repo_root: &RepoRoot<impl AsRef<Path>>,
                pathdiff::diff_paths(&repo_root.repo_root.as_ref().join(f.as_path()), &cwd) // TODO: clean up!
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::untracked_files;
use libpijul::fs_representation::untracked_files;

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
use libpijul::fs_representation::RepoRoot;

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
use libpijul::fs_representation::{RepoPath, RepoRoot};

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
use libpijul::status::{unrecorded_changes, ChangeType};
use rand;

use std::collections::HashSet;
use std::path::{Path, PathBuf};
use std::rc::Rc;

const UNRECORDED_FILES: &'static str = r#"
Changes not yet recorded:
  (use "pijul record ..." to record a new patch)
"#;

const UNTRACKED_FILES: &'static str = r#"
Untracked files:
  (use "pijul add <file>..." to track them)
"#;

const CONFLICTED_FILES: &'static str = r#"
Unresolved conflicts:
  (fix conflicts and record the resolution with "pijul record ...")
"#;

pub fn invocation() -> StaticSubcommand {
    SubCommand::with_name("status")
        .about("Show working tree status")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .takes_value(true)
                .help("Local repository."),
        )
        .arg(
            Arg::with_name("short")
                .long("short")
                .short("s")
                .help("Output in short format"),
        )
}

pub fn explain(r: Result<(), Error>) {
    default_explain(r)
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let current_branch = opts.branch();
    let repo = opts.open_and_grow_repo(409600)?;
    let short = args.is_present("short");

    let (unrecorded, untracked, conflicts) = {
        let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
        let unrecorded = unrecorded_changes(&mut txn, &opts.repo_root, &current_branch)?;

        let untracked = opts
            .repo_root
            .untracked_files(&txn, &opts.repo_root.repo_root)
            .collect();
        let conflicts = txn.list_conflict_files(&current_branch, &[])?;
        (unrecorded, untracked, conflicts)
    };

    if short {
        print_shortstatus(&opts.cwd, &opts.repo_root, unrecorded, untracked, conflicts);
    } else {
        print_longstatus(&current_branch, unrecorded, untracked, conflicts);
    }
    Ok(())
}

fn print_longstatus(
    branch: &str,
    repo_root: &PathBuf,
    changed: Vec<(Rc<RepoPath<PathBuf>>, ChangeType)>,
    untracked: HashSet<RepoPath<PathBuf>>,
    conflicts: Vec<RepoPath<PathBuf>>,
) {
    println!("On branch {}", branch);
    if changed.is_empty() && untracked.is_empty() && conflicts.is_empty() {
        println!("Nothing to record, working tree clean");
    }

    if !conflicts.is_empty() {
        println!("{}", CONFLICTED_FILES);
        for f in conflicts {
            println!("        {}", f.display());
        }
    }

    if !changed.is_empty() {
        println!("{}", UNRECORDED_FILES);
        for (f, t) in changed {
            println!("        {:10} {}", t.long(), f.display());
        }
    }

    if !untracked.is_empty() {
        println!("{}", UNTRACKED_FILES);
        for f in untracked {
            println!("        {}", f.display());
        }
    }
}

fn print_shortstatus(
    cwd: &Path,
    repo_root: &PathBuf,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    changed: Vec<(Rc<RepoPath<PathBuf>>, ChangeType)>,
    untracked: HashSet<RepoPath<PathBuf>>,
    conflicts: Vec<RepoPath<PathBuf>>,
) {
    for f in conflicts {
        debug!("{:?} {:?}", repo_root.repo_root.as_ref(), f.as_path());
        println!(
            "C {}",
            pathdiff::diff_paths(&repo_root.as_path().join(f.as_path()), &cwd)
            pathdiff::diff_paths(&repo_root.repo_root.as_ref().join(f.as_path()), &cwd)
                .unwrap()

    let patches_dir = opts.repo_root.patches_dir();
5
6

7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148

149
150

151
152
use std::path::Path;

use libpijul::fs_representation::{patch_file_name, patches_dir};
use error::Error;
use libpijul::fs_representation::{patch_file_name, RepoRoot};
use libpijul::patch::Patch;
use libpijul::{unrecord_no_resize, Hash, HashRef, PatchId};
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::mem::drop;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("unrecord")
        .about("Unrecord some patches (remove them without reverting them)")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("Branch.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .help("Patch to unrecord.")
                .takes_value(true)
                .multiple(true)
                .validator(validate_base58),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let patches: Option<HashSet<Hash>> = args
        .values_of("patch")
        .map(|ps| ps.map(|x| Hash::from_base58(x).unwrap()).collect());
    let mut increase = 409600;
    let repo = opts.open_and_grow_repo(increase)?;
    let branch_name = opts.branch();

    let mut patches: HashMap<_, _> = if let Some(ref patches) = patches {
        let txn = repo.txn_begin()?;
        if let Some(branch) = txn.get_branch(&branch_name) {
            let mut patches_ = HashMap::new();
            for h in patches.iter() {
                debug!("unrecording {:?}", h);

                if let Some(internal) = txn.get_internal(h.as_ref()) {
                    if txn.get_patch(&branch.patches, internal).is_some() {
                        let patch = load_patch(&opts.repo_root, h.as_ref());
                        patches_.insert(h.to_owned(), patch);

                        for (_, revdep) in txn
                            .iter_revdep(Some((internal, None)))
                            .take_while(|&(q, _)| q == internal)
                        {
                            // If the branch has patch revdep, and
                            // revdep is not also to be unrecorded.
                            if patches
                                .iter()
                                .any(|p| txn.get_internal(p.as_ref()).unwrap() == revdep)
                            {
                                continue;
                            }
                            if txn.get_patch(&branch.patches, revdep).is_some() {
                                let ext = txn.get_external(revdep).unwrap();
                                return Err(Error::PatchIsDependedUpon {
                                    hash: h.to_owned(),
                                    dependent: ext.to_owned(),
                                });
                            }
                        }
                        continue;
                    }
                }
                return Err(Error::BranchDoesNotHavePatch {
                    branch_name: branch.name.as_str().to_string(),
                    patch: h.to_owned(),
                });
            }
            patches_
        } else {
            HashMap::new()
        }
    } else {
        let mut patches: Vec<_> = {
            let txn = repo.txn_begin()?;
            if let Some(branch) = txn.get_branch(&branch_name) {
                txn.rev_iter_applied(&branch, None)
                    .map(|(t, h)| {
                        let ext = txn.get_external(h).unwrap();
                        let patch = load_patch(&opts.repo_root, ext);
                        (ext.to_owned(), Some(h.to_owned()), patch, t)
                    })
                    .collect()
            } else {
                Vec::new()
            }
        };
        patches.sort_by(|&(_, _, _, a), &(_, _, _, b)| b.cmp(&a));
        let patches: Vec<(Hash, Option<PatchId>, Patch)> =
            patches.into_iter().map(|(a, b, c, _)| (a, b, c)).collect();
        // debug!("patches: {:?}", patches);
        let to_unrecord = ask::ask_patches(ask::Command::Unrecord, &patches).unwrap();
        debug!("to_unrecord: {:?}", to_unrecord);
        let patches: HashMap<_, _> = patches
            .into_iter()
            .filter(|&(ref k, _, _)| to_unrecord.contains(&k))
            .map(|(k, _, p)| (k, p))
            .collect();
        patches
    };

    let mut selected = Vec::new();
    loop {
        let hash = if let Some((hash, patch)) = patches.iter().next() {
            increase += patch.size_upper_bound() as u64;
            hash.to_owned()
        } else {
            break;
        };
        deps_dfs(&mut selected, &mut patches, &hash)
    }
    drop(repo);

    let repo_dir = opts.pristine_dir();
    loop {
        match unrecord_no_resize(
            &repo_dir,
            &opts.repo_root,
            &branch_name,
            &mut selected,
            increase,
        ) {
            Err(ref e) if e.lacks_space() => increase *= 2,
            e => return e.map_err(|x| Error::Repository(x)),
        }
    }
}

fn load_patch(repo_root: &Path, ext: HashRef) -> Patch {
fn load_patch(repo_root: &RepoRoot<impl AsRef<Path>>, ext: HashRef) -> Patch {
    let base = patch_file_name(ext);
    let filename = patches_dir(repo_root).join(&base);
    let filename = repo_root.patches_dir().join(&base);
    debug!("filename: {:?}", filename);
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37

38
39
40

41
42
43
44
45
46
47
48
49
50
51
52
53
54
55



56
57
58
use error::Error;
use libpijul::fs_representation::RepoRoot;
use sequoia_openpgp::parse::Parse;
use sequoia_openpgp::serialize::Serialize;
use sequoia_openpgp::tpk::{CipherSuite, TPKBuilder};
use sequoia_openpgp::TSK;
use std;
use std::collections::BTreeMap;
use std::fs::{create_dir_all, File};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use thrussh_keys::key::KeyPair;
use toml;

pub const DEFAULT_REMOTE: &'static str = "remote";

#[derive(Debug, Default, Serialize, Deserialize)]
pub struct Repository {
    pub address: String,
    pub port: Option<u16>,
}

#[derive(Debug, Serialize, Deserialize)]
pub struct Meta {
    #[serde(default)]
    pub authors: Vec<String>,
    pub editor: Option<String>,
    pub pull: Option<String>,
    pub push: Option<String>,
    #[serde(default)]
    pub remote: BTreeMap<String, Repository>,
    pub signing_key: Option<String>,
}

impl Meta {
    pub fn load(r: &Path) -> Result<Meta, Error> {
    pub fn load(r: &RepoRoot<impl AsRef<Path>>) -> Result<Meta, Error> {
        let mut str = String::new();
        {
            let mut f = File::open(meta_file(r))?;
            let mut f = File::open(r.meta_file())?;
            f.read_to_string(&mut str)?;
        }
        Ok(toml::from_str(&str)?)
    }
    pub fn new() -> Meta {
        Meta {
            authors: Vec::new(),
            editor: None,
            pull: None,
            push: None,
            remote: BTreeMap::new(),
            signing_key: None,
        }
    }
    pub fn save(&self, r: &Path) -> Result<(), Error> {
        let mut f = File::create(meta_file(r))?;
        let mut f = File::create(meta_file(r))?;
    pub fn save(&self, r: &RepoRoot<impl AsRef<Path>>) -> Result<(), Error> {
        let mut f = File::create(r.meta_file())?;
        let s: String = toml::to_string(&self)?;