pijul_org / pijul

Getting rid of error-chain in pijul (error descriptions were incompatible with libpijul)

By blabla on November 15, 2018
This patch is not signed.
6r4WMGeRiuk4g9pWLX3T8j2Q25gU9omEUvLENG1Zs16kdNyxjWhgwqbCyHjz52WhYmXLxwPpwH8bWiuYnnbdHjH8
This patch is in the following branches:
latest
master
testing
86
87

flate2 = "1.0"
ignore = "0.4"
error-chain = "0.12"


























1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68


69
70
71









72
73
74
75
76
77
78
79
80

81
82
use super::{get_current_branch, validate_base58, BasicOptions, ErrorKind};
use super::{get_current_branch, validate_base58, BasicOptions};
            Arg::with_name("json")
                .long("json")
                .help(
                    "Accept patch in JSON format (for debugging only).",
                )
                return Err(Error::FileNotInRepository { path: rec.to_string() });
        if args.is_present("json") {
            let patch:Patch = serde_json::from_reader(&buf[..]).unwrap();
            let path = opts.patches_dir();
            let h = patch.save(&path, None)?;
            remote.insert(h);
        } else {
            while let Ok((h, _, patch)) = Patch::from_reader_compressed(&mut buf_) {
                debug!("{:?}", patch);
                {
                    let mut path = opts.patches_dir();
                    path.push(h.to_base58());
                    path.set_extension("gz");
                    let mut f = File::create(&path)?;
                    f.write_all(&buf[i..(buf.len() - buf_.len())])?;
                    i = buf.len() - buf_.len();
                }
                remote.insert(h);
            }
use super::{validate_base58, BasicOptions};
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, StaticSubcommand};
use error::Error;
use libpijul::patch::Patch;
use libpijul::{apply_resize, apply_resize_no_output, Hash, RepoPath};
use std::collections::HashSet;
use std::fs::File;
use std::io::{stdin, Read, Write};
use std::path::Path;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("apply")
        .about("Apply a patch")
        .arg(
            Arg::with_name("patch")
                .help(
                    "Hash of the patch to apply, in base58. If no patch is given, patches are \
                     read from the standard input.",
                )
                .takes_value(true)
                .multiple(true)
                .validator(validate_base58),
        )
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help(
                    "Path to the repository where the patches will be applied. Defaults to the \
                     repository containing the current directory.",
                )
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help(
                    "The branch to which the patches will be applied. Defaults to the current \
                     branch.",
                )
                .takes_value(true),
        )
        .arg(
            Arg::with_name("no-output")
                .long("no-output")
                .help("Only apply the patch, don't output it to the repository."),
        );
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    debug!("applying");
    let mut remote = HashSet::new();

    // let remote: HashSet<Hash> =
    let mut has_patches = false;
    if let Some(hashes) = args.values_of("patch") {
        remote.extend(hashes.map(|h| Hash::from_base58(&h).unwrap()));
        has_patches = true
    }

    if !has_patches {
        // Read patches in gz format from stdin.
        let mut buf = Vec::new();
        stdin().read_to_end(&mut buf)?;

        let mut buf_ = &buf[..];
        let mut i = 0;
        while let Ok((h, _, patch)) = Patch::from_reader_compressed(&mut buf_) {
            debug!("{:?}", patch);
        while let Ok((h, _, patch)) = Patch::from_reader_compressed(&mut buf_) {
            debug!("{:?}", patch);

            {
                let mut path = opts.patches_dir();
                path.push(h.to_base58());
                path.set_extension("gz");
                let mut f = File::create(&path)?;
                f.write_all(&buf[i..(buf.len() - buf_.len())])?;
                i = buf.len() - buf_.len();
                i = buf.len() - buf_.len();
            }
            {
                let mut path = opts.patches_dir();
                path.push(h.to_base58());
                path.set_extension("gz");
                let mut f = File::create(&path)?;
                f.write_all(&buf[i..(buf.len() - buf_.len())])?;
                i = buf.len() - buf_.len();
            }

            remote.insert(h);
            remote.insert(h);
        }


1
2
3
4
5
6
7
8
9
10
11
12
13
14
15

16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331

333
334


337


340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401

403


406

408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912

913
914
915
916
917
918
919
920
921
922

923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973

974
975
976
977
978
979
980
981

982
983
use error::Error;
            .map_err(|e| Error::CannotSpawnEditor { editor: editor.to_owned(), cause: e.to_string() })?;
use getch;
use libpijul::patch::{Change, ChangeContext, Patch, PatchHeader, Record};
use std::io::prelude::*;

use std::collections::{HashMap, HashSet};
use std::ffi::OsString;
use std::io::stdout;
use std::path::{Path, PathBuf};
use std::rc::Rc;

use regex::Regex;

use commands::pretty_repo_path;
use libpijul::fs_representation::{RepoPath, RepoRoot, PIJUL_DIR_NAME};

use error::{Error, ErrorKind};
use atty;
use error::Error;
use libpijul::{EdgeFlags, Hash, LineId, MutTxn, PatchId};
use rand;
use std;
use std::char::from_u32;
use std::fs::{remove_file, File};
use std::process;
use std::str;
use term;
use term::{Attr, StdoutTerminal};

use ignore::gitignore::GitignoreBuilder;
use line;

const BINARY_CONTENTS: &'static str = "<binary contents>";
#[derive(Clone, Copy)]
pub enum Command {
    Pull,
    Push,
    Unrecord,
}

impl Command {
    fn verb(&self) -> &'static str {
        match *self {
            Command::Push => "push",
            Command::Pull => "pull",
            Command::Unrecord => "unrecord",
        }
    }
}

fn print_section(term: &mut Option<Box<StdoutTerminal>>, title: &str, contents: &str) {
    if let Some(ref mut term) = *term {
        term.attr(Attr::Bold).unwrap_or(());
    }
    let mut stdout = std::io::stdout();
    write!(stdout, "{}", title).unwrap_or(());
    if let Some(ref mut term) = *term {
        term.reset().unwrap_or(());
    }
    writeln!(stdout, "{}", contents).unwrap_or(());
}

fn remove_escape_codes(f: &str) -> std::borrow::Cow<str> {
    if f.as_bytes().contains(&27) {
        std::borrow::Cow::Owned(f.chars().filter(|&c| c != 27 as char).collect())
    } else {
        std::borrow::Cow::Borrowed(f)
    }
}

pub fn print_patch_descr(
    term: &mut Option<Box<StdoutTerminal>>,
    hash: &Hash,
    internal: Option<PatchId>,
    patch: &PatchHeader,
) {
    print_section(term, "Hash:", &format!(" {}", &hash.to_base58()));
    if let Some(internal) = internal {
        print_section(term, "Internal id:", &format!(" {}", &internal.to_base58()));
    }

    print_section(
        term,
        "Authors:",
        &format!(" {}", remove_escape_codes(&patch.authors.join(", "))),
    );
    print_section(term, "Timestamp:", &format!(" {}", patch.timestamp));

    let is_tag = if !patch.flag.is_empty() { "TAG: " } else { "" };

    let mut stdout = std::io::stdout();
    writeln!(
        stdout,
        "\n    {}{}",
        is_tag,
        remove_escape_codes(&patch.name)
    )
    .unwrap_or(());
    if let Some(ref d) = patch.description {
        writeln!(stdout, "").unwrap_or(());
        let d = remove_escape_codes(d);
        for descr_line in d.lines() {
            writeln!(stdout, "    {}", descr_line).unwrap_or(());
        }
    }
    writeln!(stdout, "").unwrap_or(());
}

fn check_forced_decision(
    command: Command,
    choices: &HashMap<&Hash, bool>,
    rev_dependencies: &HashMap<&Hash, Vec<&Hash>>,
    a: &Hash,
    b: &Patch,
) -> Option<bool> {
    let covariant = match command {
        Command::Pull | Command::Push => true,
        Command::Unrecord => false,
    };
    // If we've selected patches that depend on a, and this is a pull
    // or a push, select a.
    if let Some(x) = rev_dependencies.get(a) {
        for y in x {
            // Here, y depends on a.
            //
            // If this command is covariant, and we've selected y, select a.
            // If this command is covariant, and we've unselected y, don't do anything.
            //
            // If this command is contravariant, and we've selected y, don't do anything.
            // If this command is contravariant, and we've unselected y, unselect a.
            if let Some(&choice) = choices.get(y) {
                if choice == covariant {
                    return Some(covariant);
                }
            }
        }
    };

    // If we've unselected dependencies of a, unselect a.
    for y in b.dependencies().iter() {
        // Here, a depends on y.
        //
        // If this command is covariant, and we've selected y, don't do anything.
        // If this command is covariant, and we've unselected y, unselect a.
        //
        // If this command is contravariant, and we've selected y, select a.
        // If this command is contravariant, and we've unselected y, don't do anything.

        if let Some(&choice) = choices.get(&y) {
            if choice != covariant {
                return Some(!covariant);
            }
        }
    }

    None
}

fn interactive_ask(
    getch: &getch::Getch,
    a: &Hash,
    patchid: Option<PatchId>,
    b: &Patch,
    command_name: Command,
    show_help: bool,
) -> Result<(char, Option<bool>), Error> {
    let mut term = if atty::is(atty::Stream::Stdout) {
        term::stdout()
    } else {
        None
    };
    print_patch_descr(&mut term, a, patchid, b);

    if show_help {
        display_help(command_name);
        print!("Shall I {} this patch? ", command_name.verb());
    } else {
        print!("Shall I {} this patch? [ynkad?] ", command_name.verb());
    }

    stdout().flush()?;
    match getch.getch().ok().and_then(|x| from_u32(x as u32)) {
        Some(e) => {
            println!("{}", e);
            let e = e.to_uppercase().next().unwrap_or('\0');
            match e {
                'A' => Ok(('Y', Some(true))),
                'D' => Ok(('N', Some(false))),
                e => Ok((e, None)),
            }
        }
        _ => Ok(('\0', None)),
    }
}

fn display_help(c: Command) {
    println!("Available options: ynkad?");
    println!("y: {} this patch", c.verb());
    println!("n: don't {} this patch", c.verb());
    println!("k: go bacK to the previous patch");
    println!("a: {} all remaining patches", c.verb());
    println!("d: finish, skipping all remaining patches");
    println!("")
}

/// Patches might have a dummy "changes" field here.
pub fn ask_patches(
    command: Command,
    patches: &[(Hash, Option<PatchId>, Patch)],
) -> Result<Vec<Hash>, Error> {
    let getch = getch::Getch::new();
    let mut i = 0;

    // Record of the user's choices.
    let mut choices: HashMap<&Hash, bool> = HashMap::new();

    // For each patch, the list of patches that depend on it.
    let mut rev_dependencies: HashMap<&Hash, Vec<&Hash>> = HashMap::new();

    // Decision for the remaining patches ('a' or 'd'), if any.
    let mut final_decision = None;
    let mut show_help = false;

    while i < patches.len() {
        let (ref a, patchid, ref b) = patches[i];
        let forced_decision = check_forced_decision(command, &choices, &rev_dependencies, a, b);

        // Is the decision already forced by a previous choice?
        let e = match final_decision.or(forced_decision) {
            Some(true) => 'Y',
            Some(false) => 'N',
            None => {
                debug!("decision not forced");
                let (current, remaining) =
                    interactive_ask(&getch, a, patchid, b, command, show_help)?;
                final_decision = remaining;
                current
            }
        };

        show_help = false;

        debug!("decision: {:?}", e);
        match e {
            'Y' => {
                choices.insert(a, true);
                match command {
                    Command::Pull | Command::Push => {
                        for ref dep in b.dependencies().iter() {
                            let d = rev_dependencies.entry(dep).or_insert(vec![]);
                            d.push(a)
                        }
                    }
                    Command::Unrecord => {}
                }
                i += 1
            }
            'N' => {
                choices.insert(a, false);
                match command {
                    Command::Unrecord => {
                        for ref dep in b.dependencies().iter() {
                            let d = rev_dependencies.entry(dep).or_insert(vec![]);
                            d.push(a)
                        }
                    }
                    Command::Pull | Command::Push => {}
                }
                i += 1
            }
            'K' if i > 0 => {
                let (ref a, _, _) = patches[i];
                choices.remove(a);
                i -= 1
            }
            '?' => {
                show_help = true;
            }
            _ => {}
        }
    }
    Ok(patches
        .into_iter()
        .filter_map(|&(ref hash, _, _)| {
            if let Some(true) = choices.get(hash) {
                Some(hash.to_owned())
            } else {
                None
            }
        })
        .collect())
}

/// Compute the dependencies of this change.
fn change_deps(
    id: usize,
    c: &Record<ChangeContext<Hash>>,
    provided_by: &mut HashMap<LineId, usize>,
) -> HashSet<LineId> {
    let mut s = HashSet::new();
    for c in c.iter() {
        match *c {
            Change::NewNodes {
                ref up_context,
                ref down_context,
                ref line_num,
                ref nodes,
                ..
            } => {
                for cont in up_context.iter().chain(down_context) {
                    if cont.patch.is_none() && !cont.line.is_root() {
                        s.insert(cont.line.clone());
                    }
                }
                for i in 0..nodes.len() {
                    provided_by.insert(*line_num + i, id);
                }
            }
            Change::NewEdges { ref edges, .. } => {
                for e in edges {
                    if e.from.patch.is_none() && !e.from.line.is_root() {
                        s.insert(e.from.line.clone());
                    }
                    if e.to.patch.is_none() && !e.from.line.is_root() {
                        s.insert(e.to.line.clone());
                    }
                }
            }
        }
    }
    s
}


>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
fn print_record<T: rand::Rng>(
    repo_root: &RepoRoot<impl AsRef<Path>>,

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
fn print_change<T: rand::Rng>(

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    term: &mut Option<Box<StdoutTerminal>>,
    cwd: &Path,
    repo: &MutTxn<T>,
    current_file: &mut Option<Rc<RepoPath<PathBuf>>>,
    c: &Record<ChangeContext<Hash>>,
) -> Result<(), Error> {
    match *c {
        Record::FileAdd {
            ref name,
            ref contents,
            ..
        } => {
            if let Some(ref mut term) = *term {
                term.fg(term::color::CYAN).unwrap_or(());
            }
            print!("added file ");
            if let Some(ref mut term) = *term {
                term.reset().unwrap_or(());
            }
            println!("{}", pretty_repo_path(repo_root, name, cwd).display());
            if let Some(ref change) = contents {
                print_change(term, repo, 0, 0, change)?;
            }
            Ok(())
        }
        Record::FileDel {
            ref name,
            ref contents,
            ..
        } => {
            if let Some(ref mut term) = *term {
                term.fg(term::color::MAGENTA).unwrap_or(());
            }
            print!("deleted file: ");
            if let Some(ref mut term) = *term {
                term.reset().unwrap_or(());
            }
            println!("{}", pretty_repo_path(repo_root, name, cwd).display());
            if let Some(ref change) = contents {
                print_change(term, repo, 0, 0, change)?;
            }
            Ok(())
        }
        Record::FileMove { ref new_name, .. } => {
            if let Some(ref mut term) = *term {
                term.fg(term::color::YELLOW).unwrap_or(());
            }
            print!("file moved to: ");
            if let Some(ref mut term) = *term {
                term.reset().unwrap_or(());
            }
            println!("{}", pretty_repo_path(repo_root, new_name, cwd).display());
            Ok(())
        }
        Record::Change {
            ref change,
            ref replacement,
            ref file,
            old_line,
            new_line,
            ..
        } => {

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                            }

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                            if let Some(ref mut term) = *term {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
            let mut file_changed = true;
            if let Some(ref cur_file) = *current_file {
                if file == cur_file {
                    file_changed = false;
                }
            }
            if file_changed {
                if let Some(ref mut term) = *term {
                    term.attr(Attr::Bold).unwrap_or(());
                    term.attr(Attr::Underline(true)).unwrap_or(());
                }
                println!("{}", pretty_repo_path(repo_root, file, cwd).display());
                if let Some(ref mut term) = *term {
                    term.reset().unwrap_or(());
                }
                *current_file = Some(file.clone())
            }

            print_change(term, repo, old_line, new_line, change)?;
            if let Some(ref c) = *replacement {
                print_change(term, repo, old_line, new_line, c)?
            }
            Ok(())
        }
    }
}

fn print_change<T: rand::Rng>(
    term: &mut Option<Box<StdoutTerminal>>,
    repo: &MutTxn<T>,
    old_line: usize,
    new_line: usize,
    change: &Change<ChangeContext<Hash>>,
) -> Result<(), Error> {
    match *change {
        Change::NewNodes {
            // ref up_context,ref down_context,ref line_num,
            ref flag,
            ref nodes,
            ..
        } => {
            if flag.contains(EdgeFlags::FOLDER_EDGE) {
                for n in nodes {
                    if n.len() >= 2 {
                        if let Some(ref mut term) = *term {
                            term.fg(term::color::CYAN).unwrap_or(());
                        }
                        print!("new file ");
                        if let Some(ref mut term) = *term {
                            term.reset().unwrap_or(());
                        }
                        println!("{}", str::from_utf8(&n[2..]).unwrap_or(""));
                    }
                }
            } else {
                if new_line > 0 {
                    println!("From line {}\n", new_line);
                }

                for n in nodes {
                    let s = str::from_utf8(n).unwrap_or(BINARY_CONTENTS);
                    if let Some(ref mut term) = *term {
                        term.fg(term::color::GREEN).unwrap_or(());
                    }
                    print!("+ ");
                    if let Some(ref mut term) = *term {
                        term.reset().unwrap_or(());
                    }
                    if s.ends_with("\n") {
                        print!("{}", s);
                    } else {
                        println!("{}", s);
                    }
                }
            }
            Ok(())
        }
        Change::NewEdges {
            ref edges, flag, ..
        } => {
            let mut h_targets = HashSet::with_capacity(edges.len());

            if old_line > 0 {
                println!("From line {}\n", old_line);
            }
            for e in edges {
                let (target, flag) = if !flag.contains(EdgeFlags::PARENT_EDGE) {
                    if h_targets.insert(&e.to) {
                        (Some(&e.to), flag)
                    } else {
                        (None, flag)
                    }
                } else {
                    if h_targets.insert(&e.from) {
                        (Some(&e.from), flag)
                    } else {
                        (None, flag)
                    }
                };
                if let Some(target) = target {
                    let internal = repo.internal_key_unwrap(target);
                    let l = repo.get_contents(internal).unwrap();
                    let l = l.into_cow();
                    let s = str::from_utf8(&l).unwrap_or(BINARY_CONTENTS);

                    if flag.contains(EdgeFlags::DELETED_EDGE) {
                        if let Some(ref mut term) = *term {
                            term.fg(term::color::RED).unwrap_or(());
                        }
                        print!("- ");
                    } else {
                        if let Some(ref mut term) = *term {
                            term.fg(term::color::GREEN).unwrap_or(());
                        }
                        print!("+ ");
                    }
                    if let Some(ref mut term) = *term {
                        term.reset().unwrap_or(());
                    }
                    if s.ends_with("\n") {
                        print!("{}", s)
                    } else {
                        println!("{}", s)
                    }
                }
            }
            Ok(())
        }
    }
}

#[derive(Clone, Copy, Debug)]
pub enum ChangesDirection {
    Record,
    Revert,
}

impl ChangesDirection {
    fn is_record(&self) -> bool {
        match *self {
            ChangesDirection::Record => true,
            _ => false,
        }
    }
    fn verb(&self) -> &str {
        match *self {
            ChangesDirection::Record => "record",
            ChangesDirection::Revert => "revert",
        }
    }
}

fn display_help_changes(
    potential_new_ignore: Option<&RepoPath<impl AsRef<Path>>>,
    direction: ChangesDirection,
) {
    println!("Available options:");
    println!("y: {} this change", direction.verb());
    println!("n: don't {} this change", direction.verb());
    println!(
        "f: {} the rest of the changes to this file",
        direction.verb()
    );
    println!(
        "s: don't {} the rest of the changes to this file",
        direction.verb()
    );
    println!("k: go back to the previous change");
    println!("a: {} all remaining changes", direction.verb());
    println!("d: skip all remaining changes");
    match potential_new_ignore {
        Some(filename) => println!("i: ignore file {}", filename.display()),
        None => (),
    }
    println!("")
}

fn prompt_one_change<T: rand::Rng>(
    repository: &MutTxn<T>,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    change: &Record<ChangeContext<Hash>>,
    current_file: &mut Option<Rc<RepoPath<PathBuf>>>,
    n_changes: usize,
    i: usize,
    direction: ChangesDirection,
    potential_new_ignore: Option<&RepoPath<impl AsRef<Path>>>,
    terminal: &mut Option<Box<StdoutTerminal>>,
    getch: &getch::Getch,
    cwd: &Path,
    show_help: bool,
) -> Result<(char, Option<char>, Option<char>), Error> {
    debug!("changes: {:?}", change);
    print_record(repo_root, terminal, cwd, repository, current_file, &change)?;
    println!("");
    let choices = if potential_new_ignore.is_some() {
        "[ynsfkadi?]"
    } else {
        "[ynsfkad?]"
    };
    if show_help {
        display_help_changes(potential_new_ignore, direction);
        print!(
            "Shall I {} this change? ({}/{}) ",
            direction.verb(),
            i + 1,
            n_changes
        );
    } else {
        print!(
            "Shall I {} this change? ({}/{}) {} ",
            direction.verb(),
            i + 1,
            n_changes,
            choices
        );
    }
    stdout().flush()?;
    match getch.getch().ok().and_then(|x| from_u32(x as u32)) {
        Some(e) => {
            println!("{}\n", e);
            let e = e.to_uppercase().next().unwrap_or('\0');
            match e {
                'A' => Ok(('Y', Some('Y'), None)),
                'D' => Ok(('N', Some('N'), None)),
                'F' => Ok(('Y', None, Some('Y'))),
                'S' => Ok(('N', None, Some('N'))),
                e => Ok((e, None, None)),
            }
        }
        _ => Ok(('\0', None, None)),
    }
}

fn add_to_ignore_file(
    file: &RepoPath<impl AsRef<Path>>,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    new_ignored_patterns: &mut Vec<String>,
    new_ignore_builder: &mut GitignoreBuilder,
) {
    loop {
        let pat = read_line_with_suggestion(
            "Pattern to add to ignore file (relative to repository root, empty to add nothing)? ",
            &file.as_path().to_string_lossy(),
        );
        if pat.is_empty() {
            return;
        };

        let mut ignore_builder = GitignoreBuilder::new(&repo_root.repo_root);
        let add_ok = match ignore_builder.add_line(None, &pat) {
            Ok(i) => match i.build() {
                Ok(i) => i
                    .matched_path_or_any_parents(file.as_path(), false)
                    .is_ignore(),
                Err(e) => {
                    println!("could not match pattern {}: {}", &pat, e);
                    false
                }
            },
            Err(e) => {
                println!("did not understand pattern {}: {}", &pat, e);
                false
            }
        };
        if add_ok {
            new_ignore_builder.add_line(None, &pat).unwrap();
            new_ignored_patterns.push(pat);
            return;
        }
        println!(
            "pattern {} is incorrect or does not match {}",
            pat,
            &file.display()
        );
    }
}

pub fn ask_changes<T: rand::Rng>(
    repository: &MutTxn<T>,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    cwd: &Path,
    changes: &[Record<ChangeContext<Hash>>],
    direction: ChangesDirection,
    to_unadd: &mut HashSet<RepoPath<PathBuf>>,
) -> Result<(HashMap<usize, bool>, Vec<String>), Error> {
    debug!("changes: {:?}", changes);
    let mut terminal = if atty::is(atty::Stream::Stdout) {
        term::stdout()
    } else {
        None
    };
    let getch = getch::Getch::new();
    let mut i = 0;
    let mut choices: HashMap<usize, bool> = HashMap::new();
    let mut new_ignored_patterns: Vec<String> = Vec::new();
    let mut new_ignore_builder = GitignoreBuilder::new(&repo_root.repo_root);
    let mut final_decision = None;
    // determines what decision to make on remaining change in the same file
    let mut file_decision: Option<char> = None;
    let mut provided_by = HashMap::new();
    let mut line_deps = Vec::with_capacity(changes.len());
    for i in 0..changes.len() {
        line_deps.push(change_deps(i, &changes[i], &mut provided_by));
    }
    let mut deps: HashMap<usize, Vec<usize>> = HashMap::new();
    let mut rev_deps: HashMap<usize, Vec<usize>> = HashMap::new();
    for i in 0..changes.len() {
        for dep in line_deps[i].iter() {
            debug!("provided: i {}, dep {:?}", i, dep);
            let p = provided_by.get(dep).unwrap();
            debug!("provided: p= {}", p);

            let e = deps.entry(i).or_insert(Vec::new());
            e.push(*p);

            let e = rev_deps.entry(*p).or_insert(Vec::new());
            e.push(i);
        }
    }

    let empty_deps = Vec::new();
    let mut current_file = None;
    let mut show_help = false;

    while i < changes.len() {
        let decision=
            // If one of our dependencies has been unselected (with "n")
            if deps.get(&i)
            .unwrap_or(&empty_deps)
            .iter()
            .any(|x| { ! *(choices.get(x).unwrap_or(&true)) }) {
                Some(false)
            } else if rev_deps.get(&i).unwrap_or(&empty_deps)
            .iter().any(|x| { *(choices.get(x).unwrap_or(&false)) }) {
                // If we are a dependency of someone selected (with "y").
                Some(true)
            } else {
                None
            };

        let decision = match changes[i] {
            Record::FileAdd { ref name, .. } => {
                let i = new_ignore_builder.build().unwrap();
                if i.matched_path_or_any_parents(name.as_path(), false)
                    .is_ignore()
                {
                    Some(false)
                } else {
                    None
                }
            }
            _ => decision,
        };
        let potential_new_ignore: Option<&RepoPath<PathBuf>> = match direction {
            ChangesDirection::Revert => None,
            ChangesDirection::Record => match changes[i] {
                Record::FileAdd { ref name, .. } => Some(&name),
                _ => None,
            },
        };
        let (e, f, file_d) = match decision {
            Some(true) => ('Y', final_decision, file_decision),
            Some(false) => ('N', final_decision, file_decision),
            None => {
                if let Some(d) = final_decision {
                    (d, Some(d), file_decision)
                } else {
                    let command_decisions = if let Some(ref f) = current_file {
                        file_decision.and_then(|d| match changes[i] {
                            Record::Change { ref file, .. } => {
                                if f == file {
                                    Some((d, final_decision, Some(d)))
                                } else {
                                    None
                                }
                            }
                            _ => None,
                        })
                    } else {
                        None
                    };

                    if let Some(res) = command_decisions {
                        res
                    } else {
                        prompt_one_change(
                            repository,
                            repo_root,
                            &changes[i],
                            &mut current_file,
                            changes.len(),
                            i,
                            direction,
                            potential_new_ignore,
                            &mut terminal,
                            &getch,
                            cwd,
                            show_help,
                        )?
                    }
                }
            }
        };

        show_help = false;

        final_decision = f;
        file_decision = file_d;
        match e {
            'Y' => {
                choices.insert(i, direction.is_record());
                match changes[i] {
                    Record::FileAdd { ref name, .. } => {
                        to_unadd.remove(&name);
                    }
                    _ => (),
                }
                i += 1
            }
            'N' => {
                choices.insert(i, !direction.is_record());
                i += 1
            }
            'K' if i > 0 => {
                choices.remove(&i);
                i -= 1
            }
            'I' => match potential_new_ignore {
                Some(file) => {
                    add_to_ignore_file(
                        file,
                        repo_root,
                        &mut new_ignored_patterns,
                        &mut new_ignore_builder,
                    );
                    choices.insert(i, !direction.is_record());
                    i += 1;
                }
                _ => {}
            },
            '?' => {
                show_help = true;
            }
            _ => {}
        }
    }
    Ok((choices, new_ignored_patterns))
}

fn read_line(s: &str) -> String {
    print!("{}", s);
    if let Some(mut term) = line::Terminal::new() {
        term.read_line().unwrap()
    } else {
        let stdin = std::io::stdin();
        let mut stdin = stdin.lock().lines();
        if let Some(Ok(x)) = stdin.next() {
            x
        } else {
            String::new()
        }
    }
}

pub fn read_line_with_suggestion(prompt: &str, _suggestion: &str) -> String {
    read_line(prompt)
}

pub fn ask_authors() -> Result<Vec<String>, Error> {
    std::io::stdout().flush()?;
    Ok(vec![read_line("What is your name <and email address>? ")])
}

pub fn ask_patch_name(
    repo_root: &RepoRoot<impl AsRef<Path>>,
    maybe_editor: Option<&String>,
    template: String,
) -> Result<(String, Option<String>), Error> {
    let repo_root = repo_root.repo_root.as_ref();
    if let Some(editor) = maybe_editor {
        let mut patch_name_file = repo_root.to_path_buf();
        patch_name_file.push(PIJUL_DIR_NAME);
        patch_name_file.push("PATCH_NAME");

        debug!("patch name file: {:?}", patch_name_file);

        // Initialize the PATCH_NAME file with the template given as argument of
        // this function. `File::create` truncate the file if it already exists.
        // FIXME: should we ask users if they want to use the previous content
        // instead?
        let _ =
            File::create(patch_name_file.as_path())?.write_all(template.into_bytes().as_slice())?;

        let mut editor_cmd = editor
            .trim()
            .split(" ")
            .map(OsString::from)
            .collect::<Vec<_>>();

        editor_cmd.push(patch_name_file.clone().into_os_string());

        process::Command::new(&editor_cmd[0])
            .args(&editor_cmd[1..])
            .current_dir(repo_root)
            .status()
            .map_err(|e| ErrorKind::CannotSpawnEditor(editor.to_owned(), e.to_string()))?;
            .map_err(|e| Error::CannotSpawnEditor {
                editor: editor.to_owned(),
                cause: e.to_string(),
            })?;
        // if we are here, it means the editor must have stopped and we can read
        // the content of PATCH_NAME.

        // in case of error, we consider it is because the file has not been
        // created and we consider it as empty
        let mut patch_name =
            File::open(patch_name_file.as_path()).map_err(|_| ErrorKind::EmptyPatchName)?;
            File::open(patch_name_file.as_path()).map_err(|_| Error::EmptyPatchName)?;
        let mut patch_name_content = String::new();
        patch_name.read_to_string(&mut patch_name_content)?;

        // we are done with PATCH_NAME, so delete it
        remove_file(patch_name_file)?;

        // Now, we parse the file. About `(?s:.)`, it is the syntax of the regex
        // crate for `.` to also match `\n`. So `.(?s:.)` means we want at least
        // one character that is not a newline, then the rest.
        let re_with_desc = Regex::new(r"^([^\n]+)\n\s*(.(?s:.)*)$").unwrap();
        let re_without_desc = Regex::new(r"^([^\n]+)\s*$").unwrap();

        if let Some(capt) = re_without_desc.captures(patch_name_content.as_ref()) {
            debug!("patch name without description");
            if capt[1].chars().any(|x| x == '\n' || x == '\r') {
                return Err(Error::InvalidPatchName);
            }
            Ok((String::from(&capt[1]), None))
        } else if let Some(capt) = re_with_desc.captures(patch_name_content.as_ref()) {
            debug!("patch name with description");

            // In the description, we ignore the line starting with `#`, and we
            // remove trailing and leading space.  The `map()` call is necessary
            // because `lines()` elements does not contain the newline
            // character, therefore `collect` returns a String with a single
            // line.
            //
            // Note that, in the current implementation, it remains possible to
            // start the patch name with `#`.
            let descr: String = capt[2]
                .lines()
                .filter(|l| !l.starts_with("#"))
                .map(|x| format!("{}\n", x))
                .collect::<String>()
                .trim()
                .into();

            // If, once cleaned up, the description is empty, then we prefer
            // using `None` rather than `Some("")`.
            if descr.is_empty() {
                Ok((String::from(&capt[1]), None))
            } else {
                Ok((String::from(&capt[1]), Some(String::from(descr.trim()))))
            }
        } else {
            debug!("couldn't get a valid patch name");
            debug!("the content was:");
            debug!("=======================");
            debug!("{}", patch_name_content);
            debug!("=======================");
            Err(ErrorKind::EmptyPatchName.into())
            Err(Error::EmptyPatchName)
        }
    } else {
        std::io::stdout().flush()?;

        let res = read_line("What is the name of this patch? ");
        debug!("res = {:?}", res);
        if res.trim().is_empty() {
            Err(ErrorKind::EmptyPatchName.into())
            Err(Error::EmptyPatchName)
        } else {

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40

41
42
43
44
45
46
47
48
49
50

51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94


95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111

112
113
114
115
116
117
118
119
120

121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144

145
146
use error::Error;
use clap::{Arg, ArgMatches, SubCommand};

use super::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::{in_repo_root, RepoPath};
use libpijul::patch::UnsignedPatch;
use libpijul::{FileStatus, RecordState, ToPrefixes};
use rand;
use std::collections::HashSet;
use std::path::Path;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("checkout")
        .about("Change the current branch")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .help("Branch to switch to.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("path")
                .long("path")
                .help("Partial path to check out.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("force")
                .short("f")
                .long("force")
                .takes_value(false)
                .help("Only verify that there are no unrecorded files moves, deletions or additions (ignores unrecorded changes in files). Much faster on large repositories."),
        );
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    if let Some(branch) = args.value_of("branch") {
        checkout(
            &opts,
            branch,
            args.is_present("force"),
            args.value_of("path").map(|p| RepoPath(Path::new(p))),
        )
    } else {
        Err(ErrorKind::NoSuchBranch.into())
        Err(Error::NoSuchBranch)
    }
}

pub fn checkout(
    opts: &BasicOptions,
    branch: &str,
    force: bool,
    partial_path: Option<RepoPath<&Path>>,
) -> Result<(), Error> {
    let mut force = force;
    let mut provision = 409600;

    loop {
        match try_checkout(opts, branch, force, provision, partial_path) {
            Err(ref e) if e.lacks_space() => {
                provision = provision * 2;
                force = true;
            }
            x => return x,
        }
    }
}

pub fn try_checkout(
    opts: &BasicOptions,
    branch_name: &str,
    force: bool,
    provision: u64,
    partial_path: Option<RepoPath<&Path>>,
) -> Result<(), Error> {
    let repo = opts.open_and_grow_repo(provision)?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let current_branch = opts.repo_root.get_current_branch()?;
    // We need to check at least that there are no file
    // moves/additions/deletions, because these would be
    // overwritten by the checkout, sometimes causing Pijul to
    // panic.
    if force {
        // Check whether there are file moves.
        if txn
            .iter_inodes(None)
            .any(|(_, ch)| ch.status != FileStatus::Ok)
        {
            return Err(ErrorKind::PendingChanges.into());
            return Err(ErrorKind::PendingChanges.into());
            return Err(Error::PendingChanges);
        }
    } else {
        // Check whether there are more general changes.
        let mut record = RecordState::new();
        let current_branch = txn.open_branch(&current_branch)?;
        txn.record(
            libpijul::DiffAlgorithm::default(),
            &mut record,
            &current_branch,
            &opts.repo_root,
            &in_repo_root(),
        )?;
        txn.commit_branch(current_branch)?;
        let (changes, _) = record.finish();

        if !changes.is_empty() {
            return Err(ErrorKind::PendingChanges.into());
            return Err(Error::PendingChanges);
        }
    }

    debug!("output repository");

    let mut branch = if let Some(branch) = txn.get_branch(branch_name) {
        branch
    } else {
        return Err(ErrorKind::NoSuchBranch.into());
        return Err(Error::NoSuchBranch);
    };
    let pref = if let Some(partial) = partial_path {
        (&[partial][..]).to_prefixes(&txn, &branch)
    } else {
        (&[][..] as &[RepoPath<&Path>]).to_prefixes(&txn, &branch)
    };
    txn.output_repository(
        &mut branch,
        &opts.repo_root,
        &pref,
        &UnsignedPatch::empty().leave_unsigned(),
        &HashSet::new(),
    )?;
    txn.commit_branch(branch)?;

    txn.commit()?;

    opts.repo_root.set_current_branch(branch_name)?;

    println!("Current branch: {:?}", opts.repo_root.get_current_branch()?);
    Ok(())
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)

1
2
3

4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178










179
180
181
182
183
184
185
186
187
        ).unwrap();
use clap::{Arg, ArgMatches, SubCommand};
use commands::remote::{parse_remote, Remote};
use commands::{assert_no_containing_repo, create_repo, default_explain, StaticSubcommand};
use error::{Error, ErrorKind};
use error::Error;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::{Hash, DEFAULT_BRANCH};
use regex::Regex;
use std::io::{stderr, Write};
use std::path::Path;
use std::process::exit;
use tempfile::tempdir_in;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("clone")
        .about("Clone a remote branch")
        .arg(
            Arg::with_name("from")
                .help("Repository to clone.")
                .required(true),
        )
        .arg(
            Arg::with_name("from_branch")
                .long("from-branch")
                .help("The branch to pull from")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("to_branch")
                .long("to-branch")
                .help("The branch to pull into")
                .takes_value(true),
        )
        .arg(Arg::with_name("to").help("Target."))
        .arg(
            Arg::with_name("from_path")
                .long("path")
                .help("Only pull patches relative to that path.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .long("patch")
                .help("Pull a patch and its dependencies.")
                .takes_value(true)
                .multiple(true),
        )
        .arg(
            Arg::with_name("port")
                .short("p")
                .long("port")
                .help("Port of the remote ssh server.")
                .takes_value(true)
                .validator(|val| {
                    let x: Result<u16, _> = val.parse();
                    match x {
                        Ok(_) => Ok(()),
                        Err(_) => Err(val),
                    }
                }),
        );
}
#[derive(Debug)]
pub struct Params<'a> {
    pub from: Remote<'a>,
    pub from_branch: &'a str,
    pub from_path: Vec<RepoPath<&'a Path>>,
    pub to: Remote<'a>,
    pub to_branch: &'a str,
}

pub fn parse_args<'a>(args: &'a ArgMatches) -> Params<'a> {
    // At least one must not use its "port" argument
    let from = parse_remote(
        args.value_of("from").unwrap(),
        args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
        None,
        None,
    );
    let to = if let Some(to) = args.value_of("to") {
        parse_remote(
            to,
            args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
            None,
            None,
        )
    } else {
        let basename = Regex::new(r"([^/:]+)").unwrap();
        let from = args.value_of("from").unwrap();
        if let Some(to) = basename.captures_iter(from).last().and_then(|to| to.get(1)) {
            parse_remote(
                to.as_str(),
                args.value_of("port").and_then(|x| Some(x.parse().unwrap())),
                None,
                None,
            )
        } else {
            panic!("Could not parse target")
        }
    };
    let from_branch = args.value_of("from_branch").unwrap_or(DEFAULT_BRANCH);
    let from_path = args
        .values_of("from_path")
        .map(|x| x.map(|p| RepoPath(Path::new(p))).collect())
        .unwrap_or(Vec::new());
    let to_branch = args.value_of("to_branch").unwrap_or(from_branch);
    Params {
        from,
        from_branch,
        from_path,
        to,
        to_branch,
    }
}

pub fn run(args_: &ArgMatches) -> Result<(), Error> {
    let args = parse_args(args_);
    debug!("{:?}", args);
    match args.to {
        Remote::Local { path: repo_root } => {
            assert_no_containing_repo(&repo_root.repo_root)?;

            let parent = repo_root.repo_root.parent().unwrap();
            let tmp_dir = tempdir_in(parent)?;
            {
                create_repo(tmp_dir.path())?;
                let tmp_root = RepoRoot {
                    repo_root: tmp_dir.path(),
                };
                let mut session = args.from.session()?;
                let mut pullable: Vec<_> = if let Some(patches) = args_.values_of("patch") {
                    let mut p = Vec::new();
                    for x in patches {
                        p.push((Hash::from_base58(x).unwrap(), 0))
                    }
                    p
                } else {
                    session.changes(args.from_branch, &args.from_path[..])?
                };
                session.pull(
                    &tmp_root,
                    args.to_branch,
                    &mut pullable,
                    &args.from_path,
                    true,
                )?;
                tmp_root.set_current_branch(args.to_branch)?;
            }
            let path = tmp_dir.into_path();
            std::fs::rename(&path, &repo_root.repo_root)?;
            Ok(())
        }
        _ => {
            // Clone between remote repositories.
            match args.from {
                Remote::Local { path } => {
                    let mut to_session = args.to.session()?;
                    debug!("remote init");
                    to_session.remote_init()?;
                    debug!("pushable?");
                    let pushable = to_session.pushable_patches(
                        args.from_branch,
                        args.to_branch,
                        &path,
                        &args.from_path,
                    )?;
                    debug!("pushable = {:?}", pushable);
                    let pushable = pushable.pushable.into_iter().map(|(h, _, _)| h).collect();
                    to_session.push(&path, args.to_branch, pushable)?;
                    path.set_current_branch(args.to_branch)
                        .map_err(|x| x.into())
                }
                _ => unimplemented!(),
            }
        }
    }
}

pub fn explain(res: Result<(), Error>) {
    if let Err(Error(ref kind, _)) = res {
        if let ErrorKind::InARepository(ref p) = *kind {
            writeln!(
                stderr(),
                "error: Cannot clone onto / into existing repository {}",
                p.display()
            ).unwrap();
            exit(1)
            exit(1)
        }
    if let Err(Error::InARepository { ref path }) = res {
        writeln!(
            stderr(),
            "error: Cannot clone onto / into existing repository {:?}",
            path
        )
        .unwrap();
        exit(1)
    }

1
2

3
                return Err(Error::IsDirectory);
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::{Error, ErrorKind};
use error::Error;
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23

24
25
26
27
28
29

30
31
32
33
34
35
36
37
38
39
40

41
42
43
44
45
46

47
48

49
50
51
52
53

54
55
use clap::{Arg, ArgMatches, SubCommand};
use error::Error;
use rand;

use super::{default_explain, BasicOptions, StaticSubcommand};

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("prune")
        .about("Delete a branch in the local repository")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .help("Branch to delete.")
                .takes_value(true)
                .required(true),
        );
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    debug!("args {:?}", args);
    let opts = BasicOptions::from_args(args)?;
    let branch = args.value_of("branch").unwrap();
    let current_branch = opts.repo_root.get_current_branch()?;
    if current_branch == branch {
        return Err(ErrorKind::CannotDeleteCurrentBranch.into());
        return Err(Error::CannotDeleteCurrentBranch);
    }
    let repo = opts.open_repo()?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let at_least_two_branches = {
        let mut it = txn.iter_branches(None);
        it.next();
        it.next().is_some()
    };
    if at_least_two_branches {
        if !txn.drop_branch(&branch)? {
            return Err(ErrorKind::NoSuchBranch.into());
            return Err(Error::NoSuchBranch);
        };
        txn.commit()?;
        Ok(())
    } else {
        if txn.get_branch(&branch).is_none() {
            Err(ErrorKind::NoSuchBranch.into())
            Err(Error::NoSuchBranch)
        } else {
            Err(ErrorKind::CannotDeleteCurrentBranch.into())
            Err(Error::CannotDeleteCurrentBranch)
        }
    }
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)


1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54

55
56
57
58
59
60
61
62
63
64
65
66
67
68

69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90

91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127

128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154

155
156
        .ok_or(Error::NoSuchBranch)?;
                .ok_or(Error::InvalidPath { path: user_input })?
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, ScanScope, StaticSubcommand};
use error::Error;
use flate2::write::GzEncoder;
use flate2::Compression;
use libpijul::{graph, Branch, Edge, Key, PatchId, Repository, Txn, ROOT_KEY};
use std::fs::{remove_file, File};
use std::io::{stdout, Write};
use std::path::{Path, PathBuf};
use tar::{Builder, Header};

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("dist")
        .about("Produces a tar.gz archive of the repository")
        .arg(
            Arg::with_name("archive")
                .short("d")
                .takes_value(true)
                .required(true)
                .help("File name of the output archive."),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("The branch from which to make the archive, defaults to the current branch.")
                .takes_value(true)
                .required(false),
        )
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Repository where to work.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("stdout")
                .long("stdout")
                .short("s")
                .help("Prints the resulting archive to stdout")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("dir")
                .help("Directory (or file) to archive, defaults to the whole repository.")
                .takes_value(true),
        );
}

pub fn dist<W: Write>(
    repo: Repository,
    branch_name: &str,
    scope: ScanScope,
    archive_name: &str,
    encoder: GzEncoder<W>,
) -> Result<()> {
) -> Result<(), Error> {
    let txn = repo.txn_begin()?;
    let branch = txn.get_branch(branch_name).ok_or(Error::NoSuchBranch)?;
    let mut current_path = Path::new(archive_name).to_path_buf();
    let mut archive = Builder::new(encoder);
    let mut buffer = graph::Writer::new(Vec::new());
    let mut forward = Vec::new();

    let key = match scope {
        ScanScope::FromRoot => ROOT_KEY,
        ScanScope::WithPrefix(prefix, user_input) => {
            let inode = txn.find_inode(&prefix)?;
            txn.get_inodes(inode)
                .map(|key| key.key.to_owned())
                .ok_or(error::ErrorKind::InvalidPath(user_input))?
                .ok_or(Error::InvalidPath {
                    path: PathBuf::from(user_input),
                })?
        }
    };
    archive_rec(
        &txn,
        &branch,
        key,
        &mut archive,
        &mut buffer,
        &mut forward,
        &mut current_path,
    )?;

    archive
        .into_inner()?
        .finish()?
        .flush()
        .map_err(|x| x.into())
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;

    let archive_name = args.value_of("archive").unwrap();

    let repo = opts.open_repo()?;
    let scan = opts.scan_scope()?;

    if args.is_present("stdout") {
        let encoder = GzEncoder::new(stdout(), Compression::best());

        dist(repo, &opts.branch(), scan, archive_name, encoder)
    } else {
        let archive_path = PathBuf::from(archive_name.to_string() + ".tar.gz");

        let encoder = GzEncoder::new(File::create(&archive_path)?, Compression::best());

        dist(repo, &opts.branch(), scan, archive_name, encoder).map_err(|err| {
            // The creation of the archive has failed, we should try to
            // remove it, but we ignore the error if we cannot.
            // This should not happen, because either we could not create
            // the file, or we have enough permission to do it, as we are
            // its creator.
            let _ = remove_file(archive_path);
            err
        })
    }
}

fn archive_rec<W: Write>(
    txn: &Txn,
    branch: &Branch,
    key: Key<PatchId>,
    builder: &mut Builder<W>,
    buffer: &mut graph::Writer<Vec<u8>>,
    forward: &mut Vec<(Key<PatchId>, Edge)>,
    current_path: &mut PathBuf,
) -> Result<()> {
) -> Result<(), Error> {
    let files = txn.list_files_under_node(branch, key);

    for (key, names) in files {
        debug!("archive_rec: {:?} {:?}", key, names);
        if names.len() > 1 {
            error!("file has several names: {:?}", names);
        }
        current_path.push(names[0].1);
        if names[0].0.is_dir() {
            archive_rec(txn, branch, key, builder, buffer, forward, current_path)?;
        } else {
            buffer.clear();
            let mut graph = txn.retrieve(&branch, key);
            txn.output_file(branch, buffer, &mut graph, forward)?;
            let mut header = Header::new_gnu();
            header.set_path(&current_path)?;
            header.set_size(buffer.len() as u64);
            header.set_mode(names[0].0.permissions() as u32);
            header.set_cksum();
            builder.append(&header, &buffer[..])?;
        }
        current_path.pop();
    }
    Ok(())
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)


1
2
3
4
5
6
7
8
9

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45



46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75

76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126

127
128
129
130

131
132
fn patch_dependencies(hash_str: &str, repo_root: &PathBuf) -> Result<Vec<Hash>, Error> {
        Hash::from_base58(hash_str).ok_or::<Error>(Error::WrongHash)?,
use clap::{Arg, ArgGroup, ArgMatches, SubCommand};
use commands::checkout::checkout;
use libpijul::fs_representation::RepoRoot;
use libpijul::{apply_resize_no_output, Hash};
use rand;
use std::mem;
use std::path::Path; // PathBuf;

use super::{default_explain, BasicOptions, StaticSubcommand};
use error::{Error, ErrorKind, Result};
use error::Error;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("fork")
        .about("Create a new branch")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("Branch.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .long("patch")
                .help("A patch hash, preferably a tag.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("to")
                .help("Name of the new branch.")
                .takes_value(true)
                .required(true),
        )
        .group(
            ArgGroup::with_name("source")
                .required(false)
                .args(&["branch", "patch"]),
        );
}

fn patch_dependencies(hash_str: &str, repo_root: &PathBuf) -> Result<Vec<Hash>> {
pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<()> {
fn patch_dependencies(
    hash_str: &str,
    repo_root: &RepoRoot<impl AsRef<Path>>,
) -> Result<Vec<Hash>, Error> {
    let mut deps = Vec::new();
    let mut current = vec![Hash::from_base58(hash_str).ok_or::<Error>(Error::WrongHash)?];
    let mut next = Vec::new();

    while !current.is_empty() {
        for hash in current.drain(..) {
            deps.push(hash.clone());
            let patch = repo_root.read_patch(hash.as_ref())?;

            for hash_dep in patch.dependencies().iter() {
                let h = hash_dep.to_owned();

                if !deps.contains(&h) {
                    next.push(h);
                }
            }
        }

        mem::swap(&mut next, &mut current);
    }

    deps.reverse();

    Ok(deps)
}

pub fn has_branch(opts: &BasicOptions, branch_name: &str) -> Result<bool> {
pub fn has_branch(opts: &BasicOptions, branch_name: &str) -> Result<bool, Error> {
    let repo = opts.open_repo()?;
    let txn = repo.txn_begin()?;

    Ok(txn.has_branch(branch_name))
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let to = args.value_of("to").unwrap();

    if !has_branch(&opts, to)? {
        if let Some(ref hash) = args.value_of("patch") {
            debug!(
                "Creating a new branch {:?} with dependencies of {:?}",
                to, hash
            );

            let deps = patch_dependencies(hash, &opts.repo_root)?;

            apply_resize_no_output(&opts.repo_root, to, deps.iter(), |_, _| ())?;

            println!("Branch {:?} has been created.", to);

            checkout(&opts, to, false, None)
        } else {
            let repo = opts.open_repo()?;
            let mut txn = repo.mut_txn_begin(rand::thread_rng())?;

            let br = opts.branch();
            let branch = txn.open_branch(&br)?;
            let new_branch = txn.fork(&branch, to)?;

            txn.commit_branch(branch)?;
            txn.commit_branch(new_branch)?;

            let partials = txn
                .iter_partials(&br)
                .take_while(|&(k, _)| k.as_str() == &br)
                .map(|(_, v)| v)
                .collect::<Vec<_>>();
            for &key in partials.iter() {
                txn.put_partials(to, key)?;
            }
            txn.commit()?;

            opts.repo_root.set_current_branch(to)?;

            Ok(())
        }
    } else {
        Err(ErrorKind::BranchAlreadyExists.into())
        Err(Error::BranchAlreadyExists)
    }
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)



1
2

3
4
5
6
7
8
9
10
11
12
13
14
15

16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49

50
51
52
53
54
55
56
57
58
59
60
61
62
63


66


69
70

72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
use error::Error;
                        return Err(Error::InvalidPath { path: err });
                    return Err(Error::InvalidPath { path: err });
use clap::ArgMatches;
use commands::BasicOptions;
use error::{ErrorKind, Result};
use error::Error;
use libpijul;
use libpijul::fs_representation::RepoRoot;
use rand;
use std::fs::{canonicalize, metadata};
use std::path::{Path, PathBuf};

#[derive(Debug, Clone, Copy)]
pub enum Operation {
    Add,
    Remove,
}

pub fn run(args: &ArgMatches, op: Operation) -> Result<()> {
pub fn run(args: &ArgMatches, op: Operation) -> Result<(), Error> {
    debug!("fs_operation {:?}", op);
    let opts = BasicOptions::from_args(args)?;

    debug!("repo {:?}", &opts.repo_root);
    let mut extra_space = 409600;
    let recursive = args.is_present("recursive");
    loop {
        let touched_files = match args.values_of("files") {
            Some(l) => l.map(|p| Path::new(p).to_owned()).collect(),
            None => vec![],
        };
        match really_run(
            &opts.repo_root,
            &opts.cwd,
            touched_files,
            recursive,
            op,
            extra_space,
        ) {
            Err(ref e) if e.lacks_space() => extra_space *= 2,
            e => return e,
        }
    }
}

fn really_run(
    //    repo_dir: &RepoRoot<&'static Path>,
    repo_dir: &RepoRoot<PathBuf>,
    wd: &Path,
    mut files: Vec<PathBuf>,
    recursive: bool,
    op: Operation,
    extra_space: u64,
) -> Result<()> {
) -> Result<(), Error> {
    debug!("files {:?}", files);
    let mut rng = rand::thread_rng();
    let repo = repo_dir.open_repo(Some(extra_space))?;
    let mut txn = repo.mut_txn_begin(&mut rng)?;
    match op {
        Operation::Add => {
            for file_ in files.drain(..) {
                let p = canonicalize(wd.join(&file_))?;
                if recursive {
                    debug!("adding from {:?}", p);
                    let mut files = Vec::new();
                    for file in repo_dir.untracked_files(&txn, &p) {
                        debug!("untracked {:?}", file);

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                        if let Ok(file) = file.strip_prefix(r) {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
                        let m = metadata(repo_dir.absolutize(&file))?;
                        files.push((file, m.is_dir()));

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
                    }
                    for (file, is_dir) in files {
                        match txn.add_file(&file, is_dir) {
                            Ok(()) => {}
                            Err(libpijul::Error::AlreadyAdded) => {
                                eprintln!("{:?} is already in the repository", file_)
                            }
                            Err(e) => return Err(e.into()),
                        }
                    }
                    continue;
                } else {
                    let m = metadata(&p)?;
                    if let Ok(file) = repo_dir.relativize(&p) {
                        match txn.add_file(&file, m.is_dir()) {
                            Ok(()) => {}
                            Err(libpijul::Error::AlreadyAdded) => {
                                eprintln!("{:?} is already in the repository", file_)
                            }
                            Err(e) => return Err(e.into()),
                        }
                        continue;
                    }
                }
                return Err(Error::InvalidPath { path: file_ });
            }
        }
        Operation::Remove => {
            for file in &files[..] {
                debug!("file: {:?} {:?}", file, wd.join(file));
                let p = wd.join(file).canonicalize()?;
                debug!("p: {:?}", p);
                if let Ok(file) = repo_dir.relativize(&p) {
                    debug!("remove_file {:?}", file);
                    txn.remove_file(&file)?
                } else {
                    return Err(Error::InvalidPath {
                        path: file.to_path_buf(),
                    });
                }


1
2
3
4
5



6
7
8
9

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33

34
35
36
37
use error::Error;
            return Err(Error::HookFailed { cmd: String::from(hook) });
use error::Error;
use libpijul::fs_representation::{RepoRoot, PIJUL_DIR_NAME};
use std::path::Path;
use std::process::Command;

use error::{ErrorKind, Result};


pub fn run_hook(
    repo_root: &RepoRoot<impl AsRef<Path>>,
    hook: &'static str,
    additional_arg: Option<&String>,
) -> Result<()> {
) -> Result<(), Error> {
    let repo_root = &repo_root.repo_root;
    let mut cmd = repo_root.as_ref().to_path_buf();
    cmd.push(PIJUL_DIR_NAME);
    cmd.push("hooks");
    cmd.push(hook);

    if cmd.is_file() {
        println!("Running hook: {}", hook);

        let arg = match additional_arg {
            Some(ref arg) => vec![*arg],
            None => vec![],
        };

        let output = Command::new(cmd.as_path())
            .args(arg)
            .current_dir(repo_root)
            .output()?;

        if !output.status.success() {
            if let Ok(err) = String::from_utf8(output.stderr) {
                print!("{}", err);
            }
            return Err(ErrorKind::HookFailed(String::from(hook)).into());
            return Err(Error::HookFailed {
                cmd: String::from(hook),
            });
        }
1
2

3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19

20
21
22
23
24
25
26
27
28
29
30
31






32
33
34
35
36
use clap::{Arg, ArgMatches, SubCommand};
use commands::{create_repo, default_explain, StaticSubcommand};
use error::{Error, ErrorKind, Result};
use error::Error;
use std::env::current_dir;
use std::io::{stderr, Write};
use std::path::Path;
use std::process::exit;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("init")
        .about("Create a new repository")
        .arg(
            Arg::with_name("directory")
                .index(1)
                .help("Where to create the repository, defaults to the current directory.")
                .required(false),
        );
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    // Since the location may not exist, we can't always canonicalize,
    // which doesn't really matter since we're going to explore the
    // whole path in `find_repo_root`.
    let wd = match args.value_of("directory").map(Path::new) {
        Some(r) if r.is_relative() => current_dir()?.join(r),
        Some(r) => r.to_path_buf(),
        None => current_dir()?,
    };
    create_repo(&wd)
}

pub fn explain(r: Result<()>) {
    if let Err(Error(ref kind, _)) = r {
        if let ErrorKind::InARepository(ref p) = *kind {
            writeln!(stderr(), "Repository {} already exists", p.display()).unwrap();
            exit(1)
        }
pub fn explain(r: Result<(), Error>) {
    if let Err(Error::InARepository { ref path }) = r {
        writeln!(stderr(), "Repository {:?} already exists", path).unwrap();
        exit(1)
    }
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112



113
114
115
116

117
118
use commands::StaticSubcommand;
use error::Error;
use meta;
use meta::{load_signing_key, Meta};
use std::io::{stderr, Write};
use std::process::exit;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("key")
        .about("Manage signing and SSH keys")
        .subcommand(
            SubCommand::with_name("upload")
                .about("Upload keys to a remote server")
                .arg(Arg::with_name("port")
                     .long("port")
                     .short("p")
                     .help("Port of the SSH server.")
                     .takes_value(true)
                     .required(false))
                .arg(Arg::with_name("local")
                     .long("local")
                     .help("Save keys for the local repository only")
                     .takes_value(false)
                     .required(false))
                .arg(Arg::with_name("address")
                     .help("Address to use, for instance pijul_org@nest.pijul.com.")
                     .takes_value(true)
                     .required(true))
        )
        .subcommand(
            SubCommand::with_name("gen")
                .about("Generate keys. This command generates an SSH key if --signing-id is not given")
                .arg(Arg::with_name("signing-id")
                     .long("signing-id")
                     .help("Generate a signing key for this user id (user ids are email addresses)")
                     .takes_value(true))
                .arg(Arg::with_name("repository")
                     .long("for-repository")
                     .help("Save keys for the given repository only")
                     .takes_value(true)
                     .required(false))
        )
        .subcommand(
            SubCommand::with_name("register")
                .setting(AppSettings::Hidden)
                .about("Register a signature key given in binary on the standard input")
        );
}

pub enum Params<'a> {
    Upload { address: &'a str, port: Option<u16> },
    Gen { signing: Option<&'a str> },
    Register,
    None,
}

pub fn parse_args<'a>(args: &'a ArgMatches) -> Result<Params<'a>, Error> {
    match args.subcommand() {
        ("upload", Some(args)) => Ok(Params::Upload {
            address: args.value_of("address").unwrap(),
            port: args.value_of("port").and_then(|x| x.parse().ok()),
        }),
        ("gen", Some(args)) => Ok(Params::Gen {
            signing: args.value_of("signing-id"),
        }),
        ("register", _) => Ok(Params::Register),
        _ => Ok(Params::None),
    }
}

pub fn run(arg_matches: &ArgMatches) -> Result<(), Error> {
    let mut global = meta::Global::load().unwrap_or_else(|_| meta::Global::new());
    match parse_args(arg_matches)? {
        Params::Upload { address, port } => {
            let local_key = BasicOptions::from_args(arg_matches).ok().and_then(|opts| {
                Meta::load(&opts.repo_root)
                    .ok()
                    .and_then(|meta| meta.signing_key)
            });
            let key = local_key
                .or(global.signing_key)
                .map(|s| load_signing_key(s));
            match key {
                Some(Ok(mut keys)) => {
                    if keys.keys.is_empty() {
                        return Ok(());
                    }
                    if let Some(remote) = super::remote::parse_ssh_remote_nopath(address, port) {
                        debug!("sending key");
                        remote.session()?.send_key(keys)?
                    }
                }
                Some(Err(e)) => return Err(e),
                None => return Ok(()),
            }
        }
        Params::Gen { signing } => {
            if let Some(identity) = signing {
                global.generate_global_signing_key(identity, None)?
            } else {
                meta::generate_global_ssh_key()?
            }
        }
        Params::Register => unimplemented!(),
        Params::None => {}
    }
    Ok(())
}

pub fn explain(r: Result<(), Error>) {
    if let Err(Error(kind, _)) = r {
        if let ErrorKind::InARepository(p) = kind {
            writeln!(stderr(), "Repository {} already exists", p.display()).unwrap();
    if let Err(e) = r {
        if let Error::InARepository { path } = e {
            writeln!(stderr(), "Repository {:?} already exists", path).unwrap();
        } else {
            writeln!(stderr(), "error: {}", kind).unwrap();
            writeln!(stderr(), "error: {}", e).unwrap();
        }

1
2
3
4
        None => return Err(Error::NoSuchBranch),
use clap::{Arg, ArgMatches, SubCommand};
use commands::patch::print_patch;
use commands::{ask, default_explain, BasicOptions, StaticSubcommand};
use error::Error;



1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91

92
93
94
95
96
97
98
99
100
101

102
103
104

105
106
107
108
109
110
111
112
113

114
115
116
117
118
119
120

121
122
123
124
125
126
127
128
129
130
131

132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170

171
172
173
174
175

176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199

200

202


205


208
209

210


213


216
217
218

220
221
222
223
224
225
226
227
228
229



230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
            return Err(Error::InARepository { path: dir.to_owned() });
        return Err(Error::InARepository { path: dir.to_owned() });
pub fn relative_repo_path(repo_root: &PathBuf, base: &PathBuf, dir: &str) -> Result<PathBuf, Error> {
use clap;
use clap::ArgMatches;
pub type StaticSubcommand = clap::App<'static, 'static>;

mod ask;
mod fs_operation;
pub mod remote;
mod ssh_auth_attempts;

pub mod add;
pub mod apply;
pub mod branches;
pub mod checkout;
pub mod clone;
pub mod credit;
pub mod dependencies;
pub mod diff;
pub mod dist;
pub mod fork;
pub mod generate_completions;
pub mod grep;
pub mod hooks;
pub mod info;
pub mod init;
pub mod key;
pub mod log;
pub mod ls;
pub mod mv;
pub mod patch;
pub mod prune;
pub mod pull;
pub mod push;
pub mod record;
pub mod remove;
pub mod revert;
pub mod rollback;
pub mod sign;
pub mod status;
pub mod tag;
pub mod unrecord;

mod fold_until;

use error::Error;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::Hash;
use libpijul::{fs_representation, Inode, Repository, Txn, DEFAULT_BRANCH};
use rand;
use std::borrow::Cow;
use std::env::current_dir;
use std::env::var;
use std::fs::{canonicalize, create_dir, metadata};
use std::io::{stderr, Write};
use std::path::{Path, PathBuf};
use std::process::exit;

pub fn all_command_invocations() -> Vec<StaticSubcommand> {
    return vec![
        log::invocation(),
        info::invocation(),
        init::invocation(),
        record::invocation(),
        unrecord::invocation(),
        add::invocation(),
        pull::invocation(),
        push::invocation(),
        apply::invocation(),
        clone::invocation(),
        remove::invocation(),
        mv::invocation(),
        ls::invocation(),
        revert::invocation(),
        patch::invocation(),
        fork::invocation(),
        branches::invocation(),
        prune::invocation(),
        checkout::invocation(),
        diff::invocation(),
        credit::invocation(),
        dist::invocation(),
        key::invocation(),
        rollback::invocation(),
        status::invocation(),
        dependencies::invocation(),
        tag::invocation(),
        sign::invocation(),
        generate_completions::invocation(),
        grep::invocation(),
    ];
}

pub fn get_wd(repository_path: Option<&Path>) -> Result<PathBuf> {
pub fn get_wd(repository_path: Option<&Path>) -> Result<PathBuf, Error> {
    debug!("get_wd: {:?}", repository_path);
    match repository_path {
        None => Ok(canonicalize(current_dir()?)?),
        Some(a) if a.is_relative() => Ok(canonicalize(current_dir()?.join(a))?),
        Some(a) => Ok(canonicalize(a)?),
    }
}

/// Returns an error if the `dir` is contained in a repository.
pub fn assert_no_containing_repo(dir: &Path) -> Result<()> {
pub fn assert_no_containing_repo(dir: &Path) -> Result<(), Error> {
    if metadata(dir).is_ok() {
        if fs_representation::find_repo_root(&canonicalize(dir)?).is_some() {
            return Err(ErrorKind::InARepository(dir.to_owned()).into());
            return Err(Error::InARepository {
                path: dir.to_owned(),
            });
        }
    }
    Ok(())
}

/// Creates an empty pijul repository in the given directory.
pub fn create_repo(dir: &Path) -> Result<()> {
pub fn create_repo(dir: &Path) -> Result<(), Error> {
    // Check that a repository does not already exist.
    if metadata(dir).is_err() {
        create_dir(dir)?;
    }
    let dir = canonicalize(dir)?;
    if fs_representation::find_repo_root(&dir).is_some() {
        return Err(ErrorKind::InARepository(dir.to_owned()).into());
        return Err(Error::InARepository {
            path: dir.to_owned(),
        });
    }

    let repo_root = fs_representation::create(&dir, rand::thread_rng())?;
    let repo = repo_root.open_repo(None)?;
    repo.mut_txn_begin(rand::thread_rng())?.commit()?;
    Ok(())
}

fn default_explain<R>(command_result: Result<R>) {
fn default_explain<R>(command_result: Result<R, Error>) {
    debug!("default_explain");
    match command_result {
        Ok(_) => (),
        Err(e) => {
            writeln!(stderr(), "error: {}", e).unwrap();
            exit(1)
        }
    }
}

fn validate_base58(x: String) -> ::std::result::Result<(), String> {
    if Hash::from_base58(&x).is_some() {
        Ok(())
    } else {
        Err(format!("\"{}\" is invalid base58", x))
    }
}

/// Almost all commands want to know the current directory and the repository root.  This struct
/// fills that need, and also provides methods for other commonly-used tasks.
pub struct BasicOptions<'a> {
    /// This isn't 100% the same as the actual current working directory, so pay attention: this
    /// will be the current directory, unless the user specifies `--repository`, in which case
    /// `cwd` will actually be the path of the repository root. In other words, specifying
    /// `--repository` has the same effect as changing directory to the repository root before
    /// running `pijul`.
    pub cwd: PathBuf,
    pub repo_root: RepoRoot<PathBuf>,
    args: &'a ArgMatches<'a>,
}

pub enum ScanScope {
    FromRoot,
    WithPrefix(RepoPath<PathBuf>, String),
}

impl<'a> BasicOptions<'a> {
    /// Reads the options from command line arguments.
    pub fn from_args(args: &'a ArgMatches<'a>) -> Result<BasicOptions<'a>> {
    pub fn from_args(args: &'a ArgMatches<'a>) -> Result<BasicOptions<'a>, Error> {
        let wd = get_wd(args.value_of("repository").map(Path::new))?;
        let repo_root = if let Some(r) = fs_representation::find_repo_root(&canonicalize(&wd)?) {
            r
        } else {
            return Err(ErrorKind::NotInARepository.into());
            return Err(Error::NotInARepository);
        };
        Ok(BasicOptions {
            cwd: wd,
            repo_root: repo_root,
            args: args,
        })
    }

    /// Gets the name of the desired branch.
    pub fn branch(&self) -> String {
        if let Some(b) = self.args.value_of("branch") {
            b.to_string()
        } else if let Ok(b) = self.repo_root.get_current_branch() {
            b
        } else {
            DEFAULT_BRANCH.to_string()
        }
    }

    pub fn repo_root(&self) -> PathBuf {
        self.repo_root.repo_root.clone()
    }

    pub fn open_repo(&self) -> Result<Repository> {
    pub fn open_repo(&self) -> Result<Repository, Error> {

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        self.repo_root.open_repo(None).map_err(|e| e.into())

================================

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        fs_representation::open_repo(&self.repo_root, None).map_err(|e| e.into())

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    }

    pub fn open_and_grow_repo(&self, increase: u64) -> Result<Repository> {
    pub fn open_and_grow_repo(&self, increase: u64) -> Result<Repository, Error> {

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        fs_representation::open_repo(&self.repo_root, Some(increase)).map_err(|e| e.into())

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
        self.repo_root
            .open_repo(Some(increase))
            .map_err(|e| e.into())

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    }

    pub fn pristine_dir(&self) -> PathBuf {
        self.repo_root.pristine_dir()
    }

    pub fn patches_dir(&self) -> PathBuf {
        self.repo_root.patches_dir()
    }

    pub fn scan_scope(&self) -> Result<ScanScope> {
    fn dir_inode(&self, txn: &Txn) -> Result<Inode> {
    fn dir_inode(&self, txn: &Txn) -> Result<Inode> {
    pub fn scan_scope(&self) -> Result<ScanScope, Error> {
        if let Some(prefix) = self.args.value_of("dir") {
            let root = self
                .args
                .value_of("repository")
                .map(|root| Path::new(root).to_path_buf())
                .unwrap_or(current_dir()?);

            Ok(ScanScope::WithPrefix(
                relative_repo_path(&self.repo_root, &root, prefix)?,
                prefix.into(),
            ))
        } else {
            Ok(ScanScope::FromRoot)
        }
    }

    fn dir_inode(&self, txn: &Txn) -> Result<Inode, Error> {
        use libpijul::ROOT_INODE;
        if let Some(dir) = self.args.value_of("dir") {
            let dir = if Path::new(dir).is_relative() {
                let root = if let Some(root) = self.args.value_of("repository") {
                    Path::new(root).to_path_buf()
                } else {
                    current_dir()?
                };
                root.join(&dir).canonicalize()?
            } else {
                Path::new(dir).canonicalize()?
            };
            let dir = self.repo_root.relativize(&dir)?;
            debug!("{:?}", dir);
            let inode = txn.find_inode(&dir)?;
            debug!("{:?}", inode);
            Ok(inode)
        } else {
            Ok(ROOT_INODE)
        }
    }
}

fn remote_pijul_cmd() -> Cow<'static, str> {
    if let Ok(cmd) = var("REMOTE_PIJUL") {
        Cow::Owned(cmd)
    } else {
        Cow::Borrowed("pijul")
    }
}

pub fn relative_repo_path(
    repo_root: &RepoRoot<PathBuf>,
    base: &PathBuf,
    dir: &str,
) -> Result<RepoPath<PathBuf>, Error> {
    let dir = if Path::new(dir).is_relative() {


1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121

122
123
124
125
126
127
128
129
130
131
132
133
134
135

136
137
138
139
                        return Err(Error::InvalidPath { path: f.to_string_lossy().into_owned() });
                        return Err(Error::InvalidPath { path: f.to_string_lossy().into_owned() });
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::fs_representation::RepoPath;
use std::fs::{metadata, rename};
use std::path::PathBuf;

use rand;
use std;
pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("mv")
        .about("Change file names")
        .arg(
            Arg::with_name("files")
                .multiple(true)
                .help("Files to move.")
                .required(true)
                .min_values(2),
        )
        .arg(
            Arg::with_name("repository")
                .takes_value(true)
                .long("repository")
                .help("Repository where the files are.")
                .takes_value(true),
        );
}

#[derive(Debug)]
pub enum Movement {
    IntoDir {
        from: Vec<RepoPath<PathBuf>>,
        to: RepoPath<PathBuf>,
    },
    FileToFile {
        from: RepoPath<PathBuf>,
        to: RepoPath<PathBuf>,
    },
}

fn get_movement(opts: &BasicOptions, args: &ArgMatches) -> Movement {
    debug!("wd = {:?}", opts.cwd);
    debug!("repo_root = {:?}", opts.repo_root);
    let mut repo_paths = Vec::new();
    for fname in args.values_of("files").unwrap() {
        debug!("fname: {:?}", fname);
        // if fname is absolute, erases current_dir.
        let mut path = std::env::current_dir().unwrap();
        path.push(fname);
        debug!("path = {:?}", path);
        let path = if let Ok(f) = std::fs::canonicalize(&path) {
            f
        } else {
            std::fs::canonicalize(&path.parent().unwrap())
                .unwrap()
                .join(&path.file_name().unwrap())
        };
        debug!("path = {:?}", path);
        let path = opts.repo_root.relativize(&path).unwrap();
        debug!("path = {:?}", path);

        repo_paths.push(path.to_owned());
    }
    debug!("parse_args: done");
    let repo_paths = repo_paths;
    let (dest, origs) = repo_paths.split_last().unwrap();
    let target_path = opts.repo_root.absolutize(&dest);
    let to_dir = target_path.exists() && target_path.is_dir();

    if to_dir {
        Movement::IntoDir {
            from: Vec::from(origs),
            to: dest.clone(),
        }
    } else {
        if origs.len() == 1 {
            Movement::FileToFile {
                from: origs[0].clone(),
                to: dest.clone(),
            }
        } else {
            panic!(
                "Cannot move files into {}: it is not a valid directory",
                dest.display()
            );
        }
    }
}

pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let movement = get_movement(&opts, args);
    let repo = opts.open_repo()?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    match movement {
        Movement::FileToFile {
            from: ref orig_path,
            to: ref dest_path,
        } => {
            txn.move_file(orig_path, dest_path, false)?;
            debug!(
                "1 renaming {:?} into {:?}",
                opts.repo_root.repo_root.join(orig_path.as_path()),
                opts.repo_root.repo_root.join(dest_path.as_path())
            );
            rename(
                opts.repo_root.repo_root.join(orig_path.as_path()),
                opts.repo_root.repo_root.join(dest_path.as_path()),
            )?;
            txn.commit()?;
            Ok(())
        }
        Movement::IntoDir {
            from: ref orig_paths,
            to: ref dest_dir,
        } => {
            for file in orig_paths {
                let repo_target_name = {
                    let target_basename = if let Some(f) = file.file_name() {
                        f
                    } else {
                        return Err(ErrorKind::InvalidPath(f.to_string_lossy().into_owned()).into());
                        return Err(Error::InvalidPath {
                            path: file.to_path_buf(),
                        });
                    };
                    dest_dir.join(std::path::Path::new(target_basename))
                };
                let is_dir = metadata(&opts.repo_root.absolutize(file))?.is_dir();
                txn.move_file(&file, &repo_target_name, is_dir)?;
            }
            for file in orig_paths {
                let full_target_name = {
                    let target_basename = if let Some(f) = file.file_name() {
                        f
                    } else {
                        return Err(ErrorKind::InvalidPath(f.to_string_lossy().into_owned()).into());
                        return Err(Error::InvalidPath {
                            path: file.to_path_buf(),
                        });
                    };

1
2
3
4

5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77

78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184

185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222

223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424

425
426
use error::Error;
use super::validate_base58;
use atty;
use clap::{Arg, ArgGroup, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Result;
use error::Error;
use libpijul;
use libpijul::graph::LineBuffer;
use libpijul::patch::{Change, NewEdge, Patch};
use libpijul::{Branch, EdgeFlags, Hash, Key, LineId, PatchId, Transaction, Txn, Value, ROOT_KEY};
use std::cmp::max;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{copy, stdout, BufReader};
use std::str::from_utf8;
use term;
use term::StdoutTerminal;

pub fn invocation() -> StaticSubcommand {
    SubCommand::with_name("patch")
        .about("Output a patch")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Path to the repository where the patches will be applied.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .help("The hash of the patch to be printed.")
                .takes_value(true)
                .required(true)
                .validator(validate_base58),
        )
        .arg(
            Arg::with_name("bin")
                .long("bin")
                .help("Output the patch in binary."),
        )
        .arg(
            Arg::with_name("name")
                .long("name")
                .help("Output the patch name."),
        )
        .arg(
            Arg::with_name("description")
                .long("description")
                .help("Output the patch description."),
        )
        .arg(
            Arg::with_name("authors")
                .long("authors")
                .help("Output the patch authors."),
        )
        .arg(
            Arg::with_name("date")
                .long("date")
                .help("Output the patch date."),
        )
        .group(ArgGroup::with_name("details").required(false).args(&[
            "bin",
            "name",
            "description",
            "date",
            "authors",
        ]))
}

#[derive(PartialEq, Eq)]
enum View {
    Normal,
    Bin,
    NameOnly,
    DescrOnly,
    DateOnly,
    AuthorsOnly,
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let patch = Hash::from_base58(args.value_of("patch").unwrap()).unwrap();
    let mut patch_path = opts.repo_root.patches_dir().join(&patch.to_base58());
    patch_path.set_extension("gz");
    let mut f = File::open(&patch_path)?;

    let v: View = match (
        args.is_present("bin"),
        args.is_present("name"),
        args.is_present("description"),
        args.is_present("date"),
        args.is_present("authors"),
    ) {
        (true, _, _, _, _) => View::Bin,
        (_, true, _, _, _) => View::NameOnly,
        (_, _, true, _, _) => View::DescrOnly,
        (_, _, _, true, _) => View::DateOnly,
        (_, _, _, _, true) => View::AuthorsOnly,
        (_, _, _, _, _) => View::Normal,
    };

    if v == View::Bin {
        let mut stdout = stdout();
        copy(&mut f, &mut stdout)?;
    } else {
        // Write the patch in text.
        let mut f = BufReader::new(f);
        let (hash, _, patch) = Patch::from_reader_compressed(&mut f)?;

        match v {
            View::AuthorsOnly => print!("{:?}", patch.authors),
            View::DescrOnly => print!("{}", patch.description.clone().unwrap_or("".into())),
            View::DateOnly => print!("{:?}", patch.timestamp),
            View::NameOnly => print!("{}", patch.name),
            _ => {
                // it cannot be View::Bin, so it has to be View::Normal
                let repo = opts.open_repo()?;
                let branch_name = opts.branch();
                let txn = repo.txn_begin()?;
                let branch = txn.get_branch(&branch_name).expect("Branch not found");
                print_patch(&hash, &patch, &txn, &branch)?
            }
        }
    }
    Ok(())
}

pub fn print_patch(hash: &Hash, patch: &Patch, txn: &Txn, branch: &Branch) -> Result<(), Error> {
    let internal = txn
        .get_internal(hash.as_ref())
        .expect("Patch not in repository")
        .to_owned();

    let mut buf = LineNumbers {
        n: 0, // The graph will start from a file base name.
        patch: internal.clone(),
        current_file: ROOT_KEY,
        numbers: HashMap::new(),
    };

    let mut terminal = if atty::is(atty::Stream::Stdout) {
        term::stdout()
    } else {
        None
    };
    for c in patch.changes() {
        match *c {
            Change::NewNodes {
                ref up_context,
                ref flag,
                ref nodes,
                ref line_num,
                ..
            } => {
                if flag.contains(EdgeFlags::FOLDER_EDGE) {
                    /*render_new_folder(&txn, branch, deleted_files,
                    internal, up_context, down_context, nodes)?*/
                } else {
                    render_new_change(
                        &mut terminal,
                        &txn,
                        &branch,
                        &mut buf,
                        internal,
                        up_context,
                        line_num,
                        nodes,
                    )?
                }
            }
            Change::NewEdges {
                ref edges, flag, ..
            } => render_new_edges(
                &mut terminal,
                &txn,
                &branch,
                &mut buf,
                internal,
                edges,
                flag,
            )?,
        }
    }
    Ok(())
}

fn file_names(txn: &Txn, branch: &Branch, files: &[Key<PatchId>]) -> Result<()> {
fn file_names(txn: &Txn, branch: &Branch, files: &[Key<PatchId>]) -> Result<(), Error> {
    let file_names: Vec<_> = files
        .iter()
        .flat_map(|x| {
            debug!("file_names {:?}", x);
            txn.get_file_names(branch, x.clone())
                .into_iter()
                .map(|(_, name)| name)
        })
        .collect();

    debug!("file_names = {:?}", file_names);
    // assert_eq!(file_names.len(), 1);

    print!("In \"{:?}\"", file_names[0]);
    if file_names.len() > 1 {
        print!("(also known as {:?}", file_names[1]);
        for name in file_names.iter().skip(2) {
            print!(", {:?}", name);
        }
        println!("):");
    } else {
        println!(":")
    }
    Ok(())
}

const INVALID_UTF8: &'static str = "(Invalid UTF-8)";

fn render_new_change(
    term: &mut Option<Box<StdoutTerminal>>,
    txn: &Txn,
    branch: &Branch,
    buf: &mut LineNumbers,
    internal: PatchId,
    up_context: &[Key<Option<Hash>>],
    line_num: &LineId,
    nodes: &[Vec<u8>],
) -> Result<()> {
) -> Result<(), Error> {
    // Find the file
    let mut find_alive = libpijul::apply::find_alive::FindAlive::new();
    let mut alive = HashSet::new();
    let files = if up_context.is_empty() {
        panic!("up context is empty")
    } else {
        let up = txn.internal_key(&up_context[0], internal);
        let mut file = None;
        txn.find_alive_nonfolder_ancestors(
            branch,
            &mut find_alive,
            &mut alive,
            &mut file,
            up.clone(),
        );
        if let Some(file) = file {
            vec![file]
        } else {
            txn.get_file(branch, *alive.iter().next().unwrap())
        }
    };
    debug!("render_new_change, files = {:?}", files);
    file_names(txn, branch, &files)?;

    let mut ret = txn.retrieve(branch, files[0]);
    let mut v = Vec::new();
    let mut key = Key {
        patch: internal.clone(),
        line: line_num.clone(),
    };
    if buf.numbers.get(&key).is_none() {
        buf.n = 0;
        txn.output_file(branch, buf, &mut ret, &mut v)?;
    }
    let mut current: isize = -1;
    debug!("numbers: {:?}", buf.numbers);
    for n in nodes.iter() {
        debug!("key: {:?}", key);
        if let Some(&(_, line_num)) = buf.numbers.get(&key) {
            if line_num != current + 1 {
                println!("From line {}:", line_num);
            }
            current = line_num as isize;
        } else {
            println!("Deleted in a subsequent patch:");
        }
        if let Some(ref mut term) = *term {
            term.fg(term::color::GREEN).unwrap_or(());
        }
        print!("+ ");
        if let Some(ref mut term) = *term {
            term.reset().unwrap_or(());
        }

        if let Ok(n) = from_utf8(&n) {
            print!("{}", n);
            if !n.ends_with("\n") {
                println!("");
            }
        } else {
            println!("{}", INVALID_UTF8)
        }
        key.line += 1
    }
    Ok(())
}

#[derive(Debug)]
struct LineNumbers {
    n: isize,
    patch: PatchId,
    current_file: Key<PatchId>,
    numbers: HashMap<Key<PatchId>, (Key<PatchId>, isize)>,
}

impl<'a, T: 'a + Transaction> LineBuffer<'a, T> for LineNumbers {
    fn output_line(&mut self, key: &Key<PatchId>, _: Value<'a, T>) -> libpijul::Result<()> {
        self.numbers
            .insert(key.clone(), (self.current_file.clone(), self.n));
        self.n += 1;
        Ok(())
    }
    fn output_conflict_marker(&mut self, _: &str) -> libpijul::Result<()> {
        self.n += 1;
        Ok(())
    }
}

fn render_new_edges(
    term: &mut Option<Box<StdoutTerminal>>,
    txn: &Txn,
    branch: &Branch,
    buf: &mut LineNumbers,
    internal: PatchId,
    edges: &[NewEdge],
    flag: EdgeFlags,
) -> Result<(), Error> {
    let mut find_alive = libpijul::apply::find_alive::FindAlive::new();
    let mut alive = HashSet::new();
    let mut redundant = Vec::new();
    if !flag.contains(EdgeFlags::DELETED_EDGE) {
        // Looks like a conflict resolution, I don't know how to print
        // those edges.
        return Ok(());
    }
    let mut fnames = Vec::new();
    let mut current_node = ROOT_KEY;
    let mut current_line_num = -1;
    for e in edges {
        let (from, to) = if flag.contains(EdgeFlags::PARENT_EDGE) {
            (
                txn.internal_key(&e.to, internal),
                txn.internal_key(&e.from, internal),
            )
        } else {
            (
                txn.internal_key(&e.from, internal),
                txn.internal_key(&e.to, internal),
            )
        };
        debug!("from {:?} to {:?}", from, to);

        // Find the last alive ancestor(s) to the deleted lines.
        let mut file = None;
        alive.clear();
        txn.find_alive_nonfolder_ancestors(
            branch,
            &mut find_alive,
            &mut alive,
            &mut file,
            to.clone(),
        );
        debug!("{:?}", alive);
        let mut last_num = -1;
        for &key in alive.iter() {
            if buf.numbers.get(&key).is_none() {
                debug!("starting key {:?}", key);
                let files = txn.get_file(branch, key);
                fnames.extend(files.iter().flat_map(|x| {
                    txn.get_file_names(branch, x.clone())
                        .into_iter()
                        .map(|(_, name)| name)
                }));
                let mut ret = txn.retrieve(branch, files[0]);
                redundant.clear();
                buf.current_file = files[0].clone();
                buf.n = 0;
                txn.output_file(branch, buf, &mut ret, &mut redundant)?;
            }
            debug!("buf {:?}", buf);
            if let Some(&(ref file, num)) = buf.numbers.get(&key) {
                last_num = max(num, last_num);
                debug!("{:?} {:?}", file, num)
            }
        }
        // Maybe new lines have been inserted by subsequent patches,
        // (for instance before their deletion by this patch, and they
        // have not been deleted).
        //
        // This can cause this hunk's line numbers to be
        // non-contiguous.
        if last_num != current_line_num {
            println!("After line {}:", last_num + 1);
            current_line_num = last_num
        }
        if to != current_node {
            if let Some(contents) = txn.get_contents(to) {
                if let Some(ref mut term) = *term {
                    term.fg(term::color::RED).unwrap_or(());
                }
                print!("- ");
                if let Some(ref mut term) = *term {
                    term.reset().unwrap_or(());
                }

                let mut is_valid = true;
                let mut cont = String::new();
                for chunk in contents {
                    let c = from_utf8(chunk);
                    if let Ok(c) = c {
                        cont.push_str(c)
                    } else {
                        is_valid = false;
                        break;
                    }
                }
                if is_valid {
                    print!("{}", cont);
                    if !cont.ends_with("\n") {
                        println!("");
                    }
                } else {
                    println!("{}", INVALID_UTF8)
                }
            }
        }
        current_node = to
    }
    Ok(())
}

pub fn explain(r: Result<()>) {
pub fn explain(r: Result<(), Error>) {
    default_explain(r)








1
2
3
4
5
6

7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105


108


111


114

116
117
118

119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134


135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199

200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251

252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296

297
298
299




300
301
302
303
304
305
306
307
308
309

310
311
312
313
314
315

316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375

376
377
378
379

380
381
382
383
384
385
386
387
388
389
390
391
392

393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424

425
426
427
428
429

430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447

448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470

471
472
use error::Error;
) -> Result<HashSet<PathBuf>, Error> {
fn append_to_ignore_file(repo_root: &Path, lines: &Vec<String>) -> Result<(), Error> {
        chrono::DateTime::from_str(x).map_err(|_| Error::InvalidDate { date: String::from(x) })
                return Err(Error::NoAuthor)
                                return Err(Error::ExtraDepNotOnBranch { hash: hash.to_owned() });
), Error> {
pub fn prefix(args: &ArgMatches, opts: &BasicOptions) -> Result<Option<Vec<PathBuf>>, Error> {
use super::ask::{ask_changes, ChangesDirection};
use super::default_explain;
use chrono;
use clap::{Arg, ArgMatches, SubCommand};
use commands::hooks::run_hook;
use commands::{ask, BasicOptions, StaticSubcommand};
use error::{ErrorKind, Result};
use error::Error;
use libpijul;
use libpijul::fs_representation::{in_repo_root, RepoPath, RepoRoot};
use libpijul::patch::{PatchFlags, Record};
use libpijul::{Hash, InodeUpdate, Key, MutTxn, Patch, PatchId, RecordState, Repository};
use meta::{load_signing_key, Global, Meta};
use rand;
use std::collections::HashSet;
use std::fs::canonicalize;
use std::fs::{metadata, OpenOptions};
use std::io::Write;
use std::mem::drop;
use std::path::{Path, PathBuf};
use std::str::FromStr;

pub fn record_args(sub: StaticSubcommand) -> StaticSubcommand {
    sub.arg(Arg::with_name("repository")
            .long("repository")
            .help("The repository where to record, defaults to the current directory.")
            .takes_value(true)
            .required(false))
        .arg(Arg::with_name("branch")
             .long("branch")
             .help("The branch where to record, defaults to the current branch.")
             .takes_value(true)
             .required(false))
        .arg(Arg::with_name("date")
             .long("date")
             .help("The date to use to record the patch, default is now.")
             .takes_value(true)
             .required(false))
        .arg(Arg::with_name("message")
             .short("m")
             .long("message")
             .help("The name of the patch to record")
             .takes_value(true))
        .arg(Arg::with_name("description")
             .short("d")
             .long("description")
             .help("The description of the patch to record")
             .takes_value(true))
        .arg(Arg::with_name("no-editor")
             .long("no-editor")
             .help("Do not use an editor to write the patch name and description, even if the variable is set in the configuration file")
             .takes_value(false))
        .arg(Arg::with_name("author")
             .short("A")
             .long("author")
             .help("Author of this patch (multiple occurrences allowed)")
             .takes_value(true))
        .arg(Arg::with_name("patience")
             .long("patience")
             .help("Use patience diff instead of the default (Myers diff)")
             .conflicts_with("myers")
             .takes_value(false))
        .arg(Arg::with_name("myers")
             .long("myers")
             .help("Use Myers diff")
             .conflicts_with("patience")
             .takes_value(false))
}

pub fn invocation() -> StaticSubcommand {
    return record_args(
        SubCommand::with_name("record")
            .about("Record changes in the repository")
            .arg(
                Arg::with_name("all")
                    .short("a")
                    .long("all")
                    .help("Answer 'y' to all questions")
                    .takes_value(false),
            )
            .arg(
                Arg::with_name("add-new-files")
                    .short("n")
                    .long("add-new-files")
                    .help("Offer to add files that have been created since the last record")
                    .takes_value(false),
            )
            .arg(
                Arg::with_name("depends-on")
                    .help("Add a dependency to this patch (internal id or hash accepted)")
                    .long("depends-on")
                    .takes_value(true)
                    .multiple(true),
            )
            .arg(
                Arg::with_name("prefix")
                    .help("Prefix to start from")
                    .takes_value(true)
                    .multiple(true),
            ),
    );
}

fn add_untracked_files<T: rand::Rng, P: AsRef<Path> + 'static>(
    txn: &mut MutTxn<T>,
    repo_root: &RepoRoot<P>,

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        if let Err(e) = txn.add_file(&file, m.is_dir()) {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        }

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    }

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    Ok(untracked)
}

fn append_to_ignore_file(repo_root: &Path, lines: &Vec<String>) -> Result<()> {
fn append_to_ignore_file(
    repo_root: &RepoRoot<impl AsRef<Path>>,
    lines: &Vec<String>,
) -> Result<(), Error> {
    let ignore_file = repo_root.local_ignore_file();
    let mut file = OpenOptions::new()
        .append(true)
        .create(true)
        .open(ignore_file)?;
    for line in lines {
        file.write_all(line.as_ref())?;
        file.write_all(b"\n")?
    }
    Ok(())
}

pub fn run(args: &ArgMatches) -> Result<Option<Hash>> {
pub fn run(args: &ArgMatches) -> Result<Option<Hash>> {
fn select_changes(
    algo: libpijul::DiffAlgorithm,
    opts: &BasicOptions,
    add_new_files: bool,
    branch_name: &str,
    yes_to_all: bool,
    prefix: Option<Vec<RepoPath<PathBuf>>>,
) -> Result<(Vec<Record<Vec<Key<Option<Hash>>>>>, HashSet<InodeUpdate>), Error> {
    // Increase by 100 pages. The most things record can write is one
    // write in the branches table, affecting at most O(log n) blocks.
    let repo = opts.open_and_grow_repo(409600)?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    let mut to_unadd = if add_new_files {
        add_untracked_files(&mut txn, &opts.repo_root)?
    } else {
        HashSet::new()
    };
    let (changes, syncs) = changes_from_prefixes(
        algo,
        &opts.repo_root,
        &mut txn,
        &branch_name,
        prefix.as_ref(),
    )?;
    let changes: Vec<_> = changes
        .into_iter()
        .map(|x| txn.globalize_record(x))
        .collect();
    if !yes_to_all {
        let (c, i) = ask_changes(
            &txn,
            &opts.repo_root,
            &opts.cwd,
            &changes,
            ChangesDirection::Record,
            &mut to_unadd,
        )?;
        let selected = changes
            .into_iter()
            .enumerate()
            .filter(|&(i, _)| *(c.get(&i).unwrap_or(&false)))
            .map(|(_, x)| x)
            .collect();
        for file in to_unadd {
            txn.remove_file(&file)?
        }
        txn.commit()?;
        append_to_ignore_file(&opts.repo_root, &i)?;
        Ok((selected, syncs))
    } else {
        txn.commit()?;
        Ok((changes, syncs))
    }
}

pub fn run(args: &ArgMatches) -> Result<Option<Hash>, Error> {
    let opts = BasicOptions::from_args(args)?;
    let yes_to_all = args.is_present("all");
    let patch_name_arg = args.value_of("message");
    let patch_descr_arg = args.value_of("description");
    let authors_arg = args.values_of("author").map(|x| x.collect::<Vec<_>>());
    let branch_name = opts.branch();
    let add_new_files = args.is_present("add-new-files");

    let patch_date = args.value_of("date").map_or(Ok(chrono::Utc::now()), |x| {
        chrono::DateTime::from_str(x).map_err(|_| ErrorKind::InvalidDate(String::from(x)))
        chrono::DateTime::from_str(x).map_err(|_| Error::InvalidDate {
            date: String::from(x),
        })
    })?;

    let mut save_meta = false;

    let (mut global, save_global) = Global::load().map(|c| (c, false)).unwrap_or_else(|e| {
        info!("loading global key, error {:?}", e);
        (Global::new(), true)
    });

    let mut meta = match Meta::load(&opts.repo_root) {
        Ok(m) => m,
        Err(_) => {
            save_meta = true;
            Meta::new()
        }
    };

    run_hook(&opts.repo_root, "pre-record", None)?;

    debug!("prefix {:?}", args.value_of("prefix"));
    let prefix = prefix(args, &opts)?;

    let (changes, syncs) = select_changes(
        if args.is_present("patience") {
            libpijul::DiffAlgorithm::Patience
        } else {
            libpijul::DiffAlgorithm::Myers
        },
        &opts,
        add_new_files,
        &branch_name,
        yes_to_all,
        prefix,
    )?;

    if changes.is_empty() {
        println!("Nothing to record");
        Ok(None)
    } else {
        let template = prepare_changes_template(patch_name_arg.unwrap_or(""), &changes);

        let repo = opts.open_repo()?;
        let patch = {
            let txn = repo.txn_begin()?;
            debug!("meta:{:?}", meta);

            let authors = decide_authors(authors_arg, &meta, &global)?;

            if authors.is_empty() {
                return Err(ErrorKind::NoAuthor.into())
                return Err(Error::NoAuthor);
            }

            if meta.authors.is_empty() {
                meta.authors = authors.clone();
                save_meta = true;
            }

            if global.author.is_none() {
                global.author = Some(authors[0].clone());
            }

            debug!("authors:{:?}", authors);

            let (patch_name, description) = decide_patch_message(
                patch_name_arg,
                patch_descr_arg,
                template,
                !args.is_present("no-editor"),
                &opts.repo_root,
                &meta,
                &global,
            )?;

            run_hook(&opts.repo_root, "patch-name", Some(&patch_name))?;

            debug!("patch_name:{:?}", patch_name);
            if save_meta {
                meta.save(&opts.repo_root)?
            }
            if save_global {
                global.save().unwrap_or(())
            }
            debug!("new");
            let changes = changes.into_iter().flat_map(|x| x.into_iter()).collect();
            let branch = txn.get_branch(&branch_name).unwrap();

            let mut extra_deps = Vec::new();
            if let Some(deps) = args.values_of("depends-on") {
                for dep in deps {
                    if let Some(hash) = Hash::from_base58(dep) {
                        if let Some(internal) = txn.get_internal(hash.as_ref()) {
                            if txn.get_patch(&branch.patches, internal).is_some() {
                                extra_deps.push(hash)
                            } else {
                                return Err(ErrorKind::ExtraDepNotOnBranch(hash).into());
                                return Err(Error::ExtraDepNotOnBranch { hash });
                            }
                        } else {
                            return Err(ErrorKind::PatchNotFound(
                                opts.repo_root().to_string_lossy().into_owned(),
                                hash,
                            ).into());
                            return Err(Error::PatchNotFound {
                                repo_root: opts.repo_root().to_string_lossy().into_owned(),
                                patch_hash: hash,
                            });
                        }
                    } else if let Some(internal) = PatchId::from_base58(dep) {
                        if let Some(hash) = txn.get_external(internal) {
                            if txn.get_patch(&branch.patches, internal).is_some() {
                                extra_deps.push(hash.to_owned())
                            } else {
                                return Err(ErrorKind::ExtraDepNotOnBranch(hash.to_owned()).into());
                                return Err(Error::ExtraDepNotOnBranch {
                                    hash: hash.to_owned(),
                                });
                            }
                        }
                    } else {
                        return Err(ErrorKind::WrongHash.into());
                        return Err(Error::WrongHash);
                    }
                }
            }
            txn.new_patch(
                &branch,
                authors,
                patch_name,
                description,
                patch_date,
                changes,
                extra_deps.into_iter(),
                PatchFlags::empty(),
            )
        };
        drop(repo);

        let patches_dir = opts.repo_root.patches_dir();
        let mut key = meta
            .signing_key
            .or(global.signing_key)
            .and_then(|s| load_signing_key(s).ok());
        let hash = if let Some(ref mut key) = key {
            key.check_author(&patch.header().authors)?;
            patch.save(&patches_dir, key.keys.get_mut(0))?
        } else {
            patch.save(&patches_dir, None)?
        };

        let pristine_dir = opts.pristine_dir();
        let mut increase = 409600;
        let res = loop {
            match record_no_resize(
                &pristine_dir,
                &opts.repo_root,
                &branch_name,
                &hash,
                &patch,
                &syncs,
                increase,
            ) {
                Err(ref e) if e.lacks_space() => increase *= 2,
                e => break e,
            }
        };

        run_hook(&opts.repo_root, "post-record", None)?;

        res
    }
}

pub fn record_no_resize(
    pristine_dir: &Path,
    r: &RepoRoot<impl AsRef<Path>>,
    branch_name: &str,
    hash: &Hash,
    patch: &Patch,
    syncs: &HashSet<InodeUpdate>,
    increase: u64,
) -> Result<Option<Hash>> {
) -> Result<Option<Hash>, Error> {
    let size_increase = increase + patch.size_upper_bound() as u64;
    let repo = match Repository::open(&pristine_dir, Some(size_increase)) {
        Ok(repo) => repo,
        Err(x) => return Err(ErrorKind::Repository(x).into()),
        Err(x) => return Err(Error::Repository(x)),
    };
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
    // save patch
    debug!("syncs: {:?}", syncs);
    let mut branch = txn.open_branch(branch_name)?;
    txn.apply_local_patch(&mut branch, r, &hash, &patch, &syncs, false)?;
    txn.commit_branch(branch)?;
    txn.commit()?;
    println!("Recorded patch {}", hash.to_base58());
    Ok(Some(hash.clone()))
}

pub fn explain(res: Result<Option<Hash>>) {
pub fn explain(res: Result<Option<Hash>, Error>) {
    default_explain(res)
}

pub fn changes_from_prefixes<T: rand::Rng, P: AsRef<Path>>(
    algo: libpijul::DiffAlgorithm,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    txn: &mut MutTxn<T>,
    branch_name: &str,
    prefix: Option<&Vec<RepoPath<P>>>,
) -> Result<
    (
        Vec<libpijul::patch::Record<libpijul::patch::ChangeContext<PatchId>>>,
        HashSet<libpijul::InodeUpdate>,
    ),
    Error,
> {
    let mut record = RecordState::new();
    let branch = txn.open_branch(branch_name)?;
    if let Some(prefixes) = prefix {
        for prefix in prefixes {
            txn.record(algo, &mut record, &branch, repo_root, prefix)?;
        }
    } else {
        txn.record(algo, &mut record, &branch, repo_root, &in_repo_root())?;
    }
    txn.commit_branch(branch)?;
    let (changes, updates) = record.finish();
    // let changes = changes.into_iter().map(|x| txn.globalize_change(x)).collect();
    Ok((changes, updates))
}

pub fn prefix(args: &ArgMatches, opts: &BasicOptions) -> Result<Option<Vec<PathBuf>>> {
pub fn prefix(
    args: &ArgMatches,
    opts: &BasicOptions,
) -> Result<Option<Vec<RepoPath<PathBuf>>>, Error> {
    if let Some(prefixes) = args.values_of("prefix") {
        let prefixes: Result<Vec<_>> = prefixes
        let prefixes: Result<Vec<_>, Error> = prefixes
            .map(|prefix| {
                let p = opts.cwd.join(prefix);
                let p = if let Ok(p) = canonicalize(&p) { p } else { p };
                let file = opts.repo_root.relativize(&p)?;
                Ok(file.to_owned())
            })
            .collect();
        Ok(Some(prefixes?))
    } else {
        Ok(None)
    }
}

pub fn decide_authors(
    authors_args: Option<Vec<&str>>,
    meta: &Meta,
    global: &Global,
) -> Result<Vec<String>> {
) -> Result<Vec<String>, Error> {
    Ok(match authors_args {
        Some(authors) => authors.iter().map(|x| x.to_string()).collect(),
        _ => {
            if meta.authors.len() > 0 {
                meta.authors.clone()
            } else if let Some(ref auth) = global.author {
                vec![auth.clone()]
            } else {
                ask::ask_authors()?
            }
        }
    })
}

pub fn decide_patch_message(
    name_arg: Option<&str>,
    descr_arg: Option<&str>,
    template: String,
    use_editor: bool,
    repo_root: &RepoRoot<impl AsRef<Path>>,
    meta: &Meta,
    global: &Global,
) -> Result<(String, Option<String>)> {
) -> Result<(String, Option<String>), Error> {
    Ok(match name_arg {










1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472

473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541


544


547


550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621

623
624
625
626
627
628
629
630
631
632
633
634

635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678


679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707

708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795





796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836

837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931

932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029

1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124


1127




1130



1133

1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200

1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250

1251
1252
    ) -> Result<HashMap<Hash, ApplyTimestamp>, Error> {
    ) -> Result<HashMap<Hash, ApplyTimestamp>, Error> {
    ) -> Result<HashMap<Hash, ApplyTimestamp>, Error> {
    pub fn fetch_patch(&mut self, patch_hash: &Hash, local_file: PathBuf) -> Result<PathBuf, Error> {
    ) -> Result<(), Error> {
    ) -> Result<HashMap<Hash, ApplyTimestamp>, Error> {
    pub fn download_patch(&mut self, repo_root: &Path, patch_hash: &Hash) -> Result<PathBuf, Error> {
    ) -> Result<(), Error> {
pub fn ssh_connect(user: &Option<&str>, host: &str, port: Option<u16>) -> Result<(thrussh_config::Config, thrussh_config::ConnectFuture), Error> {
                return Err(Error::UnknownHost { host: host.to_string() });
use libpijul::fs_representation::{
    branch_changes_base_path, patch_file_name, RepoRoot, PIJUL_DIR_NAME,
};
use libpijul::patch::read_changes;
use libpijul::{
    apply_resize, apply_resize_no_output, apply_resize_patches, apply_resize_patches_no_output,
    ApplyTimestamp, ConflictingFile, Hash, Patch, PatchId, RepoPath, Repository,
};
use regex::Regex;
use reqwest;
use reqwest::async as reqwest_async;

use error::Error;
use std;
use std::collections::hash_set::HashSet;
use std::collections::HashMap;
use std::fs::{copy, hard_link, metadata, rename, File};
use std::path::{Path, PathBuf};
use std::sync::Arc;

use commands::{ask, assert_no_containing_repo, create_repo};
use cryptovec;
use dirs;
use futures;
use futures::{Async, Future, Poll, Stream};
use meta;
use progrs;
use sequoia_openpgp::serialize::Serialize;
use shell_escape::unix::escape;
use std::borrow::Cow;
use std::io::prelude::*;
use std::io::BufReader;
use std::net::ToSocketAddrs;
use tempfile::tempdir_in;
use thrussh;
use thrussh_config;
use thrussh_keys;
use tokio;
use username;

#[derive(Debug)]
pub struct SshRemote<'a> {
    user: Option<&'a str>,
    host: &'a str,
    port: Option<u16>,
    path: &'a str,
    id: &'a str,
    local_repo_root: Option<&'a Path>,
    pijul_cmd: Cow<'static, str>,
}

#[derive(Debug)]
pub enum Remote<'a> {
    Ssh(SshRemote<'a>),
    Uri { uri: &'a str },
    Local { path: RepoRoot<PathBuf> },
}

pub enum Session<'a> {
    Ssh(SshSession<'a>),
    Uri(UriSession<'a>),
    Local(LocalSession<'a>),
}

pub struct SshSession<'a> {
    pub l: tokio::runtime::Runtime,
    path: &'a str,
    pijul_cmd: &'a str,
    pub session: Option<thrussh::client::Connection<thrussh_config::Stream, Client>>,
}

pub struct UriSession<'a> {
    l: tokio::runtime::Runtime,
    uri: &'a str,
    client: reqwest_async::Client,
}

pub struct LocalSession<'a> {
    root: RepoRoot<&'a Path>,
}

impl<'a> Drop for SshSession<'a> {
    fn drop(&mut self) {
        if let Some(mut session) = self.session.take() {
            debug!("disconnecting");
            session.disconnect(thrussh::Disconnect::ByApplication, "finished", "EN");
            if let Err(e) = self.l.block_on(session) {
                error!("While dropping SSH Session: {:?}", e);
            }
        }
    }
}

#[cfg(unix)]
use thrussh_keys::agent::client::AgentClient;
#[cfg(unix)]
use tokio_uds::UnixStream;

pub struct Client {
    pub exit_status: HashMap<thrussh::ChannelId, u32>,
    state: State,
    host: String,
    port: u16,
    channel: Option<thrussh::ChannelId>,
    #[cfg(unix)]
    pub agent: Option<AgentClient<UnixStream>>,
    #[cfg(windows)]
    pub agent: Option<()>,
}

impl Client {
    #[cfg(unix)]
    fn new(port: Option<u16>, host: &str, l: &mut tokio::runtime::Runtime) -> Self {
        let agent = if let Ok(path) = std::env::var("SSH_AUTH_SOCK") {
            l.block_on(
                UnixStream::connect(path).map(thrussh_keys::agent::client::AgentClient::connect),
            )
            .ok()
        } else {
            None
        };
        debug!("Client::new(), agent: {:?}", agent.is_some());
        Client {
            exit_status: HashMap::new(),
            state: State::None,
            port: port.unwrap_or(22),
            host: host.to_string(),
            channel: None,
            agent,
        }
    }

    #[cfg(windows)]
    fn new(port: Option<u16>, host: &str, _: &mut tokio::runtime::Runtime) -> Self {
        Client {
            exit_status: HashMap::new(),
            state: State::None,
            port: port.unwrap_or(22),
            host: host.to_string(),
            channel: None,
            agent: None,
        }
    }
}

enum State {
    None,
    Changes {
        changes: Vec<(Hash, ApplyTimestamp)>,
    },
    DownloadPatch {
        file: File,
    },
    /*SendKey {
        key_pair: meta::SigningKeys,
    },*/
}

enum SendFileState {
    Read(thrussh::client::Connection<thrussh_config::Stream, Client>),
    Wait(thrussh::client::Data<thrussh_config::Stream, Client, Vec<u8>>),
}

struct SendFile {
    f: File,
    buf: Option<Vec<u8>>,
    chan: thrussh::ChannelId,
    state: Option<SendFileState>,
}

impl Future for SendFile {
    type Item = (
        thrussh::client::Connection<thrussh_config::Stream, Client>,
        Vec<u8>,
    );
    type Error = Error;
    fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
        debug!("SendFile loop starting");
        loop {
            debug!("sendfile loop");
            match self.state.take() {
                Some(SendFileState::Read(c)) => {
                    debug!("read");
                    let mut buf = self.buf.take().unwrap();
                    buf.resize(BUFFER_SIZE, 0);
                    let len = self.f.read(&mut buf)?;
                    if len == 0 {
                        // If nothing has been read, return.
                        return Ok(Async::Ready((c, buf)));
                    }
                    buf.truncate(len);
                    debug!("sending {:?} bytes, {:?}", len, buf.len());
                    self.state = Some(SendFileState::Wait(c.data(self.chan, None, buf)));
                }
                Some(SendFileState::Wait(mut c)) => {
                    debug!("wait");
                    match c.poll()? {
                        Async::Ready((c, buf)) => {
                            self.buf = Some(buf);
                            self.state = Some(SendFileState::Read(c))
                        }
                        Async::NotReady => {
                            self.state = Some(SendFileState::Wait(c));
                            return Ok(Async::NotReady);
                        }
                    }
                }
                None => unreachable!(),
            }
        }
    }
}

impl thrussh::client::Handler for Client {
    type Error = Error;
    type FutureUnit = futures::Finished<Client, Error>;
    type SessionUnit = futures::Finished<(Client, thrussh::client::Session), Error>;
    type FutureBool = futures::future::FutureResult<(Client, bool), Error>;
    type FutureSign =
        Box<futures::Future<Item = (Self, cryptovec::CryptoVec), Error = Self::Error>>;

    #[cfg(unix)]
    fn auth_publickey_sign(
        mut self,
        key: &thrussh_keys::key::PublicKey,
        mut to_sign: cryptovec::CryptoVec,
    ) -> Self::FutureSign {
        debug!("auth_publickey_sign");
        if let Some(agent) = self.agent.take() {
            use thrussh_keys::encoding::Encoding;
            debug!("using agent");
            Box::new(
                agent
                    .sign_request(key, &to_sign)
                    .then(move |result| match result {
                        Ok((client, sig)) => {
                            debug!("sig = {:?}", sig);
                            if let Some(sig) = sig {
                                to_sign.extend_ssh_string(&sig[..]);
                            }
                            self.agent = Some(client);
                            futures::finished::<_, Error>((self, to_sign))
                        }
                        Err(e) => {
                            error!("SSH agent error: {:?}", e);
                            futures::finished((self, to_sign))
                        }
                    })
                    .from_err(),
            )
        } else {
            debug!("no agent");
            Box::new(futures::finished((self, to_sign)))
        }
    }

    fn data(
        mut self,
        channel: thrussh::ChannelId,
        stream: Option<u32>,
        data: &[u8],
        session: thrussh::client::Session,
    ) -> Self::SessionUnit {
        debug!(
            "data ({:?}): {:?}",
            channel,
            &data[..std::cmp::min(data.len(), 100)]
        );
        if stream == Some(1) {
            std::io::stderr().write(data).unwrap();
        } else if stream == None {
            match self.state {
                State::None => {
                    std::io::stdout().write(data).unwrap();
                }
                State::Changes { ref mut changes } => {
                    let data = std::str::from_utf8(data).unwrap();
                    for l in data.lines() {
                        let mut spl = l.split(':');
                        if let (Some(h), Some(s)) = (spl.next(), spl.next()) {
                            if let (Some(h), Ok(s)) = (Hash::from_base58(h), s.parse()) {
                                changes.push((h, s));
                            }
                        }
                    }
                }
                State::DownloadPatch { ref mut file, .. } => {
                    file.write_all(data).unwrap();
                }
            }
        } else {
            debug!(
                "SSH data received on channel {:?}: {:?} {:?}",
                channel, stream, data
            );
        }
        futures::finished((self, session))
    }
    fn exit_status(
        mut self,
        channel: thrussh::ChannelId,
        exit_status: u32,
        session: thrussh::client::Session,
    ) -> Self::SessionUnit {
        debug!(
            "exit_status received on channel {:?}: {:?}:",
            channel, exit_status
        );
        debug!("self.channel = {:?}", self.channel);
        if let Some(c) = self.channel {
            if channel == c {
                self.exit_status.insert(channel, exit_status);
            }
        }
        debug!("self.exit_status = {:?}", self.exit_status);
        futures::finished((self, session))
    }

    fn check_server_key(
        self,
        server_public_key: &thrussh_keys::key::PublicKey,
    ) -> Self::FutureBool {
        let path = dirs::home_dir().unwrap().join(".ssh").join("known_hosts");
        match thrussh_keys::check_known_hosts_path(&self.host, self.port, server_public_key, &path)
        {
            Ok(true) => futures::done(Ok((self, true))),
            Ok(false) => {
                if let Ok(false) = ask::ask_learn_ssh(&self.host, self.port, "") {
                    // TODO
                    // &server_public_key.fingerprint()) {

                    futures::done(Ok((self, false)))
                } else {
                    thrussh_keys::learn_known_hosts_path(
                        &self.host,
                        self.port,
                        server_public_key,
                        &path,
                    )
                    .unwrap();
                    futures::done(Ok((self, true)))
                }
            }
            Err(e) => {
                if let thrussh_keys::Error::KeyChanged(line) = e {
                    println!(
                        "Host key changed! Someone might be eavesdropping this communication, \
                         refusing to continue. Previous key found line {}",
                        line
                    );
                    futures::done(Ok((self, false)))
                } else {
                    futures::done(Err(From::from(e)))
                }
            }
        }
    }
}

const BUFFER_SIZE: usize = 1 << 14; // 16 kb.

impl<'a> SshSession<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        let esc_path = escape(Cow::Borrowed(self.path));
        let mut cmd = format!(
            "{} log --repository {} --branch {:?} --hash-only",
            self.pijul_cmd, esc_path, branch
        );
        for p in path {
            cmd.push_str(&format!(" --path {}", p.display()))
        }

        if let Some(ref mut session) = self.session {
            session.handler_mut().state = State::Changes {
                changes: Vec::new(),
            }
        }
        let mut channel = None;
        self.session = Some(
            self.l
                .block_on(
                    self.session
                        .take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut connection, chan)| {
                            debug!("exec: {:?}", cmd);
                            channel = Some(chan);
                            connection.handler_mut().exit_status.remove(&chan);
                            connection.handler_mut().channel = Some(chan);
                            connection.exec(chan, false, &cmd);
                            connection.channel_eof(chan);
                            // Wait until channel close.
                            debug!("waiting channel close");
                            connection
                                .wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                })
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }),
                )
                .unwrap(),
        );

        if let Some(ref session) = self.session {
            if let Some(channel) = channel {
                if let Some(&exit_code) = session.handler().exit_status.get(&channel) {
                    debug!("exit_code = {:?}", exit_code);
                    if exit_code != 0 {
                        return Ok(Vec::new());
                    }
                }
            }
        }
        if let Some(ref mut session) = self.session {
            match std::mem::replace(&mut session.handler_mut().state, State::None) {
                State::Changes { changes } => {
                    debug!("changes: {:?}", changes);
                    Ok(changes)
                }
                _ => unreachable!(),
            }
        } else {
            unreachable!()
        }
    }

    pub fn send_key(&mut self, key_pair: meta::SigningKeys) -> Result<(), Error> {
        if let Some(ref mut session) = self.session {
            // session.handler_mut().state = State::SendKey { };
            session.handler_mut().channel = None;
        }
        let challenge_cmd = format!("{} key register", self.pijul_cmd);
        let mut data = Vec::new();
        key_pair.tsk.tpk().serialize(&mut data)?;
        self.session = Some(
            self.l.block_on(
                self.session
                    .take()
                    .unwrap()
                    .channel_open_session()
                    .and_then(move |(mut session, channelid)| {
                        session.exec(channelid, false, &challenge_cmd);
                        session
                            .data(channelid, None, data)
                            .and_then(move |(mut session, _)| {
                                session.channel_eof(channelid);
                                session.handler_mut().channel = Some(channelid);
                                session.wait(move |session| {
                                    session.handler().exit_status.get(&channelid).is_some()
                                })
                            })
                    }),
            )?,
        );
        Ok(())
    }

    pub fn fetch_patch(
        &mut self,
        patch_hash: &Hash,
        local_file: PathBuf,
        local_tmp_file: PathBuf,
    ) -> Result<PathBuf> {
    ) -> Result<PathBuf, Error> {
        let esc_path = escape(Cow::Borrowed(self.path));
        let cmd = format!(
            "{} patch --repository {} --bin {}",
            self.pijul_cmd,
            esc_path,
            patch_hash.to_base58()
        );
        debug!("cmd {:?} {:?}", cmd, local_file);
        if let Some(ref mut session) = self.session {
            session.handler_mut().state = State::DownloadPatch {
                file: File::create(&local_tmp_file)?,
            };
            session.handler_mut().channel = None;
        }
        self.session = Some(
            self.l
                .block_on(
                    self.session
                        .take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut connection, chan)| {
                            connection.handler_mut().exit_status.remove(&chan);
                            connection.handler_mut().channel = Some(chan);
                            connection.exec(chan, false, &cmd);
                            connection.channel_eof(chan);
                            connection
                                .wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                })
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }),
                )
                .unwrap(),
        );

        if let Some(ref mut session) = self.session {
            if let State::DownloadPatch { mut file, .. } =
                std::mem::replace(&mut session.handler_mut().state, State::None)
            {
                file.flush()?;
                rename(&local_tmp_file, &local_file)?;
            }
        }
        Ok(local_file)
    }

    pub fn remote_apply(
        &mut self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        patch_hashes: Vec<Hash>,
    ) -> Result<(), Error> {
        let pdir = repo_root.patches_dir();
        let mut exit_status = None;
        let esc_path = escape(Cow::Borrowed(&self.path));
        let apply_cmd = format!(
            "{} apply --repository {} --branch {:?}",
            self.pijul_cmd, esc_path, remote_branch
        );
        let sign_cmd = format!("{} sign --repository {}", self.pijul_cmd, esc_path);

        let session = self.session.take().unwrap();

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                        .fold((session, Vec::new()), move |(session, buf), hash| {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                                futures::future::Either::A((SendFile {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
        self.session = Some(
            self.l
                .block_on(
                    session
                        .channel_open_session()
                        .and_then(move |(session, chan0)| {
                            session
                                .channel_open_session()
                                .and_then(move |(mut session, chan1)| {
                                    session.handler_mut().exit_status.remove(&chan0);
                                    session.handler_mut().channel = Some(chan0);
                                    debug!("exec {:?}", apply_cmd);
                                    session.exec(chan0, false, &apply_cmd);
                                    debug!("exec {:?}", sign_cmd);
                                    session.exec(chan1, false, &sign_cmd);
                                    futures::stream::iter_ok(patch_hashes.into_iter())
                                        .fold((session, Vec::new()), move |(session, buf), hash| {
                                            let mut pdir = pdir.clone();
                                            pdir.push(hash.to_base58());
                                            pdir.set_extension("gz");
                                            let f = std::fs::File::open(&pdir).unwrap();
                                            pdir.set_extension("sig");
                                            if let Ok(sig) = std::fs::File::open(&pdir) {
                                                futures::future::Either::A(
                                                    (SendFile {
                                                        f: f,
                                                        buf: Some(buf),
                                                        chan: chan0,
                                                        state: Some(SendFileState::Read(session)),
                                                    })
                                                    .and_then(move |(session, mut buf)| {
                                                        buf.clear();
                                                        SendFile {
                                                            f: sig,
                                                            buf: Some(buf),
                                                            chan: chan1,
                                                            state: Some(SendFileState::Read(
                                                                session,
                                                            )),
                                                        }
                                                    }),
                                                )
                                            } else {
                                                futures::future::Either::B(SendFile {
                                                    f: f,
                                                    buf: Some(buf),
                                                    chan: chan0,
                                                    state: Some(SendFileState::Read(session)),
                                                })
                                            }
                                        })
                                        .and_then(move |(mut session, _)| {
                                            session.channel_eof(chan0);
                                            session
                                                .wait(move |session| {
                                                    session
                                                        .handler()
                                                        .exit_status
                                                        .get(&chan0)
                                                        .is_some()
                                                })
                                                .map(move |mut session| {
                                                    exit_status = session
                                                        .handler()
                                                        .exit_status
                                                        .get(&chan0)
                                                        .map(|x| *x);
                                                    session.channel_close(chan0);
                                                    session
                                                })
                                        })
                                        .map_err(From::from)

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
                                })
                        }),
                )
                .unwrap(),
        );

        if let Some(ref session) = self.session {
            debug!("exit status = {:?}", session.handler().exit_status);
        }
        Ok(())
    }

    pub fn remote_init(&mut self) -> Result<()> {
    pub fn remote_init(&mut self) -> Result<(), Error> {
        let esc_path = escape(Cow::Borrowed(self.path));
        let cmd = format!("{} init {}", self.pijul_cmd, esc_path);
        debug!("command line:{:?}", cmd);

        self.session = Some(
            self.l
                .block_on(
                    self.session
                        .take()
                        .unwrap()
                        .channel_open_session()
                        .and_then(move |(mut session, chan)| {
                            debug!("chan = {:?}", chan);
                            session.handler_mut().exit_status.remove(&chan);
                            session.handler_mut().channel = Some(chan);
                            session.exec(chan, false, &cmd);
                            session.channel_eof(chan);
                            // Wait until channel close.
                            session
                                .wait(move |session| {
                                    session.handler().exit_status.get(&chan).is_some()
                                })
                                .and_then(move |mut session| {
                                    if session.is_channel_open(chan) {
                                        session.channel_close(chan);
                                    }
                                    session.wait(move |session| !session.is_channel_open(chan))
                                })
                        }),
                )
                .unwrap(),
        );
        Ok(())
    }
}

impl<'a> UriSession<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        if !path.is_empty() {
            return Err(ErrorKind::PartialPullOverHttp.into());
            return Err(ErrorKind::PartialPullOverHttp.into());
            return Err(Error::PartialPullOverHttp);
        }
        let mut uri = self.uri.to_string();
        uri = uri + "/" + PIJUL_DIR_NAME + "/" + &branch_changes_base_path(branch);
        let mut req = reqwest_async::Request::new(reqwest::Method::GET, uri.parse().unwrap());
        req.headers_mut().insert(
            reqwest::header::CONNECTION,
            reqwest::header::HeaderValue::from_static("close"),
        );
        let res: Vec<u8> = self.l.block_on(self.client.execute(req).and_then(
            |resp: reqwest_async::Response| {
                let res = Vec::new();
                let body = resp.into_body();
                body.fold(res, |mut res, x| {
                    res.extend(x.iter());
                    futures::finished::<_, reqwest::Error>(res)
                })
            },
        ))?;
        let changes = read_changes(&mut &res[..]).unwrap_or(Vec::new());
        debug!("http: {:?}", changes);
        Ok(changes)
    }

    pub fn fetch_patch(
        &mut self,
        patch_hash: &Hash,
        local_file: PathBuf,
        local_tmp_file: PathBuf,
    ) -> Result<PathBuf> {
    ) -> Result<PathBuf, Error> {
        let ref mut l = self.l;
        let ref mut client = self.client;
        let uri = self.uri.to_string()
            + "/"
            + PIJUL_DIR_NAME
            + "/patches/"
            + &patch_hash.to_base58()
            + ".gz";
        debug!("downloading uri {:?}", uri);

        let req = reqwest_async::Request::new(reqwest::Method::GET, uri.parse().unwrap());
        let uri_sig = self.uri.to_string()
            + "/"
            + PIJUL_DIR_NAME
            + "/patches/"
            + &patch_hash.to_base58()
            + ".sig";
        debug!("{:?}", uri_sig);
        let req_sig = reqwest_async::Request::new(reqwest::Method::GET, uri_sig.parse().unwrap());
        let mut local_sig_file = local_file.clone();
        let mut local_tmp_sig_file = local_tmp_file.clone();
        local_sig_file.set_extension("sig");
        local_tmp_sig_file.set_extension("sig");

        let res = l
            .block_on(
                client
                    .execute(req)
                    .and_then(move |resp| {
                        if resp.status() == reqwest::StatusCode::OK {
                            let res = Vec::new();
                            futures::future::Either::A(
                                resp.into_body()
                                    .fold(res, |mut res, x| {
                                        res.extend(x.iter());
                                        futures::finished::<_, reqwest::Error>(res)
                                    })
                                    .map(|body| {
                                        // debug!("response={:?}", body);
                                        let mut f = File::create(&local_tmp_file).unwrap();
                                        f.write_all(&body).unwrap();
                                        // debug!("patch downloaded through http: {:?}", body);
                                        Some((local_tmp_file, local_file))
                                    }),
                            )
                        } else {
                            futures::future::Either::B(futures::finished(None))
                        }
                    })
                    .join(client.execute(req_sig).then(move |resp| {
                        let resp = if let Ok(resp) = resp {
                            resp
                        } else {
                            return futures::future::Either::B(futures::finished(None));
                        };
                        debug!("sig status {:?}", resp.status());
                        if resp.status() == reqwest::StatusCode::OK {
                            let res = Vec::new();
                            futures::future::Either::A(
                                resp.into_body()
                                    .fold(res, |mut res, x| {
                                        res.extend(x.iter());
                                        futures::finished::<_, reqwest::Error>(res)
                                    })
                                    .map(|body| {
                                        // debug!("response={:?}", body);
                                        let mut f = File::create(&local_tmp_sig_file).unwrap();
                                        f.write_all(&body).unwrap();
                                        // debug!("patch downloaded through http: {:?}", body);
                                        Some((local_tmp_sig_file, local_sig_file))
                                    }),
                            )
                        } else {
                            futures::future::Either::B(futures::finished(None))
                        }
                    })),
            )
            .unwrap();
        if let Some((local_tmp_file, local_file)) = res.0 {
            debug!("renaming {:?} to {:?}", local_tmp_file, local_file);
            rename(&local_tmp_file, &local_file)?;
            if let Some((local_tmp_sig_file, local_sig_file)) = res.1 {
                debug!("renaming {:?} to {:?}", local_tmp_sig_file, local_sig_file);
                rename(&local_tmp_sig_file, &local_sig_file).unwrap_or(());
            }
            Ok(local_file)
        } else {
            Err(ErrorKind::PatchNotFound(
                self.uri.into(), // repo_root.to_path_buf(),
                patch_hash.to_owned(),
            ).into())
            ).into())
            Err(Error::PatchNotFound {
                repo_root: self.uri.into(),
                patch_hash: patch_hash.to_owned(),
            })
        }
    }
}

impl<'a> LocalSession<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        let repo = self.root.open_repo(None)?;
        let txn = repo.txn_begin()?;
        if let Some(branch) = txn.get_branch(&branch) {
            if !path.is_empty() {
                let mut patches = Vec::new();
                for (hash, s) in txn.iter_patches(&branch, None) {
                    for path in path {
                        let inode = txn.find_inode(path).unwrap();
                        let key = txn.get_inodes(inode).unwrap().key;
                        if txn.get_touched(key, hash) {
                            patches.push((txn.get_external(hash).unwrap().to_owned(), s));
                            break;
                        }
                    }
                }
                Ok(patches)
            } else {
                Ok(txn
                    .iter_patches(&branch, None)
                    .map(|(hash, s)| (txn.get_external(hash).unwrap().to_owned(), s))
                    .collect())
            }
        } else {
            Ok(Vec::new())
        }
    }

    pub fn fetch_patch(&mut self, patch_hash: &Hash, local_file: PathBuf) -> Result<PathBuf> {
    pub fn fetch_patch(
        &mut self,
        patch_hash: &Hash,
        local_file: PathBuf,
    ) -> Result<PathBuf, Error> {
        debug!("local downloading {:?}", patch_hash);
        let remote_file = self
            .root
            .patches_dir()
            .join(&patch_file_name(patch_hash.as_ref()));
        debug!("hard linking {:?} to {:?}", remote_file, local_file);
        if hard_link(&remote_file, &local_file).is_err() {
            copy(&remote_file, &local_file)?;
        }
        Ok(local_file)
    }

    pub fn remote_apply(
        &mut self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        patch_hashes: &Vec<Hash>,
    ) -> Result<Vec<ConflictingFile>, Error> {
        let mut remote_path = self.root.patches_dir();
        let mut local_path = repo_root.patches_dir();
        let remote_current_branch = self.root.get_current_branch()?;

        for hash in patch_hashes {
            remote_path.push(&hash.to_base58());
            remote_path.set_extension("gz");

            local_path.push(&hash.to_base58());
            local_path.set_extension("gz");

            debug!("hard linking {:?} to {:?}", local_path, remote_path);
            if metadata(&remote_path).is_err() {
                if hard_link(&local_path, &remote_path).is_err() {
                    copy(&local_path, &remote_path)?;
                }
            }

            remote_path.set_extension("sig");
            local_path.set_extension("sig");

            if metadata(&remote_path).is_err() && metadata(&local_path).is_ok() {
                if hard_link(&local_path, &remote_path).is_err() {
                    copy(&local_path, &remote_path)?;
                }
            }

            local_path.pop();
            remote_path.pop();
        }

        loop {
            let app = if remote_current_branch != remote_branch {
                apply_resize_no_output(&self.root, &remote_branch, patch_hashes.iter(), |_, _| {})
                    .map(|_| Vec::new())
            } else {
                apply_resize(
                    libpijul::DiffAlgorithm::default(),
                    &self.root,
                    &remote_branch,
                    patch_hashes.iter(),
                    &[] as &[RepoPath<&Path>],
                    |_, _| {},
                )
            };
            match app {
                Err(ref e) if e.lacks_space() => debug!("lacks space"),
                Ok(v) => return Ok(v),
                Err(e) => return Err(From::from(e)),
            }
        }
    }
}

#[derive(Debug, Clone)]
pub struct PushablePatches {
    pub pushable: Vec<(Hash, Option<PatchId>, ApplyTimestamp)>,
    pub non_fast_forward: Vec<Hash>,
}

impl<'a> Session<'a> {
    pub fn changes(
        &mut self,
        branch: &str,
        remote_path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Vec<(Hash, ApplyTimestamp)>, Error> {
        match *self {
            Session::Ssh(ref mut ssh_session) => ssh_session.changes(branch, remote_path),
            Session::Local(ref mut local_session) => local_session.changes(branch, remote_path),
            Session::Uri(ref mut uri_session) => uri_session.changes(branch, remote_path),
        }
    }
    pub fn download_patch(&mut self, repo_root: &Path, patch_hash: &Hash) -> Result<PathBuf> {
    pub fn download_patch(
        &mut self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        patch_hash: &Hash,
    ) -> Result<PathBuf, Error> {
        let patches_dir_ = repo_root.patches_dir();
        let local_file = patches_dir_.join(&patch_file_name(patch_hash.as_ref()));

        if !metadata(&local_file).is_ok() {
            match *self {
                Session::Local(ref mut local_session) => {
                    local_session.fetch_patch(patch_hash, local_file)
                }
                Session::Ssh(ref mut ssh_session) => {
                    let tmp_dir = tempdir_in(&patches_dir_)?;
                    let local_tmp_file = tmp_dir.path().join("patch");
                    ssh_session.fetch_patch(patch_hash, local_file, local_tmp_file)
                }
                Session::Uri(ref mut uri_session) => {
                    let tmp_dir = tempdir_in(&patches_dir_)?;
                    let local_tmp_file = tmp_dir.path().join("patch");
                    uri_session.fetch_patch(patch_hash, local_file, local_tmp_file)
                }
            }
        } else {
            Ok(local_file)
        }
    }

    fn remote_apply(
        &mut self,
        repo_root: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        patch_hashes: Vec<Hash>,
    ) -> Result<(), Error> {
        match *self {
            Session::Ssh(ref mut ssh_session) => {
                ssh_session.remote_apply(repo_root, remote_branch, patch_hashes)
            }

            Session::Local(ref mut local_session) => local_session
                .remote_apply(repo_root, remote_branch, &patch_hashes)
                .map(|_| ()),

            _ => panic!("upload to URI impossible"),
        }
    }

    pub fn remote_init(&mut self) -> Result<(), Error> {
        match *self {
            Session::Ssh(ref mut ssh_session) => ssh_session.remote_init(),
            Session::Local(ref mut local_session) => {
                assert_no_containing_repo(local_session.root.repo_root)?;
                create_repo(local_session.root.repo_root)
            }
            _ => panic!("remote init not possible"),
        }
    }

    pub fn pullable_patches(
        &mut self,
        remote_branch: &str,
        local_branch: &str,
        target: &RepoRoot<impl AsRef<Path>>,
        remote_path: &[RepoPath<impl AsRef<Path>>],
    ) -> Result<Pullable, Error> {
        let mut remote_patches: Vec<(Hash, ApplyTimestamp)> = self
            .changes(remote_branch, remote_path)?
            .into_iter()
            .map(|(h, s)| (h.to_owned(), s))
            .collect();
        remote_patches.sort_by(|&(_, ref a), &(_, ref b)| a.cmp(&b));
        let local_patches: HashMap<Hash, ApplyTimestamp> = {
            let repo_dir = target.pristine_dir();
            let repo = Repository::open(&repo_dir, None)?;
            let txn = repo.txn_begin()?;
            if let Some(branch) = txn.get_branch(&local_branch) {
                txn.iter_patches(&branch, None)
                    .map(|(hash, s)| (txn.get_external(hash).unwrap().to_owned(), s))
                    .collect()
            } else {
                HashMap::new()
            }
        };
        debug!("pullable done: {:?}", remote_patches);
        Ok(Pullable {
            local: local_patches.iter().map(|(h, _)| h.to_owned()).collect(),
            remote: remote_patches.into_iter().collect(),
        })
    }

    pub fn pull(
        &mut self,
        target: &RepoRoot<impl AsRef<Path>>,
        to_branch: &str,
        pullable: &mut Vec<(Hash, ApplyTimestamp)>,
        partial_paths: &[RepoPath<impl AsRef<Path>>],
        display_progress: bool,
    ) -> Result<()> {
    ) -> Result<Vec<ConflictingFile>, Error> {
        let mut p = if display_progress && !pullable.is_empty() {
            Some((progrs::start("Pulling patches", pullable.len() as u64), 0))
        } else {
            None
        };
        let mut pullable_plus_deps = Vec::new();
        let mut pulled = HashSet::new();

        while let Some((hash, _)) = pullable.pop() {
            if pulled.contains(&hash) {
                continue;
            }
            debug!("hash = {:?}", hash);
            let path = self.download_patch(target, &hash)?;

            let patch = {
                let file = File::open(&path)?;
                let mut file = BufReader::new(file);
                Patch::from_reader_compressed(&mut file)?.2
            };
            pulled.insert(hash.clone());

            // If the apply is partial, we might not have all the
            // dependencies. Add them to this list.
            if !partial_paths.is_empty() {
                for dep in patch.dependencies() {
                    if !pulled.contains(dep) {
                        pullable.push((dep.to_owned(), 0));
                    }
                }
            }

            pullable_plus_deps.push((hash.to_owned(), patch));

            p.as_mut().map(|&mut (ref mut p, ref mut n)| {
                p.display({
                    *n = *n + 1;
                    *n
                })
            });
        }

        // Because we've been popping the stack of pullable patches in
        // reverse order, we need to reverse the result.
        pullable_plus_deps.reverse();

        p.map(|(p, _)| p.stop("done"));
        debug!("patches downloaded");

        let p = std::cell::RefCell::new(progrs::start(
            "Applying patches",
            pullable_plus_deps.len() as u64,
        ));
        let mut size_increase = 4096;
        let current_branch = target.get_current_branch()?;
        let conflicts = loop {
            let app = if current_branch != to_branch {
                apply_resize_patches_no_output(
                    &target,
                    &to_branch,
                    &pullable_plus_deps,
                    size_increase,
                    |c, _| p.borrow_mut().display(c as u64),
                )
                .map(|_| Vec::new())
            } else {
                apply_resize_patches(
                    libpijul::DiffAlgorithm::default(),
                    &target,
                    &to_branch,
                    &pullable_plus_deps,
                    size_increase,
                    partial_paths,
                    |c, _| p.borrow_mut().display(c as u64),
                )
            };
            match app {
                Ok(conflicts) => break conflicts,
                Err(ref e) if e.lacks_space() => size_increase *= 2,
                Err(e) => return Err(e.into()),
            }
        };
        p.into_inner().stop("done");
        Ok(conflicts)
    }

    /// Returns a vector of pushable patches, and a vector of changes
    /// present on the remote branch but not on the local one (to
    /// identify fast-forward pushes).
    pub fn pushable_patches(
        &mut self,
        from_branch: &str,
        to_branch: &str,
        source: &RepoRoot<impl AsRef<Path> + std::fmt::Debug>,

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
        remote_paths: &[&str],
    ) -> Result<Vec<(Hash, Option<PatchId>, ApplyTimestamp)>> {
    ) -> Result<Vec<(Hash, Option<PatchId>, ApplyTimestamp)>> {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    ) -> Result<Vec<(Hash, Option<PatchId>, ApplyTimestamp)>, Error> {
    ) -> Result<Vec<(Hash, Option<PatchId>, ApplyTimestamp)>, Error> {

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

================================
        remote_paths: &[RepoPath<impl AsRef<Path>>],

<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
    ) -> Result<PushablePatches, Error> {
        debug!("source: {:?}", source);

        let mut non_fast_forward = Vec::new();

        let to_changes_ = self.changes(to_branch, remote_paths)?;

        let repo = source.open_repo(None)?;
        let txn = repo.txn_begin()?;

        let mut to_changes = HashSet::new();
        let branch = txn.get_branch(&from_branch);
        for (h, _) in to_changes_.iter() {
            if let Some(ref branch) = branch {
                if let Some(patchid) = txn.get_internal(h.as_ref()) {
                    if txn.get_patch(&branch.patches, patchid).is_none() {
                        non_fast_forward.push(h.clone())
                    }
                } else {
                    non_fast_forward.push(h.clone())
                }
            }
            to_changes.insert(h.as_ref());
        }
        debug!("to_changes: {:?}", to_changes);
        let from_changes: Vec<_> = {
            if let Some(branch) = txn.get_branch(&from_branch) {
                txn.iter_applied(&branch, None)
                    .filter_map(|(s, patchid)| {
                        if let Some(hash) = txn.get_external(patchid) {
                            if to_changes.contains(&hash) {
                                None
                            } else {
                                Some((hash.to_owned(), Some(patchid), s))
                            }
                        } else {
                            None
                        }
                    })
                    .collect()
            } else {
                Vec::new()
            }
        };
        debug!("pushing: {:?}", from_changes);
        Ok(PushablePatches {
            pushable: from_changes,
            non_fast_forward,
        })
    }

    pub fn push(
        &mut self,
        source: &RepoRoot<impl AsRef<Path>>,
        remote_branch: &str,
        pushable: Vec<Hash>,
    ) -> Result<(), Error> {
        debug!("push, remote_applying");
        debug!("pushable: {:?}", pushable);
        if pushable.len() > 0 {
            self.remote_apply(source, remote_branch, pushable)?;
        }
        Ok(())
    }
}

pub fn ssh_connect(user: &Option<&str>, host: &str, port: Option<u16>) -> Result<(thrussh_config::Config, thrussh_config::ConnectFuture)> {
pub fn ssh_connect(
    user: &Option<&str>,
    host: &str,
    port: Option<u16>,
) -> Result<(thrussh_config::Config, thrussh_config::ConnectFuture), Error> {
    let mut ssh_config =
        thrussh_config::parse_home(host).unwrap_or(thrussh_config::Config::default());
    debug!("ssh_config = {:?}", ssh_config);

    if ssh_config.host_name.is_none() {
        ssh_config.host_name = Some(host.to_string())
    }

    if let Some(port) = port {
        ssh_config.port = Some(port)
    } else if ssh_config.port.is_none() {
        ssh_config.port = Some(22)
    }

    if let Some(ref user) = *user {
        ssh_config.user = Some(user.to_string())
    } else if ssh_config.user.is_none() {
        ssh_config.user = Some(username::get_user_name().unwrap())
    }

    ssh_config.update_proxy_command();
    let stream = if let Some(ref proxycmd) = ssh_config.proxy_command {
        debug!("{:?}", proxycmd);
        thrussh_config::Stream::proxy_command("sh", &["-c", proxycmd.as_str()])
    } else {
        let addr = if let Some(addrs) = (
            ssh_config.host_name.as_ref().unwrap().as_str(),
            ssh_config.port.unwrap(),
        )
            .to_socket_addrs()?
            .next()
        {
            addrs
        } else {
            return Err(Error::UnknownHost {
                host: host.to_string(),
            });
        };
        debug!("addr = {:?}", addr);
        thrussh_config::Stream::tcp_connect(&addr)
    };
    Ok((ssh_config, stream))
}

impl<'a> Remote<'a> {
    pub fn session(&'a self) -> Result<Session<'a>> {
    pub fn session(&'a self) -> Result<Session<'a>, Error> {
        match *self {

1
2
3
4
5

6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58

59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151

152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178

179
180
use error::Error;
use super::ask::{ask_changes, ChangesDirection};
use super::record;
use chrono;
use clap::{Arg, ArgMatches, SubCommand};
use commands::{default_explain, BasicOptions, StaticSubcommand};
use error::Result;
use error::Error;
use libpijul::fs_representation::{RepoPath, RepoRoot};
use libpijul::patch::{Patch, PatchFlags, UnsignedPatch};
use libpijul::{Inode, InodeUpdate, Repository, ToPrefixes};
use rand;
use std;
use std::collections::HashSet;
use std::path::{Path, PathBuf};

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("revert")
        .about("Rewrite the working copy from the pristine")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .takes_value(true)
                .help("Local repository."),
        )
        .arg(
            Arg::with_name("all")
                .short("a")
                .long("all")
                .help("Answer 'y' to all questions")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("branch")
                .help("Branch to revert to.")
                .long("branch")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("prefix")
                .help("Prefix to start from")
                .takes_value(true)
                .multiple(true),
        )
        .arg(
            Arg::with_name("patience")
                .long("patience")
                .help("Use patience diff instead of the default (Myers diff)")
                .conflicts_with("myers")
                .takes_value(false),
        )
        .arg(
            Arg::with_name("myers")
                .long("myers")
                .help("Use Myers diff")
                .conflicts_with("patience")
                .takes_value(false),
        );
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let yes_to_all = args.is_present("all");
    let branch_name = opts.branch();
    let prefix = record::prefix(args, &opts)?;
    // Generate the pending patch.
    let (pending, pending_syncs): (_, HashSet<_>) = if !yes_to_all || prefix.is_some() {
        let repo = opts.open_and_grow_repo(409600)?;
        let mut txn = repo.mut_txn_begin(rand::thread_rng())?;
        let (changes, syncs) = {
            let (changes, syncs) = record::changes_from_prefixes(
                if args.is_present("patience") {
                    libpijul::DiffAlgorithm::Patience
                } else {
                    libpijul::DiffAlgorithm::Myers
                },
                &opts.repo_root,
                &mut txn,
                &branch_name,
                prefix.as_ref(),
            )?;
            let changes: Vec<_> = changes
                .into_iter()
                .map(|x| txn.globalize_record(x))
                .collect();
            if yes_to_all {
                (Vec::new(), HashSet::new())
            } else {
                let (c, _empty_vec) = ask_changes(
                    &txn,
                    &opts.repo_root,
                    &opts.cwd,
                    &changes,
                    ChangesDirection::Revert,
                    &mut HashSet::new(),
                )?;
                let selected = changes
                    .into_iter()
                    .enumerate()
                    .filter(|&(i, _)| *(c.get(&i).unwrap_or(&false)))
                    .map(|(_, x)| x)
                    .collect();
                (selected, syncs)
            }
        };
        debug!("changes {:?}", changes);
        debug!("syncs {:?}", syncs);
        let branch = txn.get_branch(&branch_name).unwrap();
        let changes = changes.into_iter().flat_map(|x| x.into_iter()).collect();
        let patch = txn.new_patch(
            &branch,
            Vec::new(),
            String::new(),
            None,
            chrono::Utc::now(),
            changes,
            std::iter::empty(),
            PatchFlags::empty(),
        );
        txn.commit()?;
        (patch, syncs)
    } else {
        (UnsignedPatch::empty().leave_unsigned(), HashSet::new())
    };

    let mut size_increase = None;
    let pristine = opts.pristine_dir();
    loop {
        match output_repository(
            &opts.repo_root,
            &pristine,
            &branch_name,
            size_increase,
            prefix.as_ref(),
            &pending,
            &pending_syncs,
        ) {
            Err(ref e) if e.lacks_space() => {
                size_increase = Some(Repository::repository_size(&pristine).unwrap())
            }
            e => return e,
        }
    }
}

fn output_repository(
    r: &RepoRoot<impl AsRef<Path>>,
    pristine_dir: &Path,
    branch: &str,
    size_increase: Option<u64>,
    prefixes: Option<&Vec<RepoPath<PathBuf>>>,
    pending: &Patch,
    pending_syncs: &HashSet<InodeUpdate>,
) -> Result<()> {
) -> Result<(), Error> {
    let repo = Repository::open(&pristine_dir, size_increase)?;
    let mut txn = repo.mut_txn_begin(rand::thread_rng())?;

    let mut inode_prefixes = Vec::new();
    if let Some(prefixes) = prefixes {
        for pref in prefixes.iter() {
            inode_prefixes.push(txn.find_inode(pref).unwrap());
        }
    }
    for (_, key) in txn
        .iter_partials(branch)
        .take_while(|&(k, _)| k.as_str() == branch)
    {
        debug!("extra inode prefixes: {:?}", key);
        inode_prefixes.push(txn.get_revinodes(key).unwrap())
    }

    let mut branch = txn.open_branch(branch)?;
    let pref = (&inode_prefixes as &[Inode]).to_prefixes(&txn, &branch);
    debug!("{:?}", pref);
    txn.output_repository(&mut branch, &r, &pref, pending, pending_syncs)?;
    txn.commit_branch(branch)?;
    txn.commit()?;
    Ok(())
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33

34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57




58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132

133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230

231
232
            chrono::DateTime::from_str(x).map_err(|_| Error::InvalidDate { date: String::from(x) })
use clap::{Arg, ArgMatches, SubCommand};

use super::{ask, default_explain, validate_base58, BasicOptions, StaticSubcommand};
use meta::{load_signing_key, Global, Meta};
use std::collections::HashSet;
use std::path::Path;

use chrono;
use libpijul::fs_representation::{patch_file_name, RepoPath, RepoRoot};
use libpijul::patch::{Patch, PatchFlags};
use libpijul::{apply_resize, apply_resize_no_output, Hash, HashRef, PatchId};
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::iter;
use std::mem::drop;
use std::str::FromStr;

use commands::record::{decide_authors, decide_patch_message, record_args};
use error::Error;

pub fn invocation() -> StaticSubcommand {
    record_args(
        SubCommand::with_name("rollback").arg(
            Arg::with_name("patch")
                .help("Patch to roll back.")
                .takes_value(true)
                .multiple(true)
                .validator(validate_base58),
        ),
    )
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let patches: Option<HashSet<Hash>> = args
        .values_of("patch")
        .map(|ps| ps.map(|x| Hash::from_base58(x).unwrap()).collect());

    let mut increase = 409600;
    let repo = opts.open_and_grow_repo(increase)?;
    let branch_name = opts.branch();

    let mut patches: HashMap<_, _> = if let Some(ref patches) = patches {
        let txn = repo.txn_begin()?;
        if let Some(branch) = txn.get_branch(&branch_name) {
            let mut patches_ = HashMap::new();
            for h in patches.iter() {
                debug!("unrecording {:?}", h);

                if let Some(internal) = txn.get_internal(h.as_ref()) {
                    if txn.get_patch(&branch.patches, internal).is_some() {
                        let patch = load_patch(&opts.repo_root, h.as_ref());
                        patches_.insert(h.to_owned(), patch);
                        continue;
                    }
                }
                return Err(ErrorKind::BranchDoesNotHavePatch(
                    branch.name.as_str().to_string(),
                    h.to_owned(),
                ).into());
                return Err(Error::BranchDoesNotHavePatch {
                    branch_name: branch.name.as_str().to_string(),
                    patch: h.to_owned(),
                });
            }
            patches_
        } else {
            HashMap::new()
        }
    } else {
        let mut patches: Vec<_> = {
            let txn = repo.txn_begin()?;
            if let Some(branch) = txn.get_branch(&branch_name) {
                txn.rev_iter_applied(&branch, None)
                    .map(|(t, h)| {
                        let ext = txn.get_external(h).unwrap();
                        let patch = load_patch(&opts.repo_root, ext);
                        (ext.to_owned(), Some(h.to_owned()), patch, t)
                    })
                    .collect()
            } else {
                Vec::new()
            }
        };
        patches.sort_by(|&(_, _, _, a), &(_, _, _, b)| b.cmp(&a));
        let patches: Vec<(Hash, Option<PatchId>, Patch)> =
            patches.into_iter().map(|(a, b, c, _)| (a, b, c)).collect();
        // debug!("patches: {:?}", patches);
        let to_unrecord = ask::ask_patches(ask::Command::Unrecord, &patches).unwrap();
        debug!("to_unrecord: {:?}", to_unrecord);
        let patches: HashMap<_, _> = patches
            .into_iter()
            .filter(|&(ref k, _, _)| to_unrecord.contains(&k))
            .map(|(k, _, p)| (k, p))
            .collect();
        patches
    };

    let mut selected = Vec::new();
    loop {
        let hash = if let Some((hash, patch)) = patches.iter().next() {
            increase += patch.size_upper_bound() as u64;
            hash.to_owned()
        } else {
            break;
        };
        deps_dfs(&mut selected, &mut patches, &hash)
    }

    // Create the inverse changes.
    let mut changes = Vec::new();
    for &(ref hash, ref patch) in selected.iter() {
        debug!("inverting {:?}", patch);
        patch.inverse(hash, &mut changes)
    }

    let meta = Meta::load(&opts.repo_root).unwrap_or_else(|_| Meta::new());
    let (global, save_global) = Global::load()
        .map(|g| (g, false))
        .unwrap_or_else(|_| (Global::new(), true));

    if save_global {
        global.save().unwrap_or(())
    }

    // Create the inverse patch, and save it.
    let patch = {
        let authors_arg = args.values_of("author").map(|x| x.collect::<Vec<_>>());
        let patch_name_arg = args.value_of("message");
        let patch_descr_arg = args.value_of("description");

        let txn = repo.txn_begin()?;
        let authors = decide_authors(authors_arg, &meta, &global)?;

        let patch_date = args.value_of("date").map_or(Ok(chrono::Utc::now()), |x| {
            chrono::DateTime::from_str(x).map_err(|_| ErrorKind::InvalidDate(String::from(x)))
            chrono::DateTime::from_str(x).map_err(|_| Error::InvalidDate {
                date: String::from(x),
            })
        })?;

        let (name, description) = decide_patch_message(
            patch_name_arg,
            patch_descr_arg,
            String::from(""),
            !args.is_present("no-editor"),
            &opts.repo_root,
            &meta,
            &global,
        )?;

        if let Some(branch) = txn.get_branch(&branch_name) {
            txn.new_patch(
                &branch,
                authors,
                name,
                description,
                patch_date,
                changes,
                iter::empty(),
                PatchFlags::empty(),
            )
        } else {
            unimplemented!()
        }
    };
    let patches_dir = opts.repo_root.patches_dir();
    let mut key = meta
        .signing_key
        .or(global.signing_key)
        .and_then(|s| load_signing_key(s).ok());
    let hash = if let Some(ref mut key) = key {
        key.check_author(&patch.header().authors)?;
        patch.save(&patches_dir, key.keys.get_mut(0))?
    } else {
        patch.save(&patches_dir, None)?
    };
    drop(repo);
    println!("Recorded patch {}", hash.to_base58());

    let is_current_branch = if let Ok(br) = opts.repo_root.get_current_branch() {
        br == opts.branch()
    } else {
        false
    };

    // Apply the inverse patch.
    loop {
        let app = if !is_current_branch {
            apply_resize_no_output(
                &opts.repo_root,
                &opts.branch(),
                iter::once(&hash),
                |_, _| (),
            )
            .map(|_| Vec::new())
        } else {
            apply_resize(
                libpijul::DiffAlgorithm::default(),
                &opts.repo_root,
                &opts.branch(),
                iter::once(&hash),
                &[] as &[RepoPath<&Path>],
                |_, _| (),
            )
        };
        match app {
            Err(ref e) if e.lacks_space() => {}
            Ok(_) => return Ok(()),
            Err(e) => return Err(From::from(e)),
        }
    }
}

fn load_patch(repo_root: &RepoRoot<impl AsRef<Path>>, ext: HashRef) -> Patch {
    let base = patch_file_name(ext);
    let filename = repo_root.patches_dir().join(&base);
    debug!("filename: {:?}", filename);
    let file = File::open(&filename).unwrap();
    let mut file = BufReader::new(file);
    let (_, _, patch) = Patch::from_reader_compressed(&mut file).unwrap();
    patch
}

fn deps_dfs(selected: &mut Vec<(Hash, Patch)>, patches: &mut HashMap<Hash, Patch>, current: &Hash) {
    if let Some(patch) = patches.remove(current) {
        for dep in patch.dependencies().iter() {
            deps_dfs(selected, patches, dep)
        }

        selected.push((current.to_owned(), patch))
    }
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63

64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113

114
115
116
117
118
119

120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233

234
235
use error::Error;
use super::{default_explain, get_wd, validate_base58, StaticSubcommand};
use clap::{Arg, ArgMatches, SubCommand};
use error::Error;
use libpijul::fs_representation::find_repo_root;
use libpijul::{Hash, DEFAULT_BRANCH};
use std::collections::HashSet;
use std::mem;
use std::path::Path;
use std::string::String;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("dependencies")
        .alias("deps")
        .about("Print the patch dependencies using the DOT syntax in stdout")
        .arg(
            Arg::with_name("hash")
                .help("Hash of a patch.")
                .takes_value(true)
                .required(false)
                .multiple(true)
                .validator(validate_base58),
        )
        .arg(
            Arg::with_name("depth")
                .long("depth")
                .help("The depth of the dependencies graph")
                .takes_value(true)
                .required(false)
                .validator(|x| {
                    if let Ok(x) = x.parse::<usize>() {
                        if x >= 1 {
                            return Ok(());
                        }
                    }
                    Err("The depth argument must be an integer, and at least 1".to_owned())
                }),
        )
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .multiple(true)
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("Branch.")
                .takes_value(true)
                .required(false),
        );
}

enum Target<'a> {
    Branch(Option<&'a str>),
    Hash(Vec<&'a str>, usize),
}

pub struct Params<'a> {
    pub repository: Option<&'a Path>,
    target: Target<'a>,
}

pub fn parse_args<'a>(args: &'a ArgMatches) -> Result<Params<'a>> {
pub fn parse_args<'a>(args: &'a ArgMatches) -> Result<Params<'a>, Error> {
    let target = if let Some(hash) = args.values_of("hash") {
        let depth = args
            .value_of("depth")
            .unwrap_or("1")
            .parse::<usize>()
            .unwrap();

        Target::Hash(hash.collect(), depth)
    } else {
        Target::Branch(args.value_of("branch"))
    };

    Ok(Params {
        repository: args.value_of("repository").map(|x| Path::new(x)),
        target: target,
    })
}

fn label_sanitize(str: String) -> String {
    // First, we escape the quotes, because otherwise it may interfere with dot
    // notation.
    let label = str.replace("\"", "\\\"");

    // Then, to get a more readable graph, we add line breaks every five words,
    // in order to avoid very width nodes.
    let mut words = label.split_whitespace();

    let mut nth = 0;
    let mut res = String::from("");

    if let Some(first_word) = words.next() {
        res.push_str(first_word);

        for word in words {
            if nth >= 5 {
                res.push_str("\\n");
                nth = 0;
            } else {
                res.push_str(" ");
                nth += 1;
            }

            res.push_str(word);
        }
    }

    res
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let args = parse_args(args)?;
    let wd = get_wd(args.repository)?;
    let target = if let Some(r) = find_repo_root(&wd) {
        r
    } else {
        return Err(ErrorKind::NotInARepository.into());
        return Err(Error::NotInARepository);
    };
    let repo = target.open_repo(None)?;
    let txn = repo.txn_begin()?;

    match args.target {
        Target::Branch(branch_arg) => {
            let branch_name = if let Some(b) = branch_arg {
                b.to_string()
            } else if let Ok(b) = target.get_current_branch() {
                b
            } else {
                DEFAULT_BRANCH.to_string()
            };

            if let Some(branch) = txn.get_branch(&branch_name) {
                println!("digraph dependencies {{");
                println!("  graph [rankdir=LR];");

                for (_, hash) in txn.rev_iter_applied(&branch, None) {
                    let hash_ext = txn.get_external(hash).unwrap();
                    let patch = target.read_patch(hash_ext)?;

                    patch_node(
                        hash_ext.to_base58(),
                        patch.header().name.clone(),
                        patch.is_tag(),
                    );

                    let deps = txn.minimize_deps(patch.dependencies());
                    for hash_dep in deps {
                        println!("  N{} -> N{}", hash_ext.to_base58(), hash_dep.to_base58());
                    }
                }
                println!("}}");
            }
        }
        Target::Hash(hashes, depth) => {
            let mut seen = HashSet::new();
            let mut vec: Vec<_> = hashes
                .iter()
                .map(|h| Hash::from_base58(h).unwrap())
                .collect();
            let mut next = Vec::new();

            println!("digraph dependencies {{");
            println!("  graph [rankdir=LR];");

            for _ in 0..depth {
                for hash in vec.drain(..) {
                    debug!("hash: {:?}", hash);
                    seen.insert(hash.clone());
                    let hash_ext = hash.as_ref();
                    let patch = target.read_patch(hash_ext)?;

                    patch_node(
                        hash_ext.to_base58(),
                        patch.header().name.clone(),
                        patch.is_tag(),
                    );

                    let deps = txn.minimize_deps(patch.dependencies());
                    for hash_dep in deps.iter() {
                        debug!("dep: {:?}", hash_dep);
                        println!("  N{} -> N{}", hash_ext.to_base58(), hash_dep.to_base58());

                        let h = hash_dep.to_owned();

                        if !seen.contains(&h) {
                            seen.insert(h.clone());
                            next.push(h);
                        }
                    }
                }

                // vec should be empty, has it has been consumed by drain
                // on the other hand, next contains all the
                // dependencies to walk into in the next loop
                // iteration
                mem::swap(&mut next, &mut vec);
            }

            // lets have a last for to get the name of the last dependencies
            for hash in vec.drain(..) {
                let hash_ext = hash.as_ref();
                let patch = target.read_patch(hash_ext)?;

                patch_node(
                    hash_ext.to_base58(),
                    patch.header().name.clone(),
                    patch.is_tag(),
                );
            }

            // and we are done
            println!("}}");
        }
    }

    Ok(())
}

fn patch_node(hash: String, name: String, is_tag: bool) {
    if is_tag {
        println!(
            "  N{} [label=\"TAG: {}\", shape=box]",
            hash,
            label_sanitize(name)
        );
    } else {
        println!("  N{} [label=\"{}\"]", hash, label_sanitize(name));
    }
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)



1


2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258

259
260
use error::Error;
pub fn load_key_or_ask(path_sec: &Path) -> Result<thrussh_keys::key::KeyPair, Error> {
                    return Err(Error::SshKeyNotFound { path: path_sec.to_path_buf() })
use dirs;
use error::{ErrorKind, Result};
use error::{ErrorKind, Result};
use error::Error;
use futures::future::Either;
use futures::Future;
use meta;
use rpassword;
use std;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use thrussh::client::{Authenticate, Connection, Handler};
use thrussh::Tcp;
use thrussh_keys;
use tokio::io::{AsyncRead, AsyncWrite};

pub enum AuthAttempt {
    Agent(thrussh_keys::key::PublicKey),
    Key(Arc<thrussh_keys::key::KeyPair>),
    Password(String),
}

#[derive(Debug)]
enum AuthState {
    Agent(KeyPath),
    Key(KeyPath),
    Password,
}

pub struct AuthAttempts {
    state: AuthState,
    local_repo_root: Option<PathBuf>,
    server_name: String,
}

impl AuthAttempts {
    pub fn new(server_name: String, local_repo_root: Option<PathBuf>, use_agent: bool) -> Self {
        AuthAttempts {
            state: if use_agent {
                AuthState::Agent(KeyPath::first())
            } else {
                AuthState::Key(KeyPath::first())
            },
            local_repo_root,
            server_name,
        }
    }
}

#[derive(Debug, Clone, Copy)]
enum KeyLocation {
    Local,
    Pijul,
    Ssh,
}

impl KeyLocation {
    fn next(&self) -> Option<Self> {
        match *self {
            KeyLocation::Local => Some(KeyLocation::Pijul),
            KeyLocation::Pijul => Some(KeyLocation::Ssh),
            KeyLocation::Ssh => None,
        }
    }
}

#[derive(Debug, Clone, Copy)]
enum KeyType {
    Ed25519,
    Rsa,
}

#[derive(Debug, Clone, Copy)]
struct KeyPath {
    location: KeyLocation,
    typ: KeyType,
}

impl KeyPath {
    fn first() -> Self {
        KeyPath {
            location: KeyLocation::Local,
            typ: KeyType::Ed25519,
        }
    }
    fn next(&self) -> Option<KeyPath> {
        match self.typ {
            KeyType::Ed25519 => {
                if let Some(location) = self.location.next() {
                    Some(KeyPath {
                        location,
                        typ: KeyType::Ed25519,
                    })
                } else {
                    Some(KeyPath {
                        location: KeyLocation::Local,
                        typ: KeyType::Rsa,
                    })
                }
            }
            KeyType::Rsa => {
                if let Some(location) = self.location.next() {
                    Some(KeyPath {
                        location,
                        typ: KeyType::Rsa,
                    })
                } else {
                    None
                }
            }
        }
    }
}

impl AuthAttempts {
    fn key_dir(&self, key: &KeyPath) -> Option<PathBuf> {
        match key.location {
            KeyLocation::Local => self.local_repo_root.clone(),
            KeyLocation::Pijul => meta::global_path().ok(),
            KeyLocation::Ssh => {
                if let Some(mut path) = dirs::home_dir() {
                    path.push(".ssh");
                    Some(path)
                } else {
                    None
                }
            }
        }
    }

    fn key(&self, key: &KeyPath) -> Option<PathBuf> {
        self.key_dir(key).map(|mut p| {
            p.push(match key.typ {
                KeyType::Ed25519 => "id_ed25519",
                KeyType::Rsa => "id_rsa",
            });
            p
        })
    }

    fn public_key(&self, key: &KeyPath) -> Option<PathBuf> {
        self.key(key).map(|mut p| {
            p.set_extension("pub");
            p
        })
    }
}

impl Iterator for AuthAttempts {
    type Item = AuthAttempt;
    fn next(&mut self) -> Option<Self::Item> {
        loop {
            debug!("state {:?}", self.state);
            match self.state {
                AuthState::Agent(key_path) => {
                    let path = self.public_key(&key_path);
                    debug!("agent path {:?}", path);
                    if let Some(key_path) = key_path.next() {
                        self.state = AuthState::Agent(key_path)
                    } else {
                        self.state = AuthState::Key(KeyPath::first())
                    }
                    if let Some(path) = path {
                        if let Ok(key) = thrussh_keys::load_public_key(&path) {
                            return Some(AuthAttempt::Agent(key));
                        }
                    }
                }
                AuthState::Key(key_path) => {
                    let path = self.key(&key_path);
                    debug!("path {:?}", path);
                    if let Some(path) = path {
                        if let Some(key_path) = key_path.next() {
                            self.state = AuthState::Key(key_path)
                        } else {
                            self.state = AuthState::Password
                        }
                        if let Ok(key) = load_key_or_ask(&path) {
                            return Some(AuthAttempt::Key(Arc::new(key)));
                        }
                    } else {
                        self.state = AuthState::Password
                    }
                }
                AuthState::Password => {
                    let password = rpassword::prompt_password_stdout(&format!(
                        "Password for {:?}: ",
                        self.server_name
                    ));
                    if let Ok(password) = password {
                        return Some(AuthAttempt::Password(password));
                    }
                }
            }
        }
    }
}

pub fn auth_attempt_future<R: Tcp + AsyncRead + AsyncWrite>(
    co: Connection<R, super::remote::Client>,
    auth_attempts: AuthAttempts,
    user: String,
    add_to_agent: thrussh_config::AddKeysToAgent,
) -> impl Future<Item = Connection<R, super::remote::Client>, Error = Error> {
    super::fold_until::new(
        futures::stream::iter_ok(auth_attempts),
        co,
        move |mut co, attempt| {
            if let AuthAttempt::Key(key) = attempt {
                debug!("not authenticated");
                let agent_constraints = match add_to_agent {
                    thrussh_config::AddKeysToAgent::Yes => Some(&[][..]),
                    thrussh_config::AddKeysToAgent::No => None,
                    thrussh_config::AddKeysToAgent::Confirm => {
                        Some(&[thrussh_keys::agent::Constraint::Confirm][..])
                    }
                    thrussh_config::AddKeysToAgent::Ask => None, // not yet implemented.
                };
                if let Some(cons) = agent_constraints {
                    if let Some(agent) = co.handler_mut().agent.take() {
                        let user = user.clone();
                        return Either::A(agent.add_identity(&key, cons).from_err().and_then(
                            move |(agent, _)| {
                                co.handler_mut().agent = Some(agent);
                                next_auth(co, &user, AuthAttempt::Key(key))
                            },
                        ));
                    }
                }
                Either::B(next_auth(co, &user, AuthAttempt::Key(key)))
            } else {
                Either::B(next_auth(co, &user, attempt))
            }
        },
        |co| futures::finished::<_, Error>((!co.is_authenticated(), co)),
    )
}

fn next_auth<R: AsyncRead + AsyncWrite + Tcp, H: Handler>(
    session: Connection<R, H>,
    user: &str,
    next: AuthAttempt,
) -> Authenticate<R, H> {
    debug!("next_auth");
    match next {
        AuthAttempt::Agent(pk) => {
            debug!("agent");
            session.authenticate_key_future(user, pk)
        }
        AuthAttempt::Key(k) => {
            debug!("key");
            session.authenticate_key(user, k)
        }
        AuthAttempt::Password(pass) => {
            debug!("password");
            session.authenticate_password(user, pass)
        }
    }
}

pub fn load_key_or_ask(path_sec: &Path) -> Result<thrussh_keys::key::KeyPair> {
pub fn load_key_or_ask(path_sec: &Path) -> Result<thrussh_keys::key::KeyPair, Error> {
    debug!("path_sec {:?}", path_sec);

1
2
3
4
5
6
7
8
9
10
11
12


13
14
15
16
17
18
19
20
21
22

23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153

154
155
use error::Error;
use super::default_explain;
use super::record;
use chrono;
use clap::{ArgMatches, SubCommand};
use commands::hooks::run_hook;
use commands::record::{decide_authors, decide_patch_message, record_args};
use commands::{BasicOptions, StaticSubcommand};
use error::Error;
use libpijul::patch::PatchFlags;
use libpijul::Hash;
use meta::{load_signing_key, Global, Meta};
use std::collections::HashSet;


use std::mem::drop;

pub fn invocation() -> StaticSubcommand {
    record_args(
        SubCommand::with_name("tag").about(
            "Create a tag, i.e. an empty patch with all patches on the branch as dependencies",
        ),
    )
}

pub fn run(args: &ArgMatches) -> Result<Option<Hash>> {
pub fn run(args: &ArgMatches) -> Result<Option<Hash>, Error> {
    let opts = BasicOptions::from_args(args)?;
    let patch_name_arg = args.value_of("message");
    let patch_descr_arg = args.value_of("description");
    let authors_arg = args.values_of("author").map(|x| x.collect::<Vec<_>>());
    let branch_name = opts.branch();

    let mut save_meta = false;

    let (mut global, save_global) = Global::load().map(|g| (g, false)).unwrap_or_else(|e| {
        info!("loading global key, error {:?}", e);
        (Global::new(), true)
    });

    let mut meta = match Meta::load(&opts.repo_root) {
        Ok(m) => m,
        Err(_) => {
            save_meta = true;
            Meta::new()
        }
    };

    let repo = opts.open_repo()?;
    let patch = {
        let txn = repo.txn_begin()?;
        debug!("meta:{:?}", meta);

        let authors = decide_authors(authors_arg, &meta, &global)?;

        if meta.authors.len() == 0 {
            meta.authors = authors.clone();
            save_meta = true;
        }

        if global.author.is_none() {
            global.author = Some(authors[0].clone());
        }

        debug!("authors:{:?}", authors);

        let (patch_name, description) = decide_patch_message(
            patch_name_arg,
            patch_descr_arg,
            String::from(""),
            !args.is_present("no-editor"),
            &opts.repo_root,
            &meta,
            &global,
        )?;

        run_hook(&opts.repo_root, "patch-name", Some(&patch_name))?;

        debug!("patch_name:{:?}", patch_name);
        if save_meta {
            meta.save(&opts.repo_root)?
        }
        if save_global {
            global.save()?
        }
        debug!("new");
        let branch = txn.get_branch(&branch_name).unwrap();

        let mut included = HashSet::new();
        let mut patches = Vec::new();
        for (_, patch) in txn.rev_iter_applied(&branch, None) {
            // `patch` is already implied if a patch on the branch
            // depends on `patch`. Let's look at all patches known to
            // the repository that depend on `patch`, and see if a
            // patch on the branch (i.e. all patches in `included`,
            // since we're considering patches in reverse order of
            // application) depends on `patch`.
            let mut already_in = false;
            for (p, revdep) in txn.iter_revdep(Some((patch, None))) {
                if p == patch {
                    if included.contains(&revdep) {
                        already_in = true
                    }
                } else {
                    break;
                }
            }
            if !already_in {
                let patch = txn.get_external(patch).unwrap();
                patches.push(patch.to_owned());
            }
            included.insert(patch.to_owned());
        }
        txn.new_patch(
            &branch,
            authors,
            patch_name,
            description,
            chrono::Utc::now(),
            Vec::new(),
            patches.into_iter(),
            PatchFlags::TAG,
        )
    };
    drop(repo);

    let patches_dir = opts.repo_root.patches_dir();
    let mut key = meta
        .signing_key
        .or(global.signing_key)
        .and_then(|s| load_signing_key(s).ok());
    let hash = if let Some(ref mut key) = key {
        key.check_author(&patch.header().authors)?;
        patch.save(&patches_dir, key.keys.get_mut(0))?
    } else {
        patch.save(&patches_dir, None)?
    };

    let pristine_dir = opts.pristine_dir();
    let mut increase = 40960;
    loop {
        match record::record_no_resize(
            &pristine_dir,
            &opts.repo_root,
            &branch_name,
            &hash,
            &patch,
            &HashSet::new(),
            increase,
        ) {
            Err(ref e) if e.lacks_space() => increase *= 2,
            _ => break,
        }
    }
    Ok(Some(hash))
}

pub fn explain(res: Result<Option<Hash>>) {
pub fn explain(res: Result<Option<Hash>, Error>) {
    default_explain(res)

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39

40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83




84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143

144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168

169
170
use error::Error;
use clap::{Arg, ArgMatches, SubCommand};

use super::{ask, default_explain, validate_base58, BasicOptions, StaticSubcommand};
use std::collections::HashSet;
use std::path::Path;

use error::Error;
use libpijul::fs_representation::{patch_file_name, RepoRoot};
use libpijul::patch::Patch;
use libpijul::{unrecord_no_resize, Hash, HashRef, PatchId};
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::mem::drop;

pub fn invocation() -> StaticSubcommand {
    return SubCommand::with_name("unrecord")
        .about("Unrecord some patches (remove them without reverting them)")
        .arg(
            Arg::with_name("repository")
                .long("repository")
                .help("Local repository.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("branch")
                .long("branch")
                .help("Branch.")
                .takes_value(true),
        )
        .arg(
            Arg::with_name("patch")
                .help("Patch to unrecord.")
                .takes_value(true)
                .multiple(true)
                .validator(validate_base58),
        );
}

pub fn run(args: &ArgMatches) -> Result<()> {
pub fn run(args: &ArgMatches) -> Result<(), Error> {
    let opts = BasicOptions::from_args(args)?;
    let patches: Option<HashSet<Hash>> = args
        .values_of("patch")
        .map(|ps| ps.map(|x| Hash::from_base58(x).unwrap()).collect());
    let mut increase = 409600;
    let repo = opts.open_and_grow_repo(increase)?;
    let branch_name = opts.branch();

    let mut patches: HashMap<_, _> = if let Some(ref patches) = patches {
        let txn = repo.txn_begin()?;
        if let Some(branch) = txn.get_branch(&branch_name) {
            let mut patches_ = HashMap::new();
            for h in patches.iter() {
                debug!("unrecording {:?}", h);

                if let Some(internal) = txn.get_internal(h.as_ref()) {
                    if txn.get_patch(&branch.patches, internal).is_some() {
                        let patch = load_patch(&opts.repo_root, h.as_ref());
                        patches_.insert(h.to_owned(), patch);

                        for (_, revdep) in txn
                            .iter_revdep(Some((internal, None)))
                            .take_while(|&(q, _)| q == internal)
                        {
                            // If the branch has patch revdep, and
                            // revdep is not also to be unrecorded.
                            if patches
                                .iter()
                                .any(|p| txn.get_internal(p.as_ref()).unwrap() == revdep)
                            {
                                continue;
                            }
                            if txn.get_patch(&branch.patches, revdep).is_some() {
                                let ext = txn.get_external(revdep).unwrap();
                                return Err(Error::PatchIsDependedUpon {
                                    hash: h.to_owned(),
                                    dependent: ext.to_owned(),
                                });
                            }
                        }
                        continue;
                    }
                }
                return Err(ErrorKind::BranchDoesNotHavePatch(
                    branch.name.as_str().to_string(),
                    h.to_owned(),
                ).into());
                return Err(Error::BranchDoesNotHavePatch {
                    branch_name: branch.name.as_str().to_string(),
                    patch: h.to_owned(),
                });
            }
            patches_
        } else {
            HashMap::new()
        }
    } else {
        let mut patches: Vec<_> = {
            let txn = repo.txn_begin()?;
            if let Some(branch) = txn.get_branch(&branch_name) {
                txn.rev_iter_applied(&branch, None)
                    .map(|(t, h)| {
                        let ext = txn.get_external(h).unwrap();
                        let patch = load_patch(&opts.repo_root, ext);
                        (ext.to_owned(), Some(h.to_owned()), patch, t)
                    })
                    .collect()
            } else {
                Vec::new()
            }
        };
        patches.sort_by(|&(_, _, _, a), &(_, _, _, b)| b.cmp(&a));
        let patches: Vec<(Hash, Option<PatchId>, Patch)> =
            patches.into_iter().map(|(a, b, c, _)| (a, b, c)).collect();
        // debug!("patches: {:?}", patches);
        let to_unrecord = ask::ask_patches(ask::Command::Unrecord, &patches).unwrap();
        debug!("to_unrecord: {:?}", to_unrecord);
        let patches: HashMap<_, _> = patches
            .into_iter()
            .filter(|&(ref k, _, _)| to_unrecord.contains(&k))
            .map(|(k, _, p)| (k, p))
            .collect();
        patches
    };

    let mut selected = Vec::new();
    loop {
        let hash = if let Some((hash, patch)) = patches.iter().next() {
            increase += patch.size_upper_bound() as u64;
            hash.to_owned()
        } else {
            break;
        };
        deps_dfs(&mut selected, &mut patches, &hash)
    }
    drop(repo);

    let repo_dir = opts.pristine_dir();
    loop {
        match unrecord_no_resize(
            &repo_dir,
            &opts.repo_root,
            &branch_name,
            &mut selected,
            increase,
        ) {
            Err(ref e) if e.lacks_space() => increase *= 2,
            e => return e.map_err(|x| ErrorKind::Repository(x).into()),
            e => return e.map_err(|x| Error::Repository(x)),
        }
    }
}

fn load_patch(repo_root: &RepoRoot<impl AsRef<Path>>, ext: HashRef) -> Patch {
    let base = patch_file_name(ext);
    let filename = repo_root.patches_dir().join(&base);
    debug!("filename: {:?}", filename);
    let file = File::open(&filename).unwrap();
    let mut file = BufReader::new(file);
    let (_, _, patch) = Patch::from_reader_compressed(&mut file).unwrap();
    patch
}

fn deps_dfs(selected: &mut Vec<(Hash, Patch)>, patches: &mut HashMap<Hash, Patch>, current: &Hash) {
    if let Some(patch) = patches.remove(current) {
        for dep in patch.dependencies().iter() {
            deps_dfs(selected, patches, dep)
        }

        selected.push((current.to_owned(), patch))
    }
}

pub fn explain(res: Result<()>) {
pub fn explain(res: Result<(), Error>) {
    default_explain(res)


















1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
    HookFailed { cmd: String },
    InARepository { path: std::path::PathBuf },
    InvalidPath { path: String },
    FileNotInRepository { path: String },
    WillNotOverwriteKeyFile { path: std::path::PathBuf },
    BranchDoesNotHavePatch { branch_name: String, patch: libpijul::Hash },
    PatchNotFound { repo_root: String, patch_hash: libpijul::Hash },
    SshKeyNotFound { path: PathBuf },
    ExtraDepNotOnBranch { hash: libpijul::Hash },
    CannotSpawnEditor { editor: String, cause: String },
    InvalidDate { date:  String },
    UnknownHost { host: String },
            Error::CannotSpawnEditor { ref editor, ref cause } => write!(f, "Cannot spawn editor {:?} (error: {:?})", editor, cause),
            Error::CannotSpawnEditor { .. } => "Cannot spawn editor",
            _ => None

/*
*/
use std;
use std::path::PathBuf;
use {hex, libpijul, regex, reqwest, term, thrussh, thrussh_config, thrussh_keys, toml};

#[derive(Debug)]
pub enum Error {
    IO(std::io::Error),
    Term(term::Error),
    Repository(libpijul::Error),
    UTF8(std::string::FromUtf8Error),
    Hex(hex::FromHexError),
    SSH(thrussh::Error),
    SSHKeys(thrussh_keys::Error),
    Reqwest(reqwest::Error),
    TomlDe(toml::de::Error),
    TomlSer(toml::ser::Error),
    StripPrefix(std::path::StripPrefixError),
    Regex(regex::Error),
    ThrusshConfig(thrussh_config::Error),
    Failure(failure::Error),
    HookFailed {
        cmd: String,
    },
    InARepository {
        path: std::path::PathBuf,
    },
    NotInARepository,
    MissingRemoteRepository,
    InvalidPath {
        path: PathBuf,
    },
    FileNotInRepository {
        path: String,
    },
    WrongHash,
    BranchAlreadyExists,
    CannotDeleteCurrentBranch,
    NoSuchBranch,
    IsDirectory,
    CannotParseRemote,
    WillNotOverwriteKeyFile {
        path: std::path::PathBuf,
    },
    BranchDoesNotHavePatch {
        branch_name: String,
        patch: libpijul::Hash,
    },
    PatchNotFound {
        repo_root: String,
        patch_hash: libpijul::Hash,
    },
    PatchIsDependedUpon {
        hash: libpijul::Hash,
        dependent: libpijul::Hash,
    },
    SshKeyNotFound {
        path: PathBuf,
    },
    NoHomeDir,
    ExtraDepNotOnBranch {
        hash: libpijul::Hash,
    },
    PendingChanges,
    EmptyPatchName,
    InvalidPatchName,
    CannotSpawnEditor {
        editor: String,
        cause: String,
    },
    InvalidDate {
        date: String,
    },
    PartialPullOverHttp,
    UnknownHost {
        host: String,
    },
    NoAuthor,
    NonFastForwardPush,
    NotSigningAuthor,
    EmptyPassword,
}

impl std::fmt::Display for Error {
    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
        match *self {
            Error::IO(ref e) => e.fmt(f),
            Error::Term(ref e) => e.fmt(f),
            Error::Repository(ref e) => e.fmt(f),
            Error::UTF8(ref e) => e.fmt(f),
            Error::Hex(ref e) => e.fmt(f),
            Error::SSH(ref e) => e.fmt(f),
            Error::SSHKeys(ref e) => e.fmt(f),
            Error::Reqwest(ref e) => e.fmt(f),
            Error::TomlDe(ref e) => e.fmt(f),
            Error::TomlSer(ref e) => e.fmt(f),
            Error::StripPrefix(ref e) => e.fmt(f),
            Error::Regex(ref e) => e.fmt(f),
            Error::ThrusshConfig(ref e) => e.fmt(f),
            Error::Failure(ref e) => e.fmt(f),
            Error::HookFailed { ref cmd } => write!(f, "Hook failed: {}", cmd),
            Error::InARepository { ref path } => write!(f, "In a repository: {:?}", path),
            Error::NotInARepository => write!(f, "Not in a repository"),
            Error::MissingRemoteRepository => write!(f, "Missing remote repository"),
            Error::InvalidPath { ref path } => write!(f, "Invalid path: {:?}", path),
            Error::FileNotInRepository { ref path } => write!(f, "File not in repository: {:?}", path),
            Error::WrongHash => write!(f, "Wrong hash"),
            Error::BranchAlreadyExists => write!(f, "Branch already exists"),
            Error::CannotDeleteCurrentBranch => write!(f, "Cannot delete current branch"),
            Error::NoSuchBranch => write!(f, "No such branch"),
            Error::IsDirectory => write!(f, "Is a directory"),
            Error::CannotParseRemote => write!(f, "Cannot parse remote address"),
            Error::WillNotOverwriteKeyFile { ref path } => write!(f, "Will not overwrite key file {:?}", path),
            Error::BranchDoesNotHavePatch { ref branch_name, ref patch } => write!(f, "Branch {:?} does not have patch {}", branch_name, patch.to_base58()),
            Error::PatchNotFound { ref repo_root, ref patch_hash } => write!(f, "Patch {} not found in repository {:?}", patch_hash.to_base58(), repo_root),
            Error::PatchIsDependedUpon { ref dependent, ref hash } => write!(f, "Patch {} depends on {:?}", dependent.to_base58(), hash.to_base58()),
            Error::SshKeyNotFound { ref path } => write!(f, "SSH key not found in: {:?}", path),
            Error::NoHomeDir => write!(f, "No home dir"),
            Error::ExtraDepNotOnBranch { ref hash } => write!(f, "Extra dependencies can only be added if they are on the same branch as the current record: {:?}", hash),
            Error::PendingChanges => write!(f, "There are pending changes in the repository."),
            Error::EmptyPatchName => write!(f, "Empty patch name"),
            Error::InvalidPatchName => write!(f, "Invalid patch name"),
            Error::CannotSpawnEditor { ref editor, ref cause } => write!(f, "Cannot start editor {:?} ({:?})", editor, cause),
            Error::InvalidDate { ref date } => write!(f, "Invalid date: {:?}", date),
            Error::PartialPullOverHttp => write!(f, "Partial pull over HTTP is not (yet) supported"),
            Error::UnknownHost { ref host } => write!(f, "Unknown host: {}", host),
            Error::NoAuthor => write!(f, "No authors were given"),
            Error::NonFastForwardPush => write!(f, "This push is not fast-forward. If this is really what you mean, use --force."),
            Error::NotSigningAuthor => write!(f, "The user id for the signing key doesn't match the author. Authors must be of the form `rms@xyzcorp.com` or `Robert M. Smith <rms@xyzcorp.com>`"),
            Error::EmptyPassword => write!(f, "Empty password"),
        }
    }
}

impl std::error::Error for Error {
    fn description(&self) -> &str {
        match *self {
            Error::IO(ref e) => e.description(),
            Error::Term(ref e) => e.description(),
            Error::Repository(ref e) => e.description(),
            Error::UTF8(ref e) => e.description(),
            Error::Hex(ref e) => e.description(),
            Error::SSH(ref e) => e.description(),
            Error::SSHKeys(ref e) => e.description(),
            Error::Reqwest(ref e) => e.description(),
            Error::TomlDe(ref e) => e.description(),
            Error::TomlSer(ref e) => e.description(),
            Error::StripPrefix(ref e) => e.description(),
            Error::Regex(ref e) => e.description(),
            Error::ThrusshConfig(ref e) => e.description(),
            Error::Failure(ref e) => e.name().unwrap_or("Unknown failure"),
            Error::HookFailed { .. } => "Hook failed",
            Error::InARepository { .. } => "In a repository",
            Error::NotInARepository => "Not in a repository",
            Error::MissingRemoteRepository => "Missing remote repository",
            Error::InvalidPath { .. } => "Invalid path",
            Error::FileNotInRepository { .. } => "File not in repository",
            Error::WrongHash => "Wrong hash",
            Error::BranchAlreadyExists => "Branch already exists",
            Error::CannotDeleteCurrentBranch => "Cannot delete current branch",
            Error::NoSuchBranch => "No such branch",
            Error::IsDirectory => "Is a directory",
            Error::CannotParseRemote => "Cannot parse remote address",
            Error::WillNotOverwriteKeyFile { .. } => "Will not overwrite key file",
            Error::BranchDoesNotHavePatch { .. } => "Branch does not have patch",
            Error::PatchNotFound { .. } => "Patch not found in repository",
            Error::PatchIsDependedUpon { .. } => "Patch depended upon",
            Error::SshKeyNotFound { .. } => "SSH key not found",
            Error::NoHomeDir => "No home dir",
            Error::ExtraDepNotOnBranch { .. } => "Extra dependencies can only be added if they are on the same branch as the current record",
            Error::PendingChanges => "There are pending changes in the repository.",
            Error::EmptyPatchName => "Empty patch name",
            Error::InvalidPatchName => "Invalid patch name",
            Error::CannotSpawnEditor { .. } => "Cannot start editor",
            Error::InvalidDate { .. } => "Invalid date",
            Error::PartialPullOverHttp => "Partial pull over HTTP is not (yet) supported",
            Error::UnknownHost { .. } => "Unknown host",
            Error::NoAuthor => "No authors were given",
            Error::NonFastForwardPush => "This push is not fast-forward",
            Error::NotSigningAuthor => "The user id for the signing key doesn't match the author. Authors must be of the form `rms@xyzcorp.com` or `Robert M. Smith <rms@xyzcorp.com>`",
            Error::EmptyPassword => "Empty password",
        }
    }

    fn cause(&self) -> Option<&std::error::Error> {
        match *self {
            Error::IO(ref e) => Some(e),
            Error::Term(ref e) => Some(e),
            Error::Repository(ref e) => Some(e),
            Error::UTF8(ref e) => Some(e),
            Error::Hex(ref e) => Some(e),
            Error::SSH(ref e) => Some(e),
            Error::SSHKeys(ref e) => Some(e),
            Error::Reqwest(ref e) => Some(e),
            Error::TomlDe(ref e) => Some(e),
            Error::TomlSer(ref e) => Some(e),
            Error::StripPrefix(ref e) => Some(e),
            Error::Regex(ref e) => Some(e),
            Error::ThrusshConfig(ref e) => Some(e),
            _ => None,
        }
    }
}

impl From<std::io::Error> for Error {
    fn from(err: std::io::Error) -> Error {
        Error::IO(err)
    }
}

impl From<failure::Error> for Error {
    fn from(err: failure::Error) -> Error {
        Error::Failure(err)
    }
}

impl From<term::Error> for Error {
    fn from(err: term::Error) -> Error {
        Error::Term(err)
    }
}

impl From<libpijul::Error> for Error {
    fn from(err: libpijul::Error) -> Error {
        Error::Repository(err)
    }
}

impl From<std::string::FromUtf8Error> for Error {
    fn from(err: std::string::FromUtf8Error) -> Error {
        Error::UTF8(err)
    }
}

impl From<hex::FromHexError> for Error {
    fn from(err: hex::FromHexError) -> Error {
        Error::Hex(err)
    }
}

impl From<thrussh::Error> for Error {
    fn from(err: thrussh::Error) -> Error {
        Error::SSH(err)
    }
}

impl From<thrussh_keys::Error> for Error {
    fn from(err: thrussh_keys::Error) -> Error {
        Error::SSHKeys(err)
    }
}

impl From<reqwest::Error> for Error {
    fn from(err: reqwest::Error) -> Error {
        Error::Reqwest(err)
    }
}

impl From<toml::de::Error> for Error {
    fn from(err: toml::de::Error) -> Error {
        Error::TomlDe(err)
    }
}

impl From<toml::ser::Error> for Error {
    fn from(err: toml::ser::Error) -> Error {
        Error::TomlSer(err)
    }
}

impl From<std::path::StripPrefixError> for Error {
    fn from(err: std::path::StripPrefixError) -> Error {
        Error::StripPrefix(err)
    }
}

impl From<regex::Error> for Error {
    fn from(err: regex::Error) -> Error {
        Error::Regex(err)
    }
}

impl From<thrussh_config::Error> for Error {
    fn from(err: thrussh_config::Error) -> Error {
        Error::ThrusshConfig(err)
    }
}

impl Error {
    pub fn lacks_space(&self) -> bool {
        match *self {
            Error::Repository(ref r) => r.lacks_space(),
            _ => false,
        }
    }
}

impl From<thrussh::HandlerError<Error>> for Error {
    fn from(err: thrussh::HandlerError<Error>) -> Error {
        match err {
            thrussh::HandlerError::Handler(e) => e,
            thrussh::HandlerError::Error(e) => Error::SSH(e),
        }
    }
}

impl From<Error> for thrussh::HandlerError<Error> {
    fn from(e: Error) -> thrussh::HandlerError<Error> {
        thrussh::HandlerError::Handler(e)
    }
}







1
2



3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37

38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55

56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88

89
90
91
92
93
94
95
96
97

98
99
100
101
102
103
104
105
106
107

108
109
110
111
112
113
114
115
116
117

118
119
120
121
122
123
124
125
126
127
128
129

130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146

147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171

172
173
174
175
176
177
178
179
180
181
182
183
184
185

186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219

220
221
222
223
224
225

226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317

318
319
use error::Error;
    pub fn load(r: &Path) -> Result<Meta, Error> {
    pub fn save(&self, r: &Path) -> Result<(), Error> {
    pub fn signing_key(&self) -> Result<Option<KeyPair>, Error> {
pub fn load_key<P: AsRef<Path>>(dot_pijul: P, keytype: KeyType) -> Result<KeyPair, Error> {
pub fn generate_global_key(keytype: KeyType) -> Result<(), Error> {
pub fn load_global_or_local_signing_key<P: AsRef<Path>>(dot_pijul: Option<P>) -> Result<KeyPair, Error> {
use commands::remote::{parse_remote, Remote};
use dirs;
use error::{ErrorKind, Result};
use error::{ErrorKind, Result};
use error::{ErrorKind, Result};
use error::Error;
use libpijul::fs_representation::RepoRoot;
use sequoia_openpgp::parse::Parse;
use sequoia_openpgp::serialize::Serialize;
use sequoia_openpgp::tpk::{CipherSuite, TPKBuilder};
use sequoia_openpgp::TSK;
use std;
use std::collections::BTreeMap;
use std::fs::{create_dir_all, File};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use thrussh_keys::key::KeyPair;
use toml;

pub const DEFAULT_REMOTE: &'static str = "remote";

#[derive(Debug, Default, Serialize, Deserialize)]
pub struct Repository {
    pub address: String,
    pub port: Option<u16>,
}

#[derive(Debug, Serialize, Deserialize)]
pub struct Meta {
    #[serde(default)]
    pub authors: Vec<String>,
    pub editor: Option<String>,
    pub pull: Option<String>,
    pub push: Option<String>,
    #[serde(default)]
    pub remote: BTreeMap<String, Repository>,
    pub signing_key: Option<String>,
}

impl Meta {
    pub fn load(r: &Path) -> Result<Meta> {
    pub fn load(r: &RepoRoot<impl AsRef<Path>>) -> Result<Meta, Error> {
        let mut str = String::new();
        {
            let mut f = File::open(r.meta_file())?;
            f.read_to_string(&mut str)?;
        }
        Ok(toml::from_str(&str)?)
    }
    pub fn new() -> Meta {
        Meta {
            authors: Vec::new(),
            editor: None,
            pull: None,
            push: None,
            remote: BTreeMap::new(),
            signing_key: None,
        }
    }
    pub fn save(&self, r: &Path) -> Result<()> {
    pub fn save(&self, r: &RepoRoot<impl AsRef<Path>>) -> Result<(), Error> {
        let mut f = File::create(r.meta_file())?;
        let s: String = toml::to_string(&self)?;
        f.write_all(s.as_bytes())?;
        Ok(())
    }

    fn parse_remote<'a>(
        &'a self,
        remote: &'a str,
        port: Option<u16>,
        base_path: Option<&'a Path>,
        local_repo_root: Option<&'a Path>,
    ) -> Remote<'a> {
        if let Some(repo) = self.remote.get(remote) {
            parse_remote(
                &repo.address,
                port.or(repo.port),
                base_path,
                local_repo_root,
            )
        } else {
            parse_remote(remote, port, base_path, local_repo_root)
        }
    }

    fn get_remote<'a>(
        &'a self,
        remote: Option<&'a str>,
        default_remote: Option<&'a String>,
        port: Option<u16>,
        base_path: Option<&'a Path>,
        local_repo_root: Option<&'a Path>,
    ) -> Result<Remote<'a>> {
    ) -> Result<Remote<'a>, Error> {
        if let Some(remote) = remote {
            Ok(self.parse_remote(remote, port, base_path, local_repo_root))
        } else if let Some(ref remote) = default_remote {
            Ok(self.parse_remote(remote, port, base_path, local_repo_root))
        } else if self.remote.len() == 1 {
            let remote = self.remote.keys().next().unwrap();
            Ok(self.parse_remote(remote, port, base_path, local_repo_root))
        } else {
            Err(ErrorKind::MissingRemoteRepository.into())
            Err(Error::MissingRemoteRepository)
        }
    }

    pub fn pull<'a>(
        &'a self,
        remote: Option<&'a str>,
        port: Option<u16>,
        base_path: Option<&'a Path>,
        local_repo_root: Option<&'a Path>,
    ) -> Result<Remote<'a>> {
    ) -> Result<Remote<'a>, Error> {
        self.get_remote(remote, self.pull.as_ref(), port, base_path, local_repo_root)
    }

    pub fn push<'a>(
        &'a self,
        remote: Option<&'a str>,
        port: Option<u16>,
        base_path: Option<&'a Path>,
        local_repo_root: Option<&'a Path>,
    ) -> Result<Remote<'a>> {
    ) -> Result<Remote<'a>, Error> {
        self.get_remote(remote, self.push.as_ref(), port, base_path, local_repo_root)
    }
}

#[derive(Debug, Serialize, Deserialize)]
pub struct Global {
    pub author: Option<String>,
    pub editor: Option<String>,
    pub signing_key: Option<String>,
}

pub fn global_path() -> Result<PathBuf> {
pub fn global_path() -> Result<PathBuf, Error> {
    if let Ok(var) = std::env::var("PIJUL_CONFIG_DIR") {
        let mut path = PathBuf::new();
        path.push(var);
        Ok(path)
    } else if let Ok(var) = std::env::var("XDG_DATA_HOME") {
        let mut path = PathBuf::new();
        path.push(var);
        path.push("pijul");
        std::fs::create_dir_all(&path)?;
        path.push("config");
        Ok(path)
    } else {
        if let Some(mut path) = dirs::home_dir() {
            path.push(".pijulconfig");
            Ok(path)
        } else {
            Err(ErrorKind::NoHomeDir.into())
            Err(Error::NoHomeDir)
        }
    }
}

pub fn generate_ssh_key<P: AsRef<Path>>(
    dot_pijul: P,
    password: Option<(u32, &[u8])>,
) -> Result<(), Error> {
    use thrussh_keys::{encode_pkcs8_pem, encode_pkcs8_pem_encrypted, write_public_key_base64};
    let key = KeyPair::generate_ed25519().unwrap();
    create_dir_all(dot_pijul.as_ref())?;

    let mut f = dot_pijul.as_ref().join("id_ed25519");
    debug!("generate_key: {:?}", f);
    if std::fs::metadata(&f).is_err() {
        let mut f = File::create(&f)?;
        if let Some((rounds, pass)) = password {
            encode_pkcs8_pem_encrypted(&key, pass, rounds, &mut f)?
        } else {
            encode_pkcs8_pem(&key, &mut f)?
        }
        f.flush().unwrap();
        set_key_permissions(&f, "private key file");
    } else {
        return Err(ErrorKind::WillNotOverwriteKeyFile(f).into());
        return Err(Error::WillNotOverwriteKeyFile { path: f });
    }
    f.set_extension("pub");
    {
        let mut f = File::create(&f)?;
        let pk = key.clone_public_key();
        write_public_key_base64(&mut f, &pk)?;
        f.write(b"\n")?;
        f.flush()?;
        set_key_permissions(&f, "public key file");
    }
    Ok(())
}

pub fn load_key<P: AsRef<Path>>(dot_pijul: P, keytype: KeyType) -> Result<KeyPair> {
pub fn generate_signing_key<P: AsRef<Path>>(
    dot_pijul: P,
    identity: &str,
    password: Option<String>,
) -> Result<PathBuf, Error> {
    create_dir_all(dot_pijul.as_ref())?;

    let mut f = dot_pijul.as_ref().join("signing_secret_key");
    if std::fs::metadata(&f).is_err() {
        let (tpk, sig) = TPKBuilder::default()
            .set_cipher_suite(CipherSuite::Cv25519)
            .add_userid(identity)
            .add_signing_subkey()
            .set_password(password.map(From::from))
            .generate()
            .unwrap();

        let tsk = tpk.into_tsk();
        let mut keyfile = File::create(&f)?;
        tsk.serialize(&mut keyfile).unwrap();
        set_key_permissions(&keyfile, "signing key file");

        f.set_extension("revocation_cert");
        let mut revoc_cert = File::create(&f)?;
        sig.serialize(&mut revoc_cert).unwrap();
        set_key_permissions(&revoc_cert, "revocation certificate");

        f.set_extension("");
        Ok(f)
    } else {
        return Err(Error::WillNotOverwriteKeyFile { path: f });
    }
}

pub fn generate_global_key(keytype: KeyType) -> Result<()> {
pub struct SigningKeys {
    pub keys: Vec<sequoia_openpgp::crypto::KeyPair>,
    pub tsk: TSK,
    pub user_id: String,
}

pub fn load_global_or_local_signing_key<P: AsRef<Path>>(dot_pijul: Option<P>) -> Result<KeyPair> {
impl SigningKeys {
    pub fn check_author(&self, authors: &[String]) -> Result<(), Error> {
        let author_uid = regex::Regex::new("^[^<>]*<?([^<>]+)>?[^<>]*$").unwrap();
        if self.keys.is_empty() {
            if !authors.iter().any(|auth| {
                auth == &self.user_id
                    || (if let Some(cap) = author_uid.captures(&auth) {
                        cap[1] == self.user_id
                    } else {
                        false
                    })
            }) {
                return Err(Error::NotSigningAuthor);
            }
        }
        Ok(())
    }
}

pub fn load_signing_key<P: AsRef<Path>>(path: P) -> Result<SigningKeys, Error> {
    // let path: PathBuf = path.as_ref().join("signing_secret_key");
    debug!("load_signing_key: {:?}", path.as_ref());
    let tsk = sequoia_openpgp::TPK::from_reader(&std::fs::File::open(&path)?)?.into_tsk();
    use sequoia_openpgp::crypto::KeyPair;
    use sequoia_openpgp::packet::key::SecretKey;
    let mut keys = Vec::new();
    for key in tsk.select_signing_keys(None) {
        if let Some(mut secret) = key.secret() {
            debug!("secret");
            let secret_mpis = match secret {
                SecretKey::Encrypted { .. } => {
                    let password = rpassword::prompt_password_stderr(&format!(
                        "Please enter password to decrypt {:?}/{}: ",
                        path.as_ref(),
                        key
                    ))?;
                    secret.decrypt(key.pk_algo(), &password.into())?
                }
                SecretKey::Unencrypted { ref mpis } => mpis.clone(),
            };
            keys.push(KeyPair::new(key.clone(), secret_mpis)?)
        } else {
            debug!("no secret");
        }
    }
    debug!("found {:?} keys", keys.len());
    Ok(SigningKeys {
        keys,
        tsk,
        user_id: String::new(),
    })
}

pub fn generate_global_ssh_key() -> Result<(), Error> {
    generate_ssh_key(&global_path()?, None)
}

#[cfg(unix)]
fn set_key_permissions(f: &File, key_type: &str) {
    use std::os::unix::fs::PermissionsExt;
    match f.set_permissions(std::fs::Permissions::from_mode(0o600)) {
        Err(e) => eprintln!(
            "Warning: failed to set permissions on {}: {:?}",
            key_type, e
        ),
        Ok(()) => (),
    };
}

#[cfg(not(unix))]
fn set_key_permissions(_: &File, _: &str) {}

impl Global {
    pub fn new() -> Self {
        Global {
            author: None,
            editor: None,
            signing_key: None,
        }
    }

    pub fn load() -> Result<Self, Error> {
        let mut path = global_path()?;
        path.push("config.toml");
        let mut str = String::new();
        {
            let mut f = File::open(&path)?;
            f.read_to_string(&mut str)?;
        }
        Ok(toml::from_str(&str)?)
    }

    pub fn save(&self) -> Result<()> {
    pub fn save(&self) -> Result<(), Error> {
        let mut path = global_path()?;

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
    pijul pull -a ../a
#!/usr/bin/env bats

load ../test_helper

@test "Edit nested conflict" {
    mkdir a

    cd a
    pijul_uncovered init
    echo -e "0\n1\n2\n3" > file
    pijul_uncovered add file
    pijul_uncovered record -a -m "file" -A "Me"
    cd ..

    pijul_uncovered clone a b

    cd a
    echo -e "0\n1\na\nb\nc\n2\n3" > file
    pijul_uncovered record -a -m "abc" -A "Me"

    cd ../b
    echo -e "0\n1\nd\ne\nf\n2\n3" > file
    pijul_uncovered record -a -m "def" -A "Me"
    pijul_uncovered pull -a ../a

    cd ../a
    pijul_uncovered pull -a ../b
    sed -i -e "s/a/y/" file
    sed -i -e "s/d/v/" file
    pijul record -a -m "s/a/y" -A "Me"

    cd ../b
    cp file /tmp
    sed -i -e "s/a/x/" file
    sed -i -e "s/d/u/" file
    cp file /tmp/file_
    RUST_LOG="libpijul::diff=debug" pijul record -a -m "s/a/x" -A "Me" 2> /tmp/log
    pijul info --debug
    cp debug_master /tmp
    pijul revert -a
23
24
25
    echo "$status" > status
    cp status /tmp
    diff status $BATS_TEST_DIRNAME/../expected/conflicted-short-status
In file Cargo.lock