From f8c016a8ea3f7d28b4216ccd617c53ffbf362c65 Mon Sep 17 00:00:00 2001 From: Martin von Zweigbergk Date: Wed, 30 Jun 2021 08:57:49 -0700 Subject: [PATCH] files: make MergeHunk support any number of removes and adds I think `files::merge()` will be a useful place to share code for resolving conflicting hunks after all. We'll want `MergeHunk` to support multi-way merges then. --- lib/src/conflicts.rs | 8 +++---- lib/src/files.rs | 52 ++++++++++++++++++++++++-------------------- 2 files changed, 33 insertions(+), 27 deletions(-) diff --git a/lib/src/conflicts.rs b/lib/src/conflicts.rs index ab3bd4cc8..be09546ba 100644 --- a/lib/src/conflicts.rs +++ b/lib/src/conflicts.rs @@ -141,13 +141,13 @@ pub fn materialize_conflict( files::MergeHunk::Resolved(contents) => { file.write_all(&contents).unwrap(); } - files::MergeHunk::Conflict { base, left, right } => { + files::MergeHunk::Conflict { removes, adds } => { file.write_all(b"<<<<<<<\n").unwrap(); - file.write_all(&left).unwrap(); + file.write_all(&adds[0]).unwrap(); file.write_all(b"|||||||\n").unwrap(); - file.write_all(&base).unwrap(); + file.write_all(&removes[0]).unwrap(); file.write_all(b"=======\n").unwrap(); - file.write_all(&right).unwrap(); + file.write_all(&adds[1]).unwrap(); file.write_all(b">>>>>>>\n").unwrap(); } } diff --git a/lib/src/files.rs b/lib/src/files.rs index 591834f20..9167fe8ee 100644 --- a/lib/src/files.rs +++ b/lib/src/files.rs @@ -16,6 +16,8 @@ use std::collections::VecDeque; use std::fmt::{Debug, Error, Formatter}; use std::ops::Range; +use itertools::Itertools; + use crate::diff; use crate::diff::{Diff, DiffHunk}; @@ -145,9 +147,8 @@ impl<'a> Iterator for DiffLineIterator<'a> { pub enum MergeHunk { Resolved(Vec), Conflict { - base: Vec, - left: Vec, - right: Vec, + removes: Vec>, + adds: Vec>, }, } @@ -158,11 +159,22 @@ impl Debug for MergeHunk { .debug_tuple("Resolved") .field(&String::from_utf8_lossy(data)) .finish(), - MergeHunk::Conflict { base, left, right } => f + MergeHunk::Conflict { removes, adds } => f .debug_struct("Conflict") - .field("base", &String::from_utf8_lossy(base)) - .field("left", &String::from_utf8_lossy(left)) - .field("right", &String::from_utf8_lossy(right)) + .field( + "removes", + &removes + .iter() + .map(|part| String::from_utf8_lossy(part)) + .collect_vec(), + ) + .field( + "adds", + &adds + .iter() + .map(|part| String::from_utf8_lossy(part)) + .collect_vec(), + ) .finish(), } } @@ -182,7 +194,6 @@ struct SyncRegion { right: Range, } -// TODO: Update callers to use diff::Diff directly instead. pub fn merge(base: &[u8], left: &[u8], right: &[u8]) -> MergeResult { let diff = Diff::for_tokenizer(&[base, left, right], &diff::find_line_ranges); let mut resolved_hunk: Vec = vec![]; @@ -206,9 +217,8 @@ pub fn merge(base: &[u8], left: &[u8], right: &[u8]) -> MergeResult { resolved_hunk = vec![]; } merge_hunks.push(MergeHunk::Conflict { - base: base_content.to_vec(), - left: left_content.to_vec(), - right: right_content.to_vec(), + removes: vec![base_content.to_vec()], + adds: vec![left_content.to_vec(), right_content.to_vec()], }); } } @@ -252,9 +262,8 @@ mod tests { MergeResult::Conflict(vec![ MergeHunk::Resolved(b"a\n".to_vec()), MergeHunk::Conflict { - base: b"".to_vec(), - left: b"b\n".to_vec(), - right: b"c\n".to_vec() + removes: vec![b"".to_vec()], + adds: vec![b"b\n".to_vec(), b"c\n".to_vec()] } ]) ); @@ -269,25 +278,22 @@ mod tests { assert_eq!( merge(b"a", b"", b"b"), MergeResult::Conflict(vec![MergeHunk::Conflict { - base: b"a".to_vec(), - left: b"".to_vec(), - right: b"b".to_vec() + removes: vec![b"a".to_vec()], + adds: vec![b"".to_vec(), b"b".to_vec()] }]) ); assert_eq!( merge(b"a", b"b", b""), MergeResult::Conflict(vec![MergeHunk::Conflict { - base: b"a".to_vec(), - left: b"b".to_vec(), - right: b"".to_vec() + removes: vec![b"a".to_vec()], + adds: vec![b"b".to_vec(), b"".to_vec()] }]) ); assert_eq!( merge(b"a", b"b", b"c"), MergeResult::Conflict(vec![MergeHunk::Conflict { - base: b"a".to_vec(), - left: b"b".to_vec(), - right: b"c".to_vec() + removes: vec![b"a".to_vec()], + adds: vec![b"b".to_vec(), b"c".to_vec()] }]) ); }