From 5f2f13a876a3ed5d7ede0897e3a39b2c8815bbbd Mon Sep 17 00:00:00 2001 From: Yuya Nishihara Date: Wed, 19 Jun 2024 16:48:38 +0900 Subject: [PATCH] diff: simply pass tokenizer Fn by value --- cli/src/diff_util.rs | 2 +- cli/src/merge_tools/builtin.rs | 2 +- lib/src/conflicts.rs | 4 ++-- lib/src/diff.rs | 16 ++++++++-------- lib/src/files.rs | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/cli/src/diff_util.rs b/cli/src/diff_util.rs index 587dc67a6..c73dddf02 100644 --- a/cli/src/diff_util.rs +++ b/cli/src/diff_util.rs @@ -726,7 +726,7 @@ fn unified_diff_hunks<'content>( lines: vec![], }; let mut show_context_after = false; - let diff = Diff::for_tokenizer(&[left_content, right_content], &diff::find_line_ranges); + let diff = Diff::for_tokenizer(&[left_content, right_content], diff::find_line_ranges); for hunk in diff.hunks() { match hunk { DiffHunk::Matching(content) => { diff --git a/cli/src/merge_tools/builtin.rs b/cli/src/merge_tools/builtin.rs index a20df9561..e4c9b424e 100644 --- a/cli/src/merge_tools/builtin.rs +++ b/cli/src/merge_tools/builtin.rs @@ -227,7 +227,7 @@ fn make_diff_sections( ) -> Result>, BuiltinToolError> { let diff = Diff::for_tokenizer( &[left_contents.as_bytes(), right_contents.as_bytes()], - &find_line_ranges, + find_line_ranges, ); let mut sections = Vec::new(); for hunk in diff.hunks() { diff --git a/lib/src/conflicts.rs b/lib/src/conflicts.rs index f28e1c51d..4da59c809 100644 --- a/lib/src/conflicts.rs +++ b/lib/src/conflicts.rs @@ -259,7 +259,7 @@ pub fn materialize_merge_result( output.write_all(&left.0)?; continue; }; - let diff1 = Diff::for_tokenizer(&[&left.0, &right1.0], &find_line_ranges) + let diff1 = Diff::for_tokenizer(&[&left.0, &right1.0], find_line_ranges) .hunks() .collect_vec(); // Check if the diff against the next positive term is better. Since @@ -267,7 +267,7 @@ pub fn materialize_merge_result( // any later positive terms. if let Some(right2) = hunk.get_add(add_index + 1) { let diff2 = - Diff::for_tokenizer(&[&left.0, &right2.0], &find_line_ranges) + Diff::for_tokenizer(&[&left.0, &right2.0], find_line_ranges) .hunks() .collect_vec(); if diff_size(&diff2) < diff_size(&diff1) { diff --git a/lib/src/diff.rs b/lib/src/diff.rs index d67dd535a..076f0557d 100644 --- a/lib/src/diff.rs +++ b/lib/src/diff.rs @@ -395,7 +395,7 @@ fn intersect_regions( impl<'input> Diff<'input> { pub fn for_tokenizer( inputs: &[&'input [u8]], - tokenizer: &impl Fn(&[u8]) -> Vec>, + tokenizer: impl Fn(&[u8]) -> Vec>, ) -> Self { assert!(!inputs.is_empty()); let base_input = inputs[0]; @@ -444,7 +444,7 @@ impl<'input> Diff<'input> { } pub fn unrefined(inputs: &[&'input [u8]]) -> Self { - Diff::for_tokenizer(inputs, &|_| vec![]) + Diff::for_tokenizer(inputs, |_| vec![]) } // TODO: At least when merging, it's wasteful to refine the diff if e.g. if 2 @@ -454,9 +454,9 @@ impl<'input> Diff<'input> { // probably mean that many callers repeat the same code. Perhaps it // should be possible to refine a whole diff *or* individual hunks. pub fn default_refinement(inputs: &[&'input [u8]]) -> Self { - let mut diff = Diff::for_tokenizer(inputs, &find_line_ranges); - diff.refine_changed_regions(&find_word_ranges); - diff.refine_changed_regions(&find_nonword_ranges); + let mut diff = Diff::for_tokenizer(inputs, find_line_ranges); + diff.refine_changed_regions(find_word_ranges); + diff.refine_changed_regions(find_nonword_ranges); diff } @@ -475,7 +475,7 @@ impl<'input> Diff<'input> { /// Uses the given tokenizer to split the changed regions into smaller /// regions. Then tries to finds unchanged regions among them. - pub fn refine_changed_regions(&mut self, tokenizer: &impl Fn(&[u8]) -> Vec>) { + pub fn refine_changed_regions(&mut self, tokenizer: impl Fn(&[u8]) -> Vec>) { let mut previous = UnchangedRange { base_range: 0..0, offsets: vec![0; self.other_inputs.len()], @@ -493,7 +493,7 @@ impl<'input> Diff<'input> { slices.push(&self.other_inputs[i][changed_range]); } - let refined_diff = Diff::for_tokenizer(&slices, tokenizer); + let refined_diff = Diff::for_tokenizer(&slices, &tokenizer); for UnchangedRange { base_range, @@ -931,7 +931,7 @@ mod tests { // Tests that unchanged regions are compacted when using for_tokenizer() let diff = Diff::for_tokenizer( &[b"a\nb\nc\nd\ne\nf\ng", b"a\nb\nc\nX\ne\nf\ng"], - &find_line_ranges, + find_line_ranges, ); assert_eq!( diff.hunks().collect_vec(), diff --git a/lib/src/files.rs b/lib/src/files.rs index a30d09787..d7ecd73e9 100644 --- a/lib/src/files.rs +++ b/lib/src/files.rs @@ -165,7 +165,7 @@ pub fn merge(slices: &Merge<&[u8]>) -> MergeResult { let num_diffs = slices.removes().len(); let diff_inputs = slices.removes().chain(slices.adds()).copied().collect_vec(); - let diff = Diff::for_tokenizer(&diff_inputs, &diff::find_line_ranges); + let diff = Diff::for_tokenizer(&diff_inputs, diff::find_line_ranges); let mut resolved_hunk = ContentHunk(vec![]); let mut merge_hunks: Vec> = vec![]; for diff_hunk in diff.hunks() {