cleanup: let Clippy fix a bunch of warnings

This commit is contained in:
Martin von Zweigbergk 2021-06-14 00:18:38 -07:00
parent 134940d2bb
commit 4c416dd864
35 changed files with 151 additions and 151 deletions

View file

@ -49,17 +49,17 @@ pub fn materialize_conflict(
let mut base_contents: Vec<u8> = vec![];
let mut right_contents: Vec<u8> = vec![];
store
.read_file(&path, &left_id)
.read_file(path, &left_id)
.unwrap()
.read_to_end(&mut left_contents)
.unwrap();
store
.read_file(&path, &base_id)
.read_file(path, &base_id)
.unwrap()
.read_to_end(&mut base_contents)
.unwrap();
store
.read_file(&path, &right_id)
.read_file(path, &right_id)
.unwrap()
.read_to_end(&mut right_contents)
.unwrap();
@ -106,7 +106,7 @@ pub fn conflict_to_materialized_value(
conflict: &Conflict,
) -> TreeValue {
let mut buf = vec![];
materialize_conflict(store, &path, &conflict, &mut buf);
materialize_conflict(store, path, conflict, &mut buf);
let file_id = store.write_file(path, &mut Cursor::new(&buf)).unwrap();
TreeValue::Normal {
id: file_id,

View file

@ -168,7 +168,7 @@ where
Box::new(|node| {
let neighbors: Vec<T> = neighbors_fn(node).into_iter().collect();
for neighbor in &neighbors {
reachable.remove(&neighbor);
reachable.remove(neighbor);
}
neighbors
}),

View file

@ -414,11 +414,11 @@ fn refine_changed_ranges<'a>(
RangeDiff::Replaced(left_range, right_range) => {
let left_slice = &left[left_range.clone()];
let right_slice = &right[right_range.clone()];
let refined_left_ranges: Vec<Range<usize>> = tokenizer(&left_slice);
let refined_right_ranges: Vec<Range<usize>> = tokenizer(&right_slice);
let refined_left_ranges: Vec<Range<usize>> = tokenizer(left_slice);
let refined_right_ranges: Vec<Range<usize>> = tokenizer(right_slice);
let unchanged_refined_ranges = unchanged_ranges(
&left_slice,
&right_slice,
left_slice,
right_slice,
&refined_left_ranges,
&refined_right_ranges,
);

View file

@ -528,7 +528,7 @@ impl<'settings> DivergenceResolver<'settings> {
let store = mut_repo.store().clone();
let commits = commit_ids
.iter()
.map(|id| store.get_commit(&id).unwrap())
.map(|id| store.get_commit(id).unwrap())
.collect();
evolve_divergent_change(self.user_settings, &store, mut_repo, &commits)
})
@ -688,7 +688,7 @@ fn evolve_two_divergent_commits(
// TODO: Merge commit description and other commit metadata. How do we deal with
// conflicts? It's probably best to interactively ask the caller (which
// might ask the user in interactive use).
CommitBuilder::for_rewrite_from(user_settings, store, &commit1)
CommitBuilder::for_rewrite_from(user_settings, store, commit1)
.set_tree(resolved_tree)
.set_predecessors(vec![commit1.id().clone(), commit2.id().clone()])
.write_to_repo(mut_repo)

View file

@ -85,7 +85,7 @@ fn signature_to_git(signature: &Signature) -> git2::Signature {
(signature.timestamp.timestamp.0 / 1000) as i64,
signature.timestamp.tz_offset,
);
git2::Signature::new(&name, &email, &time).unwrap()
git2::Signature::new(name, email, &time).unwrap()
}
fn serialize_note(commit: &Commit) -> String {
@ -136,7 +136,7 @@ fn write_note(
// TODO: Report this to libgit2.
let notes_ref_lock = format!("{}.lock", notes_ref);
let mut try_write_note = || {
let note_status = git_repo.note(&committer, &committer, Some(notes_ref), oid, note, false);
let note_status = git_repo.note(committer, committer, Some(notes_ref), oid, note, false);
match note_status {
Err(err) if err.message().contains(&notes_ref_lock) => {
Err(backoff::Error::Transient(err))
@ -385,7 +385,7 @@ impl Store for GitStore {
Some(&create_no_gc_ref()),
&author,
&committer,
&message,
message,
&git_tree,
&parent_refs,
)?;

View file

@ -50,7 +50,7 @@ impl GitIgnoreLine {
if !prev_was_space && non_space_seen {
trimmed_len = input.len();
}
&input.split_at(trimmed_len).0
input.split_at(trimmed_len).0
}
fn parse(prefix: &str, input: &str) -> Result<Option<GitIgnoreLine>, GitIgnoreParseError> {
@ -111,7 +111,7 @@ impl GitIgnoreLine {
} else if let Some(characters) = &mut character_class {
if c == ']' {
regex.push('[');
regex.push_str(&characters);
regex.push_str(characters);
regex.push(']');
character_class = None;
} else {

View file

@ -482,7 +482,7 @@ impl MutableIndex {
if let Some(parent_file) = &self.parent_file {
buf.write_u32::<LittleEndian>(parent_file.name.len() as u32)
.unwrap();
buf.write_all(&parent_file.name.as_bytes()).unwrap();
buf.write_all(parent_file.name.as_bytes()).unwrap();
} else {
buf.write_u32::<LittleEndian>(0).unwrap();
}
@ -877,7 +877,7 @@ impl<'a> CompositeIndex<'a> {
while !(items1.is_empty() || items2.is_empty()) {
let entry1 = items1.last().unwrap();
let entry2 = items2.last().unwrap();
match entry1.cmp(&entry2) {
match entry1.cmp(entry2) {
Ordering::Greater => {
let entry1 = items1.pop_last().unwrap();
for parent_entry in entry1.0.parents() {
@ -1272,7 +1272,7 @@ impl IndexSegment for MutableIndex {
if !id.0.starts_with(&bytes_prefix.0) {
break;
}
if prefix.matches(&id) {
if prefix.matches(id) {
if first_match.is_some() {
return PrefixResolution::AmbiguousMatch;
}

View file

@ -133,7 +133,7 @@ impl IndexStore {
let commits = topo_order_earlier_first(store, heads, maybe_parent_file);
for commit in &commits {
data.add_commit(&commit);
data.add_commit(commit);
}
let index_file = data.save_in(self.dir.clone())?;
@ -151,7 +151,7 @@ impl IndexStore {
) -> io::Result<()> {
let mut temp_file = NamedTempFile::new_in(&self.dir)?;
let file = temp_file.as_file_mut();
file.write_all(&index.name().as_bytes()).unwrap();
file.write_all(index.name().as_bytes()).unwrap();
persist_temp_file(temp_file, &self.dir.join("operations").join(op_id.hex()))?;
Ok(())
}

View file

@ -115,7 +115,7 @@ impl Store for LocalStore {
}
fn read_file(&self, _path: &RepoPath, id: &FileId) -> StoreResult<Box<dyn Read>> {
let path = self.file_path(&id);
let path = self.file_path(id);
let file = File::open(path).map_err(not_found_to_store_error)?;
Ok(Box::new(zstd::Decoder::new(file)?))
}
@ -146,7 +146,7 @@ impl Store for LocalStore {
}
fn read_symlink(&self, _path: &RepoPath, id: &SymlinkId) -> Result<String, StoreError> {
let path = self.symlink_path(&id);
let path = self.symlink_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let mut target = String::new();
file.read_to_string(&mut target).unwrap();
@ -169,7 +169,7 @@ impl Store for LocalStore {
}
fn read_tree(&self, _path: &RepoPath, id: &TreeId) -> StoreResult<Tree> {
let path = self.tree_path(&id);
let path = self.tree_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let proto: crate::protos::store::Tree = Message::parse_from_reader(&mut file)?;
@ -192,7 +192,7 @@ impl Store for LocalStore {
}
fn read_commit(&self, id: &CommitId) -> StoreResult<Commit> {
let path = self.commit_path(&id);
let path = self.commit_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let proto: crate::protos::store::Commit = Message::parse_from_reader(&mut file)?;
@ -215,7 +215,7 @@ impl Store for LocalStore {
}
fn read_conflict(&self, id: &ConflictId) -> StoreResult<Conflict> {
let path = self.conflict_path(&id);
let path = self.conflict_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let proto: crate::protos::store::Conflict = Message::parse_from_reader(&mut file)?;

View file

@ -138,11 +138,11 @@ impl Dirs {
}
fn get_dirs(&self, dir: &RepoPath) -> &HashSet<RepoPathComponent> {
self.dirs.get(&dir).unwrap_or(&self.empty_dirs)
self.dirs.get(dir).unwrap_or(&self.empty_dirs)
}
fn get_files(&self, dir: &RepoPath) -> &HashSet<RepoPathComponent> {
self.files.get(&dir).unwrap_or(&self.empty_files)
self.files.get(dir).unwrap_or(&self.empty_files)
}
}

View file

@ -46,7 +46,7 @@ impl OpHeadsStore {
op_store: &Arc<dyn OpStore>,
root_view: &op_store::View,
) -> (Self, Operation) {
let root_view_id = op_store.write_view(&root_view).unwrap();
let root_view_id = op_store.write_view(root_view).unwrap();
let operation_metadata =
OperationMetadata::new("initialize repo".to_string(), Timestamp::now());
let init_operation = op_store::Operation {
@ -58,7 +58,7 @@ impl OpHeadsStore {
let init_operation = Operation::new(op_store.clone(), init_operation_id, init_operation);
let op_heads_store = OpHeadsStore { dir };
op_heads_store.add_op_head(&init_operation.id());
op_heads_store.add_op_head(init_operation.id());
(op_heads_store, init_operation)
}
@ -177,7 +177,7 @@ impl OpHeadsStore {
let op_heads = dag_walk::heads(op_heads, &neighbors_fn, &|op: &Operation| op.id().clone());
let op_head_ids_after: HashSet<_> = op_heads.iter().map(|op| op.id().clone()).collect();
for removed_op_head in op_head_ids_before.difference(&op_head_ids_after) {
self.remove_op_head(&removed_op_head);
self.remove_op_head(removed_op_head);
}
op_heads.into_iter().collect()
}
@ -201,7 +201,7 @@ fn merge_op_heads(
)
.unwrap();
let base_repo = repo_loader.load_at(&ancestor_op);
let other_repo = repo_loader.load_at(&other_op_head);
let other_repo = repo_loader.load_at(other_op_head);
merged_repo.merge(&base_repo, &other_repo);
}
let op_parent_ids = op_heads.iter().map(|op| op.id().clone()).collect();

View file

@ -289,7 +289,7 @@ impl ReadonlyRepo {
}
pub fn as_repo_ref(&self) -> RepoRef {
RepoRef::Readonly(&self)
RepoRef::Readonly(self)
}
pub fn repo_path(&self) -> &PathBuf {
@ -457,7 +457,7 @@ impl RepoLoader {
}
pub fn load_at_head(&self) -> Arc<ReadonlyRepo> {
let op = self.op_heads_store.get_single_op_head(&self).unwrap();
let op = self.op_heads_store.get_single_op_head(self).unwrap();
let view = View::new(op.view().take_store_view());
self._finish_load(op, view)
}
@ -542,7 +542,7 @@ impl MutableRepo {
}
pub fn as_repo_ref(&self) -> RepoRef {
RepoRef::Mutable(&self)
RepoRef::Mutable(self)
}
pub fn base_repo(&self) -> &Arc<ReadonlyRepo> {
@ -750,8 +750,8 @@ impl MutableRepo {
// merging the view. Merging in base_repo's index isn't typically
// necessary, but it can be if base_repo is ahead of either self or other_repo
// (e.g. because we're undoing an operation that hasn't been published).
self.index.merge_in(&base_repo.index());
self.index.merge_in(&other_repo.index());
self.index.merge_in(base_repo.index());
self.index.merge_in(other_repo.index());
self.view.merge(&base_repo.view, &other_repo.view);
self.enforce_view_invariants();

View file

@ -252,23 +252,23 @@ mod tests {
#[test]
fn test_to_fs_path() {
assert_eq!(
RepoPath::from_internal_string("").to_fs_path(&Path::new("base/dir")),
RepoPath::from_internal_string("").to_fs_path(Path::new("base/dir")),
Path::new("base/dir")
);
assert_eq!(
RepoPath::from_internal_string("").to_fs_path(&Path::new("")),
RepoPath::from_internal_string("").to_fs_path(Path::new("")),
Path::new("")
);
assert_eq!(
RepoPath::from_internal_string("file").to_fs_path(&Path::new("base/dir")),
RepoPath::from_internal_string("file").to_fs_path(Path::new("base/dir")),
Path::new("base/dir/file")
);
assert_eq!(
RepoPath::from_internal_string("some/deep/dir/file").to_fs_path(&Path::new("base/dir")),
RepoPath::from_internal_string("some/deep/dir/file").to_fs_path(Path::new("base/dir")),
Path::new("base/dir/some/deep/dir/file")
);
assert_eq!(
RepoPath::from_internal_string("dir/file").to_fs_path(&Path::new("")),
RepoPath::from_internal_string("dir/file").to_fs_path(Path::new("")),
Path::new("dir/file")
);
}

View file

@ -836,7 +836,7 @@ pub fn evaluate_expression<'repo>(
index_entries: vec![],
})),
RevsetExpression::Symbol(symbol) => {
let commit_ids = resolve_symbol(repo, &symbol)?;
let commit_ids = resolve_symbol(repo, symbol)?;
let index = repo.index();
let mut index_entries = commit_ids
.iter()

View file

@ -56,14 +56,14 @@ pub fn rebase_commit(
) -> Commit {
let store = mut_repo.store();
let old_base_tree = merge_commit_trees(mut_repo.as_repo_ref(), &old_commit.parents());
let new_base_tree = merge_commit_trees(mut_repo.as_repo_ref(), &new_parents);
let new_base_tree = merge_commit_trees(mut_repo.as_repo_ref(), new_parents);
// TODO: pass in labels for the merge parts
let new_tree_id = merge_trees(&new_base_tree, &old_base_tree, &old_commit.tree()).unwrap();
let new_parent_ids = new_parents
.iter()
.map(|commit| commit.id().clone())
.collect();
CommitBuilder::for_rewrite_from(settings, store, &old_commit)
CommitBuilder::for_rewrite_from(settings, store, old_commit)
.set_parents(new_parent_ids)
.set_tree(new_tree_id)
.write_to_repo(mut_repo)
@ -77,7 +77,7 @@ pub fn back_out_commit(
) -> Commit {
let store = mut_repo.store();
let old_base_tree = merge_commit_trees(mut_repo.as_repo_ref(), &old_commit.parents());
let new_base_tree = merge_commit_trees(mut_repo.as_repo_ref(), &new_parents);
let new_base_tree = merge_commit_trees(mut_repo.as_repo_ref(), new_parents);
// TODO: pass in labels for the merge parts
let new_tree_id = merge_trees(&new_base_tree, &old_commit.tree(), &old_base_tree).unwrap();
let new_parent_ids = new_parents

View file

@ -81,7 +81,7 @@ fn not_found_to_store_error(err: std::io::Error) -> OpStoreError {
impl OpStore for SimpleOpStore {
fn read_view(&self, id: &ViewId) -> OpStoreResult<View> {
let path = self.view_path(&id);
let path = self.view_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let proto: crate::protos::op_store::View = Message::parse_from_reader(&mut file)?;
@ -104,7 +104,7 @@ impl OpStore for SimpleOpStore {
}
fn read_operation(&self, id: &OperationId) -> OpStoreResult<Operation> {
let path = self.operation_path(&id);
let path = self.operation_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let proto: crate::protos::op_store::Operation = Message::parse_from_reader(&mut file)?;

View file

@ -254,11 +254,11 @@ impl<'a> TreeEntry<'a> {
}
pub fn name(&self) -> &'a RepoPathComponent {
&self.name
self.name
}
pub fn value(&self) -> &'a TreeValue {
&self.value
self.value
}
}

View file

@ -54,9 +54,9 @@ pub fn init_repo(settings: &UserSettings, use_git: bool) -> (TempDir, Arc<Readon
let repo = if use_git {
let git_path = temp_dir.path().join("git-repo");
git2::Repository::init(&git_path).unwrap();
ReadonlyRepo::init_external_git(&settings, wc_path, git_path).unwrap()
ReadonlyRepo::init_external_git(settings, wc_path, git_path).unwrap()
} else {
ReadonlyRepo::init_local(&settings, wc_path).unwrap()
ReadonlyRepo::init_local(settings, wc_path).unwrap()
};
(temp_dir, repo)

View file

@ -438,13 +438,13 @@ impl Iterator for TreeDiffIterator<'_> {
let subdir_path = self.dir.join(subdir);
let before_tree = match before {
Some(TreeValue::Tree(id_before)) => {
self.tree1.known_sub_tree(subdir, &id_before)
self.tree1.known_sub_tree(subdir, id_before)
}
_ => Tree::null(self.tree1.store().clone(), subdir_path.clone()),
};
let after_tree = match after {
Some(TreeValue::Tree(id_after)) => {
self.tree2.known_sub_tree(subdir, &id_after)
self.tree2.known_sub_tree(subdir, id_after)
}
_ => Tree::null(self.tree2.store().clone(), subdir_path.clone()),
};
@ -559,9 +559,9 @@ fn merge_tree_value(
) => {
let subdir = dir.join(basename);
let merged_tree_id = merge_trees(
&store.get_tree(&subdir, &side1).unwrap(),
&store.get_tree(&subdir, &base).unwrap(),
&store.get_tree(&subdir, &side2).unwrap(),
&store.get_tree(&subdir, side1).unwrap(),
&store.get_tree(&subdir, base).unwrap(),
&store.get_tree(&subdir, side2).unwrap(),
)?;
if &merged_tree_id == store.empty_tree_id() {
None
@ -594,18 +594,18 @@ fn merge_tree_value(
*side1_executable
};
let filename = dir.join(&basename);
let filename = dir.join(basename);
let mut base_content = vec![];
store
.read_file(&filename, &base_id)?
.read_file(&filename, base_id)?
.read_to_end(&mut base_content)?;
let mut side1_content = vec![];
store
.read_file(&filename, &side1_id)?
.read_file(&filename, side1_id)?
.read_to_end(&mut side1_content)?;
let mut side2_content = vec![];
store
.read_file(&filename, &side2_id)?
.read_file(&filename, side2_id)?
.read_to_end(&mut side2_content)?;
let merge_result = files::merge(&base_content, &side1_content, &side2_content);
@ -704,7 +704,7 @@ fn simplify_conflict(
for part in &conflict.adds {
match part.value {
TreeValue::Conflict(_) => {
let conflict = conflict_part_to_conflict(&store, part)?;
let conflict = conflict_part_to_conflict(store, part)?;
new_removes.extend_from_slice(&conflict.removes);
new_adds.extend_from_slice(&conflict.adds);
}
@ -716,7 +716,7 @@ fn simplify_conflict(
for part in &conflict.removes {
match part.value {
TreeValue::Conflict(_) => {
let conflict = conflict_part_to_conflict(&store, part)?;
let conflict = conflict_part_to_conflict(store, part)?;
new_removes.extend_from_slice(&conflict.adds);
new_adds.extend_from_slice(&conflict.removes);
}

View file

@ -101,17 +101,17 @@ impl View {
// sides and emit a warning?
}
for removed_head in base.public_heads().difference(&other.public_heads()) {
for removed_head in base.public_heads().difference(other.public_heads()) {
self.remove_public_head(removed_head);
}
for added_head in other.public_heads().difference(&base.public_heads()) {
for added_head in other.public_heads().difference(base.public_heads()) {
self.add_public_head(added_head);
}
for removed_head in base.heads().difference(&other.heads()) {
for removed_head in base.heads().difference(other.heads()) {
self.remove_head(removed_head);
}
for added_head in other.heads().difference(&base.heads()) {
for added_head in other.heads().difference(base.heads()) {
self.add_head(added_head);
}
// TODO: Should it be considered a conflict if a commit-head is removed on one

View file

@ -120,7 +120,7 @@ fn file_states_from_proto(
let mut file_states = BTreeMap::new();
for (path_str, proto_file_state) in &proto.file_states {
let path = RepoPath::from_internal_string(path_str.as_str());
file_states.insert(path, file_state_from_proto(&proto_file_state));
file_states.insert(path, file_state_from_proto(proto_file_state));
}
file_states
}
@ -442,7 +442,7 @@ impl TreeState {
// the file exists, and the stat information is most likely accurate,
// except for other processes modifying the file concurrently (The mtime is set
// at write time and won't change when we close the file.)
let mut file_state = self.file_state(&disk_path).unwrap();
let mut file_state = self.file_state(disk_path).unwrap();
// Make sure the state we record is what we tried to set above. This is mostly
// for Windows, since the executable bit is not reflected in the file system
// there.
@ -463,7 +463,7 @@ impl TreeState {
let target = PathBuf::from(&target);
symlink(target, disk_path).unwrap();
}
self.file_state(&disk_path).unwrap()
self.file_state(disk_path).unwrap()
}
#[cfg_attr(windows, allow(unused_variables))]

View file

@ -27,7 +27,7 @@ fn count_non_merge_operations(repo: &ReadonlyRepo) -> usize {
for op_id in dag_walk::bfs(
vec![op_id],
Box::new(|op_id| op_id.clone()),
Box::new(|op_id| op_store.read_operation(&op_id).unwrap().parents),
Box::new(|op_id| op_store.read_operation(op_id).unwrap().parents),
) {
if op_store.read_operation(&op_id).unwrap().parents.len() <= 1 {
num_ops += 1;

View file

@ -214,8 +214,8 @@ fn test_matcher_dir_file_transition(use_git: bool) {
tree2.diff_summary(&tree1, &matcher),
DiffSummary {
modified: vec![],
added: vec![a_path.clone()],
removed: vec![a_a_path.clone()]
added: vec![a_path],
removed: vec![a_a_path]
}
);
}
@ -258,9 +258,9 @@ fn test_matcher_normal_cases(use_git: bool) {
assert_eq!(
tree2.diff_summary(&tree1, &matcher),
DiffSummary {
modified: vec![a_path.clone()],
modified: vec![a_path],
added: vec![],
removed: vec![z_path.clone()]
removed: vec![z_path]
}
);
@ -276,9 +276,9 @@ fn test_matcher_normal_cases(use_git: bool) {
assert_eq!(
tree2.diff_summary(&tree1, &matcher),
DiffSummary {
modified: vec![dir1_a_path.clone()],
modified: vec![dir1_a_path],
added: vec![],
removed: vec![dir2_b_path.clone()]
removed: vec![dir2_b_path]
}
);
}

View file

@ -28,7 +28,7 @@ use test_case::test_case;
#[must_use]
fn child_commit(settings: &UserSettings, repo: &ReadonlyRepo, commit: &Commit) -> CommitBuilder {
testutils::create_random_commit(&settings, repo).set_parents(vec![commit.id().clone()])
testutils::create_random_commit(settings, repo).set_parents(vec![commit.id().clone()])
}
#[test_case(false ; "local store")]
@ -545,7 +545,7 @@ fn test_evolve_orphan(use_git: bool) {
}) = resolution2
{
assert_eq!(orphan2, grandchild);
assert_eq!(new_commit2.parents(), vec![new_commit1.clone()]);
assert_eq!(new_commit2.parents(), vec![new_commit1]);
}
}
@ -634,7 +634,7 @@ fn test_evolve_multiple_orphans(use_git: bool) {
}) = resolution3
{
assert_eq!(orphan3, grandchild2);
assert_eq!(new_commit3.parents(), vec![new_commit1.clone()]);
assert_eq!(new_commit3.parents(), vec![new_commit1]);
}
}
}

View file

@ -313,7 +313,7 @@ fn test_fetch_success() {
let source_git_repo = git2::Repository::init_bare(&source_repo_dir).unwrap();
let initial_git_commit = empty_git_commit(&source_git_repo, "refs/heads/main", &[]);
let clone_git_repo =
git2::Repository::clone(&source_repo_dir.to_str().unwrap(), &clone_repo_dir).unwrap();
git2::Repository::clone(source_repo_dir.to_str().unwrap(), &clone_repo_dir).unwrap();
std::fs::create_dir(&jj_repo_dir).unwrap();
ReadonlyRepo::init_external_git(&settings, jj_repo_dir.clone(), clone_repo_dir).unwrap();
@ -363,10 +363,10 @@ fn set_up_push_repos(settings: &UserSettings, temp_dir: &TempDir) -> PushTestSet
let source_repo = git2::Repository::init_bare(&source_repo_dir).unwrap();
let initial_git_commit = empty_git_commit(&source_repo, "refs/heads/main", &[]);
let initial_commit_id = commit_id(&initial_git_commit);
git2::Repository::clone(&source_repo_dir.to_str().unwrap(), &clone_repo_dir).unwrap();
git2::Repository::clone(source_repo_dir.to_str().unwrap(), &clone_repo_dir).unwrap();
std::fs::create_dir(&jj_repo_dir).unwrap();
let jj_repo = ReadonlyRepo::init_external_git(&settings, jj_repo_dir, clone_repo_dir).unwrap();
let new_commit = testutils::create_random_commit(&settings, &jj_repo)
let jj_repo = ReadonlyRepo::init_external_git(settings, jj_repo_dir, clone_repo_dir).unwrap();
let new_commit = testutils::create_random_commit(settings, &jj_repo)
.set_parents(vec![initial_commit_id])
.write_to_new_transaction(&jj_repo, "test");
let jj_repo = jj_repo.reload();

View file

@ -26,7 +26,7 @@ use test_case::test_case;
#[must_use]
fn child_commit(settings: &UserSettings, repo: &ReadonlyRepo, commit: &Commit) -> CommitBuilder {
testutils::create_random_commit(&settings, repo).set_parents(vec![commit.id().clone()])
testutils::create_random_commit(settings, repo).set_parents(vec![commit.id().clone()])
}
// Helper just to reduce line wrapping

View file

@ -64,6 +64,6 @@ fn test_load_at_operation(use_git: bool) {
// If we load the repo at the previous operation, we should see the commit since
// it has not been removed yet
let loader = RepoLoader::init(&settings, repo.working_copy_path().clone()).unwrap();
let old_repo = loader.load_at(&repo.operation());
let old_repo = loader.load_at(repo.operation());
assert!(old_repo.view().heads().contains(commit.id()));
}

View file

@ -366,7 +366,7 @@ fn test_types(use_git: bool) {
.unwrap()
{
TreeValue::Conflict(id) => {
let conflict = store.read_conflict(&id).unwrap();
let conflict = store.read_conflict(id).unwrap();
assert_eq!(
conflict.removes,
vec![ConflictPart {
@ -453,7 +453,7 @@ fn test_simplify_conflict(use_git: bool) {
let upstream2_tree = write_tree("upstream2 contents");
let merge_trees = |base: &Tree, side1: &Tree, side2: &Tree| -> Tree {
let tree_id = tree::merge_trees(&side1, &base, &side2).unwrap();
let tree_id = tree::merge_trees(side1, base, side2).unwrap();
store.get_tree(&RepoPath::root(), &tree_id).unwrap()
};

View file

@ -147,9 +147,9 @@ fn test_checkout_closed_with_conflict(use_git: bool) {
}
fn write_conflict(store: &Arc<StoreWrapper>, file_path: &RepoPath) -> ConflictId {
let file_id1 = testutils::write_file(store, &file_path, "a\n");
let file_id2 = testutils::write_file(store, &file_path, "b\n");
let file_id3 = testutils::write_file(store, &file_path, "c\n");
let file_id1 = testutils::write_file(store, file_path, "a\n");
let file_id2 = testutils::write_file(store, file_path, "b\n");
let file_id3 = testutils::write_file(store, file_path, "c\n");
let conflict = Conflict {
removes: vec![ConflictPart {
value: TreeValue::Normal {

View file

@ -128,8 +128,8 @@ fn test_checkout_file_transitions(use_git: bool) {
TreeValue::Tree(id)
}
Kind::GitSubmodule => {
let id = testutils::create_random_commit(&settings, &repo)
.write_to_new_transaction(&repo, "test")
let id = testutils::create_random_commit(settings, repo)
.write_to_new_transaction(repo, "test")
.id()
.clone();
TreeValue::GitSubmodule(id)
@ -331,10 +331,10 @@ fn test_gitignores(use_git: bool) {
assert_eq!(
files2,
vec![
gitignore_path.clone(),
added_path.clone(),
subdir_modified_path.clone(),
modified_path.clone(),
gitignore_path,
added_path,
subdir_modified_path,
modified_path,
]
);
}

View file

@ -203,7 +203,7 @@ impl RepoCommandHelper {
string_args: Vec<String>,
root_matches: &ArgMatches,
) -> Result<Self, CommandError> {
let repo = get_repo(ui, &root_matches)?;
let repo = get_repo(ui, root_matches)?;
let may_update_working_copy = root_matches.value_of("at_op").unwrap() == "@";
Ok(RepoCommandHelper {
string_args,
@ -435,7 +435,7 @@ fn resolve_single_op(repo: &ReadonlyRepo, op_str: &str) -> Result<Operation, Com
// was loaded at
Ok(repo.operation().clone())
} else {
resolve_single_op_from_store(&repo.op_store(), &repo.op_heads_store(), op_str)
resolve_single_op_from_store(repo.op_store(), repo.op_heads_store(), op_str)
}
}
@ -1220,13 +1220,13 @@ fn cmd_diff(
fn show_diff_summary(ui: &mut Ui, wc_path: &Path, summary: &DiffSummary) -> io::Result<()> {
for file in &summary.modified {
writeln!(ui, "M {}", ui.format_file_path(wc_path, &file))?;
writeln!(ui, "M {}", ui.format_file_path(wc_path, file))?;
}
for file in &summary.added {
writeln!(ui, "A {}", ui.format_file_path(wc_path, &file))?;
writeln!(ui, "A {}", ui.format_file_path(wc_path, file))?;
}
for file in &summary.removed {
writeln!(ui, "R {}", ui.format_file_path(wc_path, &file))?;
writeln!(ui, "R {}", ui.format_file_path(wc_path, file))?;
}
Ok(())
}
@ -1529,7 +1529,7 @@ fn cmd_describe(
} else if sub_matches.is_present("message") {
description = sub_matches.value_of("message").unwrap().to_owned()
} else {
description = edit_description(&repo, commit.description());
description = edit_description(repo, commit.description());
}
let mut tx = repo_command.start_transaction(&format!("describe commit {}", commit.id().hex()));
CommitBuilder::for_rewrite_from(ui.settings(), repo.store(), &commit)
@ -1571,7 +1571,7 @@ fn cmd_close(
if sub_matches.is_present("message") {
description = sub_matches.value_of("message").unwrap().to_string();
} else if commit.description().is_empty() {
description = edit_description(&repo, "\n\nJJ: Enter commit description.\n");
description = edit_description(repo, "\n\nJJ: Enter commit description.\n");
} else {
description = commit.description().to_string();
}
@ -1612,7 +1612,7 @@ fn cmd_prune(
let predecessors = repo_command.resolve_revset(sub_matches.value_of("revision").unwrap())?;
repo_command.check_non_empty(&predecessors)?;
for predecessor in &predecessors {
repo_command.check_rewriteable(&predecessor)?;
repo_command.check_rewriteable(predecessor)?;
}
let repo = repo_command.repo();
let transaction_description = if predecessors.len() == 1 {
@ -1691,7 +1691,7 @@ fn cmd_squash(
to the destination. If you don't make any changes, then all the changes\n\
from the source will be moved into the parent.\n",
short_commit_description(&commit),
short_commit_description(&parent)
short_commit_description(parent)
);
new_parent_tree_id =
crate::diff_edit::edit_diff(ui, &parent.tree(), &commit.tree(), &instructions)?;
@ -1704,7 +1704,7 @@ fn cmd_squash(
// Prune the child if the parent now has all the content from the child (always
// the case in the non-interactive case).
let prune_child = &new_parent_tree_id == commit.tree().id();
let new_parent = CommitBuilder::for_rewrite_from(ui.settings(), repo.store(), &parent)
let new_parent = CommitBuilder::for_rewrite_from(ui.settings(), repo.store(), parent)
.set_tree(new_parent_tree_id)
.set_predecessors(vec![parent.id().clone(), commit.id().clone()])
.write_to_repo(mut_repo);
@ -1733,7 +1733,7 @@ fn cmd_unsquash(
)));
}
let parent = &parents[0];
repo_command.check_rewriteable(&parent)?;
repo_command.check_rewriteable(parent)?;
let mut tx = repo_command.start_transaction(&format!("unsquash commit {}", commit.id().hex()));
let mut_repo = tx.mut_repo();
let parent_base_tree = merge_commit_trees(repo.as_repo_ref(), &parent.parents());
@ -1749,7 +1749,7 @@ fn cmd_unsquash(
the parent commit. The changes you edited out will be moved into the\n\
child commit. If you don't make any changes, then the operation will be\n\
aborted.\n",
short_commit_description(&parent),
short_commit_description(parent),
short_commit_description(&commit)
);
new_parent_tree_id =
@ -1763,7 +1763,7 @@ fn cmd_unsquash(
// Prune the parent if it is now empty (always the case in the non-interactive
// case).
let prune_parent = &new_parent_tree_id == parent_base_tree.id();
let new_parent = CommitBuilder::for_rewrite_from(ui.settings(), repo.store(), &parent)
let new_parent = CommitBuilder::for_rewrite_from(ui.settings(), repo.store(), parent)
.set_tree(new_parent_tree_id)
.set_predecessors(vec![parent.id().clone(), commit.id().clone()])
.set_pruned(prune_parent)
@ -1925,7 +1925,7 @@ fn cmd_split(
let mut tx = repo_command.start_transaction(&format!("split commit {}", commit.id().hex()));
let mut_repo = tx.mut_repo();
let first_description = edit_description(
&repo,
repo,
&("JJ: Enter commit description for the first part.\n".to_string()
+ commit.description()),
);
@ -1934,7 +1934,7 @@ fn cmd_split(
.set_description(first_description)
.write_to_repo(mut_repo);
let second_description = edit_description(
&repo,
repo,
&("JJ: Enter commit description for the second part.\n".to_string()
+ commit.description()),
);
@ -1979,7 +1979,7 @@ fn cmd_merge(
description = sub_matches.value_of("message").unwrap().to_string();
} else {
description = edit_description(
&repo,
repo,
"\n\nJJ: Enter commit description for the merge commit.\n",
);
}
@ -2334,7 +2334,7 @@ fn cmd_op_undo(
) -> Result<(), CommandError> {
let mut repo_command = command.repo_helper(ui)?;
let repo = repo_command.repo();
let bad_op = resolve_single_op(&repo, _cmd_matches.value_of("operation").unwrap())?;
let bad_op = resolve_single_op(repo, _cmd_matches.value_of("operation").unwrap())?;
let parent_ops = bad_op.parents();
if parent_ops.len() > 1 {
return Err(CommandError::UserError(
@ -2363,7 +2363,7 @@ fn cmd_op_restore(
) -> Result<(), CommandError> {
let mut repo_command = command.repo_helper(ui)?;
let repo = repo_command.repo();
let target_op = resolve_single_op(&repo, _cmd_matches.value_of("operation").unwrap())?;
let target_op = resolve_single_op(repo, _cmd_matches.value_of("operation").unwrap())?;
let mut tx =
repo_command.start_transaction(&format!("restore to operation {}", target_op.id().hex()));
tx.mut_repo().set_view(target_op.view().take_store_view());
@ -2561,59 +2561,59 @@ where
let matches = get_app().get_matches_from(&string_args);
let command_helper = CommandHelper::new(string_args, matches.clone());
let result = if let Some(sub_matches) = command_helper.root_matches.subcommand_matches("init") {
cmd_init(&mut ui, &command_helper, &sub_matches)
cmd_init(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("checkout") {
cmd_checkout(&mut ui, &command_helper, &sub_matches)
cmd_checkout(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("files") {
cmd_files(&mut ui, &command_helper, &sub_matches)
cmd_files(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("diff") {
cmd_diff(&mut ui, &command_helper, &sub_matches)
cmd_diff(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("status") {
cmd_status(&mut ui, &command_helper, &sub_matches)
cmd_status(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("log") {
cmd_log(&mut ui, &command_helper, &sub_matches)
cmd_log(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("obslog") {
cmd_obslog(&mut ui, &command_helper, &sub_matches)
cmd_obslog(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("describe") {
cmd_describe(&mut ui, &command_helper, &sub_matches)
cmd_describe(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("close") {
cmd_close(&mut ui, &command_helper, &sub_matches)
cmd_close(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("open") {
cmd_open(&mut ui, &command_helper, &sub_matches)
cmd_open(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("duplicate") {
cmd_duplicate(&mut ui, &command_helper, &sub_matches)
cmd_duplicate(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("prune") {
cmd_prune(&mut ui, &command_helper, &sub_matches)
cmd_prune(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("new") {
cmd_new(&mut ui, &command_helper, &sub_matches)
cmd_new(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("squash") {
cmd_squash(&mut ui, &command_helper, &sub_matches)
cmd_squash(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("unsquash") {
cmd_unsquash(&mut ui, &command_helper, &sub_matches)
cmd_unsquash(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("discard") {
cmd_discard(&mut ui, &command_helper, &sub_matches)
cmd_discard(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("restore") {
cmd_restore(&mut ui, &command_helper, &sub_matches)
cmd_restore(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("edit") {
cmd_edit(&mut ui, &command_helper, &sub_matches)
cmd_edit(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("split") {
cmd_split(&mut ui, &command_helper, &sub_matches)
cmd_split(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("merge") {
cmd_merge(&mut ui, &command_helper, &sub_matches)
cmd_merge(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("rebase") {
cmd_rebase(&mut ui, &command_helper, &sub_matches)
cmd_rebase(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("backout") {
cmd_backout(&mut ui, &command_helper, &sub_matches)
cmd_backout(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("evolve") {
cmd_evolve(&mut ui, &command_helper, &sub_matches)
cmd_evolve(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("operation") {
cmd_operation(&mut ui, &command_helper, &sub_matches)
cmd_operation(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("git") {
cmd_git(&mut ui, &command_helper, &sub_matches)
cmd_git(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("bench") {
cmd_bench(&mut ui, &command_helper, &sub_matches)
cmd_bench(&mut ui, &command_helper, sub_matches)
} else if let Some(sub_matches) = matches.subcommand_matches("debug") {
cmd_debug(&mut ui, &command_helper, &sub_matches)
cmd_debug(&mut ui, &command_helper, sub_matches)
} else {
panic!("unhandled command: {:#?}", matches);
};

View file

@ -107,7 +107,7 @@ pub fn edit_diff(
let store = left_tree.store();
let mut left_tree_builder = store.tree_builder(store.empty_tree_id().clone());
let mut right_tree_builder = store.tree_builder(store.empty_tree_id().clone());
for (file_path, diff) in left_tree.diff(&right_tree, &EverythingMatcher) {
for (file_path, diff) in left_tree.diff(right_tree, &EverythingMatcher) {
let (left_value, right_value) = diff.as_options();
if let Some(value) = left_value {
add_to_tree(store, &mut left_tree_builder, &file_path, value).unwrap();
@ -171,7 +171,7 @@ pub fn edit_diff(
// Create a Tree based on the initial right tree, applying the changes made to
// that directory by the diff editor.
let new_right_partial_tree_id = right_tree_state.write_tree();
let new_right_partial_tree = store.get_tree(&RepoPath::root(), &new_right_partial_tree_id)?;
let new_right_partial_tree = store.get_tree(&RepoPath::root(), new_right_partial_tree_id)?;
let new_tree_id = merge_trees(right_tree, &right_partial_tree, &new_right_partial_tree)?;
Ok(new_tree_id)

View file

@ -148,7 +148,7 @@ impl<'output> ColorFormatter<'output> {
}
}
let color = self.color_for_name(&best_match.1);
let color = self.color_for_name(best_match.1);
self.cached_colors
.insert(self.labels.clone(), color.clone());
color

View file

@ -88,7 +88,7 @@ where
if edges.len() > 2 && edge_index < self.edges.len() - 1 {
for i in 2..edges.len() {
for edge in self.edges.iter().take(edge_index + 1) {
AsciiGraphDrawer::straight_edge(&mut self.writer, &edge)?;
AsciiGraphDrawer::straight_edge(&mut self.writer, edge)?;
}
for _ in 0..i - 2 {
self.writer.write_all(b" ")?;
@ -108,7 +108,7 @@ where
// Draw the edges to the left of the new node
for edge in self.edges.iter().take(edge_index) {
AsciiGraphDrawer::straight_edge(&mut self.writer, &edge)?;
AsciiGraphDrawer::straight_edge(&mut self.writer, edge)?;
}
// Draw the new node
self.writer.write_all(node_symbol)?;
@ -122,7 +122,7 @@ where
self.writer.write_all(b" ")?;
// Draw the edges to the right of the new node
for edge in self.edges.iter().skip(edge_index) {
AsciiGraphDrawer::straight_edge(&mut self.writer, &edge)?;
AsciiGraphDrawer::straight_edge(&mut self.writer, edge)?;
}
if edges.len() > 1 {
self.writer.write_all(b" ")?;
@ -138,7 +138,7 @@ where
// If it's a merge commit, insert a row of '\'.
if edges.len() >= 2 {
for edge in self.edges.iter().take(edge_index) {
AsciiGraphDrawer::straight_edge(&mut self.writer, &edge)?;
AsciiGraphDrawer::straight_edge(&mut self.writer, edge)?;
}
AsciiGraphDrawer::straight_edge_no_space(&mut self.writer, &self.edges[edge_index])?;
for _ in edge_index + 1..self.edges.len() {
@ -181,7 +181,7 @@ where
// Emit any remaining lines of text.
while !self.pending_text.is_empty() {
for edge in self.edges.iter() {
AsciiGraphDrawer::straight_edge(&mut self.writer, &edge)?;
AsciiGraphDrawer::straight_edge(&mut self.writer, edge)?;
}
for _ in self.edges.len()..pad_to_index {
self.writer.write_all(b" ")?;

View file

@ -38,7 +38,7 @@ fn new_formatter<'output>(
output: Box<dyn Write + 'output>,
) -> Box<dyn Formatter + 'output> {
if color {
Box::new(ColorFormatter::new(output, &settings))
Box::new(ColorFormatter::new(output, settings))
} else {
Box::new(PlainTextFormatter::new(output))
}