cleanup: remove some Vec<_> annotations, mostly by using collect_vec()

This commit is contained in:
Martin von Zweigbergk 2021-06-09 13:57:48 -07:00
parent 0ce50e137a
commit b593e552b8
21 changed files with 136 additions and 94 deletions

17
Cargo.lock generated
View file

@ -241,7 +241,7 @@ dependencies = [
"clap",
"criterion-plot",
"csv",
"itertools",
"itertools 0.9.0",
"lazy_static",
"num-traits 0.2.12",
"oorandom",
@ -263,7 +263,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e022feadec601fba1649cfa83586381a4ad31c6bf3a9ab7d408118b05dd9889d"
dependencies = [
"cast",
"itertools",
"itertools 0.9.0",
]
[[package]]
@ -516,6 +516,15 @@ dependencies = [
"either",
]
[[package]]
name = "itertools"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "0.4.6"
@ -554,6 +563,7 @@ dependencies = [
"git2",
"hex",
"indoc",
"itertools 0.10.1",
"jujutsu-lib",
"pest",
"pest_derive",
@ -582,6 +592,7 @@ dependencies = [
"dirs",
"git2",
"hex",
"itertools 0.10.1",
"maplit",
"num_cpus",
"pest",
@ -1596,6 +1607,6 @@ checksum = "ec24a9273d24437afb8e71b16f3d9a5d569193cccdb7896213b59f552f387674"
dependencies = [
"cc",
"glob",
"itertools",
"itertools 0.9.0",
"libc",
]

View file

@ -30,6 +30,7 @@ dirs = "3.0.1"
git2 = "0.13.14"
hex = "0.4.2"
indoc = "1.0.3"
itertools = "0.10.1"
jujutsu-lib = { version = "=0.2.0", path = "lib"}
pest = "2.1.3"
pest_derive = "2.1.0"

View file

@ -25,6 +25,7 @@ config = "0.10.1"
dirs = "3.0.1"
git2 = "0.13.14"
hex = "0.4.2"
itertools = "0.10.1"
maplit = "1.0.2"
pest = "2.1.3"
pest_derive = "2.1.0"

View file

@ -83,7 +83,7 @@ where
let mut emitted = HashSet::new();
let mut result = vec![];
let mut start_nodes: Vec<_> = start.into_iter().collect();
let mut start_nodes: Vec<T> = start.into_iter().collect();
start_nodes.reverse();
for start_node in start_nodes {
@ -384,14 +384,13 @@ mod tests {
'e' => vec!['b'],
'F' => vec!['C', 'e'],
};
let expected: HashSet<char> = vec!['D', 'F'].into_iter().collect();
let actual = heads(
vec!['A', 'C', 'D', 'F'],
&|node| neighbors[node].clone(),
&|node| *node,
);
assert_eq!(actual, expected);
assert_eq!(actual, hashset!['D', 'F']);
// Check with a different order in the start set
let actual = heads(
@ -399,6 +398,6 @@ mod tests {
&|node| neighbors[node].clone(),
&|node| *node,
);
assert_eq!(actual, expected);
assert_eq!(actual, hashset!['D', 'F']);
}
}

View file

@ -15,6 +15,8 @@
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use itertools::Itertools;
use crate::commit::Commit;
use crate::commit_builder::CommitBuilder;
use crate::dag_walk::{bfs, closest_common_node, leaves};
@ -49,7 +51,7 @@ impl State {
let view = repo.view();
let index = repo.index();
let mut state = State::default();
let head_ids: Vec<_> = view.heads().iter().cloned().collect();
let head_ids = view.heads().iter().cloned().collect_vec();
let mut change_to_commits = HashMap::new();
for head_id in &head_ids {
state.children.insert(head_id.clone(), HashSet::new());
@ -95,7 +97,7 @@ impl State {
.insert(change_id, non_obsoletes);
}
// Find orphans by walking to the children of obsolete commits
let mut work: Vec<CommitId> = state.obsolete_commits.iter().cloned().collect();
let mut work = state.obsolete_commits.iter().cloned().collect_vec();
work.extend(state.pruned_commits.iter().cloned());
while !work.is_empty() {
let commit_id = work.pop().unwrap();
@ -313,11 +315,11 @@ impl State {
}
// Filter out candidates that are ancestors of other candidates.
let all_candidates: Vec<CommitId> = repo
let all_candidates = repo
.index()
.heads(all_candidates.iter())
.into_iter()
.collect();
.collect_vec();
for candidate in all_candidates {
// TODO: Make this not recursive
@ -507,14 +509,14 @@ pub struct DivergenceResolver<'settings> {
impl<'settings> DivergenceResolver<'settings> {
pub fn new(user_settings: &'settings UserSettings, mut_repo: &MutableRepo) -> Self {
// TODO: Put them in some defined order
let divergent_changes: Vec<_> = mut_repo
let divergent_changes = mut_repo
.evolution()
.state
.non_obsoletes_by_changeid
.values()
.filter(|non_obsoletes| non_obsoletes.len() > 1)
.cloned()
.collect();
.collect_vec();
DivergenceResolver {
user_settings,
remaining_changes: divergent_changes,
@ -552,12 +554,12 @@ pub struct OrphanResolver<'settings> {
impl<'settings> OrphanResolver<'settings> {
pub fn new(user_settings: &'settings UserSettings, mut_repo: &MutableRepo) -> Self {
let mut orphans_topo_order: Vec<_> = mut_repo
let mut orphans_topo_order = mut_repo
.index()
.topo_order(mut_repo.evolution().state.orphan_commits.iter())
.iter()
.map(|entry| entry.position())
.collect();
.collect_vec();
// Reverse so we can pop then efficiently later
orphans_topo_order.reverse();
OrphanResolver {
@ -610,7 +612,7 @@ fn evolve_divergent_change(
commits: &HashSet<Commit>,
) -> DivergenceResolution {
// Resolve divergence pair-wise, starting with the two oldest commits.
let mut commits: Vec<Commit> = commits.iter().cloned().collect();
let mut commits = commits.iter().cloned().collect_vec();
commits.sort_by(|a: &Commit, b: &Commit| a.committer().timestamp.cmp(&b.committer().timestamp));
commits.reverse();

View file

@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use itertools::Itertools;
use thiserror::Error;
use crate::commit::Commit;
@ -31,7 +32,7 @@ pub fn import_refs(
) -> Result<(), GitImportError> {
let store = mut_repo.store().clone();
let git_refs = git_repo.references()?;
let existing_git_refs: Vec<_> = mut_repo.view().git_refs().keys().cloned().collect();
let existing_git_refs = mut_repo.view().git_refs().keys().cloned().collect_vec();
// TODO: Store the id of the previous import and read it back here, so we can
// merge the views instead of overwriting.
for existing_git_ref in existing_git_refs {

View file

@ -21,6 +21,7 @@ use std::time::Duration;
use backoff::{ExponentialBackoff, Operation};
use git2::Oid;
use itertools::Itertools;
use protobuf::Message;
use uuid::Uuid;
@ -333,10 +334,10 @@ impl Store for GitStore {
.map(|b| b.reverse_bits())
.collect(),
);
let parents: Vec<_> = commit
let parents = commit
.parent_ids()
.map(|oid| CommitId(oid.as_bytes().to_vec()))
.collect();
.collect_vec();
let tree_id = TreeId(commit.tree_id().as_bytes().to_vec());
let description = commit.message().unwrap_or("<no message>").to_owned();
let author = signature_from_git(commit.author());
@ -379,7 +380,7 @@ impl Store for GitStore {
locked_repo.find_commit(Oid::from_bytes(parent_id.0.as_slice())?)?;
parents.push(parent_git_commit);
}
let parent_refs: Vec<_> = parents.iter().collect();
let parent_refs = parents.iter().collect_vec();
let git_id = locked_repo.commit(
Some(&create_no_gc_ref()),
&author,
@ -642,11 +643,11 @@ mod tests {
is_pruned: false,
};
let commit_id = store.write_commit(&commit).unwrap();
let git_refs: Vec<_> = git_repo
let git_refs = git_repo
.references_glob("refs/jj/keep/*")
.unwrap()
.map(|git_ref| git_ref.unwrap().target().unwrap())
.collect();
.collect_vec();
assert_eq!(git_refs, vec![Oid::from_bytes(&commit_id.0).unwrap()]);
}

View file

@ -14,6 +14,7 @@
use std::sync::Arc;
use itertools::Itertools;
use regex::{escape as regex_escape, Regex};
pub enum GitIgnoreParseError {}
@ -87,7 +88,7 @@ impl GitIgnoreLine {
regex.insert_str(0, "(^|/)");
}
let components: Vec<_> = input.split('/').collect();
let components = input.split('/').collect_vec();
for (i, component) in components.iter().enumerate() {
if *component == "**" {
if i == components.len() - 1 {

View file

@ -27,6 +27,7 @@ use std::sync::Arc;
use blake2::{Blake2b, Digest};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use itertools::Itertools;
use tempfile::NamedTempFile;
use crate::commit::Commit;
@ -419,16 +420,16 @@ impl MutableIndex {
let other = CompositeIndex(other_segment);
for pos in other_segment.segment_num_parent_commits()..other.num_commits() {
let entry = other.entry_by_pos(IndexPosition(pos));
let parent_ids: Vec<_> = entry
let parent_ids = entry
.parents()
.iter()
.map(|entry| entry.commit_id())
.collect();
let predecessor_ids: Vec<_> = entry
.collect_vec();
let predecessor_ids = entry
.predecessors()
.iter()
.map(|entry| entry.commit_id())
.collect();
.collect_vec();
self.add_commit_data(
entry.commit_id(),
entry.change_id(),
@ -843,14 +844,14 @@ impl<'a> CompositeIndex<'a> {
}
pub fn common_ancestors(&self, set1: &[CommitId], set2: &[CommitId]) -> Vec<CommitId> {
let pos1: Vec<_> = set1
let pos1 = set1
.iter()
.map(|id| self.commit_id_to_pos(id).unwrap())
.collect();
let pos2: Vec<_> = set2
.collect_vec();
let pos2 = set2
.iter()
.map(|id| self.commit_id_to_pos(id).unwrap())
.collect();
.collect_vec();
self.common_ancestors_pos(&pos1, &pos2)
.iter()
.map(|pos| self.entry_by_pos(*pos).commit_id())
@ -965,13 +966,15 @@ impl<'a> CompositeIndex<'a> {
&self,
input: impl IntoIterator<Item = &'input CommitId>,
) -> Vec<IndexEntry<'a>> {
let mut entries_by_generation: Vec<_> = input
let mut entries_by_generation = input
.into_iter()
.map(|id| IndexEntryByPosition(self.entry_by_id(id).unwrap()))
.collect();
.collect_vec();
entries_by_generation.sort();
let entries: Vec<_> = entries_by_generation.into_iter().map(|key| key.0).collect();
entries
entries_by_generation
.into_iter()
.map(|key| key.0)
.collect_vec()
}
}
@ -2303,7 +2306,7 @@ mod tests {
index
.walk_revs(wanted, unwanted)
.map(|entry| entry.commit_id())
.collect::<Vec<_>>()
.collect_vec()
};
// No wanted commits

View file

@ -19,6 +19,7 @@ use std::io::{Read, Write};
use std::path::PathBuf;
use std::sync::Arc;
use itertools::Itertools;
use tempfile::NamedTempFile;
use crate::commit::Commit;
@ -126,7 +127,7 @@ impl IndexStore {
}
}
let mut heads: Vec<CommitId> = new_heads.into_iter().collect();
let mut heads = new_heads.into_iter().collect_vec();
heads.sort();
let commits = topo_order_earlier_first(store, heads, maybe_parent_file);

View file

@ -16,6 +16,7 @@ use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::Arc;
use itertools::Itertools;
use thiserror::Error;
use crate::lock::FileLock;
@ -141,13 +142,13 @@ impl OpHeadsStore {
return Ok(Operation::new(op_store.clone(), op_head_id, op_head));
}
let op_heads: Vec<_> = op_head_ids
let op_heads = op_head_ids
.iter()
.map(|op_id: &OperationId| {
let data = op_store.read_operation(op_id).unwrap();
Operation::new(op_store.clone(), op_id.clone(), data)
})
.collect();
.collect_vec();
let mut op_heads = self.handle_ancestor_ops(op_heads);
// Return without creating a merge operation

View file

@ -15,6 +15,8 @@
use std::fmt::{Debug, Error, Formatter};
use std::path::{Path, PathBuf};
use itertools::Itertools;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub struct RepoPathComponent {
value: String,
@ -86,11 +88,11 @@ impl RepoPath {
/// The full string form used internally, not for presenting to users (where
/// we may want to use the platform's separator).
pub fn to_internal_file_string(&self) -> String {
let strings: Vec<String> = self
let strings = self
.components
.iter()
.map(|component| component.value.clone())
.collect();
.collect_vec();
strings.join("/")
}

View file

@ -18,6 +18,7 @@ use std::iter::Peekable;
use std::ops::Range;
use std::rc::Rc;
use itertools::Itertools;
use pest::iterators::Pairs;
use pest::Parser;
use thiserror::Error;
@ -837,18 +838,20 @@ pub fn evaluate_expression<'repo>(
RevsetExpression::Symbol(symbol) => {
let commit_ids = resolve_symbol(repo, &symbol)?;
let index = repo.index();
let mut index_entries: Vec<_> = commit_ids
let mut index_entries = commit_ids
.iter()
.map(|id| index.entry_by_id(id).unwrap())
.collect();
.collect_vec();
index_entries.sort_by_key(|b| Reverse(b.position()));
Ok(Box::new(EagerRevset { index_entries }))
}
RevsetExpression::Parents(base_expression) => {
// TODO: Make this lazy
let base_set = base_expression.evaluate(repo)?;
let mut parent_entries: Vec<_> =
base_set.iter().flat_map(|entry| entry.parents()).collect();
let mut parent_entries = base_set
.iter()
.flat_map(|entry| entry.parents())
.collect_vec();
parent_entries.sort_by_key(|b| Reverse(b.position()));
parent_entries.dedup();
Ok(Box::new(EagerRevset {
@ -871,9 +874,9 @@ pub fn evaluate_expression<'repo>(
.evaluate(repo),
RevsetExpression::Range { roots, heads } => {
let root_set = roots.evaluate(repo)?;
let root_ids: Vec<_> = root_set.iter().map(|entry| entry.commit_id()).collect();
let root_ids = root_set.iter().map(|entry| entry.commit_id()).collect_vec();
let head_set = heads.evaluate(repo)?;
let head_ids: Vec<_> = head_set.iter().map(|entry| entry.commit_id()).collect();
let head_ids = head_set.iter().map(|entry| entry.commit_id()).collect_vec();
let walk = repo.index().walk_revs(&head_ids, &root_ids);
Ok(Box::new(RevWalkRevset { walk }))
}
@ -885,7 +888,7 @@ pub fn evaluate_expression<'repo>(
let candidate_set = RevsetExpression::Ancestors(heads.clone()).evaluate(repo)?;
let mut reachable: HashSet<_> = root_set.iter().map(|entry| entry.position()).collect();
let mut result = vec![];
let candidates: Vec<_> = candidate_set.iter().collect();
let candidates = candidate_set.iter().collect_vec();
for candidate in candidates.into_iter().rev() {
if reachable.contains(&candidate.position())
|| candidate
@ -905,10 +908,10 @@ pub fn evaluate_expression<'repo>(
RevsetExpression::AllHeads => {
let index = repo.index();
let heads = repo.view().heads();
let mut index_entries: Vec<_> = heads
let mut index_entries = heads
.iter()
.map(|id| index.entry_by_id(id).unwrap())
.collect();
.collect_vec();
index_entries.sort_by_key(|b| Reverse(b.position()));
Ok(Box::new(EagerRevset { index_entries }))
}
@ -930,21 +933,21 @@ pub fn evaluate_expression<'repo>(
RevsetExpression::PublicHeads => {
let index = repo.index();
let heads = repo.view().public_heads();
let mut index_entries: Vec<_> = heads
let mut index_entries = heads
.iter()
.map(|id| index.entry_by_id(id).unwrap())
.collect();
.collect_vec();
index_entries.sort_by_key(|b| Reverse(b.position()));
Ok(Box::new(EagerRevset { index_entries }))
}
RevsetExpression::GitRefs => {
let index = repo.index();
let mut index_entries: Vec<_> = repo
let mut index_entries = repo
.view()
.git_refs()
.values()
.map(|id| index.entry_by_id(id).unwrap())
.collect();
.collect_vec();
index_entries.sort_by_key(|b| Reverse(b.position()));
index_entries.dedup();
Ok(Box::new(EagerRevset { index_entries }))
@ -987,7 +990,7 @@ fn non_obsolete_heads<'revset, 'repo: 'revset>(
heads: Box<dyn Revset<'repo> + 'repo>,
) -> Box<dyn Revset<'repo> + 'revset> {
let mut commit_ids = HashSet::new();
let mut work: Vec<_> = heads.iter().collect();
let mut work = heads.iter().collect_vec();
let evolution = repo.evolution();
while !work.is_empty() {
let index_entry = work.pop().unwrap();
@ -1005,10 +1008,10 @@ fn non_obsolete_heads<'revset, 'repo: 'revset>(
}
let index = repo.index();
let commit_ids = index.heads(&commit_ids);
let mut index_entries: Vec<_> = commit_ids
let mut index_entries = commit_ids
.iter()
.map(|id| index.entry_by_id(id).unwrap())
.collect();
.collect_vec();
index_entries.sort_by_key(|b| Reverse(b.position()));
Box::new(EagerRevset { index_entries })
}
@ -1018,10 +1021,10 @@ pub fn revset_for_commits<'revset, 'repo: 'revset>(
commits: &[&Commit],
) -> Box<dyn Revset<'repo> + 'revset> {
let index = repo.index();
let mut index_entries: Vec<_> = commits
let mut index_entries = commits
.iter()
.map(|commit| index.entry_by_id(commit.id()).unwrap())
.collect();
.collect_vec();
index_entries.sort_by_key(|b| Reverse(b.position()));
Box::new(EagerRevset { index_entries })
}

View file

@ -12,6 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use itertools::Itertools;
use crate::commit::Commit;
use crate::commit_builder::CommitBuilder;
use crate::repo::{MutableRepo, RepoRef};
@ -28,13 +30,16 @@ pub fn merge_commit_trees(repo: RepoRef, commits: &[Commit]) -> Tree {
} else {
let index = repo.index();
let mut new_tree = commits[0].tree();
let commit_ids: Vec<_> = commits.iter().map(|commit| commit.id().clone()).collect();
let commit_ids = commits
.iter()
.map(|commit| commit.id().clone())
.collect_vec();
for (i, other_commit) in commits.iter().enumerate().skip(1) {
let ancestor_ids = index.common_ancestors(&commit_ids[0..i], &[commit_ids[i].clone()]);
let ancestors: Vec<_> = ancestor_ids
let ancestors = ancestor_ids
.iter()
.map(|id| store.get_commit(id).unwrap())
.collect();
.collect_vec();
let ancestor_tree = merge_commit_trees(repo, &ancestors);
let new_tree_id = merge_trees(&new_tree, &ancestor_tree, &other_commit.tree()).unwrap();
new_tree = store.get_tree(&RepoPath::root(), &new_tree_id).unwrap();

View file

@ -17,6 +17,7 @@ use std::fs::OpenOptions;
use std::io::Write;
use std::sync::Arc;
use itertools::Itertools;
use tempfile::TempDir;
use crate::commit::Commit;
@ -150,7 +151,10 @@ impl<'settings, 'repo> CommitGraphBuilder<'settings, 'repo> {
}
pub fn commit_with_parents(&mut self, parents: &[&Commit]) -> Commit {
let parent_ids: Vec<_> = parents.iter().map(|commit| commit.id().clone()).collect();
let parent_ids = parents
.iter()
.map(|commit| commit.id().clone())
.collect_vec();
create_random_commit(self.settings, self.mut_repo.base_repo().as_ref())
.set_parents(parent_ids)
.write_to_repo(self.mut_repo)

View file

@ -14,6 +14,7 @@
#![feature(assert_matches)]
use itertools::Itertools;
use jujutsu_lib::commit::Commit;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::evolution::{
@ -722,7 +723,7 @@ fn test_evolve_divergent(use_git: bool) {
assert_eq!(resolved.predecessors(), &[commit6, commit4]);
let tree = resolved.tree();
let entries: Vec<_> = tree.entries().collect();
let entries = tree.entries().collect_vec();
assert_eq!(entries.len(), 4);
assert_eq!(
tree.value(&RepoPathComponent::from("A")).unwrap(),

View file

@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use itertools::Itertools;
use jujutsu_lib::repo_path::{RepoPath, RepoPathComponent};
use jujutsu_lib::store::{ConflictPart, TreeValue};
use jujutsu_lib::tree::Tree;
@ -72,10 +73,10 @@ fn test_same_type(use_git: bool) {
let merged_tree = store.get_tree(&RepoPath::root(), &merged_tree_id).unwrap();
// Check that we have exactly the paths we expect in the merged tree
let names: Vec<&str> = merged_tree
let names = merged_tree
.entries_non_recursive()
.map(|entry| entry.name().as_str())
.collect();
.collect_vec();
assert_eq!(
names,
vec!["__a", "_a_", "_aa", "_ab", "a_b", "aaa", "aab", "ab_", "aba", "abb", "abc",]
@ -259,7 +260,7 @@ fn test_subtrees(use_git: bool) {
let merged_tree_id = tree::merge_trees(&side1_tree, &base_tree, &side2_tree).unwrap();
let merged_tree = store.get_tree(&RepoPath::root(), &merged_tree_id).unwrap();
let entries: Vec<_> = merged_tree.entries().collect();
let entries = merged_tree.entries().collect_vec();
let expected_tree = write_tree(vec![
"f1",
@ -270,7 +271,7 @@ fn test_subtrees(use_git: bool) {
"d1/d1/d1/f1",
"d1/d1/d1/f2",
]);
let expected_entries: Vec<_> = expected_tree.entries().collect();
let expected_entries = expected_tree.entries().collect_vec();
assert_eq!(entries, expected_entries);
}

View file

@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use itertools::Itertools;
use jujutsu_lib::revset::revset_for_commits;
use jujutsu_lib::revset_graph_iterator::RevsetGraphEdge;
use jujutsu_lib::testutils;
@ -47,11 +48,11 @@ fn test_graph_iterator_linearized(skip_transitive_edges: bool) {
let pos_a = mut_repo.index().commit_id_to_pos(commit_a.id()).unwrap();
let revset = revset_for_commits(mut_repo.as_repo_ref(), &[&commit_a, &commit_d]);
let commits: Vec<_> = revset
let commits = revset
.iter()
.graph()
.set_skip_transitive_edges(skip_transitive_edges)
.collect();
.collect_vec();
drop(revset);
assert_eq!(commits.len(), 2);
assert_eq!(commits[0].0.commit_id(), *commit_d.id());
@ -97,11 +98,11 @@ fn test_graph_iterator_virtual_octopus(skip_transitive_edges: bool) {
mut_repo.as_repo_ref(),
&[&commit_a, &commit_b, &commit_c, &commit_f],
);
let commits: Vec<_> = revset
let commits = revset
.iter()
.graph()
.set_skip_transitive_edges(skip_transitive_edges)
.collect();
.collect_vec();
drop(revset);
assert_eq!(commits.len(), 4);
assert_eq!(commits[0].0.commit_id(), *commit_f.id());
@ -155,11 +156,11 @@ fn test_graph_iterator_simple_fork(skip_transitive_edges: bool) {
let pos_a = mut_repo.index().commit_id_to_pos(commit_a.id()).unwrap();
let revset = revset_for_commits(mut_repo.as_repo_ref(), &[&commit_a, &commit_c, &commit_e]);
let commits: Vec<_> = revset
let commits = revset
.iter()
.graph()
.set_skip_transitive_edges(skip_transitive_edges)
.collect();
.collect_vec();
drop(revset);
assert_eq!(commits.len(), 3);
assert_eq!(commits[0].0.commit_id(), *commit_e.id());
@ -205,11 +206,11 @@ fn test_graph_iterator_multiple_missing(skip_transitive_edges: bool) {
let pos_c = mut_repo.index().commit_id_to_pos(commit_c.id()).unwrap();
let revset = revset_for_commits(mut_repo.as_repo_ref(), &[&commit_b, &commit_f]);
let commits: Vec<_> = revset
let commits = revset
.iter()
.graph()
.set_skip_transitive_edges(skip_transitive_edges)
.collect();
.collect_vec();
drop(revset);
assert_eq!(commits.len(), 2);
assert_eq!(commits[0].0.commit_id(), *commit_f.id());
@ -260,11 +261,11 @@ fn test_graph_iterator_edge_to_ancestor(skip_transitive_edges: bool) {
let pos_d = mut_repo.index().commit_id_to_pos(commit_d.id()).unwrap();
let revset = revset_for_commits(mut_repo.as_repo_ref(), &[&commit_c, &commit_d, &commit_f]);
let commits: Vec<_> = revset
let commits = revset
.iter()
.graph()
.set_skip_transitive_edges(skip_transitive_edges)
.collect();
.collect_vec();
drop(revset);
assert_eq!(commits.len(), 3);
assert_eq!(commits[0].0.commit_id(), *commit_f.id());
@ -341,11 +342,11 @@ fn test_graph_iterator_edge_escapes_from_(skip_transitive_edges: bool) {
mut_repo.as_repo_ref(),
&[&commit_a, &commit_d, &commit_g, &commit_h, &commit_j],
);
let commits: Vec<_> = revset
let commits = revset
.iter()
.graph()
.set_skip_transitive_edges(skip_transitive_edges)
.collect();
.collect_vec();
drop(revset);
assert_eq!(commits.len(), 5);
assert_eq!(commits[0].0.commit_id(), *commit_j.id());

View file

@ -18,6 +18,7 @@ use std::io::{Read, Write};
use std::os::unix::fs::PermissionsExt;
use std::sync::Arc;
use itertools::Itertools;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::repo::ReadonlyRepo;
use jujutsu_lib::repo_path::{RepoPath, RepoPathComponent};
@ -298,11 +299,11 @@ fn test_gitignores(use_git: bool) {
let wc = repo.working_copy().clone();
let (repo, commit1) = wc.lock().unwrap().commit(&settings, repo);
let files1: Vec<_> = commit1
let files1 = commit1
.tree()
.entries()
.map(|(name, _value)| name)
.collect();
.collect_vec();
assert_eq!(
files1,
vec![
@ -323,11 +324,11 @@ fn test_gitignores(use_git: bool) {
let wc = repo.working_copy().clone();
let (_repo, commit2) = wc.lock().unwrap().commit(&settings, repo);
let files2: Vec<_> = commit2
let files2 = commit2
.tree()
.entries()
.map(|(name, _value)| name)
.collect();
.collect_vec();
assert_eq!(
files2,
vec![

View file

@ -29,6 +29,7 @@ use std::{fs, io};
use clap::{crate_version, App, Arg, ArgMatches, SubCommand};
use criterion::Criterion;
use itertools::Itertools;
use jujutsu_lib::commit::Commit;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::dag_walk::topo_order_reverse;
@ -346,7 +347,7 @@ impl RepoCommandHelper {
format!("'{}'", arg.replace("'", "\\'"))
}
};
let quoted_strings: Vec<_> = self.string_args.iter().map(shell_escape).collect();
let quoted_strings = self.string_args.iter().map(shell_escape).collect_vec();
tx.set_tag("args".to_string(), quoted_strings.join(" "));
tx
}
@ -1442,7 +1443,7 @@ fn edit_description(repo: &ReadonlyRepo, description: &str) -> String {
let editor = std::env::var("EDITOR").unwrap_or_else(|_| "pico".to_string());
// Handle things like `EDITOR=emacs -nw`
let args: Vec<_> = editor.split(' ').collect();
let args = editor.split(' ').collect_vec();
let editor_args = if args.len() > 1 { &args[1..] } else { &[] };
let exit_status = Command::new(args[0])
.args(editor_args)
@ -1463,10 +1464,10 @@ fn edit_description(repo: &ReadonlyRepo, description: &str) -> String {
// Delete the file only if everything went well.
// TODO: Tell the user the name of the file we left behind.
std::fs::remove_file(description_file_path).ok();
let mut lines: Vec<_> = description
let mut lines = description
.split_inclusive('\n')
.filter(|line| !line.starts_with("JJ: "))
.collect();
.collect_vec();
// Remove trailing blank lines
while matches!(lines.last(), Some(&"\n") | Some(&"\r\n")) {
lines.pop().unwrap();

View file

@ -16,6 +16,7 @@ use std::borrow::BorrowMut;
use std::io;
use std::ops::Add;
use itertools::Itertools;
use jujutsu_lib::commit::Commit;
use jujutsu_lib::repo::RepoRef;
use jujutsu_lib::store::{CommitId, Signature};
@ -61,10 +62,10 @@ pub struct LabelTemplate<'a, C> {
impl<'a, C> LabelTemplate<'a, C> {
pub fn new(content: Box<dyn Template<C> + 'a>, labels: String) -> Self {
let labels: Vec<String> = labels
let labels = labels
.split_whitespace()
.map(|label| label.to_string())
.collect();
.collect_vec();
LabelTemplate { content, labels }
}
}
@ -103,10 +104,10 @@ impl<'a, C> DynamicLabelTemplate<'a, C> {
impl<'a, C> Template<C> for DynamicLabelTemplate<'a, C> {
fn format(&self, context: &C, formatter: &mut dyn Formatter) -> io::Result<()> {
let labels = self.label_property.as_ref()(context);
let labels: Vec<String> = labels
let labels = labels
.split_whitespace()
.map(|label| label.to_string())
.collect();
.collect_vec();
for label in &labels {
formatter.add_label(label.clone())?;
}
@ -225,14 +226,14 @@ pub struct GitRefsProperty<'a> {
impl TemplateProperty<Commit, String> for GitRefsProperty<'_> {
fn extract(&self, context: &Commit) -> String {
let refs: Vec<_> = self
let refs = self
.repo
.view()
.git_refs()
.iter()
.filter(|(_name, id)| *id == context.id())
.map(|(name, _id)| name.clone())
.collect();
.collect_vec();
refs.join(" ")
}
}