forked from mirrors/jj
duplicate: add --destination
, --insert-before
, and --insert-after
options
This commit is contained in:
parent
b1e5ca5348
commit
3906779ea7
3 changed files with 335 additions and 16 deletions
|
@ -12,12 +12,17 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::io::Write;
|
||||
use std::rc::Rc;
|
||||
|
||||
use clap::ArgGroup;
|
||||
use indexmap::IndexMap;
|
||||
use itertools::Itertools as _;
|
||||
use jj_lib::backend::CommitId;
|
||||
use jj_lib::commit::Commit;
|
||||
use jj_lib::repo::Repo;
|
||||
use jj_lib::commit::{Commit, CommitIteratorExt};
|
||||
use jj_lib::repo::{ReadonlyRepo, Repo};
|
||||
use jj_lib::revset::{RevsetExpression, RevsetIteratorExt};
|
||||
use tracing::instrument;
|
||||
|
||||
use crate::cli_util::{short_commit_hash, CommandHelper, RevisionArg};
|
||||
|
@ -26,6 +31,7 @@ use crate::ui::Ui;
|
|||
|
||||
/// Create a new change with the same content as an existing one
|
||||
#[derive(clap::Args, Clone, Debug)]
|
||||
#[command(group(ArgGroup::new("target").args(&["destination", "insert_after", "insert_before"]).multiple(true)))]
|
||||
pub(crate) struct DuplicateArgs {
|
||||
/// The revision(s) to duplicate
|
||||
#[arg(default_value = "@")]
|
||||
|
@ -33,6 +39,28 @@ pub(crate) struct DuplicateArgs {
|
|||
/// Ignored (but lets you pass `-r` for consistency with other commands)
|
||||
#[arg(short = 'r', hide = true, action = clap::ArgAction::Count)]
|
||||
unused_revision: u8,
|
||||
/// The revision(s) to rebase onto (can be repeated to create a merge
|
||||
/// commit)
|
||||
#[arg(long, short)]
|
||||
destination: Vec<RevisionArg>,
|
||||
/// The revision(s) to insert after (can be repeated to create a merge
|
||||
/// commit)
|
||||
#[arg(
|
||||
long,
|
||||
short = 'A',
|
||||
visible_alias = "after",
|
||||
conflicts_with = "destination"
|
||||
)]
|
||||
insert_after: Vec<RevisionArg>,
|
||||
/// The revision(s) to insert before (can be repeated to create a merge
|
||||
/// commit)
|
||||
#[arg(
|
||||
long,
|
||||
short = 'B',
|
||||
visible_alias = "before",
|
||||
conflicts_with = "destination"
|
||||
)]
|
||||
insert_before: Vec<RevisionArg>,
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
|
@ -53,28 +81,234 @@ pub(crate) fn cmd_duplicate(
|
|||
if to_duplicate.last() == Some(workspace_command.repo().store().root_commit_id()) {
|
||||
return Err(user_error("Cannot duplicate the root commit"));
|
||||
}
|
||||
|
||||
let parent_commit_ids: Vec<CommitId>;
|
||||
let children_commit_ids: Vec<CommitId>;
|
||||
|
||||
if !args.insert_before.is_empty() && !args.insert_after.is_empty() {
|
||||
let parent_commits = workspace_command
|
||||
.resolve_some_revsets_default_single(&args.insert_after)?
|
||||
.into_iter()
|
||||
.collect_vec();
|
||||
parent_commit_ids = parent_commits.iter().ids().cloned().collect();
|
||||
let children_commits = workspace_command
|
||||
.resolve_some_revsets_default_single(&args.insert_before)?
|
||||
.into_iter()
|
||||
.collect_vec();
|
||||
children_commit_ids = children_commits.iter().ids().cloned().collect();
|
||||
workspace_command.check_rewritable(&children_commit_ids)?;
|
||||
let children_expression = RevsetExpression::commits(children_commit_ids.clone());
|
||||
let parents_expression = RevsetExpression::commits(parent_commit_ids.clone());
|
||||
ensure_no_commit_loop(
|
||||
workspace_command.repo(),
|
||||
&children_expression,
|
||||
&parents_expression,
|
||||
)?;
|
||||
} else if !args.insert_before.is_empty() {
|
||||
let children_commits = workspace_command
|
||||
.resolve_some_revsets_default_single(&args.insert_before)?
|
||||
.into_iter()
|
||||
.collect_vec();
|
||||
children_commit_ids = children_commits.iter().ids().cloned().collect();
|
||||
workspace_command.check_rewritable(&children_commit_ids)?;
|
||||
let children_expression = RevsetExpression::commits(children_commit_ids.clone());
|
||||
let parents_expression = children_expression.parents();
|
||||
ensure_no_commit_loop(
|
||||
workspace_command.repo(),
|
||||
&children_expression,
|
||||
&parents_expression,
|
||||
)?;
|
||||
// Manually collect the parent commit IDs to preserve the order of parents.
|
||||
parent_commit_ids = children_commits
|
||||
.iter()
|
||||
.flat_map(|commit| commit.parent_ids())
|
||||
.unique()
|
||||
.cloned()
|
||||
.collect_vec();
|
||||
} else if !args.insert_after.is_empty() {
|
||||
let parent_commits = workspace_command
|
||||
.resolve_some_revsets_default_single(&args.insert_after)?
|
||||
.into_iter()
|
||||
.collect_vec();
|
||||
parent_commit_ids = parent_commits.iter().ids().cloned().collect();
|
||||
let parents_expression = RevsetExpression::commits(parent_commit_ids.clone());
|
||||
let children_expression = parents_expression.children();
|
||||
children_commit_ids = children_expression
|
||||
.evaluate_programmatic(workspace_command.repo().as_ref())?
|
||||
.iter()
|
||||
.collect();
|
||||
workspace_command.check_rewritable(&children_commit_ids)?;
|
||||
} else if !args.destination.is_empty() {
|
||||
let parent_commits = workspace_command
|
||||
.resolve_some_revsets_default_single(&args.destination)?
|
||||
.into_iter()
|
||||
.collect_vec();
|
||||
parent_commit_ids = parent_commits.iter().ids().cloned().collect();
|
||||
children_commit_ids = vec![];
|
||||
} else {
|
||||
parent_commit_ids = vec![];
|
||||
children_commit_ids = vec![];
|
||||
};
|
||||
|
||||
let mut duplicated_old_to_new: IndexMap<&CommitId, Commit> = IndexMap::new();
|
||||
let mut num_rebased = 0;
|
||||
|
||||
let mut tx = workspace_command.start_transaction();
|
||||
let base_repo = tx.base_repo().clone();
|
||||
let store = base_repo.store();
|
||||
let mut_repo = tx.mut_repo();
|
||||
|
||||
for original_commit_id in to_duplicate.iter().rev() {
|
||||
// Topological order ensures that any parents of `original_commit` are
|
||||
// either not in `to_duplicate` or were already duplicated.
|
||||
let original_commit = store.get_commit(original_commit_id)?;
|
||||
let new_parents = original_commit
|
||||
.parent_ids()
|
||||
// If there are no parent commits specified, duplicate each commit on top of
|
||||
// their parents or other duplicated commits only.
|
||||
if parent_commit_ids.is_empty() {
|
||||
for original_commit_id in to_duplicate.iter().rev() {
|
||||
// Topological order ensures that any parents of `original_commit` are
|
||||
// either not in `target_commits` or were already duplicated.
|
||||
let original_commit = store.get_commit(original_commit_id)?;
|
||||
let new_parents = original_commit
|
||||
.parent_ids()
|
||||
.iter()
|
||||
.map(|id| duplicated_old_to_new.get(id).map_or(id, |c| c.id()).clone())
|
||||
.collect();
|
||||
let new_commit = mut_repo
|
||||
.rewrite_commit(command.settings(), &original_commit)
|
||||
.generate_new_change_id()
|
||||
.set_parents(new_parents)
|
||||
.write()?;
|
||||
duplicated_old_to_new.insert(original_commit_id, new_commit);
|
||||
}
|
||||
} else {
|
||||
let target_commits = to_duplicate.clone();
|
||||
|
||||
let connected_target_commits: Vec<_> =
|
||||
RevsetExpression::commits(target_commits.iter().cloned().collect_vec())
|
||||
.connected()
|
||||
.evaluate_programmatic(mut_repo)?
|
||||
.iter()
|
||||
.commits(store)
|
||||
.try_collect()?;
|
||||
|
||||
// If a commit in the target set has a parent which is not in the set, but has
|
||||
// an ancestor which is in the set, then the commit will have that ancestor
|
||||
// as a parent instead.
|
||||
let mut target_commits_internal_parents: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
|
||||
for commit in connected_target_commits.iter().rev() {
|
||||
// The roots of the set will not have any parents found, and will be stored as
|
||||
// an empty vector.
|
||||
let mut new_parents = vec![];
|
||||
for old_parent in commit.parent_ids() {
|
||||
if target_commits.contains(old_parent) {
|
||||
new_parents.push(old_parent.clone());
|
||||
} else if let Some(parents) = target_commits_internal_parents.get(old_parent) {
|
||||
new_parents.extend(parents.iter().cloned());
|
||||
}
|
||||
}
|
||||
target_commits_internal_parents.insert(commit.id().clone(), new_parents);
|
||||
}
|
||||
target_commits_internal_parents.retain(|id, _| target_commits.contains(id));
|
||||
|
||||
// Compute the roots of `target_commits`.
|
||||
let target_roots: HashSet<_> = target_commits_internal_parents
|
||||
.iter()
|
||||
.map(|id| duplicated_old_to_new.get(id).map_or(id, |c| c.id()).clone())
|
||||
.filter(|(_, parents)| parents.is_empty())
|
||||
.map(|(commit_id, _)| commit_id.clone())
|
||||
.collect();
|
||||
let new_commit = mut_repo
|
||||
.rewrite_commit(command.settings(), &original_commit)
|
||||
.generate_new_change_id()
|
||||
.set_parents(new_parents)
|
||||
.write()?;
|
||||
duplicated_old_to_new.insert(original_commit_id, new_commit);
|
||||
|
||||
// Compute the heads of the target set, which will be used as the parents of
|
||||
// `children_commits`.
|
||||
let target_heads: Vec<CommitId> = if !children_commit_ids.is_empty() {
|
||||
let mut target_heads: HashSet<CommitId> = HashSet::new();
|
||||
for commit in connected_target_commits.iter().rev() {
|
||||
target_heads.insert(commit.id().clone());
|
||||
for old_parent in commit.parent_ids() {
|
||||
target_heads.remove(old_parent);
|
||||
}
|
||||
}
|
||||
connected_target_commits
|
||||
.iter()
|
||||
.rev()
|
||||
.filter(|commit| {
|
||||
target_heads.contains(commit.id()) && target_commits.contains(commit.id())
|
||||
})
|
||||
.map(|commit| commit.id().clone())
|
||||
.collect_vec()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
for original_commit_id in to_duplicate.iter().rev() {
|
||||
// Topological order ensures that any parents of `original_commit` are
|
||||
// either not in `target_commits` or were already duplicated.
|
||||
let original_commit = store.get_commit(original_commit_id)?;
|
||||
let new_parents = if target_roots.contains(original_commit_id) {
|
||||
parent_commit_ids.clone()
|
||||
} else {
|
||||
original_commit
|
||||
.parent_ids()
|
||||
.iter()
|
||||
.filter(|id| {
|
||||
// Filter out parents which are descendants of the children commits.
|
||||
!children_commit_ids.iter().any(|child_commit_id| {
|
||||
mut_repo.index().is_ancestor(child_commit_id, id)
|
||||
})
|
||||
})
|
||||
.flat_map(|id| {
|
||||
// Get the new IDs of the parents of `original_commit`.
|
||||
target_commits_internal_parents
|
||||
.get(id)
|
||||
.map_or_else(|| vec![id.clone()], |parents| parents.clone())
|
||||
.into_iter()
|
||||
// Replace parent IDs with their new IDs if they were duplicated.
|
||||
.map(|id| {
|
||||
duplicated_old_to_new
|
||||
.get(&id)
|
||||
.map_or(id, |c| c.id().clone())
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
let new_commit = mut_repo
|
||||
.rewrite_commit(command.settings(), &original_commit)
|
||||
.generate_new_change_id()
|
||||
.set_parents(new_parents)
|
||||
.write()?;
|
||||
duplicated_old_to_new.insert(original_commit_id, new_commit);
|
||||
}
|
||||
|
||||
// Replace the original commit IDs in `target_heads` with the duplicated commit
|
||||
// IDs.
|
||||
let target_heads = target_heads
|
||||
.into_iter()
|
||||
.map(|commit_id| {
|
||||
duplicated_old_to_new
|
||||
.get(&commit_id)
|
||||
.map_or_else(|| commit_id, |c| c.id().clone())
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
// Rebase new children onto `target_heads`.
|
||||
let children_commit_ids_set: HashSet<CommitId> =
|
||||
children_commit_ids.iter().cloned().collect();
|
||||
tx.mut_repo().transform_descendants(
|
||||
command.settings(),
|
||||
children_commit_ids,
|
||||
|mut rewriter| {
|
||||
if children_commit_ids_set.contains(rewriter.old_commit().id()) {
|
||||
let new_parents: Vec<CommitId> = rewriter
|
||||
.old_commit()
|
||||
.parent_ids()
|
||||
.iter()
|
||||
.filter(|id| !parent_commit_ids.contains(id))
|
||||
.chain(target_heads.iter())
|
||||
.cloned()
|
||||
.collect();
|
||||
rewriter.set_new_parents(new_parents);
|
||||
}
|
||||
num_rebased += 1;
|
||||
rewriter.rebase(command.settings())?.write()?;
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Some(mut formatter) = ui.status_formatter() {
|
||||
|
@ -83,7 +317,35 @@ pub(crate) fn cmd_duplicate(
|
|||
tx.write_commit_summary(formatter.as_mut(), new_commit)?;
|
||||
writeln!(formatter)?;
|
||||
}
|
||||
if num_rebased > 0 {
|
||||
writeln!(
|
||||
ui.status(),
|
||||
"Rebased {num_rebased} commits onto duplicated commits"
|
||||
)?;
|
||||
}
|
||||
}
|
||||
tx.finish(ui, format!("duplicate {} commit(s)", to_duplicate.len()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure that there is no possible cycle between the potential children and
|
||||
/// parents of the duplicated commits.
|
||||
fn ensure_no_commit_loop(
|
||||
repo: &ReadonlyRepo,
|
||||
children_expression: &Rc<RevsetExpression>,
|
||||
parents_expression: &Rc<RevsetExpression>,
|
||||
) -> Result<(), CommandError> {
|
||||
if let Some(commit_id) = children_expression
|
||||
.dag_range_to(parents_expression)
|
||||
.evaluate_programmatic(repo)?
|
||||
.iter()
|
||||
.next()
|
||||
{
|
||||
return Err(user_error(format!(
|
||||
"Refusing to create a loop: commit {} would be both an ancestor and a descendant of \
|
||||
the duplicated commits",
|
||||
short_commit_hash(&commit_id),
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -703,7 +703,7 @@ See `jj restore` if you want to move entire files from one revision to another.
|
|||
|
||||
Create a new change with the same content as an existing one
|
||||
|
||||
**Usage:** `jj duplicate [REVISIONS]...`
|
||||
**Usage:** `jj duplicate [OPTIONS] [REVISIONS]...`
|
||||
|
||||
###### **Arguments:**
|
||||
|
||||
|
@ -714,6 +714,9 @@ Create a new change with the same content as an existing one
|
|||
###### **Options:**
|
||||
|
||||
* `-r` — Ignored (but lets you pass `-r` for consistency with other commands)
|
||||
* `-d`, `--destination <DESTINATION>` — The revision(s) to rebase onto (can be repeated to create a merge commit)
|
||||
* `-A`, `--insert-after <INSERT_AFTER>` — The revision(s) to insert after (can be repeated to create a merge commit)
|
||||
* `-B`, `--insert-before <INSERT_BEFORE>` — The revision(s) to insert before (can be repeated to create a merge commit)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -250,6 +250,60 @@ fn test_duplicate_many() {
|
|||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate_destination() {
|
||||
let test_env = TestEnvironment::default();
|
||||
test_env.jj_cmd_ok(test_env.env_root(), &["git", "init", "repo"]);
|
||||
let repo_path = test_env.env_root().join("repo");
|
||||
|
||||
create_commit(&test_env, &repo_path, "a", &[]);
|
||||
create_commit(&test_env, &repo_path, "b", &[]);
|
||||
create_commit(&test_env, &repo_path, "c", &["a", "b"]);
|
||||
// Test the setup
|
||||
insta::assert_snapshot!(get_log_output(&test_env, &repo_path), @r###"
|
||||
@ 17a00fc21654 c
|
||||
├─╮
|
||||
│ ◉ d370aee184ba b
|
||||
◉ │ 2443ea76b0b1 a
|
||||
├─╯
|
||||
◉ 000000000000
|
||||
"###);
|
||||
|
||||
let (stdout, stderr) = test_env.jj_cmd_ok(&repo_path, &["duplicate", "a", "-d", "c"]);
|
||||
insta::assert_snapshot!(stdout, @"");
|
||||
insta::assert_snapshot!(stderr, @r###"
|
||||
Duplicated 2443ea76b0b1 as yostqsxw a777fdfc a
|
||||
"###);
|
||||
insta::assert_snapshot!(get_log_output(&test_env, &repo_path), @r###"
|
||||
◉ a777fdfc29f3 a
|
||||
@ 17a00fc21654 c
|
||||
├─╮
|
||||
│ ◉ d370aee184ba b
|
||||
◉ │ 2443ea76b0b1 a
|
||||
├─╯
|
||||
◉ 000000000000
|
||||
"###);
|
||||
|
||||
let (stdout, stderr) = test_env.jj_cmd_ok(&repo_path, &["undo"]);
|
||||
insta::assert_snapshot!(stdout, @"");
|
||||
insta::assert_snapshot!(stderr, @"");
|
||||
let (stdout, stderr) = test_env.jj_cmd_ok(&repo_path, &["duplicate" /* duplicates `c` */]);
|
||||
insta::assert_snapshot!(stdout, @"");
|
||||
insta::assert_snapshot!(stderr, @r###"
|
||||
Duplicated 17a00fc21654 as kmkuslsw 2426bb15 c
|
||||
"###);
|
||||
insta::assert_snapshot!(get_log_output(&test_env, &repo_path), @r###"
|
||||
◉ 2426bb15bfd6 c
|
||||
├─╮
|
||||
│ │ @ 17a00fc21654 c
|
||||
╭─┬─╯
|
||||
│ ◉ d370aee184ba b
|
||||
◉ │ 2443ea76b0b1 a
|
||||
├─╯
|
||||
◉ 000000000000
|
||||
"###);
|
||||
}
|
||||
|
||||
// https://github.com/martinvonz/jj/issues/1050
|
||||
#[test]
|
||||
fn test_undo_after_duplicate() {
|
||||
|
|
Loading…
Reference in a new issue