mirror of
https://github.com/martinvonz/jj.git
synced 2024-12-25 05:29:39 +00:00
repo: when merging in removed head, rebase descendants (#111)
This commit is contained in:
parent
ec84de7779
commit
04ad9a3628
10 changed files with 305 additions and 31 deletions
|
@ -30,6 +30,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
abandoned if it's empty and has descendants, it only gets abandoned if it's
|
||||
empty and does not have descendants.
|
||||
|
||||
* (#111) When undoing an earlier operation, any new commits on top of commits
|
||||
from the undone operation will be rebased away. For example, let's say you
|
||||
rebase commit A so it becomes a new commit A', and then you create commit B
|
||||
on top of A'. If you now undo the rebase operation, commit B will be rebased
|
||||
to be on top of A instead. The same logic is used if the repo was modified
|
||||
by concurrent operations (so if one operation added B on top of A, and one
|
||||
operation rebased A as A', then B would be automatically rebased on top of
|
||||
A'). See #111 for more examples.
|
||||
|
||||
## [0.3.3] - 2022-03-16
|
||||
|
||||
No changes, only trying to get the automated build to work.
|
||||
|
|
|
@ -20,9 +20,10 @@ use std::ops::Deref;
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use itertools::Itertools;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::backend::{BackendError, CommitId};
|
||||
use crate::backend::{BackendError, ChangeId, CommitId};
|
||||
use crate::commit::Commit;
|
||||
use crate::commit_builder::CommitBuilder;
|
||||
use crate::dag_walk::{closest_common_node, topo_order_reverse};
|
||||
|
@ -194,7 +195,7 @@ impl ReadonlyRepo {
|
|||
pub fn load_at_head(user_settings: &UserSettings, repo_path: PathBuf) -> Arc<ReadonlyRepo> {
|
||||
RepoLoader::init(user_settings, repo_path)
|
||||
.load_at_head()
|
||||
.resolve()
|
||||
.resolve(user_settings)
|
||||
}
|
||||
|
||||
pub fn loader(&self) -> RepoLoader {
|
||||
|
@ -278,8 +279,8 @@ impl ReadonlyRepo {
|
|||
Transaction::new(mut_repo, description)
|
||||
}
|
||||
|
||||
pub fn reload_at_head(&self) -> Arc<ReadonlyRepo> {
|
||||
self.loader().load_at_head().resolve()
|
||||
pub fn reload_at_head(&self, user_settings: &UserSettings) -> Arc<ReadonlyRepo> {
|
||||
self.loader().load_at_head().resolve(user_settings)
|
||||
}
|
||||
|
||||
pub fn reload_at(&self, operation: &Operation) -> Arc<ReadonlyRepo> {
|
||||
|
@ -293,10 +294,10 @@ pub enum RepoAtHead {
|
|||
}
|
||||
|
||||
impl RepoAtHead {
|
||||
pub fn resolve(self) -> Arc<ReadonlyRepo> {
|
||||
pub fn resolve(self, user_settings: &UserSettings) -> Arc<ReadonlyRepo> {
|
||||
match self {
|
||||
RepoAtHead::Single(repo) => repo,
|
||||
RepoAtHead::Unresolved(unresolved) => unresolved.resolve(),
|
||||
RepoAtHead::Unresolved(unresolved) => unresolved.resolve(user_settings),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -308,10 +309,10 @@ pub struct UnresolvedHeadRepo {
|
|||
}
|
||||
|
||||
impl UnresolvedHeadRepo {
|
||||
pub fn resolve(self) -> Arc<ReadonlyRepo> {
|
||||
pub fn resolve(self, user_settings: &UserSettings) -> Arc<ReadonlyRepo> {
|
||||
let merged_repo = self
|
||||
.repo_loader
|
||||
.merge_op_heads(self.op_heads)
|
||||
.merge_op_heads(user_settings, self.op_heads)
|
||||
.leave_unpublished();
|
||||
self.locked_op_heads.finish(merged_repo.operation());
|
||||
merged_repo
|
||||
|
@ -383,7 +384,11 @@ impl RepoLoader {
|
|||
}
|
||||
}
|
||||
|
||||
fn merge_op_heads(&self, mut op_heads: Vec<Operation>) -> UnpublishedOperation {
|
||||
fn merge_op_heads(
|
||||
&self,
|
||||
user_settings: &UserSettings,
|
||||
mut op_heads: Vec<Operation>,
|
||||
) -> UnpublishedOperation {
|
||||
op_heads.sort_by_key(|op| op.store_operation().metadata.end_time.timestamp.clone());
|
||||
let base_repo = self.load_at(&op_heads[0]);
|
||||
let mut tx = base_repo.start_transaction("resolve concurrent operations");
|
||||
|
@ -400,6 +405,7 @@ impl RepoLoader {
|
|||
let base_repo = self.load_at(&ancestor_op);
|
||||
let other_repo = self.load_at(other_op_head);
|
||||
merged_repo.merge(&base_repo, &other_repo);
|
||||
merged_repo.rebase_descendants(user_settings);
|
||||
}
|
||||
let op_parent_ids = op_heads.iter().map(|op| op.id().clone()).collect();
|
||||
tx.set_parents(op_parent_ids);
|
||||
|
@ -810,15 +816,16 @@ impl MutableRepo {
|
|||
self.view_mut().add_public_head(added_head);
|
||||
}
|
||||
|
||||
for removed_head in base.heads().difference(other.heads()) {
|
||||
self.view_mut().remove_head(removed_head);
|
||||
}
|
||||
let base_heads = base.heads().iter().cloned().collect_vec();
|
||||
let own_heads = self.view().heads().iter().cloned().collect_vec();
|
||||
let other_heads = other.heads().iter().cloned().collect_vec();
|
||||
self.record_rewrites(&base_heads, &own_heads);
|
||||
self.record_rewrites(&base_heads, &other_heads);
|
||||
// No need to remove heads removed by `other` because we already marked them
|
||||
// abandoned or rewritten.
|
||||
for added_head in other.heads().difference(base.heads()) {
|
||||
self.view_mut().add_head(added_head);
|
||||
}
|
||||
// TODO: Should it be considered a conflict if a commit-head is removed on one
|
||||
// side while a child or successor is created on another side? Maybe a
|
||||
// warning?
|
||||
|
||||
let mut maybe_changed_ref_names = HashSet::new();
|
||||
|
||||
|
@ -877,6 +884,49 @@ impl MutableRepo {
|
|||
}
|
||||
}
|
||||
|
||||
/// Finds and records commits that were rewritten or abandoned between
|
||||
/// `old_heads` and `new_heads`.
|
||||
fn record_rewrites(&mut self, old_heads: &[CommitId], new_heads: &[CommitId]) {
|
||||
let mut removed_changes: HashMap<ChangeId, Vec<CommitId>> = HashMap::new();
|
||||
for removed in self.index.walk_revs(old_heads, new_heads) {
|
||||
removed_changes
|
||||
.entry(removed.change_id())
|
||||
.or_default()
|
||||
.push(removed.commit_id());
|
||||
}
|
||||
if removed_changes.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut rewritten_changes = HashSet::new();
|
||||
let mut rewritten_commits: HashMap<CommitId, Vec<CommitId>> = HashMap::new();
|
||||
for added in self.index.walk_revs(new_heads, old_heads) {
|
||||
let change_id = added.change_id();
|
||||
if let Some(old_commits) = removed_changes.get(&change_id) {
|
||||
for old_commit in old_commits {
|
||||
rewritten_commits
|
||||
.entry(old_commit.clone())
|
||||
.or_default()
|
||||
.push(added.commit_id());
|
||||
}
|
||||
}
|
||||
rewritten_changes.insert(change_id);
|
||||
}
|
||||
for (old_commit, new_commits) in rewritten_commits {
|
||||
for new_commit in new_commits {
|
||||
self.record_rewritten_commit(old_commit.clone(), new_commit);
|
||||
}
|
||||
}
|
||||
|
||||
for (change_id, removed_commit_ids) in &removed_changes {
|
||||
if !rewritten_changes.contains(change_id) {
|
||||
for removed_commit_id in removed_commit_ids {
|
||||
self.record_abandoned_commit(removed_commit_id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn merge_single_ref(
|
||||
&mut self,
|
||||
ref_name: &RefName,
|
||||
|
|
|
@ -109,7 +109,10 @@ fn test_bad_locking_children(use_git: bool) {
|
|||
let machine1_root = TempDir::new().unwrap().into_path();
|
||||
copy_directory(workspace_root, &machine1_root);
|
||||
let machine1_workspace = Workspace::load(&settings, machine1_root.clone()).unwrap();
|
||||
let machine1_repo = machine1_workspace.repo_loader().load_at_head().resolve();
|
||||
let machine1_repo = machine1_workspace
|
||||
.repo_loader()
|
||||
.load_at_head()
|
||||
.resolve(&settings);
|
||||
let mut machine1_tx = machine1_repo.start_transaction("test");
|
||||
let child1 = testutils::create_random_commit(&settings, &machine1_repo)
|
||||
.set_parents(vec![initial.id().clone()])
|
||||
|
@ -120,7 +123,10 @@ fn test_bad_locking_children(use_git: bool) {
|
|||
let machine2_root = TempDir::new().unwrap().into_path();
|
||||
copy_directory(workspace_root, &machine2_root);
|
||||
let machine2_workspace = Workspace::load(&settings, machine2_root.clone()).unwrap();
|
||||
let machine2_repo = machine2_workspace.repo_loader().load_at_head().resolve();
|
||||
let machine2_repo = machine2_workspace
|
||||
.repo_loader()
|
||||
.load_at_head()
|
||||
.resolve(&settings);
|
||||
let mut machine2_tx = machine2_repo.start_transaction("test");
|
||||
let child2 = testutils::create_random_commit(&settings, &machine2_repo)
|
||||
.set_parents(vec![initial.id().clone()])
|
||||
|
@ -132,7 +138,10 @@ fn test_bad_locking_children(use_git: bool) {
|
|||
let merged_path = TempDir::new().unwrap().into_path();
|
||||
merge_directories(&machine1_root, workspace_root, &machine2_root, &merged_path);
|
||||
let merged_workspace = Workspace::load(&settings, merged_path).unwrap();
|
||||
let merged_repo = merged_workspace.repo_loader().load_at_head().resolve();
|
||||
let merged_repo = merged_workspace
|
||||
.repo_loader()
|
||||
.load_at_head()
|
||||
.resolve(&settings);
|
||||
assert!(merged_repo.view().heads().contains(child1.id()));
|
||||
assert!(merged_repo.view().heads().contains(child2.id()));
|
||||
let op_id = merged_repo.op_id().clone();
|
||||
|
|
|
@ -61,7 +61,7 @@ fn test_commit_parallel(use_git: bool) {
|
|||
for thread in threads {
|
||||
thread.join().ok().unwrap();
|
||||
}
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
// One commit per thread plus the commit from the initial checkout on top of the
|
||||
// root commit
|
||||
assert_eq!(repo.view().heads().len(), num_threads + 1);
|
||||
|
|
|
@ -34,7 +34,7 @@ fn test_load_at_operation(use_git: bool) {
|
|||
// If we load the repo at head, we should not see the commit since it was
|
||||
// removed
|
||||
let loader = RepoLoader::init(&settings, repo.repo_path().clone());
|
||||
let head_repo = loader.load_at_head().resolve();
|
||||
let head_repo = loader.load_at_head().resolve(&settings);
|
||||
assert!(!head_repo.view().heads().contains(commit.id()));
|
||||
|
||||
// If we load the repo at the previous operation, we should see the commit since
|
||||
|
|
|
@ -68,7 +68,7 @@ fn test_consecutive_operations(use_git: bool) {
|
|||
assert_ne!(op_id1, op_id0);
|
||||
assert_eq!(list_dir(&op_heads_dir), vec![op_id1.hex()]);
|
||||
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
let mut tx2 = repo.start_transaction("transaction 2");
|
||||
testutils::create_random_commit(&settings, &repo).write_to_repo(tx2.mut_repo());
|
||||
let op_id2 = tx2.commit().operation().id().clone();
|
||||
|
@ -78,7 +78,7 @@ fn test_consecutive_operations(use_git: bool) {
|
|||
|
||||
// Reloading the repo makes no difference (there are no conflicting operations
|
||||
// to resolve).
|
||||
let _repo = repo.reload_at_head();
|
||||
let _repo = repo.reload_at_head(&settings);
|
||||
assert_eq!(list_dir(&op_heads_dir), vec![op_id2.hex()]);
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ fn test_concurrent_operations(use_git: bool) {
|
|||
assert_eq!(actual_heads_on_disk, expected_heads_on_disk);
|
||||
|
||||
// Reloading the repo causes the operations to be merged
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
let merged_op_id = repo.op_id().clone();
|
||||
assert_ne!(merged_op_id, op_id0);
|
||||
assert_ne!(merged_op_id, op_id1);
|
||||
|
@ -175,6 +175,6 @@ fn test_isolation(use_git: bool) {
|
|||
tx2.commit();
|
||||
assert_heads(repo.as_repo_ref(), vec![initial.id()]);
|
||||
// After reload, the base repo sees both rewrites.
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
assert_heads(repo.as_repo_ref(), vec![rewrite1.id(), rewrite2.id()]);
|
||||
}
|
||||
|
|
|
@ -706,7 +706,7 @@ fn test_rebase_descendants_divergent_rewrite(use_git: bool) {
|
|||
|
||||
// Commit B was replaced by commit B2. Commit D was replaced by commits D2 and
|
||||
// D3. Commit F was replaced by commit F2. Commit C should be rebased onto
|
||||
// B2. Commit E should not be rebased. Commit F should be rebased onto
|
||||
// B2. Commit E should not be rebased. Commit G should be rebased onto
|
||||
// commit F2.
|
||||
//
|
||||
// G
|
||||
|
|
|
@ -12,9 +12,15 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use jujutsu_lib::commit_builder::CommitBuilder;
|
||||
use jujutsu_lib::op_store::{BranchTarget, RefTarget, WorkspaceId};
|
||||
use jujutsu_lib::repo::ReadonlyRepo;
|
||||
use jujutsu_lib::settings::UserSettings;
|
||||
use jujutsu_lib::testutils;
|
||||
use jujutsu_lib::testutils::CommitGraphBuilder;
|
||||
use jujutsu_lib::transaction::Transaction;
|
||||
use maplit::{btreemap, hashset};
|
||||
use test_case::test_case;
|
||||
|
||||
|
@ -121,7 +127,7 @@ fn test_merge_views_heads() {
|
|||
tx2.mut_repo().add_public_head(&public_head_add_tx2);
|
||||
tx2.commit();
|
||||
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
|
||||
let expected_heads = hashset! {
|
||||
head_unchanged.id().clone(),
|
||||
|
@ -215,7 +221,7 @@ fn test_merge_views_checkout() {
|
|||
std::thread::sleep(std::time::Duration::from_millis(1));
|
||||
tx2.commit();
|
||||
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
|
||||
// We currently arbitrarily pick the first transaction's checkout (first by
|
||||
// transaction end time).
|
||||
|
@ -302,7 +308,7 @@ fn test_merge_views_branches() {
|
|||
);
|
||||
tx2.commit();
|
||||
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
let expected_main_branch = BranchTarget {
|
||||
local_target: Some(RefTarget::Conflict {
|
||||
removes: vec![main_branch_local_tx0.id().clone()],
|
||||
|
@ -360,7 +366,7 @@ fn test_merge_views_tags() {
|
|||
.set_tag("v1.0".to_string(), RefTarget::Normal(v1_tx2.id().clone()));
|
||||
tx2.commit();
|
||||
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
let expected_v1 = RefTarget::Conflict {
|
||||
removes: vec![v1_tx0.id().clone()],
|
||||
adds: vec![v1_tx1.id().clone(), v1_tx2.id().clone()],
|
||||
|
@ -422,7 +428,7 @@ fn test_merge_views_git_refs() {
|
|||
);
|
||||
tx2.commit();
|
||||
|
||||
let repo = repo.reload_at_head();
|
||||
let repo = repo.reload_at_head(&settings);
|
||||
let expected_main_branch = RefTarget::Conflict {
|
||||
removes: vec![main_branch_tx0.id().clone()],
|
||||
adds: vec![main_branch_tx1.id().clone(), main_branch_tx2.id().clone()],
|
||||
|
@ -436,3 +442,156 @@ fn test_merge_views_git_refs() {
|
|||
}
|
||||
);
|
||||
}
|
||||
|
||||
fn commit_transactions(settings: &UserSettings, txs: Vec<Transaction>) -> Arc<ReadonlyRepo> {
|
||||
let repo_loader = txs[0].base_repo().loader();
|
||||
let mut op_ids = vec![];
|
||||
for tx in txs {
|
||||
op_ids.push(tx.commit().op_id().clone());
|
||||
std::thread::sleep(std::time::Duration::from_millis(1));
|
||||
}
|
||||
let repo = repo_loader.load_at_head().resolve(settings);
|
||||
// Test the setup. The assumption here is that the parent order matches the
|
||||
// order in which they were merged (which currently matches the transaction
|
||||
// commit order), so we want to know make sure they appear in a certain
|
||||
// order, so the caller can decide the order by passing them to this
|
||||
// function in a certain order.
|
||||
assert_eq!(*repo.operation().parent_ids(), op_ids);
|
||||
repo
|
||||
}
|
||||
|
||||
#[test_case(false ; "rewrite first")]
|
||||
#[test_case(true ; "add child first")]
|
||||
fn test_merge_views_child_on_rewritten(child_first: bool) {
|
||||
// We start with just commit A. Operation 1 adds commit B on top. Operation 2
|
||||
// rewrites A as A2.
|
||||
let settings = testutils::user_settings();
|
||||
let test_repo = testutils::init_repo(&settings, false);
|
||||
|
||||
let mut tx = test_repo.repo.start_transaction("test");
|
||||
let commit_a =
|
||||
testutils::create_random_commit(&settings, &test_repo.repo).write_to_repo(tx.mut_repo());
|
||||
let repo = tx.commit();
|
||||
|
||||
let mut tx1 = repo.start_transaction("test");
|
||||
let commit_b = testutils::create_random_commit(&settings, &repo)
|
||||
.set_parents(vec![commit_a.id().clone()])
|
||||
.write_to_repo(tx1.mut_repo());
|
||||
|
||||
let mut tx2 = repo.start_transaction("test");
|
||||
let commit_a2 = CommitBuilder::for_rewrite_from(&settings, repo.store(), &commit_a)
|
||||
.set_description("A2".to_string())
|
||||
.write_to_repo(tx2.mut_repo());
|
||||
tx2.mut_repo().rebase_descendants(&settings);
|
||||
|
||||
let repo = if child_first {
|
||||
commit_transactions(&settings, vec![tx1, tx2])
|
||||
} else {
|
||||
commit_transactions(&settings, vec![tx2, tx1])
|
||||
};
|
||||
|
||||
// A new B2 commit (B rebased onto A2) should be the only head.
|
||||
let heads = repo.view().heads();
|
||||
assert_eq!(heads.len(), 1);
|
||||
let b2_id = heads.iter().next().unwrap();
|
||||
let commit_b2 = repo.store().get_commit(b2_id).unwrap();
|
||||
assert_eq!(commit_b2.change_id(), commit_b.change_id());
|
||||
assert_eq!(commit_b2.parent_ids(), vec![commit_a2.id().clone()]);
|
||||
}
|
||||
|
||||
#[test_case(false, false ; "add child on unchanged, rewrite first")]
|
||||
#[test_case(false, true ; "add child on unchanged, add child first")]
|
||||
#[test_case(true, false ; "add child on rewritten, rewrite first")]
|
||||
#[test_case(true, true ; "add child on rewritten, add child first")]
|
||||
fn test_merge_views_child_on_rewritten_divergent(on_rewritten: bool, child_first: bool) {
|
||||
// We start with divergent commits A2 and A3. Operation 1 adds commit B on top
|
||||
// of A2 or A3. Operation 2 rewrites A2 as A4. The result should be that B
|
||||
// gets rebased onto A4 if it was based on A2 before, but if it was based on
|
||||
// A3, it should remain there.
|
||||
let settings = testutils::user_settings();
|
||||
let test_repo = testutils::init_repo(&settings, false);
|
||||
|
||||
let mut tx = test_repo.repo.start_transaction("test");
|
||||
let commit_a2 =
|
||||
testutils::create_random_commit(&settings, &test_repo.repo).write_to_repo(tx.mut_repo());
|
||||
let commit_a3 = testutils::create_random_commit(&settings, &test_repo.repo)
|
||||
.set_change_id(commit_a2.change_id().clone())
|
||||
.write_to_repo(tx.mut_repo());
|
||||
let repo = tx.commit();
|
||||
|
||||
let mut tx1 = repo.start_transaction("test");
|
||||
let parent = if on_rewritten { &commit_a2 } else { &commit_a3 };
|
||||
let commit_b = testutils::create_random_commit(&settings, &repo)
|
||||
.set_parents(vec![parent.id().clone()])
|
||||
.write_to_repo(tx1.mut_repo());
|
||||
|
||||
let mut tx2 = repo.start_transaction("test");
|
||||
let commit_a4 = CommitBuilder::for_rewrite_from(&settings, repo.store(), &commit_a2)
|
||||
.set_description("A4".to_string())
|
||||
.write_to_repo(tx2.mut_repo());
|
||||
tx2.mut_repo().rebase_descendants(&settings);
|
||||
|
||||
let repo = if child_first {
|
||||
commit_transactions(&settings, vec![tx1, tx2])
|
||||
} else {
|
||||
commit_transactions(&settings, vec![tx2, tx1])
|
||||
};
|
||||
|
||||
if on_rewritten {
|
||||
// A3 should remain as a head. The other head should be B2 (B rebased onto A4).
|
||||
let mut heads = repo.view().heads().clone();
|
||||
assert_eq!(heads.len(), 2);
|
||||
assert!(heads.remove(commit_a3.id()));
|
||||
let b2_id = heads.iter().next().unwrap();
|
||||
let commit_b2 = repo.store().get_commit(b2_id).unwrap();
|
||||
assert_eq!(commit_b2.change_id(), commit_b.change_id());
|
||||
assert_eq!(commit_b2.parent_ids(), vec![commit_a4.id().clone()]);
|
||||
} else {
|
||||
// No rebases should happen, so B and A4 should be the heads.
|
||||
let mut heads = repo.view().heads().clone();
|
||||
assert_eq!(heads.len(), 2);
|
||||
assert!(heads.remove(commit_b.id()));
|
||||
assert!(heads.remove(commit_a4.id()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test_case(false ; "abandon first")]
|
||||
#[test_case(true ; "add child first")]
|
||||
fn test_merge_views_child_on_abandoned(child_first: bool) {
|
||||
// We start with commit B on top of commit A. Operation 1 adds commit C on top.
|
||||
// Operation 2 abandons B.
|
||||
let settings = testutils::user_settings();
|
||||
let test_repo = testutils::init_repo(&settings, false);
|
||||
|
||||
let mut tx = test_repo.repo.start_transaction("test");
|
||||
let commit_a =
|
||||
testutils::create_random_commit(&settings, &test_repo.repo).write_to_repo(tx.mut_repo());
|
||||
let commit_b = testutils::create_random_commit(&settings, &test_repo.repo)
|
||||
.set_parents(vec![commit_a.id().clone()])
|
||||
.write_to_repo(tx.mut_repo());
|
||||
let repo = tx.commit();
|
||||
|
||||
let mut tx1 = repo.start_transaction("test");
|
||||
let commit_c = testutils::create_random_commit(&settings, &repo)
|
||||
.set_parents(vec![commit_b.id().clone()])
|
||||
.write_to_repo(tx1.mut_repo());
|
||||
|
||||
let mut tx2 = repo.start_transaction("test");
|
||||
tx2.mut_repo()
|
||||
.record_abandoned_commit(commit_b.id().clone());
|
||||
tx2.mut_repo().rebase_descendants(&settings);
|
||||
|
||||
let repo = if child_first {
|
||||
commit_transactions(&settings, vec![tx1, tx2])
|
||||
} else {
|
||||
commit_transactions(&settings, vec![tx2, tx1])
|
||||
};
|
||||
|
||||
// A new C2 commit (C rebased onto A) should be the only head.
|
||||
let heads = repo.view().heads();
|
||||
assert_eq!(heads.len(), 1);
|
||||
let id_c2 = heads.iter().next().unwrap();
|
||||
let commit_c2 = repo.store().get_commit(id_c2).unwrap();
|
||||
assert_eq!(commit_c2.change_id(), commit_c.change_id());
|
||||
assert_eq!(commit_c2.parent_ids(), vec![commit_a.id().clone()]);
|
||||
}
|
||||
|
|
|
@ -218,7 +218,8 @@ jj init --git-repo=.";
|
|||
ui,
|
||||
"Concurrent modification detected, resolving automatically.",
|
||||
)?;
|
||||
unresolved.resolve()
|
||||
// TODO: Tell the user how many commits were rebased.
|
||||
unresolved.resolve(ui.settings())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
46
tests/test_undo.rs
Normal file
46
tests/test_undo.rs
Normal file
|
@ -0,0 +1,46 @@
|
|||
// Copyright 2022 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use jujutsu::testutils::TestEnvironment;
|
||||
|
||||
#[test]
|
||||
fn test_undo_rewrite_with_child() {
|
||||
// Test that if we undo an operation that rewrote some commit, any descendants
|
||||
// after that will be rebased on top of the un-rewritten commit.
|
||||
let test_env = TestEnvironment::default();
|
||||
test_env.jj_cmd_success(test_env.env_root(), &["init", "repo", "--git"]);
|
||||
let repo_path = test_env.env_root().join("repo");
|
||||
|
||||
test_env.jj_cmd_success(&repo_path, &["describe", "-m", "initial"]);
|
||||
test_env.jj_cmd_success(&repo_path, &["describe", "-m", "modified"]);
|
||||
let stdout = test_env.jj_cmd_success(&repo_path, &["op", "log"]);
|
||||
let op_id_hex = stdout[2..14].to_string();
|
||||
test_env.jj_cmd_success(&repo_path, &["new", "-m", "child"]);
|
||||
let stdout = test_env.jj_cmd_success(&repo_path, &["log", "-T", "description"]);
|
||||
insta::assert_snapshot!(stdout, @r###"
|
||||
@ child
|
||||
o modified
|
||||
o
|
||||
"###);
|
||||
test_env.jj_cmd_success(&repo_path, &["undo", "-o", &op_id_hex]);
|
||||
|
||||
// Since we undid the description-change, the child commit should now be on top
|
||||
// of the initial commit
|
||||
let stdout = test_env.jj_cmd_success(&repo_path, &["log", "-T", "description"]);
|
||||
insta::assert_snapshot!(stdout, @r###"
|
||||
@ child
|
||||
o initial
|
||||
o
|
||||
"###);
|
||||
}
|
Loading…
Reference in a new issue