From 7f6cf1e6d954abe9a457680cdc19c73752aed749 Mon Sep 17 00:00:00 2001 From: Martin von Zweigbergk Date: Wed, 21 Sep 2022 21:44:46 -0700 Subject: [PATCH] cli: extract new `cli_util` module This commit moves much of the `commands` module that isn't specific to a particular command into a new `cli_util`. Much of this is actually not even CLI-specific, so we should move that further down into the library, but that can come later. That includes the code in `WorkspaceCommandHelper` for snapshotting the working copy and automatically importing/exporting to a colocated Git working copy. --- src/cli_util.rs | 1217 +++++++++++++++++++++++++++++++++++++++++++++++ src/commands.rs | 1202 +--------------------------------------------- src/lib.rs | 1 + src/main.rs | 3 +- 4 files changed, 1242 insertions(+), 1181 deletions(-) create mode 100644 src/cli_util.rs diff --git a/src/cli_util.rs b/src/cli_util.rs new file mode 100644 index 000000000..6ea320422 --- /dev/null +++ b/src/cli_util.rs @@ -0,0 +1,1217 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::collections::{HashSet, VecDeque}; +use std::fmt::Debug; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use git2::{Oid, Repository}; +use itertools::Itertools; +use jujutsu_lib::backend::{BackendError, CommitId, TreeId}; +use jujutsu_lib::commit::Commit; +use jujutsu_lib::commit_builder::CommitBuilder; +use jujutsu_lib::git::{GitExportError, GitImportError}; +use jujutsu_lib::gitignore::GitIgnoreFile; +use jujutsu_lib::matchers::{EverythingMatcher, Matcher, PrefixMatcher, Visit}; +use jujutsu_lib::op_heads_store::{OpHeadResolutionError, OpHeads, OpHeadsStore}; +use jujutsu_lib::op_store::{OpStore, OpStoreError, OperationId, WorkspaceId}; +use jujutsu_lib::operation::Operation; +use jujutsu_lib::repo::{MutableRepo, ReadonlyRepo}; +use jujutsu_lib::repo_path::RepoPath; +use jujutsu_lib::revset::{RevsetError, RevsetParseError}; +use jujutsu_lib::settings::UserSettings; +use jujutsu_lib::transaction::Transaction; +use jujutsu_lib::tree::{Tree, TreeMergeError}; +use jujutsu_lib::working_copy::{ + CheckoutStats, LockedWorkingCopy, ResetError, SnapshotError, WorkingCopy, +}; +use jujutsu_lib::workspace::{Workspace, WorkspaceInitError, WorkspaceLoadError}; +use jujutsu_lib::{dag_walk, git, revset}; + +use crate::diff_edit::DiffEditError; +use crate::ui; +use crate::ui::{ColorChoice, FilePathParseError, Ui}; + +pub enum CommandError { + UserError(String), + /// Invalid command line + CliError(String), + BrokenPipe, + InternalError(String), +} + +impl From for CommandError { + fn from(err: std::io::Error) -> Self { + if err.kind() == std::io::ErrorKind::BrokenPipe { + CommandError::BrokenPipe + } else { + // TODO: Record the error as a chained cause + CommandError::InternalError(format!("I/O error: {err}")) + } + } +} + +impl From for CommandError { + fn from(err: config::ConfigError) -> Self { + CommandError::UserError(format!("Config error: {err}")) + } +} + +impl From for CommandError { + fn from(err: BackendError) -> Self { + CommandError::UserError(format!("Unexpected error from store: {err}")) + } +} + +impl From for CommandError { + fn from(_: WorkspaceInitError) -> Self { + CommandError::UserError("The target repo already exists".to_string()) + } +} + +impl From for CommandError { + fn from(err: OpHeadResolutionError) -> Self { + match err { + OpHeadResolutionError::NoHeads => { + CommandError::InternalError("Corrupt repository: the are no operations".to_string()) + } + } + } +} + +impl From for CommandError { + fn from(err: SnapshotError) -> Self { + CommandError::InternalError(format!("Failed to snapshot the working copy: {err}")) + } +} + +impl From for CommandError { + fn from(err: TreeMergeError) -> Self { + CommandError::InternalError(format!("Merge failed: {err}")) + } +} + +impl From for CommandError { + fn from(_: ResetError) -> Self { + CommandError::InternalError("Failed to reset the working copy".to_string()) + } +} + +impl From for CommandError { + fn from(err: DiffEditError) -> Self { + CommandError::UserError(format!("Failed to edit diff: {err}")) + } +} + +impl From for CommandError { + fn from(err: git2::Error) -> Self { + CommandError::UserError(format!("Git operation failed: {err}")) + } +} + +impl From for CommandError { + fn from(err: GitImportError) -> Self { + CommandError::InternalError(format!( + "Failed to import refs from underlying Git repo: {err}" + )) + } +} + +impl From for CommandError { + fn from(err: GitExportError) -> Self { + match err { + GitExportError::ConflictedBranch(branch_name) => { + CommandError::UserError(format!("Cannot export conflicted branch '{branch_name}'")) + } + GitExportError::InternalGitError(err) => CommandError::InternalError(format!( + "Failed to export refs to underlying Git repo: {err}" + )), + } + } +} + +impl From for CommandError { + fn from(err: RevsetParseError) -> Self { + CommandError::UserError(format!("Failed to parse revset: {err}")) + } +} + +impl From for CommandError { + fn from(err: RevsetError) -> Self { + CommandError::UserError(format!("{err}")) + } +} + +impl From for CommandError { + fn from(err: FilePathParseError) -> Self { + match err { + FilePathParseError::InputNotInRepo(input) => { + CommandError::UserError(format!("Path \"{input}\" is not in the repo")) + } + } + } +} + +pub struct CommandHelper<'help> { + app: clap::Command<'help>, + string_args: Vec, + global_args: GlobalArgs, +} + +impl<'help> CommandHelper<'help> { + pub fn new( + app: clap::Command<'help>, + string_args: Vec, + global_args: GlobalArgs, + ) -> Self { + Self { + app, + string_args, + global_args, + } + } + + pub fn app(&self) -> &clap::Command<'help> { + &self.app + } + + pub fn string_args(&self) -> &Vec { + &self.string_args + } + + pub fn global_args(&self) -> &GlobalArgs { + &self.global_args + } + + pub fn workspace_helper(&self, ui: &mut Ui) -> Result { + let wc_path_str = self.global_args.repository.as_deref().unwrap_or("."); + let wc_path = ui.cwd().join(wc_path_str); + let workspace = match Workspace::load(ui.settings(), wc_path) { + Ok(workspace) => workspace, + Err(WorkspaceLoadError::NoWorkspaceHere(wc_path)) => { + let mut message = format!("There is no jj repo in \"{}\"", wc_path_str); + let git_dir = wc_path.join(".git"); + if git_dir.is_dir() { + // TODO: Make this hint separate from the error, so the caller can format + // it differently. + message += " +It looks like this is a git repo. You can create a jj repo backed by it by running this: +jj init --git-repo=."; + } + return Err(CommandError::UserError(message)); + } + Err(WorkspaceLoadError::RepoDoesNotExist(repo_dir)) => { + return Err(CommandError::UserError(format!( + "The repository directory at {} is missing. Was it moved?", + repo_dir.to_str().unwrap() + ))); + } + }; + let repo_loader = workspace.repo_loader(); + let op_heads = resolve_op_for_load( + repo_loader.op_store(), + repo_loader.op_heads_store(), + &self.global_args.at_operation, + )?; + let repo = match op_heads { + OpHeads::Single(op) => repo_loader.load_at(&op), + OpHeads::Unresolved { + locked_op_heads, + op_heads, + } => { + writeln!( + ui, + "Concurrent modification detected, resolving automatically.", + )?; + let base_repo = repo_loader.load_at(&op_heads[0]); + // TODO: It may be helpful to print each operation we're merging here + let mut workspace_command = self.for_loaded_repo(ui, workspace, base_repo)?; + let mut tx = workspace_command.start_transaction("resolve concurrent operations"); + for other_op_head in op_heads.into_iter().skip(1) { + tx.merge_operation(other_op_head); + let num_rebased = tx.mut_repo().rebase_descendants(ui.settings())?; + if num_rebased > 0 { + writeln!( + ui, + "Rebased {} descendant commits onto commits rewritten by other \ + operation", + num_rebased + )?; + } + } + let merged_repo = tx.write().leave_unpublished(); + locked_op_heads.finish(merged_repo.operation()); + workspace_command.repo = merged_repo; + return Ok(workspace_command); + } + }; + self.for_loaded_repo(ui, workspace, repo) + } + + pub fn for_loaded_repo( + &self, + ui: &mut Ui, + workspace: Workspace, + repo: Arc, + ) -> Result { + WorkspaceCommandHelper::for_loaded_repo( + ui, + workspace, + self.string_args.clone(), + &self.global_args, + repo, + ) + } +} + +// Provides utilities for writing a command that works on a workspace (like most +// commands do). +pub struct WorkspaceCommandHelper { + cwd: PathBuf, + string_args: Vec, + global_args: GlobalArgs, + settings: UserSettings, + workspace: Workspace, + repo: Arc, + may_update_working_copy: bool, + working_copy_shared_with_git: bool, +} + +impl WorkspaceCommandHelper { + pub fn for_loaded_repo( + ui: &mut Ui, + workspace: Workspace, + string_args: Vec, + global_args: &GlobalArgs, + repo: Arc, + ) -> Result { + let loaded_at_head = &global_args.at_operation == "@"; + let may_update_working_copy = loaded_at_head && !global_args.no_commit_working_copy; + let mut working_copy_shared_with_git = false; + let maybe_git_repo = repo.store().git_repo(); + if let Some(git_workdir) = maybe_git_repo + .as_ref() + .and_then(|git_repo| git_repo.workdir()) + .and_then(|workdir| workdir.canonicalize().ok()) + { + working_copy_shared_with_git = git_workdir == workspace.workspace_root().as_path(); + } + let mut helper = Self { + cwd: ui.cwd().to_owned(), + string_args, + global_args: global_args.clone(), + settings: ui.settings().clone(), + workspace, + repo, + may_update_working_copy, + working_copy_shared_with_git, + }; + if may_update_working_copy { + if working_copy_shared_with_git { + helper.import_git_refs_and_head(ui, maybe_git_repo.as_ref().unwrap())?; + } + helper.commit_working_copy(ui)?; + } + Ok(helper) + } + + fn check_working_copy_writable(&self) -> Result<(), CommandError> { + if self.may_update_working_copy { + Ok(()) + } else if self.global_args.no_commit_working_copy { + Err(CommandError::UserError( + "This command must be able to update the working copy (don't use \ + --no-commit-working-copy)." + .to_string(), + )) + } else { + Err(CommandError::UserError( + "This command must be able to update the working copy (don't use --at-op)." + .to_string(), + )) + } + } + + fn import_git_refs_and_head( + &mut self, + ui: &mut Ui, + git_repo: &Repository, + ) -> Result<(), CommandError> { + let mut tx = self.start_transaction("import git refs"); + git::import_refs(tx.mut_repo(), git_repo)?; + if tx.mut_repo().has_changes() { + let old_git_head = self.repo.view().git_head(); + let new_git_head = tx.mut_repo().view().git_head(); + // If the Git HEAD has changed, abandon our old checkout and check out the new + // Git HEAD. + if new_git_head != old_git_head && new_git_head.is_some() { + let workspace_id = self.workspace.workspace_id(); + let mut locked_working_copy = self.workspace.working_copy_mut().start_mutation(); + if let Some(old_wc_commit_id) = self.repo.view().get_wc_commit_id(&workspace_id) { + tx.mut_repo() + .record_abandoned_commit(old_wc_commit_id.clone()); + } + let new_checkout = self + .repo + .store() + .get_commit(new_git_head.as_ref().unwrap())?; + tx.mut_repo() + .check_out(workspace_id, &self.settings, &new_checkout); + // The working copy was presumably updated by the git command that updated HEAD, + // so we just need to reset our working copy state to it without updating + // working copy files. + locked_working_copy.reset(&new_checkout.tree())?; + tx.mut_repo().rebase_descendants(&self.settings)?; + self.repo = tx.commit(); + locked_working_copy.finish(self.repo.op_id().clone()); + } else { + let num_rebased = tx.mut_repo().rebase_descendants(ui.settings())?; + if num_rebased > 0 { + writeln!( + ui, + "Rebased {} descendant commits off of commits rewritten from git", + num_rebased + )?; + } + self.finish_transaction(ui, tx)?; + } + } + Ok(()) + } + + fn export_head_to_git(&self, mut_repo: &mut MutableRepo) -> Result<(), CommandError> { + let git_repo = mut_repo.store().git_repo().unwrap(); + let current_git_head_ref = git_repo.find_reference("HEAD").unwrap(); + let current_git_commit_id = current_git_head_ref + .peel_to_commit() + .ok() + .map(|commit| commit.id()); + if let Some(checkout_id) = mut_repo.view().get_wc_commit_id(&self.workspace_id()) { + let first_parent_id = + mut_repo.index().entry_by_id(checkout_id).unwrap().parents()[0].commit_id(); + if first_parent_id != *mut_repo.store().root_commit_id() { + if let Some(current_git_commit_id) = current_git_commit_id { + git_repo.set_head_detached(current_git_commit_id)?; + } + let new_git_commit_id = Oid::from_bytes(first_parent_id.as_bytes()).unwrap(); + let new_git_commit = git_repo.find_commit(new_git_commit_id)?; + git_repo.reset(new_git_commit.as_object(), git2::ResetType::Mixed, None)?; + mut_repo.set_git_head(first_parent_id); + } + } else { + // The workspace was removed (maybe the user undid the + // initialization of the workspace?), which is weird, + // but we should probably just not do anything else here. + // Except maybe print a note about it? + } + Ok(()) + } + + pub fn repo(&self) -> &Arc { + &self.repo + } + + pub fn repo_mut(&mut self) -> &mut Arc { + &mut self.repo + } + + pub fn working_copy(&self) -> &WorkingCopy { + self.workspace.working_copy() + } + + pub fn start_working_copy_mutation( + &mut self, + ) -> Result<(LockedWorkingCopy, Commit), CommandError> { + self.check_working_copy_writable()?; + let wc_commit_id = self.repo.view().get_wc_commit_id(&self.workspace_id()); + let wc_commit = if let Some(wc_commit_id) = wc_commit_id { + self.repo.store().get_commit(wc_commit_id)? + } else { + return Err(CommandError::UserError( + "Nothing checked out in this workspace".to_string(), + )); + }; + + let locked_working_copy = self.workspace.working_copy_mut().start_mutation(); + if wc_commit.tree_id() != locked_working_copy.old_tree_id() { + return Err(CommandError::UserError( + "Concurrent working copy operation. Try again.".to_string(), + )); + } + + Ok((locked_working_copy, wc_commit)) + } + + pub fn workspace_root(&self) -> &PathBuf { + self.workspace.workspace_root() + } + + pub fn workspace_id(&self) -> WorkspaceId { + self.workspace.workspace_id() + } + + pub fn working_copy_shared_with_git(&self) -> bool { + self.working_copy_shared_with_git + } + + pub fn format_file_path(&self, file: &RepoPath) -> String { + ui::relative_path(&self.cwd, &file.to_fs_path(self.workspace_root())) + .to_str() + .unwrap() + .to_owned() + } + + pub fn git_config(&self) -> Result { + if let Some(git_repo) = self.repo.store().git_repo() { + git_repo.config() + } else { + git2::Config::open_default() + } + } + + pub fn base_ignores(&self) -> Arc { + let mut git_ignores = GitIgnoreFile::empty(); + if let Ok(excludes_file_str) = self + .git_config() + .and_then(|git_config| git_config.get_string("core.excludesFile")) + { + let excludes_file_path = expand_git_path(excludes_file_str); + git_ignores = git_ignores.chain_with_file("", excludes_file_path); + } + if let Some(git_repo) = self.repo.store().git_repo() { + git_ignores = + git_ignores.chain_with_file("", git_repo.path().join("info").join("exclude")); + } + git_ignores + } + + pub fn resolve_single_op(&self, op_str: &str) -> Result { + // When resolving the "@" operation in a `ReadonlyRepo`, we resolve it to the + // operation the repo was loaded at. + resolve_single_op( + self.repo.op_store(), + self.repo.op_heads_store(), + self.repo.operation(), + op_str, + ) + } + + pub fn resolve_single_rev(&self, revision_str: &str) -> Result { + let revset_expression = revset::parse(revision_str)?; + let revset = + revset_expression.evaluate(self.repo.as_repo_ref(), Some(&self.workspace_id()))?; + let mut iter = revset.iter().commits(self.repo.store()); + match iter.next() { + None => Err(CommandError::UserError(format!( + "Revset \"{}\" didn't resolve to any revisions", + revision_str + ))), + Some(commit) => { + if iter.next().is_some() { + Err(CommandError::UserError(format!( + "Revset \"{}\" resolved to more than one revision", + revision_str + ))) + } else { + Ok(commit?) + } + } + } + } + + pub fn resolve_revset(&self, revision_str: &str) -> Result, CommandError> { + let revset_expression = revset::parse(revision_str)?; + let revset = + revset_expression.evaluate(self.repo.as_repo_ref(), Some(&self.workspace_id()))?; + Ok(revset + .iter() + .commits(self.repo.store()) + .map(Result::unwrap) + .collect()) + } + + pub fn check_rewriteable(&self, commit: &Commit) -> Result<(), CommandError> { + if commit.id() == self.repo.store().root_commit_id() { + return Err(CommandError::UserError( + "Cannot rewrite the root commit".to_string(), + )); + } + Ok(()) + } + + pub fn check_non_empty(&self, commits: &[Commit]) -> Result<(), CommandError> { + if commits.is_empty() { + return Err(CommandError::UserError("Empty revision set".to_string())); + } + Ok(()) + } + + pub fn commit_working_copy(&mut self, ui: &mut Ui) -> Result<(), CommandError> { + let repo = self.repo.clone(); + let workspace_id = self.workspace_id(); + let checkout_id = match repo.view().get_wc_commit_id(&self.workspace_id()) { + Some(checkout_id) => checkout_id.clone(), + None => { + // If the workspace has been deleted, it's unclear what to do, so we just skip + // committing the working copy. + return Ok(()); + } + }; + let base_ignores = self.base_ignores(); + let mut locked_wc = self.workspace.working_copy_mut().start_mutation(); + // Check if the working copy commit matches the repo's view. It's fine if it + // doesn't, but we'll need to reload the repo so the new commit is + // in the index and view, and so we don't cause unnecessary + // divergence. + let checkout_commit = repo.store().get_commit(&checkout_id)?; + let wc_tree_id = locked_wc.old_tree_id().clone(); + let mut wc_was_stale = false; + if *checkout_commit.tree_id() != wc_tree_id { + let wc_operation_data = self + .repo + .op_store() + .read_operation(locked_wc.old_operation_id()) + .unwrap(); + let wc_operation = Operation::new( + repo.op_store().clone(), + locked_wc.old_operation_id().clone(), + wc_operation_data, + ); + let repo_operation = repo.operation(); + let maybe_ancestor_op = dag_walk::closest_common_node( + [wc_operation.clone()], + [repo_operation.clone()], + &|op: &Operation| op.parents(), + &|op: &Operation| op.id().clone(), + ); + if let Some(ancestor_op) = maybe_ancestor_op { + if ancestor_op.id() == repo_operation.id() { + // The working copy was updated since we loaded the repo. We reload the repo + // at the working copy's operation. + self.repo = repo.reload_at(&wc_operation); + } else if ancestor_op.id() == wc_operation.id() { + // The working copy was not updated when some repo operation committed, + // meaning that it's stale compared to the repo view. We update the working + // copy to what the view says. + writeln!( + ui, + "The working copy is stale (not updated since operation {}), now updating \ + to operation {}", + short_operation_hash(wc_operation.id()), + short_operation_hash(repo_operation.id()), + )?; + locked_wc + .check_out(&checkout_commit.tree()) + .map_err(|err| { + CommandError::InternalError(format!( + "Failed to check out commit {}: {}", + checkout_commit.id().hex(), + err + )) + })?; + wc_was_stale = true; + } else { + return Err(CommandError::InternalError(format!( + "The repo was loaded at operation {}, which seems to be a sibling of the \ + working copy's operation {}", + short_operation_hash(repo_operation.id()), + short_operation_hash(wc_operation.id()) + ))); + } + } else { + return Err(CommandError::InternalError(format!( + "The repo was loaded at operation {}, which seems unrelated to the working \ + copy's operation {}", + short_operation_hash(repo_operation.id()), + short_operation_hash(wc_operation.id()) + ))); + } + } + let new_tree_id = locked_wc.snapshot(base_ignores)?; + if new_tree_id != *checkout_commit.tree_id() { + let mut tx = self.repo.start_transaction("commit working copy"); + let mut_repo = tx.mut_repo(); + let commit = CommitBuilder::for_rewrite_from(&self.settings, &checkout_commit) + .set_tree(new_tree_id) + .write_to_repo(mut_repo); + mut_repo.set_wc_commit(workspace_id, commit.id().clone()); + + // Rebase descendants + let num_rebased = mut_repo.rebase_descendants(&self.settings)?; + if num_rebased > 0 { + writeln!( + ui, + "Rebased {} descendant commits onto updated working copy", + num_rebased + )?; + } + + self.repo = tx.commit(); + locked_wc.finish(self.repo.op_id().clone()); + } else if wc_was_stale { + locked_wc.finish(self.repo.op_id().clone()); + } else { + locked_wc.discard(); + } + Ok(()) + } + + pub fn edit_diff( + &self, + ui: &mut Ui, + left_tree: &Tree, + right_tree: &Tree, + instructions: &str, + ) -> Result { + crate::diff_edit::edit_diff( + ui, + &self.settings, + left_tree, + right_tree, + instructions, + self.base_ignores(), + ) + } + + pub fn select_diff( + &self, + ui: &mut Ui, + left_tree: &Tree, + right_tree: &Tree, + instructions: &str, + interactive: bool, + matcher: &dyn Matcher, + ) -> Result { + if interactive { + Ok(crate::diff_edit::edit_diff( + ui, + &self.settings, + left_tree, + right_tree, + instructions, + self.base_ignores(), + )?) + } else if matcher.visit(&RepoPath::root()) == Visit::AllRecursively { + // Optimization for a common case + Ok(right_tree.id().clone()) + } else { + let mut tree_builder = self.repo().store().tree_builder(left_tree.id().clone()); + for (repo_path, diff) in left_tree.diff(right_tree, matcher) { + match diff.into_options().1 { + Some(value) => { + tree_builder.set(repo_path, value); + } + None => { + tree_builder.remove(repo_path); + } + } + } + Ok(tree_builder.write_tree()) + } + } + + pub fn start_transaction(&self, description: &str) -> Transaction { + let mut tx = self.repo.start_transaction(description); + // TODO: Either do better shell-escaping here or store the values in some list + // type (which we currently don't have). + let shell_escape = |arg: &String| { + if arg.as_bytes().iter().all(|b| { + matches!(b, + b'A'..=b'Z' + | b'a'..=b'z' + | b'0'..=b'9' + | b',' + | b'-' + | b'.' + | b'/' + | b':' + | b'@' + | b'_' + ) + }) { + arg.clone() + } else { + format!("'{}'", arg.replace('\'', "\\'")) + } + }; + let quoted_strings = self.string_args.iter().map(shell_escape).collect_vec(); + tx.set_tag("args".to_string(), quoted_strings.join(" ")); + tx + } + + pub fn finish_transaction( + &mut self, + ui: &mut Ui, + mut tx: Transaction, + ) -> Result<(), CommandError> { + let mut_repo = tx.mut_repo(); + let store = mut_repo.store().clone(); + if !mut_repo.has_changes() { + writeln!(ui, "Nothing changed.")?; + return Ok(()); + } + let num_rebased = mut_repo.rebase_descendants(ui.settings())?; + if num_rebased > 0 { + writeln!(ui, "Rebased {} descendant commits", num_rebased)?; + } + if self.working_copy_shared_with_git { + self.export_head_to_git(mut_repo)?; + } + let maybe_old_commit = tx + .base_repo() + .view() + .get_wc_commit_id(&self.workspace_id()) + .map(|commit_id| store.get_commit(commit_id)) + .transpose()?; + self.repo = tx.commit(); + if self.may_update_working_copy { + let stats = update_working_copy( + ui, + &self.repo, + &self.workspace_id(), + self.workspace.working_copy_mut(), + maybe_old_commit.as_ref(), + )?; + if let Some(stats) = stats { + print_checkout_stats(ui, stats)?; + } + } + if self.working_copy_shared_with_git { + let git_repo = self.repo.store().git_repo().unwrap(); + git::export_refs(&self.repo, &git_repo)?; + } + Ok(()) + } +} + +pub fn print_checkout_stats(ui: &mut Ui, stats: CheckoutStats) -> Result<(), std::io::Error> { + if stats.added_files > 0 || stats.updated_files > 0 || stats.removed_files > 0 { + writeln!( + ui, + "Added {} files, modified {} files, removed {} files", + stats.added_files, stats.updated_files, stats.removed_files + )?; + } + Ok(()) +} + +/// Expands "~/" to "$HOME/" as Git seems to do for e.g. core.excludesFile. +fn expand_git_path(path_str: String) -> PathBuf { + if let Some(remainder) = path_str.strip_prefix("~/") { + if let Ok(home_dir_str) = std::env::var("HOME") { + return PathBuf::from(home_dir_str).join(remainder); + } + } + PathBuf::from(path_str) +} + +fn resolve_op_for_load( + op_store: &Arc, + op_heads_store: &Arc, + op_str: &str, +) -> Result { + if op_str == "@" { + Ok(op_heads_store.get_heads(op_store)?) + } else if op_str == "@-" { + match op_heads_store.get_heads(op_store)? { + OpHeads::Single(current_op) => { + let resolved_op = resolve_single_op(op_store, op_heads_store, ¤t_op, op_str)?; + Ok(OpHeads::Single(resolved_op)) + } + OpHeads::Unresolved { .. } => Err(CommandError::UserError(format!( + r#"The "{op_str}" expression resolved to more than one operation"# + ))), + } + } else { + let operation = resolve_single_op_from_store(op_store, op_heads_store, op_str)?; + Ok(OpHeads::Single(operation)) + } +} + +fn resolve_single_op( + op_store: &Arc, + op_heads_store: &Arc, + current_op: &Operation, + op_str: &str, +) -> Result { + if op_str == "@" { + Ok(current_op.clone()) + } else if op_str == "@-" { + let parent_ops = current_op.parents(); + if parent_ops.len() != 1 { + return Err(CommandError::UserError(format!( + r#"The "{op_str}" expression resolved to more than one operation"# + ))); + } + Ok(parent_ops[0].clone()) + } else { + resolve_single_op_from_store(op_store, op_heads_store, op_str) + } +} + +fn find_all_operations( + op_store: &Arc, + op_heads_store: &Arc, +) -> Vec { + let mut visited = HashSet::new(); + let mut work: VecDeque<_> = op_heads_store.get_op_heads().into_iter().collect(); + let mut operations = vec![]; + while let Some(op_id) = work.pop_front() { + if visited.insert(op_id.clone()) { + let store_operation = op_store.read_operation(&op_id).unwrap(); + work.extend(store_operation.parents.iter().cloned()); + let operation = Operation::new(op_store.clone(), op_id, store_operation); + operations.push(operation); + } + } + operations +} + +fn resolve_single_op_from_store( + op_store: &Arc, + op_heads_store: &Arc, + op_str: &str, +) -> Result { + if op_str.is_empty() || !op_str.as_bytes().iter().all(|b| b.is_ascii_hexdigit()) { + return Err(CommandError::UserError(format!( + "Operation ID \"{}\" is not a valid hexadecimal prefix", + op_str + ))); + } + if let Ok(binary_op_id) = hex::decode(op_str) { + let op_id = OperationId::new(binary_op_id); + match op_store.read_operation(&op_id) { + Ok(operation) => { + return Ok(Operation::new(op_store.clone(), op_id, operation)); + } + Err(OpStoreError::NotFound) => { + // Fall through + } + Err(err) => { + return Err(CommandError::InternalError(format!( + "Failed to read operation: {err}" + ))); + } + } + } + let mut matches = vec![]; + for op in find_all_operations(op_store, op_heads_store) { + if op.id().hex().starts_with(op_str) { + matches.push(op); + } + } + if matches.is_empty() { + Err(CommandError::UserError(format!( + "No operation ID matching \"{}\"", + op_str + ))) + } else if matches.len() == 1 { + Ok(matches.pop().unwrap()) + } else { + Err(CommandError::UserError(format!( + "Operation ID prefix \"{}\" is ambiguous", + op_str + ))) + } +} + +pub fn resolve_base_revs( + workspace_command: &WorkspaceCommandHelper, + revisions: &[String], +) -> Result, CommandError> { + let mut commits = vec![]; + for revision_str in revisions { + let commit = workspace_command.resolve_single_rev(revision_str)?; + if let Some(i) = commits.iter().position(|c| c == &commit) { + return Err(CommandError::UserError(format!( + r#"Revset "{}" and "{}" resolved to the same revision {}"#, + revisions[i], + revision_str, + short_commit_hash(commit.id()), + ))); + } + commits.push(commit); + } + + let root_commit_id = workspace_command.repo().store().root_commit_id(); + if commits.len() >= 2 && commits.iter().any(|c| c.id() == root_commit_id) { + Err(CommandError::UserError( + "Cannot merge with root revision".to_owned(), + )) + } else { + Ok(commits) + } +} + +pub fn repo_paths_from_values( + ui: &Ui, + wc_path: &Path, + values: &[String], +) -> Result, CommandError> { + if !values.is_empty() { + // TODO: Add support for globs and other formats + let mut paths = vec![]; + for value in values { + let repo_path = ui.parse_file_path(wc_path, value)?; + paths.push(repo_path); + } + Ok(paths) + } else { + Ok(vec![]) + } +} + +pub fn matcher_from_values( + ui: &Ui, + wc_path: &Path, + values: &[String], +) -> Result, CommandError> { + let paths = repo_paths_from_values(ui, wc_path, values)?; + if paths.is_empty() { + Ok(Box::new(EverythingMatcher)) + } else { + Ok(Box::new(PrefixMatcher::new(&paths))) + } +} + +fn update_working_copy( + ui: &mut Ui, + repo: &Arc, + workspace_id: &WorkspaceId, + wc: &mut WorkingCopy, + old_commit: Option<&Commit>, +) -> Result, CommandError> { + let new_commit_id = match repo.view().get_wc_commit_id(workspace_id) { + Some(new_commit_id) => new_commit_id, + None => { + // It seems the workspace was deleted, so we shouldn't try to update it. + return Ok(None); + } + }; + let new_commit = repo.store().get_commit(new_commit_id)?; + let old_tree_id = old_commit.map(|commit| commit.tree_id().clone()); + let stats = if Some(new_commit.tree_id()) != old_tree_id.as_ref() { + // TODO: CheckoutError::ConcurrentCheckout should probably just result in a + // warning for most commands (but be an error for the checkout command) + let stats = wc + .check_out( + repo.op_id().clone(), + old_tree_id.as_ref(), + &new_commit.tree(), + ) + .map_err(|err| { + CommandError::InternalError(format!( + "Failed to check out commit {}: {}", + new_commit.id().hex(), + err + )) + })?; + Some(stats) + } else { + // Record new operation id which represents the latest working-copy state + // TODO: no need to rewrite the tree_state file + let locked_wc = wc.start_mutation(); + locked_wc.finish(repo.op_id().clone()); + None + }; + if Some(&new_commit) != old_commit { + ui.write("Working copy now at: ")?; + ui.write_commit_summary(repo.as_repo_ref(), workspace_id, &new_commit)?; + ui.write("\n")?; + } + Ok(stats) +} + +pub fn short_commit_description(commit: &Commit) -> String { + let first_line = commit.description().split('\n').next().unwrap(); + format!("{} ({})", short_commit_hash(commit.id()), first_line) +} + +pub fn short_commit_hash(commit_id: &CommitId) -> String { + commit_id.hex()[0..12].to_string() +} + +pub fn short_operation_hash(operation_id: &OperationId) -> String { + operation_id.hex()[0..12].to_string() +} + +/// Jujutsu (An experimental VCS) +/// +/// To get started, see the tutorial at https://github.com/martinvonz/jj/blob/main/docs/tutorial.md. +#[derive(clap::Parser, Clone, Debug)] +#[clap( + name = "jj", + author = "Martin von Zweigbergk ", + version +)] +#[clap(mut_arg("help", |arg| { +arg +.help("Print help information, more help with --help than with -h") +.help_heading("GLOBAL OPTIONS") +}))] +pub struct Args { + #[clap(flatten)] + pub global_args: GlobalArgs, +} + +#[derive(clap::Args, Clone, Debug)] +pub struct GlobalArgs { + /// Path to repository to operate on + /// + /// By default, Jujutsu searches for the closest .jj/ directory in an + /// ancestor of the current working directory. + #[clap( + long, + short = 'R', + global = true, + help_heading = "GLOBAL OPTIONS", + value_hint = clap::ValueHint::DirPath, + )] + pub repository: Option, + /// Don't commit the working copy + /// + /// By default, Jujutsu commits the working copy on every command, unless + /// you load the repo at a specific operation with `--at-operation`. If + /// you want to avoid committing the working and instead see a possibly + /// stale working copy commit, you can use `--no-commit-working-copy`. + /// This may be useful e.g. in a command prompt, especially if you have + /// another process that commits the working copy. + #[clap(long, global = true, help_heading = "GLOBAL OPTIONS")] + pub no_commit_working_copy: bool, + /// Operation to load the repo at + /// + /// Operation to load the repo at. By default, Jujutsu loads the repo at the + /// most recent operation. You can use `--at-op=` to see what + /// the repo looked like at an earlier operation. For example `jj + /// --at-op= st` will show you what `jj st` would have + /// shown you when the given operation had just finished. + /// + /// Use `jj op log` to find the operation ID you want. Any unambiguous + /// prefix of the operation ID is enough. + /// + /// When loading the repo at an earlier operation, the working copy will not + /// be automatically committed. + /// + /// It is possible to run mutating commands when loading the repo at an + /// earlier operation. Doing that is equivalent to having run concurrent + /// commands starting at the earlier operation. There's rarely a reason to + /// do that, but it is possible. + #[clap( + long, + visible_alias = "at-op", + global = true, + help_heading = "GLOBAL OPTIONS", + default_value = "@" + )] + pub at_operation: String, + /// When to colorize output (always, never, auto) + #[clap( + long, + value_name = "WHEN", + global = true, + help_heading = "GLOBAL OPTIONS" + )] + pub color: Option, +} + +fn string_list_from_config(value: config::Value) -> Option> { + match value { + config::Value { + kind: config::ValueKind::Array(elements), + .. + } => { + let mut strings = vec![]; + for arg in elements { + match arg { + config::Value { + kind: config::ValueKind::String(string_value), + .. + } => { + strings.push(string_value); + } + _ => { + return None; + } + } + } + Some(strings) + } + _ => None, + } +} + +pub fn resolve_aliases( + app: &clap::Command, + settings: &UserSettings, + string_args: &[String], +) -> Result, CommandError> { + let mut resolved_aliases = HashSet::new(); + let mut string_args = string_args.to_vec(); + let mut real_commands = HashSet::new(); + for command in app.get_subcommands() { + real_commands.insert(command.get_name().to_string()); + for alias in command.get_all_aliases() { + real_commands.insert(alias.to_string()); + } + } + loop { + let app_clone = app.clone().allow_external_subcommands(true); + let matches = app_clone.get_matches_from(&string_args); + if let Some((command_name, submatches)) = matches.subcommand() { + if !real_commands.contains(command_name) { + let alias_name = command_name.to_string(); + let alias_args = submatches + .values_of("") + .unwrap_or_default() + .map(|arg| arg.to_string()) + .collect_vec(); + if resolved_aliases.contains(&alias_name) { + return Err(CommandError::UserError(format!( + r#"Recursive alias definition involving "{alias_name}""# + ))); + } + match settings + .config() + .get::(&format!("alias.{}", alias_name)) + { + Ok(value) => { + if let Some(alias_definition) = string_list_from_config(value) { + assert!(string_args.ends_with(&alias_args)); + string_args.truncate(string_args.len() - 1 - alias_args.len()); + string_args.extend(alias_definition); + string_args.extend_from_slice(&alias_args); + resolved_aliases.insert(alias_name.clone()); + continue; + } else { + return Err(CommandError::UserError(format!( + r#"Alias definition for "{alias_name}" must be a string list"# + ))); + } + } + Err(config::ConfigError::NotFound(_)) => { + let mut app = app.clone(); + app.error(clap::ErrorKind::ArgumentNotFound, format!( + r#"Found argument '{alias_name}' which wasn't expected, or isn't valid in this context"# + )).exit(); + } + Err(err) => { + return Err(CommandError::from(err)); + } + } + } + } + return Ok(string_args); + } +} diff --git a/src/commands.rs b/src/commands.rs index 14d96c267..e26a50309 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -25,1101 +25,46 @@ use std::{fs, io}; use chrono::{FixedOffset, TimeZone, Utc}; use clap::{ArgGroup, CommandFactory, FromArgMatches, Subcommand}; -use config::Value; use criterion::Criterion; -use git2::{Oid, Repository}; use itertools::Itertools; -use jujutsu_lib::backend::{BackendError, CommitId, Timestamp, TreeId, TreeValue}; +use jujutsu_lib::backend::{BackendError, CommitId, Timestamp, TreeValue}; use jujutsu_lib::commit::Commit; use jujutsu_lib::commit_builder::CommitBuilder; use jujutsu_lib::dag_walk::topo_order_reverse; use jujutsu_lib::diff::{Diff, DiffHunk}; use jujutsu_lib::files::DiffLine; -use jujutsu_lib::git::{GitExportError, GitFetchError, GitImportError, GitRefUpdate}; -use jujutsu_lib::gitignore::GitIgnoreFile; +use jujutsu_lib::git::{GitFetchError, GitRefUpdate}; use jujutsu_lib::index::{HexPrefix, IndexEntry}; -use jujutsu_lib::matchers::{EverythingMatcher, Matcher, PrefixMatcher, Visit}; -use jujutsu_lib::op_heads_store::{OpHeadResolutionError, OpHeads, OpHeadsStore}; -use jujutsu_lib::op_store::{OpStore, OpStoreError, OperationId, RefTarget, WorkspaceId}; +use jujutsu_lib::matchers::{EverythingMatcher, Matcher}; +use jujutsu_lib::op_store::{RefTarget, WorkspaceId}; use jujutsu_lib::operation::Operation; use jujutsu_lib::refs::{classify_branch_push_action, BranchPushAction, BranchPushUpdate}; -use jujutsu_lib::repo::{MutableRepo, ReadonlyRepo, RepoRef}; +use jujutsu_lib::repo::{ReadonlyRepo, RepoRef}; use jujutsu_lib::repo_path::RepoPath; -use jujutsu_lib::revset::{RevsetError, RevsetExpression, RevsetParseError}; +use jujutsu_lib::revset::RevsetExpression; use jujutsu_lib::revset_graph_iterator::{RevsetGraphEdge, RevsetGraphEdgeType}; use jujutsu_lib::rewrite::{back_out_commit, merge_commit_trees, rebase_commit, DescendantRebaser}; use jujutsu_lib::settings::UserSettings; use jujutsu_lib::store::Store; -use jujutsu_lib::transaction::Transaction; -use jujutsu_lib::tree::{merge_trees, Tree, TreeDiffIterator, TreeMergeError}; +use jujutsu_lib::tree::{merge_trees, Tree, TreeDiffIterator}; use jujutsu_lib::view::View; -use jujutsu_lib::working_copy::{ - CheckoutStats, LockedWorkingCopy, ResetError, SnapshotError, WorkingCopy, -}; -use jujutsu_lib::workspace::{Workspace, WorkspaceInitError, WorkspaceLoadError}; -use jujutsu_lib::{conflicts, dag_walk, diff, files, git, revset, tree}; +use jujutsu_lib::workspace::Workspace; +use jujutsu_lib::{conflicts, diff, files, git, revset, tree}; use maplit::{hashmap, hashset}; use pest::Parser; +use crate::cli_util::{ + matcher_from_values, print_checkout_stats, repo_paths_from_values, resolve_aliases, + resolve_base_revs, short_commit_description, short_commit_hash, Args, CommandError, + CommandHelper, WorkspaceCommandHelper, +}; use crate::commands::CommandError::UserError; -use crate::diff_edit::DiffEditError; use crate::formatter::Formatter; use crate::graphlog::{AsciiGraphDrawer, Edge}; use crate::template_parser::TemplateParser; use crate::templater::Template; use crate::ui; -use crate::ui::{ColorChoice, FilePathParseError, Ui}; - -pub enum CommandError { - UserError(String), - /// Invalid command line - CliError(String), - BrokenPipe, - InternalError(String), -} - -impl From for CommandError { - fn from(err: std::io::Error) -> Self { - if err.kind() == std::io::ErrorKind::BrokenPipe { - CommandError::BrokenPipe - } else { - // TODO: Record the error as a chained cause - CommandError::InternalError(format!("I/O error: {err}")) - } - } -} - -impl From for CommandError { - fn from(err: config::ConfigError) -> Self { - CommandError::UserError(format!("Config error: {err}")) - } -} - -impl From for CommandError { - fn from(err: BackendError) -> Self { - CommandError::UserError(format!("Unexpected error from store: {err}")) - } -} - -impl From for CommandError { - fn from(_: WorkspaceInitError) -> Self { - CommandError::UserError("The target repo already exists".to_string()) - } -} - -impl From for CommandError { - fn from(err: OpHeadResolutionError) -> Self { - match err { - OpHeadResolutionError::NoHeads => { - CommandError::InternalError("Corrupt repository: the are no operations".to_string()) - } - } - } -} - -impl From for CommandError { - fn from(err: SnapshotError) -> Self { - CommandError::InternalError(format!("Failed to snapshot the working copy: {err}")) - } -} - -impl From for CommandError { - fn from(err: TreeMergeError) -> Self { - CommandError::InternalError(format!("Merge failed: {err}")) - } -} - -impl From for CommandError { - fn from(_: ResetError) -> Self { - CommandError::InternalError("Failed to reset the working copy".to_string()) - } -} - -impl From for CommandError { - fn from(err: DiffEditError) -> Self { - CommandError::UserError(format!("Failed to edit diff: {err}")) - } -} - -impl From for CommandError { - fn from(err: git2::Error) -> Self { - CommandError::UserError(format!("Git operation failed: {err}")) - } -} - -impl From for CommandError { - fn from(err: GitImportError) -> Self { - CommandError::InternalError(format!( - "Failed to import refs from underlying Git repo: {err}" - )) - } -} - -impl From for CommandError { - fn from(err: GitExportError) -> Self { - match err { - GitExportError::ConflictedBranch(branch_name) => { - CommandError::UserError(format!("Cannot export conflicted branch '{branch_name}'")) - } - GitExportError::InternalGitError(err) => CommandError::InternalError(format!( - "Failed to export refs to underlying Git repo: {err}" - )), - } - } -} - -impl From for CommandError { - fn from(err: RevsetParseError) -> Self { - CommandError::UserError(format!("Failed to parse revset: {err}")) - } -} - -impl From for CommandError { - fn from(err: RevsetError) -> Self { - CommandError::UserError(format!("{err}")) - } -} - -impl From for CommandError { - fn from(err: FilePathParseError) -> Self { - match err { - FilePathParseError::InputNotInRepo(input) => { - CommandError::UserError(format!("Path \"{input}\" is not in the repo")) - } - } - } -} - -struct CommandHelper<'help> { - app: clap::Command<'help>, - string_args: Vec, - global_args: GlobalArgs, -} - -impl<'help> CommandHelper<'help> { - fn new(app: clap::Command<'help>, string_args: Vec, global_args: GlobalArgs) -> Self { - Self { - app, - string_args, - global_args, - } - } - - fn global_args(&self) -> &GlobalArgs { - &self.global_args - } - - fn workspace_helper(&self, ui: &mut Ui) -> Result { - let wc_path_str = self.global_args.repository.as_deref().unwrap_or("."); - let wc_path = ui.cwd().join(wc_path_str); - let workspace = match Workspace::load(ui.settings(), wc_path) { - Ok(workspace) => workspace, - Err(WorkspaceLoadError::NoWorkspaceHere(wc_path)) => { - let mut message = format!("There is no jj repo in \"{}\"", wc_path_str); - let git_dir = wc_path.join(".git"); - if git_dir.is_dir() { - // TODO: Make this hint separate from the error, so the caller can format - // it differently. - message += " -It looks like this is a git repo. You can create a jj repo backed by it by running this: -jj init --git-repo=."; - } - return Err(CommandError::UserError(message)); - } - Err(WorkspaceLoadError::RepoDoesNotExist(repo_dir)) => { - return Err(CommandError::UserError(format!( - "The repository directory at {} is missing. Was it moved?", - repo_dir.to_str().unwrap() - ))); - } - }; - let repo_loader = workspace.repo_loader(); - let op_heads = resolve_op_for_load( - repo_loader.op_store(), - repo_loader.op_heads_store(), - &self.global_args.at_operation, - )?; - let repo = match op_heads { - OpHeads::Single(op) => repo_loader.load_at(&op), - OpHeads::Unresolved { - locked_op_heads, - op_heads, - } => { - writeln!( - ui, - "Concurrent modification detected, resolving automatically.", - )?; - let base_repo = repo_loader.load_at(&op_heads[0]); - // TODO: It may be helpful to print each operation we're merging here - let mut workspace_command = self.for_loaded_repo(ui, workspace, base_repo)?; - let mut tx = workspace_command.start_transaction("resolve concurrent operations"); - for other_op_head in op_heads.into_iter().skip(1) { - tx.merge_operation(other_op_head); - let num_rebased = tx.mut_repo().rebase_descendants(ui.settings())?; - if num_rebased > 0 { - writeln!( - ui, - "Rebased {} descendant commits onto commits rewritten by other \ - operation", - num_rebased - )?; - } - } - let merged_repo = tx.write().leave_unpublished(); - locked_op_heads.finish(merged_repo.operation()); - workspace_command.repo = merged_repo; - return Ok(workspace_command); - } - }; - self.for_loaded_repo(ui, workspace, repo) - } - - fn for_loaded_repo( - &self, - ui: &mut Ui, - workspace: Workspace, - repo: Arc, - ) -> Result { - WorkspaceCommandHelper::for_loaded_repo( - ui, - workspace, - self.string_args.clone(), - &self.global_args, - repo, - ) - } -} - -// Provides utilities for writing a command that works on a workspace (like most -// commands do). -struct WorkspaceCommandHelper { - cwd: PathBuf, - string_args: Vec, - global_args: GlobalArgs, - settings: UserSettings, - workspace: Workspace, - repo: Arc, - may_update_working_copy: bool, - working_copy_shared_with_git: bool, -} - -impl WorkspaceCommandHelper { - fn for_loaded_repo( - ui: &mut Ui, - workspace: Workspace, - string_args: Vec, - global_args: &GlobalArgs, - repo: Arc, - ) -> Result { - let loaded_at_head = &global_args.at_operation == "@"; - let may_update_working_copy = loaded_at_head && !global_args.no_commit_working_copy; - let mut working_copy_shared_with_git = false; - let maybe_git_repo = repo.store().git_repo(); - if let Some(git_workdir) = maybe_git_repo - .as_ref() - .and_then(|git_repo| git_repo.workdir()) - .and_then(|workdir| workdir.canonicalize().ok()) - { - working_copy_shared_with_git = git_workdir == workspace.workspace_root().as_path(); - } - let mut helper = Self { - cwd: ui.cwd().to_owned(), - string_args, - global_args: global_args.clone(), - settings: ui.settings().clone(), - workspace, - repo, - may_update_working_copy, - working_copy_shared_with_git, - }; - if may_update_working_copy { - if working_copy_shared_with_git { - helper.import_git_refs_and_head(ui, maybe_git_repo.as_ref().unwrap())?; - } - helper.commit_working_copy(ui)?; - } - Ok(helper) - } - - fn check_working_copy_writable(&self) -> Result<(), CommandError> { - if self.may_update_working_copy { - Ok(()) - } else if self.global_args.no_commit_working_copy { - Err(CommandError::UserError( - "This command must be able to update the working copy (don't use \ - --no-commit-working-copy)." - .to_string(), - )) - } else { - Err(CommandError::UserError( - "This command must be able to update the working copy (don't use --at-op)." - .to_string(), - )) - } - } - - fn import_git_refs_and_head( - &mut self, - ui: &mut Ui, - git_repo: &Repository, - ) -> Result<(), CommandError> { - let mut tx = self.start_transaction("import git refs"); - git::import_refs(tx.mut_repo(), git_repo)?; - if tx.mut_repo().has_changes() { - let old_git_head = self.repo.view().git_head(); - let new_git_head = tx.mut_repo().view().git_head(); - // If the Git HEAD has changed, abandon our old checkout and check out the new - // Git HEAD. - if new_git_head != old_git_head && new_git_head.is_some() { - let workspace_id = self.workspace.workspace_id(); - let mut locked_working_copy = self.workspace.working_copy_mut().start_mutation(); - if let Some(old_wc_commit_id) = self.repo.view().get_wc_commit_id(&workspace_id) { - tx.mut_repo() - .record_abandoned_commit(old_wc_commit_id.clone()); - } - let new_checkout = self - .repo - .store() - .get_commit(new_git_head.as_ref().unwrap())?; - tx.mut_repo() - .check_out(workspace_id, &self.settings, &new_checkout); - // The working copy was presumably updated by the git command that updated HEAD, - // so we just need to reset our working copy state to it without updating - // working copy files. - locked_working_copy.reset(&new_checkout.tree())?; - tx.mut_repo().rebase_descendants(&self.settings)?; - self.repo = tx.commit(); - locked_working_copy.finish(self.repo.op_id().clone()); - } else { - let num_rebased = tx.mut_repo().rebase_descendants(ui.settings())?; - if num_rebased > 0 { - writeln!( - ui, - "Rebased {} descendant commits off of commits rewritten from git", - num_rebased - )?; - } - self.finish_transaction(ui, tx)?; - } - } - Ok(()) - } - - fn export_head_to_git(&self, mut_repo: &mut MutableRepo) -> Result<(), CommandError> { - let git_repo = mut_repo.store().git_repo().unwrap(); - let current_git_head_ref = git_repo.find_reference("HEAD").unwrap(); - let current_git_commit_id = current_git_head_ref - .peel_to_commit() - .ok() - .map(|commit| commit.id()); - if let Some(checkout_id) = mut_repo.view().get_wc_commit_id(&self.workspace_id()) { - let first_parent_id = - mut_repo.index().entry_by_id(checkout_id).unwrap().parents()[0].commit_id(); - if first_parent_id != *mut_repo.store().root_commit_id() { - if let Some(current_git_commit_id) = current_git_commit_id { - git_repo.set_head_detached(current_git_commit_id)?; - } - let new_git_commit_id = Oid::from_bytes(first_parent_id.as_bytes()).unwrap(); - let new_git_commit = git_repo.find_commit(new_git_commit_id)?; - git_repo.reset(new_git_commit.as_object(), git2::ResetType::Mixed, None)?; - mut_repo.set_git_head(first_parent_id); - } - } else { - // The workspace was removed (maybe the user undid the - // initialization of the workspace?), which is weird, - // but we should probably just not do anything else here. - // Except maybe print a note about it? - } - Ok(()) - } - - fn repo(&self) -> &Arc { - &self.repo - } - - fn repo_mut(&mut self) -> &mut Arc { - &mut self.repo - } - - fn working_copy(&self) -> &WorkingCopy { - self.workspace.working_copy() - } - - fn start_working_copy_mutation(&mut self) -> Result<(LockedWorkingCopy, Commit), CommandError> { - self.check_working_copy_writable()?; - let wc_commit_id = self.repo.view().get_wc_commit_id(&self.workspace_id()); - let wc_commit = if let Some(wc_commit_id) = wc_commit_id { - self.repo.store().get_commit(wc_commit_id)? - } else { - return Err(CommandError::UserError( - "Nothing checked out in this workspace".to_string(), - )); - }; - - let locked_working_copy = self.workspace.working_copy_mut().start_mutation(); - if wc_commit.tree_id() != locked_working_copy.old_tree_id() { - return Err(CommandError::UserError( - "Concurrent working copy operation. Try again.".to_string(), - )); - } - - Ok((locked_working_copy, wc_commit)) - } - - fn workspace_root(&self) -> &PathBuf { - self.workspace.workspace_root() - } - - fn workspace_id(&self) -> WorkspaceId { - self.workspace.workspace_id() - } - - fn working_copy_shared_with_git(&self) -> bool { - self.working_copy_shared_with_git - } - - fn format_file_path(&self, file: &RepoPath) -> String { - ui::relative_path(&self.cwd, &file.to_fs_path(self.workspace_root())) - .to_str() - .unwrap() - .to_owned() - } - - fn git_config(&self) -> Result { - if let Some(git_repo) = self.repo.store().git_repo() { - git_repo.config() - } else { - git2::Config::open_default() - } - } - - fn base_ignores(&self) -> Arc { - let mut git_ignores = GitIgnoreFile::empty(); - if let Ok(excludes_file_str) = self - .git_config() - .and_then(|git_config| git_config.get_string("core.excludesFile")) - { - let excludes_file_path = expand_git_path(excludes_file_str); - git_ignores = git_ignores.chain_with_file("", excludes_file_path); - } - if let Some(git_repo) = self.repo.store().git_repo() { - git_ignores = - git_ignores.chain_with_file("", git_repo.path().join("info").join("exclude")); - } - git_ignores - } - - fn resolve_single_op(&self, op_str: &str) -> Result { - // When resolving the "@" operation in a `ReadonlyRepo`, we resolve it to the - // operation the repo was loaded at. - resolve_single_op( - self.repo.op_store(), - self.repo.op_heads_store(), - self.repo.operation(), - op_str, - ) - } - - fn resolve_single_rev(&self, revision_str: &str) -> Result { - let revset_expression = revset::parse(revision_str)?; - let revset = - revset_expression.evaluate(self.repo.as_repo_ref(), Some(&self.workspace_id()))?; - let mut iter = revset.iter().commits(self.repo.store()); - match iter.next() { - None => Err(CommandError::UserError(format!( - "Revset \"{}\" didn't resolve to any revisions", - revision_str - ))), - Some(commit) => { - if iter.next().is_some() { - Err(CommandError::UserError(format!( - "Revset \"{}\" resolved to more than one revision", - revision_str - ))) - } else { - Ok(commit?) - } - } - } - } - - fn resolve_revset(&self, revision_str: &str) -> Result, CommandError> { - let revset_expression = revset::parse(revision_str)?; - let revset = - revset_expression.evaluate(self.repo.as_repo_ref(), Some(&self.workspace_id()))?; - Ok(revset - .iter() - .commits(self.repo.store()) - .map(Result::unwrap) - .collect()) - } - - fn check_rewriteable(&self, commit: &Commit) -> Result<(), CommandError> { - if commit.id() == self.repo.store().root_commit_id() { - return Err(CommandError::UserError( - "Cannot rewrite the root commit".to_string(), - )); - } - Ok(()) - } - - fn check_non_empty(&self, commits: &[Commit]) -> Result<(), CommandError> { - if commits.is_empty() { - return Err(CommandError::UserError("Empty revision set".to_string())); - } - Ok(()) - } - - fn commit_working_copy(&mut self, ui: &mut Ui) -> Result<(), CommandError> { - let repo = self.repo.clone(); - let workspace_id = self.workspace_id(); - let checkout_id = match repo.view().get_wc_commit_id(&self.workspace_id()) { - Some(checkout_id) => checkout_id.clone(), - None => { - // If the workspace has been deleted, it's unclear what to do, so we just skip - // committing the working copy. - return Ok(()); - } - }; - let base_ignores = self.base_ignores(); - let mut locked_wc = self.workspace.working_copy_mut().start_mutation(); - // Check if the working copy commit matches the repo's view. It's fine if it - // doesn't, but we'll need to reload the repo so the new commit is - // in the index and view, and so we don't cause unnecessary - // divergence. - let checkout_commit = repo.store().get_commit(&checkout_id)?; - let wc_tree_id = locked_wc.old_tree_id().clone(); - let mut wc_was_stale = false; - if *checkout_commit.tree_id() != wc_tree_id { - let wc_operation_data = self - .repo - .op_store() - .read_operation(locked_wc.old_operation_id()) - .unwrap(); - let wc_operation = Operation::new( - repo.op_store().clone(), - locked_wc.old_operation_id().clone(), - wc_operation_data, - ); - let repo_operation = repo.operation(); - let maybe_ancestor_op = dag_walk::closest_common_node( - [wc_operation.clone()], - [repo_operation.clone()], - &|op: &Operation| op.parents(), - &|op: &Operation| op.id().clone(), - ); - if let Some(ancestor_op) = maybe_ancestor_op { - if ancestor_op.id() == repo_operation.id() { - // The working copy was updated since we loaded the repo. We reload the repo - // at the working copy's operation. - self.repo = repo.reload_at(&wc_operation); - } else if ancestor_op.id() == wc_operation.id() { - // The working copy was not updated when some repo operation committed, - // meaning that it's stale compared to the repo view. We update the working - // copy to what the view says. - writeln!( - ui, - "The working copy is stale (not updated since operation {}), now updating \ - to operation {}", - short_operation_hash(wc_operation.id()), - short_operation_hash(repo_operation.id()), - )?; - locked_wc - .check_out(&checkout_commit.tree()) - .map_err(|err| { - CommandError::InternalError(format!( - "Failed to check out commit {}: {}", - checkout_commit.id().hex(), - err - )) - })?; - wc_was_stale = true; - } else { - return Err(CommandError::InternalError(format!( - "The repo was loaded at operation {}, which seems to be a sibling of the \ - working copy's operation {}", - short_operation_hash(repo_operation.id()), - short_operation_hash(wc_operation.id()), - ))); - } - } else { - return Err(CommandError::InternalError(format!( - "The repo was loaded at operation {}, which seems unrelated to the working \ - copy's operation {}", - short_operation_hash(repo_operation.id()), - short_operation_hash(wc_operation.id()), - ))); - } - } - let new_tree_id = locked_wc.snapshot(base_ignores)?; - if new_tree_id != *checkout_commit.tree_id() { - let mut tx = self.repo.start_transaction("commit working copy"); - let mut_repo = tx.mut_repo(); - let commit = CommitBuilder::for_rewrite_from(&self.settings, &checkout_commit) - .set_tree(new_tree_id) - .write_to_repo(mut_repo); - mut_repo.set_wc_commit(workspace_id, commit.id().clone()); - - // Rebase descendants - let num_rebased = mut_repo.rebase_descendants(&self.settings)?; - if num_rebased > 0 { - writeln!( - ui, - "Rebased {} descendant commits onto updated working copy", - num_rebased - )?; - } - - self.repo = tx.commit(); - locked_wc.finish(self.repo.op_id().clone()); - } else if wc_was_stale { - locked_wc.finish(self.repo.op_id().clone()); - } else { - locked_wc.discard(); - } - Ok(()) - } - - fn edit_diff( - &self, - ui: &mut Ui, - left_tree: &Tree, - right_tree: &Tree, - instructions: &str, - ) -> Result { - crate::diff_edit::edit_diff( - ui, - &self.settings, - left_tree, - right_tree, - instructions, - self.base_ignores(), - ) - } - - fn select_diff( - &self, - ui: &mut Ui, - left_tree: &Tree, - right_tree: &Tree, - instructions: &str, - interactive: bool, - matcher: &dyn Matcher, - ) -> Result { - if interactive { - Ok(crate::diff_edit::edit_diff( - ui, - &self.settings, - left_tree, - right_tree, - instructions, - self.base_ignores(), - )?) - } else if matcher.visit(&RepoPath::root()) == Visit::AllRecursively { - // Optimization for a common case - Ok(right_tree.id().clone()) - } else { - let mut tree_builder = self.repo().store().tree_builder(left_tree.id().clone()); - for (repo_path, diff) in left_tree.diff(right_tree, matcher) { - match diff.into_options().1 { - Some(value) => { - tree_builder.set(repo_path, value); - } - None => { - tree_builder.remove(repo_path); - } - } - } - Ok(tree_builder.write_tree()) - } - } - - fn start_transaction(&self, description: &str) -> Transaction { - let mut tx = self.repo.start_transaction(description); - // TODO: Either do better shell-escaping here or store the values in some list - // type (which we currently don't have). - let shell_escape = |arg: &String| { - if arg.as_bytes().iter().all(|b| { - matches!(b, - b'A'..=b'Z' - | b'a'..=b'z' - | b'0'..=b'9' - | b',' - | b'-' - | b'.' - | b'/' - | b':' - | b'@' - | b'_' - ) - }) { - arg.clone() - } else { - format!("'{}'", arg.replace('\'', "\\'")) - } - }; - let quoted_strings = self.string_args.iter().map(shell_escape).collect_vec(); - tx.set_tag("args".to_string(), quoted_strings.join(" ")); - tx - } - - fn finish_transaction(&mut self, ui: &mut Ui, mut tx: Transaction) -> Result<(), CommandError> { - let mut_repo = tx.mut_repo(); - let store = mut_repo.store().clone(); - if !mut_repo.has_changes() { - writeln!(ui, "Nothing changed.")?; - return Ok(()); - } - let num_rebased = mut_repo.rebase_descendants(ui.settings())?; - if num_rebased > 0 { - writeln!(ui, "Rebased {} descendant commits", num_rebased)?; - } - if self.working_copy_shared_with_git { - self.export_head_to_git(mut_repo)?; - } - let maybe_old_commit = tx - .base_repo() - .view() - .get_wc_commit_id(&self.workspace_id()) - .map(|commit_id| store.get_commit(commit_id)) - .transpose()?; - self.repo = tx.commit(); - if self.may_update_working_copy { - let stats = update_working_copy( - ui, - &self.repo, - &self.workspace_id(), - self.workspace.working_copy_mut(), - maybe_old_commit.as_ref(), - )?; - if let Some(stats) = stats { - print_checkout_stats(ui, stats)?; - } - } - if self.working_copy_shared_with_git { - let git_repo = self.repo.store().git_repo().unwrap(); - git::export_refs(&self.repo, &git_repo)?; - } - Ok(()) - } -} - -fn print_checkout_stats(ui: &mut Ui, stats: CheckoutStats) -> Result<(), std::io::Error> { - if stats.added_files > 0 || stats.updated_files > 0 || stats.removed_files > 0 { - writeln!( - ui, - "Added {} files, modified {} files, removed {} files", - stats.added_files, stats.updated_files, stats.removed_files - )?; - } - Ok(()) -} - -/// Expands "~/" to "$HOME/" as Git seems to do for e.g. core.excludesFile. -fn expand_git_path(path_str: String) -> PathBuf { - if let Some(remainder) = path_str.strip_prefix("~/") { - if let Ok(home_dir_str) = std::env::var("HOME") { - return PathBuf::from(home_dir_str).join(remainder); - } - } - PathBuf::from(path_str) -} - -fn resolve_op_for_load( - op_store: &Arc, - op_heads_store: &Arc, - op_str: &str, -) -> Result { - if op_str == "@" { - Ok(op_heads_store.get_heads(op_store)?) - } else if op_str == "@-" { - match op_heads_store.get_heads(op_store)? { - OpHeads::Single(current_op) => { - let resolved_op = resolve_single_op(op_store, op_heads_store, ¤t_op, op_str)?; - Ok(OpHeads::Single(resolved_op)) - } - OpHeads::Unresolved { .. } => Err(UserError(format!( - r#"The "{op_str}" expression resolved to more than one operation"# - ))), - } - } else { - let operation = resolve_single_op_from_store(op_store, op_heads_store, op_str)?; - Ok(OpHeads::Single(operation)) - } -} - -fn resolve_single_op( - op_store: &Arc, - op_heads_store: &Arc, - current_op: &Operation, - op_str: &str, -) -> Result { - if op_str == "@" { - Ok(current_op.clone()) - } else if op_str == "@-" { - let parent_ops = current_op.parents(); - if parent_ops.len() != 1 { - return Err(UserError(format!( - r#"The "{op_str}" expression resolved to more than one operation"# - ))); - } - Ok(parent_ops[0].clone()) - } else { - resolve_single_op_from_store(op_store, op_heads_store, op_str) - } -} - -fn find_all_operations( - op_store: &Arc, - op_heads_store: &Arc, -) -> Vec { - let mut visited = HashSet::new(); - let mut work: VecDeque<_> = op_heads_store.get_op_heads().into_iter().collect(); - let mut operations = vec![]; - while let Some(op_id) = work.pop_front() { - if visited.insert(op_id.clone()) { - let store_operation = op_store.read_operation(&op_id).unwrap(); - work.extend(store_operation.parents.iter().cloned()); - let operation = Operation::new(op_store.clone(), op_id, store_operation); - operations.push(operation); - } - } - operations -} - -fn resolve_single_op_from_store( - op_store: &Arc, - op_heads_store: &Arc, - op_str: &str, -) -> Result { - if op_str.is_empty() || !op_str.as_bytes().iter().all(|b| b.is_ascii_hexdigit()) { - return Err(CommandError::UserError(format!( - "Operation ID \"{}\" is not a valid hexadecimal prefix", - op_str - ))); - } - if let Ok(binary_op_id) = hex::decode(op_str) { - let op_id = OperationId::new(binary_op_id); - match op_store.read_operation(&op_id) { - Ok(operation) => { - return Ok(Operation::new(op_store.clone(), op_id, operation)); - } - Err(OpStoreError::NotFound) => { - // Fall through - } - Err(err) => { - return Err(CommandError::InternalError(format!( - "Failed to read operation: {err}" - ))); - } - } - } - let mut matches = vec![]; - for op in find_all_operations(op_store, op_heads_store) { - if op.id().hex().starts_with(op_str) { - matches.push(op); - } - } - if matches.is_empty() { - Err(CommandError::UserError(format!( - "No operation ID matching \"{}\"", - op_str - ))) - } else if matches.len() == 1 { - Ok(matches.pop().unwrap()) - } else { - Err(CommandError::UserError(format!( - "Operation ID prefix \"{}\" is ambiguous", - op_str - ))) - } -} - -fn resolve_base_revs( - workspace_command: &WorkspaceCommandHelper, - revisions: &[String], -) -> Result, CommandError> { - let mut commits = vec![]; - for revision_str in revisions { - let commit = workspace_command.resolve_single_rev(revision_str)?; - if let Some(i) = commits.iter().position(|c| c == &commit) { - return Err(CommandError::UserError(format!( - r#"Revset "{}" and "{}" resolved to the same revision {}"#, - revisions[i], - revision_str, - short_commit_hash(commit.id()), - ))); - } - commits.push(commit); - } - - let root_commit_id = workspace_command.repo().store().root_commit_id(); - if commits.len() >= 2 && commits.iter().any(|c| c.id() == root_commit_id) { - Err(CommandError::UserError( - "Cannot merge with root revision".to_owned(), - )) - } else { - Ok(commits) - } -} - -fn repo_paths_from_values( - ui: &Ui, - wc_path: &Path, - values: &[String], -) -> Result, CommandError> { - if !values.is_empty() { - // TODO: Add support for globs and other formats - let mut paths = vec![]; - for value in values { - let repo_path = ui.parse_file_path(wc_path, value)?; - paths.push(repo_path); - } - Ok(paths) - } else { - Ok(vec![]) - } -} - -fn matcher_from_values( - ui: &Ui, - wc_path: &Path, - values: &[String], -) -> Result, CommandError> { - let paths = repo_paths_from_values(ui, wc_path, values)?; - if paths.is_empty() { - Ok(Box::new(EverythingMatcher)) - } else { - Ok(Box::new(PrefixMatcher::new(&paths))) - } -} - -fn update_working_copy( - ui: &mut Ui, - repo: &Arc, - workspace_id: &WorkspaceId, - wc: &mut WorkingCopy, - old_commit: Option<&Commit>, -) -> Result, CommandError> { - let new_commit_id = match repo.view().get_wc_commit_id(workspace_id) { - Some(new_commit_id) => new_commit_id, - None => { - // It seems the workspace was deleted, so we shouldn't try to update it. - return Ok(None); - } - }; - let new_commit = repo.store().get_commit(new_commit_id)?; - let old_tree_id = old_commit.map(|commit| commit.tree_id().clone()); - let stats = if Some(new_commit.tree_id()) != old_tree_id.as_ref() { - // TODO: CheckoutError::ConcurrentCheckout should probably just result in a - // warning for most commands (but be an error for the checkout command) - let stats = wc - .check_out( - repo.op_id().clone(), - old_tree_id.as_ref(), - &new_commit.tree(), - ) - .map_err(|err| { - CommandError::InternalError(format!( - "Failed to check out commit {}: {}", - new_commit.id().hex(), - err - )) - })?; - Some(stats) - } else { - // Record new operation id which represents the latest working-copy state - // TODO: no need to rewrite the tree_state file - let locked_wc = wc.start_mutation(); - locked_wc.finish(repo.op_id().clone()); - None - }; - if Some(&new_commit) != old_commit { - ui.write("Working copy now at: ")?; - ui.write_commit_summary(repo.as_repo_ref(), workspace_id, &new_commit)?; - ui.write("\n")?; - } - Ok(stats) -} - -/// Jujutsu (An experimental VCS) -/// -/// To get started, see the tutorial at https://github.com/martinvonz/jj/blob/main/docs/tutorial.md. -#[derive(clap::Parser, Clone, Debug)] -#[clap( - name = "jj", - author = "Martin von Zweigbergk ", - version -)] -#[clap(mut_arg("help", |arg| { - arg - .help("Print help information, more help with --help than with -h") - .help_heading("GLOBAL OPTIONS") - }))] -struct Args { - #[clap(flatten)] - global_args: GlobalArgs, -} - -#[derive(clap::Args, Clone, Debug)] -struct GlobalArgs { - /// Path to repository to operate on - /// - /// By default, Jujutsu searches for the closest .jj/ directory in an - /// ancestor of the current working directory. - #[clap( - long, - short = 'R', - global = true, - help_heading = "GLOBAL OPTIONS", - value_hint = clap::ValueHint::DirPath, - )] - repository: Option, - /// Don't commit the working copy - /// - /// By default, Jujutsu commits the working copy on every command, unless - /// you load the repo at a specific operation with `--at-operation`. If - /// you want to avoid committing the working and instead see a possibly - /// stale working copy commit, you can use `--no-commit-working-copy`. - /// This may be useful e.g. in a command prompt, especially if you have - /// another process that commits the working copy. - #[clap(long, global = true, help_heading = "GLOBAL OPTIONS")] - no_commit_working_copy: bool, - /// Operation to load the repo at - /// - /// Operation to load the repo at. By default, Jujutsu loads the repo at the - /// most recent operation. You can use `--at-op=` to see what - /// the repo looked like at an earlier operation. For example `jj - /// --at-op= st` will show you what `jj st` would have - /// shown you when the given operation had just finished. - /// - /// Use `jj op log` to find the operation ID you want. Any unambiguous - /// prefix of the operation ID is enough. - /// - /// When loading the repo at an earlier operation, the working copy will not - /// be automatically committed. - /// - /// It is possible to run mutating commands when loading the repo at an - /// earlier operation. Doing that is equivalent to having run concurrent - /// commands starting at the earlier operation. There's rarely a reason to - /// do that, but it is possible. - #[clap( - long, - visible_alias = "at-op", - global = true, - help_heading = "GLOBAL OPTIONS", - default_value = "@" - )] - at_operation: String, - /// When to colorize output (always, never, auto) - #[clap( - long, - value_name = "WHEN", - global = true, - help_heading = "GLOBAL OPTIONS" - )] - color: Option, -} +use crate::ui::Ui; #[derive(clap::Parser, Clone, Debug)] enum Commands { @@ -2103,19 +1048,6 @@ struct DebugOperationArgs { operation: String, } -fn short_commit_description(commit: &Commit) -> String { - let first_line = commit.description().split('\n').next().unwrap(); - format!("{} ({})", short_commit_hash(commit.id()), first_line) -} - -fn short_commit_hash(commit_id: &CommitId) -> String { - commit_id.hex()[0..12].to_string() -} - -fn short_operation_hash(operation_id: &OperationId) -> String { - operation_id.hex()[0..12].to_string() -} - fn add_to_git_exclude(ui: &mut Ui, git_repo: &git2::Repository) -> Result<(), CommandError> { let exclude_file_path = git_repo.path().join("info").join("exclude"); if exclude_file_path.exists() { @@ -2158,7 +1090,7 @@ fn cmd_version( command: &CommandHelper, _args: &VersionArgs, ) -> Result<(), CommandError> { - ui.write(&command.app.render_version())?; + ui.write(&command.app().render_version())?; Ok(()) } @@ -4176,7 +3108,7 @@ fn rebase_branch( .range(&RevsetExpression::commit(branch_commit.id().clone())) .roots(); let mut num_rebased = 0; - let store = workspace_command.repo.store(); + let store = workspace_command.repo().store(); for root_result in roots_expression .evaluate( workspace_command.repo().as_repo_ref(), @@ -4234,7 +3166,7 @@ fn rebase_revision( // branches and the working copy get updated to the rewritten commit.) let children_expression = RevsetExpression::commit(old_commit.id().clone()).children(); let mut num_rebased_descendants = 0; - let store = workspace_command.repo.store(); + let store = workspace_command.repo().store(); for child_commit in children_expression .evaluate( @@ -4306,7 +3238,7 @@ fn check_rebase_destinations( ) -> Result<(), CommandError> { for parent in new_parents { if workspace_command - .repo + .repo() .index() .is_ancestor(commit.id(), parent.id()) { @@ -4587,7 +3519,7 @@ fn cmd_debug( ) -> Result<(), CommandError> { match subcommand { DebugCommands::Completion(completion_matches) => { - let mut app = command.app.clone(); + let mut app = command.app().clone(); let mut buf = vec![]; let shell = if completion_matches.zsh { clap_complete::Shell::Zsh @@ -4601,7 +3533,7 @@ fn cmd_debug( } DebugCommands::Mangen(_mangen_matches) => { let mut buf = vec![]; - let man = clap_mangen::Man::new(command.app.clone()); + let man = clap_mangen::Man::new(command.app().clone()); man.render(&mut buf)?; ui.stdout_formatter().write_all(&buf)?; } @@ -4965,7 +3897,7 @@ fn cmd_workspace_add( let mut new_workspace_command = WorkspaceCommandHelper::for_loaded_repo( ui, new_workspace, - command.string_args.clone(), + command.string_args().clone(), command.global_args(), repo, )?; @@ -5583,96 +4515,6 @@ fn cmd_git( } } -fn string_list_from_config(value: config::Value) -> Option> { - match value { - Value { - kind: config::ValueKind::Array(elements), - .. - } => { - let mut strings = vec![]; - for arg in elements { - match arg { - config::Value { - kind: config::ValueKind::String(string_value), - .. - } => { - strings.push(string_value); - } - _ => { - return None; - } - } - } - Some(strings) - } - _ => None, - } -} - -fn resolve_aliases( - app: &clap::Command, - settings: &UserSettings, - string_args: &[String], -) -> Result, CommandError> { - let mut resolved_aliases = HashSet::new(); - let mut string_args = string_args.to_vec(); - let mut real_commands = HashSet::new(); - for command in app.get_subcommands() { - real_commands.insert(command.get_name().to_string()); - for alias in command.get_all_aliases() { - real_commands.insert(alias.to_string()); - } - } - loop { - let app_clone = app.clone().allow_external_subcommands(true); - let matches = app_clone.get_matches_from(&string_args); - if let Some((command_name, submatches)) = matches.subcommand() { - if !real_commands.contains(command_name) { - let alias_name = command_name.to_string(); - let alias_args = submatches - .values_of("") - .unwrap_or_default() - .map(|arg| arg.to_string()) - .collect_vec(); - if resolved_aliases.contains(&alias_name) { - return Err(CommandError::UserError(format!( - r#"Recursive alias definition involving "{alias_name}""# - ))); - } - match settings - .config() - .get::(&format!("alias.{}", alias_name)) - { - Ok(value) => { - if let Some(alias_definition) = string_list_from_config(value) { - assert!(string_args.ends_with(&alias_args)); - string_args.truncate(string_args.len() - 1 - alias_args.len()); - string_args.extend(alias_definition); - string_args.extend_from_slice(&alias_args); - resolved_aliases.insert(alias_name.clone()); - continue; - } else { - return Err(CommandError::UserError(format!( - r#"Alias definition for "{alias_name}" must be a string list"# - ))); - } - } - Err(config::ConfigError::NotFound(_)) => { - let mut app = app.clone(); - app.error(clap::ErrorKind::ArgumentNotFound, format!( - r#"Found argument '{alias_name}' which wasn't expected, or isn't valid in this context"# - )).exit(); - } - Err(err) => { - return Err(CommandError::from(err)); - } - } - } - } - return Ok(string_args); - } -} - pub fn dispatch(ui: &mut Ui, args_os: ArgsOs) -> Result<(), CommandError> { let mut string_args: Vec = vec![]; for arg_os in args_os { diff --git a/src/lib.rs b/src/lib.rs index 99fcbb9f1..0a1d46845 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ #![deny(unused_must_use)] +pub mod cli_util; pub mod commands; pub mod config; pub mod diff_edit; diff --git a/src/main.rs b/src/main.rs index 69c18fac6..7b3254e8b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -12,7 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use jujutsu::commands::{dispatch, CommandError}; +use jujutsu::cli_util::CommandError; +use jujutsu::commands::dispatch; use jujutsu::config::read_config; use jujutsu::ui::Ui; use jujutsu_lib::settings::UserSettings;