diff --git a/Cargo.lock b/Cargo.lock index bb185e084..70857d82c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1001,6 +1001,7 @@ dependencies = [ "crossterm 0.26.1", "dirs", "esl01-renderdag", + "futures 0.3.28", "git2", "glob", "hex", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index abdfd4768..9d4c4e87d 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -39,6 +39,7 @@ criterion = { workspace = true, optional = true } crossterm = { workspace = true } dirs = { workspace = true } esl01-renderdag = { workspace = true } +futures = { workspace = true } git2 = { workspace = true } glob = { workspace = true } hex = { workspace = true } @@ -89,4 +90,4 @@ watchman = ["jj-lib/watchman"] [package.metadata.binstall] # The archive name is jj, not jj-cli. Also, `cargo binstall` gets # confused by the `v` before versions in archive name. -pkg-url="{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }" +pkg-url = "{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }" diff --git a/cli/src/commands/mod.rs b/cli/src/commands/mod.rs index 6ad313b29..461ea05e4 100644 --- a/cli/src/commands/mod.rs +++ b/cli/src/commands/mod.rs @@ -29,6 +29,7 @@ use std::{fmt, fs, io}; use clap::builder::NonEmptyStringValueParser; use clap::parser::ValueSource; use clap::{ArgGroup, Command, CommandFactory, FromArgMatches, Subcommand}; +use futures::executor::block_on; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; use jj_lib::backend::{CommitId, ObjectId, TreeValue}; @@ -1521,7 +1522,13 @@ fn cmd_cat(ui: &mut Ui, command: &CommandHelper, args: &CatArgs) -> Result<(), C } Err(conflict) => { let mut contents = vec![]; - conflicts::materialize(&conflict, repo.store(), &path, &mut contents).unwrap(); + block_on(conflicts::materialize( + &conflict, + repo.store(), + &path, + &mut contents, + )) + .unwrap(); ui.request_pager(); ui.stdout_formatter().write_all(&contents)?; } diff --git a/cli/src/diff_util.rs b/cli/src/diff_util.rs index f3b8a05fd..c61f13417 100644 --- a/cli/src/diff_util.rs +++ b/cli/src/diff_util.rs @@ -18,6 +18,7 @@ use std::io; use std::ops::Range; use std::sync::Arc; +use futures::executor::block_on; use itertools::Itertools; use jj_lib::backend::{ObjectId, TreeValue}; use jj_lib::commit::Commit; @@ -363,7 +364,13 @@ fn diff_content( } None => { let mut content = vec![]; - conflicts::materialize(value, repo.store(), path, &mut content).unwrap(); + block_on(conflicts::materialize( + value, + repo.store(), + path, + &mut content, + )) + .unwrap(); Ok(content) } Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) => { @@ -516,7 +523,13 @@ fn git_diff_part( None => { mode = "100644".to_string(); hash = "0000000000".to_string(); - conflicts::materialize(value, repo.store(), path, &mut content).unwrap(); + block_on(conflicts::materialize( + value, + repo.store(), + path, + &mut content, + )) + .unwrap(); } Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => { panic!("Unexpected {value:?} in diff at path {path:?}"); diff --git a/cli/src/merge_tools/builtin.rs b/cli/src/merge_tools/builtin.rs index 705ed9bb2..180836764 100644 --- a/cli/src/merge_tools/builtin.rs +++ b/cli/src/merge_tools/builtin.rs @@ -542,6 +542,7 @@ pub fn edit_merge_builtin( #[cfg(test)] mod tests { + use futures::executor::block_on; use jj_lib::conflicts::extract_as_single_hunk; use jj_lib::repo::Repo; use testutils::TestRepo; @@ -724,7 +725,7 @@ mod tests { to_file_id(right_tree.path_value(&path)), ], ); - let content = extract_as_single_hunk(&merge, store, &path); + let content = block_on(extract_as_single_hunk(&merge, store, &path)); let slices = content.map(|ContentHunk(buf)| buf.as_slice()); let merge_result = files::merge(slices); let sections = make_merge_sections(merge_result).unwrap(); diff --git a/cli/src/merge_tools/external.rs b/cli/src/merge_tools/external.rs index a4ee7e88a..5cd574af2 100644 --- a/cli/src/merge_tools/external.rs +++ b/cli/src/merge_tools/external.rs @@ -6,6 +6,7 @@ use std::process::{Command, ExitStatus, Stdio}; use std::sync::Arc; use config::ConfigError; +use futures::executor::block_on; use itertools::Itertools; use jj_lib::backend::{FileId, MergedTreeId, TreeValue}; use jj_lib::conflicts::{self, materialize_merge_result}; @@ -357,12 +358,12 @@ pub fn run_mergetool_external( } let new_file_ids = if editor.merge_tool_edits_conflict_markers { - conflicts::update_from_content( + block_on(conflicts::update_from_content( &file_merge, tree.store(), repo_path, output_file_contents.as_slice(), - )? + ))? } else { let new_file_id = tree .store() diff --git a/cli/src/merge_tools/mod.rs b/cli/src/merge_tools/mod.rs index 7b9a6e40d..965a7d56b 100644 --- a/cli/src/merge_tools/mod.rs +++ b/cli/src/merge_tools/mod.rs @@ -18,6 +18,7 @@ mod external; use std::sync::Arc; use config::ConfigError; +use futures::executor::block_on; use jj_lib::backend::MergedTreeId; use jj_lib::conflicts::extract_as_single_hunk; use jj_lib::gitignore::GitIgnoreFile; @@ -112,7 +113,7 @@ pub fn run_mergetool( sides: file_merge.num_sides(), }); }; - let content = extract_as_single_hunk(&file_merge, tree.store(), repo_path); + let content = block_on(extract_as_single_hunk(&file_merge, tree.store(), repo_path)); let editor = get_merge_tool_from_settings(ui, settings)?; match editor { diff --git a/lib/src/conflicts.rs b/lib/src/conflicts.rs index c7c9be761..b09a1badd 100644 --- a/lib/src/conflicts.rs +++ b/lib/src/conflicts.rs @@ -17,6 +17,7 @@ use std::io::Write; use std::iter::zip; +use futures::StreamExt; use itertools::Itertools; use crate::backend::{BackendResult, FileId, TreeValue}; @@ -57,12 +58,13 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result Ok(()) } -fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> ContentHunk { +async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> ContentHunk { match term { Some(id) => { let mut content = vec![]; store - .read_file(path, id) + .read_file_async(path, id) + .await .unwrap() .read_to_end(&mut content) .unwrap(); @@ -74,22 +76,26 @@ fn get_file_contents(store: &Store, path: &RepoPath, term: &Option) -> C } } -pub fn extract_as_single_hunk( +pub async fn extract_as_single_hunk( merge: &Merge>, store: &Store, path: &RepoPath, ) -> Merge { - merge.map(|term| get_file_contents(store, path, term)) + let builder: MergeBuilder = futures::stream::iter(merge.iter()) + .then(|term| get_file_contents(store, path, term)) + .collect() + .await; + builder.build() } -pub fn materialize( +pub async fn materialize( conflict: &Merge>, store: &Store, path: &RepoPath, output: &mut dyn Write, ) -> std::io::Result<()> { if let Some(file_merge) = conflict.to_file_merge() { - let content = extract_as_single_hunk(&file_merge, store, path); + let content = extract_as_single_hunk(&file_merge, store, path).await; materialize_merge_result(&content, output) } else { // Unless all terms are regular files, we can't do much better than to try to @@ -285,7 +291,7 @@ fn parse_conflict_hunk(input: &[u8]) -> Merge { /// Parses conflict markers in `content` and returns an updated version of /// `file_ids` with the new contents. If no (valid) conflict markers remain, a /// single resolves `FileId` will be returned. -pub fn update_from_content( +pub async fn update_from_content( file_ids: &Merge>, store: &Store, path: &RepoPath, @@ -297,7 +303,7 @@ pub fn update_from_content( // conflicts (for example) are not converted to regular files in the working // copy. let mut old_content = Vec::with_capacity(content.len()); - let merge_hunk = extract_as_single_hunk(file_ids, store, path); + let merge_hunk = extract_as_single_hunk(file_ids, store, path).await; materialize_merge_result(&merge_hunk, &mut old_content).unwrap(); if content == old_content { return Ok(file_ids.clone()); diff --git a/lib/src/local_working_copy.rs b/lib/src/local_working_copy.rs index da52f0649..1d7c54753 100644 --- a/lib/src/local_working_copy.rs +++ b/lib/src/local_working_copy.rs @@ -30,6 +30,7 @@ use std::sync::mpsc::{channel, Sender}; use std::sync::Arc; use std::time::UNIX_EPOCH; +use futures::executor::block_on; use itertools::Itertools; use once_cell::unsync::OnceCell; use prost::Message; @@ -955,12 +956,12 @@ impl TreeState { message: format!("Failed to open file {}", disk_path.display()), err: err.into(), })?; - let new_file_ids = conflicts::update_from_content( + let new_file_ids = block_on(conflicts::update_from_content( &old_file_ids, self.store.as_ref(), repo_path, &content, - )?; + ))?; match new_file_ids.into_resolved() { Ok(file_id) => { #[cfg(windows)] @@ -1062,8 +1063,13 @@ impl TreeState { err: err.into(), })?; let mut conflict_data = vec![]; - conflicts::materialize(conflict, self.store.as_ref(), path, &mut conflict_data) - .expect("Failed to materialize conflict to in-memory buffer"); + block_on(conflicts::materialize( + conflict, + self.store.as_ref(), + path, + &mut conflict_data, + )) + .expect("Failed to materialize conflict to in-memory buffer"); file.write_all(&conflict_data) .map_err(|err| CheckoutError::Other { message: format!("Failed to write conflict to file {}", disk_path.display()), diff --git a/lib/tests/test_conflicts.rs b/lib/tests/test_conflicts.rs index b2a6bfa85..352ff3d6b 100644 --- a/lib/tests/test_conflicts.rs +++ b/lib/tests/test_conflicts.rs @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +use futures::executor::block_on; use jj_lib::backend::FileId; use jj_lib::conflicts::{ extract_as_single_hunk, materialize_merge_result, parse_conflict, update_from_content, @@ -629,7 +630,7 @@ fn test_update_conflict_from_content() { // If the content is unchanged compared to the materialized value, we get the // old conflict id back. let materialized = materialize_conflict_string(store, &path, &conflict); - let parse = |content| update_from_content(&conflict, store, &path, content).unwrap(); + let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap(); assert_eq!(parse(materialized.as_bytes()), conflict); // If the conflict is resolved, we get None back to indicate that. @@ -673,7 +674,7 @@ fn test_update_conflict_from_content_modify_delete() { // If the content is unchanged compared to the materialized value, we get the // old conflict id back. let materialized = materialize_conflict_string(store, &path, &conflict); - let parse = |content| update_from_content(&conflict, store, &path, content).unwrap(); + let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap(); assert_eq!(parse(materialized.as_bytes()), conflict); // If the conflict is resolved, we get None back to indicate that. @@ -704,7 +705,7 @@ fn materialize_conflict_string( conflict: &Merge>, ) -> String { let mut result: Vec = vec![]; - let contents = extract_as_single_hunk(conflict, store, path); + let contents = block_on(extract_as_single_hunk(conflict, store, path)); materialize_merge_result(&contents, &mut result).unwrap(); String::from_utf8(result).unwrap() }