ok/jj
1
0
Fork 0
forked from mirrors/jj

conflicts: make materialization async

We need to let async-ness propagate up from the backend because
`block_on()` doesn't like to be called recursively. The conflict
materialization code is a good place to make async because it doesn't
depends on anything that isn't already async-ready.
This commit is contained in:
Martin von Zweigbergk 2023-10-18 16:57:19 -07:00 committed by Martin von Zweigbergk
parent e900c97618
commit 8764ad9826
10 changed files with 61 additions and 23 deletions

1
Cargo.lock generated
View file

@ -1001,6 +1001,7 @@ dependencies = [
"crossterm 0.26.1",
"dirs",
"esl01-renderdag",
"futures 0.3.28",
"git2",
"glob",
"hex",

View file

@ -39,6 +39,7 @@ criterion = { workspace = true, optional = true }
crossterm = { workspace = true }
dirs = { workspace = true }
esl01-renderdag = { workspace = true }
futures = { workspace = true }
git2 = { workspace = true }
glob = { workspace = true }
hex = { workspace = true }
@ -89,4 +90,4 @@ watchman = ["jj-lib/watchman"]
[package.metadata.binstall]
# The archive name is jj, not jj-cli. Also, `cargo binstall` gets
# confused by the `v` before versions in archive name.
pkg-url="{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }"
pkg-url = "{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }"

View file

@ -29,6 +29,7 @@ use std::{fmt, fs, io};
use clap::builder::NonEmptyStringValueParser;
use clap::parser::ValueSource;
use clap::{ArgGroup, Command, CommandFactory, FromArgMatches, Subcommand};
use futures::executor::block_on;
use indexmap::{IndexMap, IndexSet};
use itertools::Itertools;
use jj_lib::backend::{CommitId, ObjectId, TreeValue};
@ -1521,7 +1522,13 @@ fn cmd_cat(ui: &mut Ui, command: &CommandHelper, args: &CatArgs) -> Result<(), C
}
Err(conflict) => {
let mut contents = vec![];
conflicts::materialize(&conflict, repo.store(), &path, &mut contents).unwrap();
block_on(conflicts::materialize(
&conflict,
repo.store(),
&path,
&mut contents,
))
.unwrap();
ui.request_pager();
ui.stdout_formatter().write_all(&contents)?;
}

View file

@ -18,6 +18,7 @@ use std::io;
use std::ops::Range;
use std::sync::Arc;
use futures::executor::block_on;
use itertools::Itertools;
use jj_lib::backend::{ObjectId, TreeValue};
use jj_lib::commit::Commit;
@ -363,7 +364,13 @@ fn diff_content(
}
None => {
let mut content = vec![];
conflicts::materialize(value, repo.store(), path, &mut content).unwrap();
block_on(conflicts::materialize(
value,
repo.store(),
path,
&mut content,
))
.unwrap();
Ok(content)
}
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) => {
@ -516,7 +523,13 @@ fn git_diff_part(
None => {
mode = "100644".to_string();
hash = "0000000000".to_string();
conflicts::materialize(value, repo.store(), path, &mut content).unwrap();
block_on(conflicts::materialize(
value,
repo.store(),
path,
&mut content,
))
.unwrap();
}
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => {
panic!("Unexpected {value:?} in diff at path {path:?}");

View file

@ -542,6 +542,7 @@ pub fn edit_merge_builtin(
#[cfg(test)]
mod tests {
use futures::executor::block_on;
use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::repo::Repo;
use testutils::TestRepo;
@ -724,7 +725,7 @@ mod tests {
to_file_id(right_tree.path_value(&path)),
],
);
let content = extract_as_single_hunk(&merge, store, &path);
let content = block_on(extract_as_single_hunk(&merge, store, &path));
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
let merge_result = files::merge(slices);
let sections = make_merge_sections(merge_result).unwrap();

View file

@ -6,6 +6,7 @@ use std::process::{Command, ExitStatus, Stdio};
use std::sync::Arc;
use config::ConfigError;
use futures::executor::block_on;
use itertools::Itertools;
use jj_lib::backend::{FileId, MergedTreeId, TreeValue};
use jj_lib::conflicts::{self, materialize_merge_result};
@ -357,12 +358,12 @@ pub fn run_mergetool_external(
}
let new_file_ids = if editor.merge_tool_edits_conflict_markers {
conflicts::update_from_content(
block_on(conflicts::update_from_content(
&file_merge,
tree.store(),
repo_path,
output_file_contents.as_slice(),
)?
))?
} else {
let new_file_id = tree
.store()

View file

@ -18,6 +18,7 @@ mod external;
use std::sync::Arc;
use config::ConfigError;
use futures::executor::block_on;
use jj_lib::backend::MergedTreeId;
use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::gitignore::GitIgnoreFile;
@ -112,7 +113,7 @@ pub fn run_mergetool(
sides: file_merge.num_sides(),
});
};
let content = extract_as_single_hunk(&file_merge, tree.store(), repo_path);
let content = block_on(extract_as_single_hunk(&file_merge, tree.store(), repo_path));
let editor = get_merge_tool_from_settings(ui, settings)?;
match editor {

View file

@ -17,6 +17,7 @@
use std::io::Write;
use std::iter::zip;
use futures::StreamExt;
use itertools::Itertools;
use crate::backend::{BackendResult, FileId, TreeValue};
@ -57,12 +58,13 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result
Ok(())
}
fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
match term {
Some(id) => {
let mut content = vec![];
store
.read_file(path, id)
.read_file_async(path, id)
.await
.unwrap()
.read_to_end(&mut content)
.unwrap();
@ -74,22 +76,26 @@ fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> C
}
}
pub fn extract_as_single_hunk(
pub async fn extract_as_single_hunk(
merge: &Merge<Option<FileId>>,
store: &Store,
path: &RepoPath,
) -> Merge<ContentHunk> {
merge.map(|term| get_file_contents(store, path, term))
let builder: MergeBuilder<ContentHunk> = futures::stream::iter(merge.iter())
.then(|term| get_file_contents(store, path, term))
.collect()
.await;
builder.build()
}
pub fn materialize(
pub async fn materialize(
conflict: &Merge<Option<TreeValue>>,
store: &Store,
path: &RepoPath,
output: &mut dyn Write,
) -> std::io::Result<()> {
if let Some(file_merge) = conflict.to_file_merge() {
let content = extract_as_single_hunk(&file_merge, store, path);
let content = extract_as_single_hunk(&file_merge, store, path).await;
materialize_merge_result(&content, output)
} else {
// Unless all terms are regular files, we can't do much better than to try to
@ -285,7 +291,7 @@ fn parse_conflict_hunk(input: &[u8]) -> Merge<ContentHunk> {
/// Parses conflict markers in `content` and returns an updated version of
/// `file_ids` with the new contents. If no (valid) conflict markers remain, a
/// single resolves `FileId` will be returned.
pub fn update_from_content(
pub async fn update_from_content(
file_ids: &Merge<Option<FileId>>,
store: &Store,
path: &RepoPath,
@ -297,7 +303,7 @@ pub fn update_from_content(
// conflicts (for example) are not converted to regular files in the working
// copy.
let mut old_content = Vec::with_capacity(content.len());
let merge_hunk = extract_as_single_hunk(file_ids, store, path);
let merge_hunk = extract_as_single_hunk(file_ids, store, path).await;
materialize_merge_result(&merge_hunk, &mut old_content).unwrap();
if content == old_content {
return Ok(file_ids.clone());

View file

@ -30,6 +30,7 @@ use std::sync::mpsc::{channel, Sender};
use std::sync::Arc;
use std::time::UNIX_EPOCH;
use futures::executor::block_on;
use itertools::Itertools;
use once_cell::unsync::OnceCell;
use prost::Message;
@ -955,12 +956,12 @@ impl TreeState {
message: format!("Failed to open file {}", disk_path.display()),
err: err.into(),
})?;
let new_file_ids = conflicts::update_from_content(
let new_file_ids = block_on(conflicts::update_from_content(
&old_file_ids,
self.store.as_ref(),
repo_path,
&content,
)?;
))?;
match new_file_ids.into_resolved() {
Ok(file_id) => {
#[cfg(windows)]
@ -1062,8 +1063,13 @@ impl TreeState {
err: err.into(),
})?;
let mut conflict_data = vec![];
conflicts::materialize(conflict, self.store.as_ref(), path, &mut conflict_data)
.expect("Failed to materialize conflict to in-memory buffer");
block_on(conflicts::materialize(
conflict,
self.store.as_ref(),
path,
&mut conflict_data,
))
.expect("Failed to materialize conflict to in-memory buffer");
file.write_all(&conflict_data)
.map_err(|err| CheckoutError::Other {
message: format!("Failed to write conflict to file {}", disk_path.display()),

View file

@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use futures::executor::block_on;
use jj_lib::backend::FileId;
use jj_lib::conflicts::{
extract_as_single_hunk, materialize_merge_result, parse_conflict, update_from_content,
@ -629,7 +630,7 @@ fn test_update_conflict_from_content() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let materialized = materialize_conflict_string(store, &path, &conflict);
let parse = |content| update_from_content(&conflict, store, &path, content).unwrap();
let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap();
assert_eq!(parse(materialized.as_bytes()), conflict);
// If the conflict is resolved, we get None back to indicate that.
@ -673,7 +674,7 @@ fn test_update_conflict_from_content_modify_delete() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let materialized = materialize_conflict_string(store, &path, &conflict);
let parse = |content| update_from_content(&conflict, store, &path, content).unwrap();
let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap();
assert_eq!(parse(materialized.as_bytes()), conflict);
// If the conflict is resolved, we get None back to indicate that.
@ -704,7 +705,7 @@ fn materialize_conflict_string(
conflict: &Merge<Option<FileId>>,
) -> String {
let mut result: Vec<u8> = vec![];
let contents = extract_as_single_hunk(conflict, store, path);
let contents = block_on(extract_as_single_hunk(conflict, store, path));
materialize_merge_result(&contents, &mut result).unwrap();
String::from_utf8(result).unwrap()
}