forked from mirrors/jj
conflicts: make materialization async
We need to let async-ness propagate up from the backend because `block_on()` doesn't like to be called recursively. The conflict materialization code is a good place to make async because it doesn't depends on anything that isn't already async-ready.
This commit is contained in:
parent
e900c97618
commit
8764ad9826
10 changed files with 61 additions and 23 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1001,6 +1001,7 @@ dependencies = [
|
||||||
"crossterm 0.26.1",
|
"crossterm 0.26.1",
|
||||||
"dirs",
|
"dirs",
|
||||||
"esl01-renderdag",
|
"esl01-renderdag",
|
||||||
|
"futures 0.3.28",
|
||||||
"git2",
|
"git2",
|
||||||
"glob",
|
"glob",
|
||||||
"hex",
|
"hex",
|
||||||
|
|
|
@ -39,6 +39,7 @@ criterion = { workspace = true, optional = true }
|
||||||
crossterm = { workspace = true }
|
crossterm = { workspace = true }
|
||||||
dirs = { workspace = true }
|
dirs = { workspace = true }
|
||||||
esl01-renderdag = { workspace = true }
|
esl01-renderdag = { workspace = true }
|
||||||
|
futures = { workspace = true }
|
||||||
git2 = { workspace = true }
|
git2 = { workspace = true }
|
||||||
glob = { workspace = true }
|
glob = { workspace = true }
|
||||||
hex = { workspace = true }
|
hex = { workspace = true }
|
||||||
|
@ -89,4 +90,4 @@ watchman = ["jj-lib/watchman"]
|
||||||
[package.metadata.binstall]
|
[package.metadata.binstall]
|
||||||
# The archive name is jj, not jj-cli. Also, `cargo binstall` gets
|
# The archive name is jj, not jj-cli. Also, `cargo binstall` gets
|
||||||
# confused by the `v` before versions in archive name.
|
# confused by the `v` before versions in archive name.
|
||||||
pkg-url="{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }"
|
pkg-url = "{ repo }/releases/download/v{ version }/jj-v{ version }-{ target }.{ archive-format }"
|
||||||
|
|
|
@ -29,6 +29,7 @@ use std::{fmt, fs, io};
|
||||||
use clap::builder::NonEmptyStringValueParser;
|
use clap::builder::NonEmptyStringValueParser;
|
||||||
use clap::parser::ValueSource;
|
use clap::parser::ValueSource;
|
||||||
use clap::{ArgGroup, Command, CommandFactory, FromArgMatches, Subcommand};
|
use clap::{ArgGroup, Command, CommandFactory, FromArgMatches, Subcommand};
|
||||||
|
use futures::executor::block_on;
|
||||||
use indexmap::{IndexMap, IndexSet};
|
use indexmap::{IndexMap, IndexSet};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use jj_lib::backend::{CommitId, ObjectId, TreeValue};
|
use jj_lib::backend::{CommitId, ObjectId, TreeValue};
|
||||||
|
@ -1521,7 +1522,13 @@ fn cmd_cat(ui: &mut Ui, command: &CommandHelper, args: &CatArgs) -> Result<(), C
|
||||||
}
|
}
|
||||||
Err(conflict) => {
|
Err(conflict) => {
|
||||||
let mut contents = vec![];
|
let mut contents = vec![];
|
||||||
conflicts::materialize(&conflict, repo.store(), &path, &mut contents).unwrap();
|
block_on(conflicts::materialize(
|
||||||
|
&conflict,
|
||||||
|
repo.store(),
|
||||||
|
&path,
|
||||||
|
&mut contents,
|
||||||
|
))
|
||||||
|
.unwrap();
|
||||||
ui.request_pager();
|
ui.request_pager();
|
||||||
ui.stdout_formatter().write_all(&contents)?;
|
ui.stdout_formatter().write_all(&contents)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ use std::io;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use futures::executor::block_on;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use jj_lib::backend::{ObjectId, TreeValue};
|
use jj_lib::backend::{ObjectId, TreeValue};
|
||||||
use jj_lib::commit::Commit;
|
use jj_lib::commit::Commit;
|
||||||
|
@ -363,7 +364,13 @@ fn diff_content(
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let mut content = vec![];
|
let mut content = vec![];
|
||||||
conflicts::materialize(value, repo.store(), path, &mut content).unwrap();
|
block_on(conflicts::materialize(
|
||||||
|
value,
|
||||||
|
repo.store(),
|
||||||
|
path,
|
||||||
|
&mut content,
|
||||||
|
))
|
||||||
|
.unwrap();
|
||||||
Ok(content)
|
Ok(content)
|
||||||
}
|
}
|
||||||
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) => {
|
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) => {
|
||||||
|
@ -516,7 +523,13 @@ fn git_diff_part(
|
||||||
None => {
|
None => {
|
||||||
mode = "100644".to_string();
|
mode = "100644".to_string();
|
||||||
hash = "0000000000".to_string();
|
hash = "0000000000".to_string();
|
||||||
conflicts::materialize(value, repo.store(), path, &mut content).unwrap();
|
block_on(conflicts::materialize(
|
||||||
|
value,
|
||||||
|
repo.store(),
|
||||||
|
path,
|
||||||
|
&mut content,
|
||||||
|
))
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => {
|
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => {
|
||||||
panic!("Unexpected {value:?} in diff at path {path:?}");
|
panic!("Unexpected {value:?} in diff at path {path:?}");
|
||||||
|
|
|
@ -542,6 +542,7 @@ pub fn edit_merge_builtin(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use futures::executor::block_on;
|
||||||
use jj_lib::conflicts::extract_as_single_hunk;
|
use jj_lib::conflicts::extract_as_single_hunk;
|
||||||
use jj_lib::repo::Repo;
|
use jj_lib::repo::Repo;
|
||||||
use testutils::TestRepo;
|
use testutils::TestRepo;
|
||||||
|
@ -724,7 +725,7 @@ mod tests {
|
||||||
to_file_id(right_tree.path_value(&path)),
|
to_file_id(right_tree.path_value(&path)),
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
let content = extract_as_single_hunk(&merge, store, &path);
|
let content = block_on(extract_as_single_hunk(&merge, store, &path));
|
||||||
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
|
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
|
||||||
let merge_result = files::merge(slices);
|
let merge_result = files::merge(slices);
|
||||||
let sections = make_merge_sections(merge_result).unwrap();
|
let sections = make_merge_sections(merge_result).unwrap();
|
||||||
|
|
|
@ -6,6 +6,7 @@ use std::process::{Command, ExitStatus, Stdio};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use config::ConfigError;
|
use config::ConfigError;
|
||||||
|
use futures::executor::block_on;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use jj_lib::backend::{FileId, MergedTreeId, TreeValue};
|
use jj_lib::backend::{FileId, MergedTreeId, TreeValue};
|
||||||
use jj_lib::conflicts::{self, materialize_merge_result};
|
use jj_lib::conflicts::{self, materialize_merge_result};
|
||||||
|
@ -357,12 +358,12 @@ pub fn run_mergetool_external(
|
||||||
}
|
}
|
||||||
|
|
||||||
let new_file_ids = if editor.merge_tool_edits_conflict_markers {
|
let new_file_ids = if editor.merge_tool_edits_conflict_markers {
|
||||||
conflicts::update_from_content(
|
block_on(conflicts::update_from_content(
|
||||||
&file_merge,
|
&file_merge,
|
||||||
tree.store(),
|
tree.store(),
|
||||||
repo_path,
|
repo_path,
|
||||||
output_file_contents.as_slice(),
|
output_file_contents.as_slice(),
|
||||||
)?
|
))?
|
||||||
} else {
|
} else {
|
||||||
let new_file_id = tree
|
let new_file_id = tree
|
||||||
.store()
|
.store()
|
||||||
|
|
|
@ -18,6 +18,7 @@ mod external;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use config::ConfigError;
|
use config::ConfigError;
|
||||||
|
use futures::executor::block_on;
|
||||||
use jj_lib::backend::MergedTreeId;
|
use jj_lib::backend::MergedTreeId;
|
||||||
use jj_lib::conflicts::extract_as_single_hunk;
|
use jj_lib::conflicts::extract_as_single_hunk;
|
||||||
use jj_lib::gitignore::GitIgnoreFile;
|
use jj_lib::gitignore::GitIgnoreFile;
|
||||||
|
@ -112,7 +113,7 @@ pub fn run_mergetool(
|
||||||
sides: file_merge.num_sides(),
|
sides: file_merge.num_sides(),
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
let content = extract_as_single_hunk(&file_merge, tree.store(), repo_path);
|
let content = block_on(extract_as_single_hunk(&file_merge, tree.store(), repo_path));
|
||||||
|
|
||||||
let editor = get_merge_tool_from_settings(ui, settings)?;
|
let editor = get_merge_tool_from_settings(ui, settings)?;
|
||||||
match editor {
|
match editor {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::iter::zip;
|
use std::iter::zip;
|
||||||
|
|
||||||
|
use futures::StreamExt;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::backend::{BackendResult, FileId, TreeValue};
|
use crate::backend::{BackendResult, FileId, TreeValue};
|
||||||
|
@ -57,12 +58,13 @@ fn write_diff_hunks(hunks: &[DiffHunk], file: &mut dyn Write) -> std::io::Result
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
|
async fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> ContentHunk {
|
||||||
match term {
|
match term {
|
||||||
Some(id) => {
|
Some(id) => {
|
||||||
let mut content = vec![];
|
let mut content = vec![];
|
||||||
store
|
store
|
||||||
.read_file(path, id)
|
.read_file_async(path, id)
|
||||||
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.read_to_end(&mut content)
|
.read_to_end(&mut content)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -74,22 +76,26 @@ fn get_file_contents(store: &Store, path: &RepoPath, term: &Option<FileId>) -> C
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_as_single_hunk(
|
pub async fn extract_as_single_hunk(
|
||||||
merge: &Merge<Option<FileId>>,
|
merge: &Merge<Option<FileId>>,
|
||||||
store: &Store,
|
store: &Store,
|
||||||
path: &RepoPath,
|
path: &RepoPath,
|
||||||
) -> Merge<ContentHunk> {
|
) -> Merge<ContentHunk> {
|
||||||
merge.map(|term| get_file_contents(store, path, term))
|
let builder: MergeBuilder<ContentHunk> = futures::stream::iter(merge.iter())
|
||||||
|
.then(|term| get_file_contents(store, path, term))
|
||||||
|
.collect()
|
||||||
|
.await;
|
||||||
|
builder.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn materialize(
|
pub async fn materialize(
|
||||||
conflict: &Merge<Option<TreeValue>>,
|
conflict: &Merge<Option<TreeValue>>,
|
||||||
store: &Store,
|
store: &Store,
|
||||||
path: &RepoPath,
|
path: &RepoPath,
|
||||||
output: &mut dyn Write,
|
output: &mut dyn Write,
|
||||||
) -> std::io::Result<()> {
|
) -> std::io::Result<()> {
|
||||||
if let Some(file_merge) = conflict.to_file_merge() {
|
if let Some(file_merge) = conflict.to_file_merge() {
|
||||||
let content = extract_as_single_hunk(&file_merge, store, path);
|
let content = extract_as_single_hunk(&file_merge, store, path).await;
|
||||||
materialize_merge_result(&content, output)
|
materialize_merge_result(&content, output)
|
||||||
} else {
|
} else {
|
||||||
// Unless all terms are regular files, we can't do much better than to try to
|
// Unless all terms are regular files, we can't do much better than to try to
|
||||||
|
@ -285,7 +291,7 @@ fn parse_conflict_hunk(input: &[u8]) -> Merge<ContentHunk> {
|
||||||
/// Parses conflict markers in `content` and returns an updated version of
|
/// Parses conflict markers in `content` and returns an updated version of
|
||||||
/// `file_ids` with the new contents. If no (valid) conflict markers remain, a
|
/// `file_ids` with the new contents. If no (valid) conflict markers remain, a
|
||||||
/// single resolves `FileId` will be returned.
|
/// single resolves `FileId` will be returned.
|
||||||
pub fn update_from_content(
|
pub async fn update_from_content(
|
||||||
file_ids: &Merge<Option<FileId>>,
|
file_ids: &Merge<Option<FileId>>,
|
||||||
store: &Store,
|
store: &Store,
|
||||||
path: &RepoPath,
|
path: &RepoPath,
|
||||||
|
@ -297,7 +303,7 @@ pub fn update_from_content(
|
||||||
// conflicts (for example) are not converted to regular files in the working
|
// conflicts (for example) are not converted to regular files in the working
|
||||||
// copy.
|
// copy.
|
||||||
let mut old_content = Vec::with_capacity(content.len());
|
let mut old_content = Vec::with_capacity(content.len());
|
||||||
let merge_hunk = extract_as_single_hunk(file_ids, store, path);
|
let merge_hunk = extract_as_single_hunk(file_ids, store, path).await;
|
||||||
materialize_merge_result(&merge_hunk, &mut old_content).unwrap();
|
materialize_merge_result(&merge_hunk, &mut old_content).unwrap();
|
||||||
if content == old_content {
|
if content == old_content {
|
||||||
return Ok(file_ids.clone());
|
return Ok(file_ids.clone());
|
||||||
|
|
|
@ -30,6 +30,7 @@ use std::sync::mpsc::{channel, Sender};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::UNIX_EPOCH;
|
use std::time::UNIX_EPOCH;
|
||||||
|
|
||||||
|
use futures::executor::block_on;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use once_cell::unsync::OnceCell;
|
use once_cell::unsync::OnceCell;
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
|
@ -955,12 +956,12 @@ impl TreeState {
|
||||||
message: format!("Failed to open file {}", disk_path.display()),
|
message: format!("Failed to open file {}", disk_path.display()),
|
||||||
err: err.into(),
|
err: err.into(),
|
||||||
})?;
|
})?;
|
||||||
let new_file_ids = conflicts::update_from_content(
|
let new_file_ids = block_on(conflicts::update_from_content(
|
||||||
&old_file_ids,
|
&old_file_ids,
|
||||||
self.store.as_ref(),
|
self.store.as_ref(),
|
||||||
repo_path,
|
repo_path,
|
||||||
&content,
|
&content,
|
||||||
)?;
|
))?;
|
||||||
match new_file_ids.into_resolved() {
|
match new_file_ids.into_resolved() {
|
||||||
Ok(file_id) => {
|
Ok(file_id) => {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
|
@ -1062,8 +1063,13 @@ impl TreeState {
|
||||||
err: err.into(),
|
err: err.into(),
|
||||||
})?;
|
})?;
|
||||||
let mut conflict_data = vec![];
|
let mut conflict_data = vec![];
|
||||||
conflicts::materialize(conflict, self.store.as_ref(), path, &mut conflict_data)
|
block_on(conflicts::materialize(
|
||||||
.expect("Failed to materialize conflict to in-memory buffer");
|
conflict,
|
||||||
|
self.store.as_ref(),
|
||||||
|
path,
|
||||||
|
&mut conflict_data,
|
||||||
|
))
|
||||||
|
.expect("Failed to materialize conflict to in-memory buffer");
|
||||||
file.write_all(&conflict_data)
|
file.write_all(&conflict_data)
|
||||||
.map_err(|err| CheckoutError::Other {
|
.map_err(|err| CheckoutError::Other {
|
||||||
message: format!("Failed to write conflict to file {}", disk_path.display()),
|
message: format!("Failed to write conflict to file {}", disk_path.display()),
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
use futures::executor::block_on;
|
||||||
use jj_lib::backend::FileId;
|
use jj_lib::backend::FileId;
|
||||||
use jj_lib::conflicts::{
|
use jj_lib::conflicts::{
|
||||||
extract_as_single_hunk, materialize_merge_result, parse_conflict, update_from_content,
|
extract_as_single_hunk, materialize_merge_result, parse_conflict, update_from_content,
|
||||||
|
@ -629,7 +630,7 @@ fn test_update_conflict_from_content() {
|
||||||
// If the content is unchanged compared to the materialized value, we get the
|
// If the content is unchanged compared to the materialized value, we get the
|
||||||
// old conflict id back.
|
// old conflict id back.
|
||||||
let materialized = materialize_conflict_string(store, &path, &conflict);
|
let materialized = materialize_conflict_string(store, &path, &conflict);
|
||||||
let parse = |content| update_from_content(&conflict, store, &path, content).unwrap();
|
let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap();
|
||||||
assert_eq!(parse(materialized.as_bytes()), conflict);
|
assert_eq!(parse(materialized.as_bytes()), conflict);
|
||||||
|
|
||||||
// If the conflict is resolved, we get None back to indicate that.
|
// If the conflict is resolved, we get None back to indicate that.
|
||||||
|
@ -673,7 +674,7 @@ fn test_update_conflict_from_content_modify_delete() {
|
||||||
// If the content is unchanged compared to the materialized value, we get the
|
// If the content is unchanged compared to the materialized value, we get the
|
||||||
// old conflict id back.
|
// old conflict id back.
|
||||||
let materialized = materialize_conflict_string(store, &path, &conflict);
|
let materialized = materialize_conflict_string(store, &path, &conflict);
|
||||||
let parse = |content| update_from_content(&conflict, store, &path, content).unwrap();
|
let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap();
|
||||||
assert_eq!(parse(materialized.as_bytes()), conflict);
|
assert_eq!(parse(materialized.as_bytes()), conflict);
|
||||||
|
|
||||||
// If the conflict is resolved, we get None back to indicate that.
|
// If the conflict is resolved, we get None back to indicate that.
|
||||||
|
@ -704,7 +705,7 @@ fn materialize_conflict_string(
|
||||||
conflict: &Merge<Option<FileId>>,
|
conflict: &Merge<Option<FileId>>,
|
||||||
) -> String {
|
) -> String {
|
||||||
let mut result: Vec<u8> = vec![];
|
let mut result: Vec<u8> = vec![];
|
||||||
let contents = extract_as_single_hunk(conflict, store, path);
|
let contents = block_on(extract_as_single_hunk(conflict, store, path));
|
||||||
materialize_merge_result(&contents, &mut result).unwrap();
|
materialize_merge_result(&contents, &mut result).unwrap();
|
||||||
String::from_utf8(result).unwrap()
|
String::from_utf8(result).unwrap()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue