ok/jj
1
0
Fork 0
forked from mirrors/jj

async: switch to pollster's block_on()

During the transition to using more async code, I keep running into
https://github.com/rust-lang/futures-rs/issues/2090. Right now, I want
to convert `MergedTree::diff()` into a `Stream`. I don't want to
update all call sites at once, so instead I'm adding a
`MergedTree::diff_stream()` method, which just wraps
`MergedTree::diff()` in a `Stream. However, since the iterator is
synchronous, it needs to block on the async `Backend::read_tree()`
calls. If we then also block on the `Stream` in the CLI, we run into
the panic.
This commit is contained in:
Martin von Zweigbergk 2023-10-28 16:15:01 -07:00 committed by Martin von Zweigbergk
parent b7c480a575
commit 24b706641f
15 changed files with 91 additions and 80 deletions

8
Cargo.lock generated
View file

@ -1628,6 +1628,7 @@ dependencies = [
"once_cell",
"pest",
"pest_derive",
"pollster",
"regex",
"rpassword",
"scm-record",
@ -1674,6 +1675,7 @@ dependencies = [
"once_cell",
"pest",
"pest_derive",
"pollster",
"pretty_assertions",
"prost",
"rand",
@ -2122,6 +2124,12 @@ dependencies = [
"plotters-backend",
]
[[package]]
name = "pollster"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22686f4785f02a4fcc856d3b3bb19bf6c8160d103f7a99cc258bddd0251dc7f2"
[[package]]
name = "powerfmt"
version = "0.2.0"

View file

@ -58,6 +58,7 @@ once_cell = "1.18.0"
pest = "2.7.5"
pest_derive = "2.7.5"
pretty_assertions = "1.4.0"
pollster = "0.3.0"
prost = "0.11.9"
prost-build = "0.11.9"
rand = "0.8.5"

View file

@ -50,6 +50,7 @@ maplit = { workspace = true }
once_cell = { workspace = true }
pest = { workspace = true }
pest_derive = { workspace = true }
pollster = { workspace = true }
regex = { workspace = true }
rpassword = { workspace = true }
scm-record = { workspace = true }

View file

@ -14,10 +14,10 @@
use std::io::Write;
use futures::executor::block_on;
use jj_lib::backend::TreeValue;
use jj_lib::conflicts;
use jj_lib::repo::Repo;
use pollster::FutureExt;
use tracing::instrument;
use crate::cli_util::{user_error, CommandError, CommandHelper, RevisionArg};
@ -56,12 +56,8 @@ pub(crate) fn cmd_cat(
}
Err(conflict) => {
let mut contents = vec![];
block_on(conflicts::materialize(
&conflict,
repo.store(),
&path,
&mut contents,
))
conflicts::materialize(&conflict, repo.store(), &path, &mut contents)
.block_on()
.unwrap();
ui.request_pager();
ui.stdout_formatter().write_all(&contents)?;

View file

@ -18,7 +18,6 @@ use std::io;
use std::ops::Range;
use std::sync::Arc;
use futures::executor::block_on;
use itertools::Itertools;
use jj_lib::backend::{ObjectId, TreeValue};
use jj_lib::commit::Commit;
@ -31,6 +30,7 @@ use jj_lib::repo::{ReadonlyRepo, Repo};
use jj_lib::repo_path::RepoPath;
use jj_lib::settings::{ConfigResultExt as _, UserSettings};
use jj_lib::{conflicts, diff, files, rewrite};
use pollster::FutureExt;
use tracing::instrument;
use unicode_width::UnicodeWidthStr as _;
@ -364,12 +364,8 @@ fn diff_content(
}
None => {
let mut content = vec![];
block_on(conflicts::materialize(
value,
repo.store(),
path,
&mut content,
))
conflicts::materialize(value, repo.store(), path, &mut content)
.block_on()
.unwrap();
Ok(content)
}
@ -524,12 +520,8 @@ fn git_diff_part(
None => {
mode = "100644".to_string();
hash = "0000000000".to_string();
block_on(conflicts::materialize(
value,
repo.store(),
path,
&mut content,
))
conflicts::materialize(value, repo.store(), path, &mut content)
.block_on()
.unwrap();
}
Some(Some(TreeValue::Tree(_))) | Some(Some(TreeValue::Conflict(_))) | Some(None) => {

View file

@ -542,10 +542,10 @@ pub fn edit_merge_builtin(
#[cfg(test)]
mod tests {
use futures::executor::block_on;
use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::merge::MergedTreeValue;
use jj_lib::repo::Repo;
use pollster::FutureExt;
use testutils::TestRepo;
use super::*;
@ -726,7 +726,7 @@ mod tests {
to_file_id(right_tree.path_value(&path)),
],
);
let content = block_on(extract_as_single_hunk(&merge, store, &path));
let content = extract_as_single_hunk(&merge, store, &path).block_on();
let slices = content.map(|ContentHunk(buf)| buf.as_slice());
let merge_result = files::merge(slices);
let sections = make_merge_sections(merge_result).unwrap();

View file

@ -6,7 +6,6 @@ use std::process::{Command, ExitStatus, Stdio};
use std::sync::Arc;
use config::ConfigError;
use futures::executor::block_on;
use itertools::Itertools;
use jj_lib::backend::{FileId, MergedTreeId, TreeValue};
use jj_lib::conflicts::{self, materialize_merge_result};
@ -19,6 +18,7 @@ use jj_lib::repo_path::RepoPath;
use jj_lib::settings::UserSettings;
use jj_lib::store::Store;
use jj_lib::working_copy::{CheckoutError, SnapshotOptions};
use pollster::FutureExt;
use regex::{Captures, Regex};
use tempfile::TempDir;
use thiserror::Error;
@ -358,12 +358,13 @@ pub fn run_mergetool_external(
}
let new_file_ids = if editor.merge_tool_edits_conflict_markers {
block_on(conflicts::update_from_content(
conflicts::update_from_content(
&file_merge,
tree.store(),
repo_path,
output_file_contents.as_slice(),
))?
)
.block_on()?
} else {
let new_file_id = tree
.store()

View file

@ -18,7 +18,6 @@ mod external;
use std::sync::Arc;
use config::ConfigError;
use futures::executor::block_on;
use jj_lib::backend::MergedTreeId;
use jj_lib::conflicts::extract_as_single_hunk;
use jj_lib::gitignore::GitIgnoreFile;
@ -27,6 +26,7 @@ use jj_lib::merged_tree::MergedTree;
use jj_lib::repo_path::RepoPath;
use jj_lib::settings::{ConfigResultExt as _, UserSettings};
use jj_lib::working_copy::SnapshotError;
use pollster::FutureExt;
use thiserror::Error;
use self::builtin::{edit_diff_builtin, edit_merge_builtin, BuiltinToolError};
@ -113,7 +113,7 @@ pub fn run_mergetool(
sides: file_merge.num_sides(),
});
};
let content = block_on(extract_as_single_hunk(&file_merge, tree.store(), repo_path));
let content = extract_as_single_hunk(&file_merge, tree.store(), repo_path).block_on();
let editor = get_merge_tool_from_settings(ui, settings)?;
match editor {

View file

@ -19,7 +19,7 @@ harness = false
version_check = { workspace = true }
[dependencies]
async-trait = { workspace = true}
async-trait = { workspace = true }
backoff = { workspace = true }
blake2 = { workspace = true }
byteorder = { workspace = true }
@ -38,6 +38,7 @@ maplit = { workspace = true }
once_cell = { workspace = true }
pest = { workspace = true }
pest_derive = { workspace = true }
pollster = { workspace = true }
prost = { workspace = true }
rand = { workspace = true }
rand_chacha = { workspace = true }

View file

@ -1016,8 +1016,8 @@ fn bytes_vec_from_json(value: &serde_json::Value) -> Vec<u8> {
#[cfg(test)]
mod tests {
use assert_matches::assert_matches;
use futures::executor::block_on;
use git2::Oid;
use pollster::FutureExt;
use test_case::test_case;
use super::*;
@ -1101,7 +1101,7 @@ mod tests {
.collect_vec();
assert_eq!(git_refs, vec![git_commit_id2]);
let commit = block_on(backend.read_commit(&commit_id)).unwrap();
let commit = backend.read_commit(&commit_id).block_on().unwrap();
assert_eq!(&commit.change_id, &change_id);
assert_eq!(commit.parents, vec![CommitId::from_bytes(&[0; 20])]);
assert_eq!(commit.predecessors, vec![]);
@ -1130,10 +1130,12 @@ mod tests {
);
assert_eq!(commit.committer.timestamp.tz_offset, -480);
let root_tree = block_on(backend.read_tree(
let root_tree = backend
.read_tree(
&RepoPath::root(),
&TreeId::from_bytes(root_tree_id.as_bytes()),
))
)
.block_on()
.unwrap();
let mut root_entries = root_tree.entries();
let dir = root_entries.next().unwrap();
@ -1144,10 +1146,12 @@ mod tests {
&TreeValue::Tree(TreeId::from_bytes(dir_tree_id.as_bytes()))
);
let dir_tree = block_on(backend.read_tree(
let dir_tree = backend
.read_tree(
&RepoPath::from_internal_string("dir"),
&TreeId::from_bytes(dir_tree_id.as_bytes()),
))
)
.block_on()
.unwrap();
let mut entries = dir_tree.entries();
let file = entries.next().unwrap();
@ -1167,7 +1171,7 @@ mod tests {
&TreeValue::Symlink(SymlinkId::from_bytes(blob2.as_bytes()))
);
let commit2 = block_on(backend.read_commit(&commit_id2)).unwrap();
let commit2 = backend.read_commit(&commit_id2).block_on().unwrap();
assert_eq!(commit2.parents, vec![commit_id.clone()]);
assert_eq!(commit.predecessors, vec![]);
assert_eq!(
@ -1206,9 +1210,10 @@ mod tests {
// read_commit() without import_head_commits() works as of now. This might be
// changed later.
assert!(
block_on(backend.read_commit(&CommitId::from_bytes(git_commit_id.as_bytes()))).is_ok()
);
assert!(backend
.read_commit(&CommitId::from_bytes(git_commit_id.as_bytes()))
.block_on()
.is_ok());
assert!(
backend
.cached_extra_metadata_table()
@ -1294,7 +1299,7 @@ mod tests {
// Only root commit as parent
commit.parents = vec![backend.root_commit_id().clone()];
let first_id = backend.write_commit(commit.clone()).unwrap().0;
let first_commit = block_on(backend.read_commit(&first_id)).unwrap();
let first_commit = backend.read_commit(&first_id).block_on().unwrap();
assert_eq!(first_commit, commit);
let first_git_commit = git_repo.find_commit(git_id(&first_id)).unwrap();
assert_eq!(first_git_commit.parent_ids().collect_vec(), vec![]);
@ -1302,7 +1307,7 @@ mod tests {
// Only non-root commit as parent
commit.parents = vec![first_id.clone()];
let second_id = backend.write_commit(commit.clone()).unwrap().0;
let second_commit = block_on(backend.read_commit(&second_id)).unwrap();
let second_commit = backend.read_commit(&second_id).block_on().unwrap();
assert_eq!(second_commit, commit);
let second_git_commit = git_repo.find_commit(git_id(&second_id)).unwrap();
assert_eq!(
@ -1313,7 +1318,7 @@ mod tests {
// Merge commit
commit.parents = vec![first_id.clone(), second_id.clone()];
let merge_id = backend.write_commit(commit.clone()).unwrap().0;
let merge_commit = block_on(backend.read_commit(&merge_id)).unwrap();
let merge_commit = backend.read_commit(&merge_id).block_on().unwrap();
assert_eq!(merge_commit, commit);
let merge_git_commit = git_repo.find_commit(git_id(&merge_id)).unwrap();
assert_eq!(
@ -1363,7 +1368,7 @@ mod tests {
// When writing a tree-level conflict, the root tree on the git side has the
// individual trees as subtrees.
let read_commit_id = backend.write_commit(commit.clone()).unwrap().0;
let read_commit = block_on(backend.read_commit(&read_commit_id)).unwrap();
let read_commit = backend.read_commit(&read_commit_id).block_on().unwrap();
assert_eq!(read_commit, commit);
let git_commit = git_repo
.find_commit(Oid::from_bytes(read_commit_id.as_bytes()).unwrap())
@ -1392,7 +1397,7 @@ mod tests {
// regular git tree.
commit.root_tree = MergedTreeId::resolved(create_tree(5));
let read_commit_id = backend.write_commit(commit.clone()).unwrap().0;
let read_commit = block_on(backend.read_commit(&read_commit_id)).unwrap();
let read_commit = backend.read_commit(&read_commit_id).block_on().unwrap();
assert_eq!(read_commit, commit);
let git_commit = git_repo
.find_commit(Oid::from_bytes(read_commit_id.as_bytes()).unwrap())
@ -1459,7 +1464,7 @@ mod tests {
let (commit_id2, mut actual_commit2) = backend.write_commit(commit2.clone()).unwrap();
// The returned matches the ID
assert_eq!(
block_on(backend.read_commit(&commit_id2)).unwrap(),
backend.read_commit(&commit_id2).block_on().unwrap(),
actual_commit2
);
assert_ne!(commit_id2, commit_id1);

View file

@ -461,7 +461,7 @@ fn conflict_term_to_proto(part: &ConflictTerm) -> crate::protos::local_store::co
#[cfg(test)]
mod tests {
use assert_matches::assert_matches;
use futures::executor::block_on;
use pollster::FutureExt;
use super::*;
use crate::backend::MillisSinceEpoch;
@ -493,25 +493,25 @@ mod tests {
// Only root commit as parent
commit.parents = vec![backend.root_commit_id().clone()];
let first_id = backend.write_commit(commit.clone()).unwrap().0;
let first_commit = block_on(backend.read_commit(&first_id)).unwrap();
let first_commit = backend.read_commit(&first_id).block_on().unwrap();
assert_eq!(first_commit, commit);
// Only non-root commit as parent
commit.parents = vec![first_id.clone()];
let second_id = backend.write_commit(commit.clone()).unwrap().0;
let second_commit = block_on(backend.read_commit(&second_id)).unwrap();
let second_commit = backend.read_commit(&second_id).block_on().unwrap();
assert_eq!(second_commit, commit);
// Merge commit
commit.parents = vec![first_id.clone(), second_id.clone()];
let merge_id = backend.write_commit(commit.clone()).unwrap().0;
let merge_commit = block_on(backend.read_commit(&merge_id)).unwrap();
let merge_commit = backend.read_commit(&merge_id).block_on().unwrap();
assert_eq!(merge_commit, commit);
// Merge commit with root as one parent
commit.parents = vec![first_id, backend.root_commit_id().clone()];
let root_merge_id = backend.write_commit(commit.clone()).unwrap().0;
let root_merge_commit = block_on(backend.read_commit(&root_merge_id)).unwrap();
let root_merge_commit = backend.read_commit(&root_merge_id).block_on().unwrap();
assert_eq!(root_merge_commit, commit);
}

View file

@ -30,9 +30,9 @@ use std::sync::mpsc::{channel, Sender};
use std::sync::Arc;
use std::time::UNIX_EPOCH;
use futures::executor::block_on;
use itertools::Itertools;
use once_cell::unsync::OnceCell;
use pollster::FutureExt;
use prost::Message;
use rayon::iter::IntoParallelIterator;
use rayon::prelude::ParallelIterator;
@ -956,12 +956,13 @@ impl TreeState {
message: format!("Failed to open file {}", disk_path.display()),
err: err.into(),
})?;
let new_file_ids = block_on(conflicts::update_from_content(
let new_file_ids = conflicts::update_from_content(
&old_file_ids,
self.store.as_ref(),
repo_path,
&content,
))?;
)
.block_on()?;
match new_file_ids.into_resolved() {
Ok(file_id) => {
#[cfg(windows)]
@ -1063,12 +1064,8 @@ impl TreeState {
err: err.into(),
})?;
let mut conflict_data = vec![];
block_on(conflicts::materialize(
conflict,
self.store.as_ref(),
path,
&mut conflict_data,
))
conflicts::materialize(conflict, self.store.as_ref(), path, &mut conflict_data)
.block_on()
.expect("Failed to materialize conflict to in-memory buffer");
file.write_all(&conflict_data)
.map_err(|err| CheckoutError::Other {

View file

@ -20,10 +20,10 @@ use std::iter::zip;
use std::sync::Arc;
use std::{iter, vec};
use futures::executor::block_on;
use futures::stream::StreamExt;
use futures::TryStreamExt;
use itertools::Itertools;
use pollster::FutureExt;
use crate::backend::{BackendError, BackendResult, ConflictId, MergedTreeId, TreeId, TreeValue};
use crate::matchers::{EverythingMatcher, Matcher};
@ -898,11 +898,12 @@ impl Iterator for TreeDiffIterator<'_> {
let tree_after = after.is_tree();
let post_subdir =
if (tree_before || tree_after) && !self.matcher.visit(&path).is_nothing() {
let (before_tree, after_tree) = block_on(async {
let (before_tree, after_tree) = async {
let before_tree = Self::tree(dir.tree1.as_ref(), &path, &before);
let after_tree = Self::tree(dir.tree2.as_ref(), &path, &after);
futures::join!(before_tree, after_tree)
});
}
.block_on();
let before_tree = match before_tree {
Ok(tree) => tree,
Err(err) => {

View file

@ -20,7 +20,7 @@ use std::fmt::{Debug, Formatter};
use std::io::Read;
use std::sync::{Arc, RwLock};
use futures::executor::block_on;
use pollster::FutureExt;
use crate::backend;
use crate::backend::{
@ -98,7 +98,7 @@ impl Store {
}
pub fn get_commit(self: &Arc<Self>, id: &CommitId) -> BackendResult<Commit> {
block_on(self.get_commit_async(id))
self.get_commit_async(id).block_on()
}
pub async fn get_commit_async(self: &Arc<Self>, id: &CommitId) -> BackendResult<Commit> {
@ -133,7 +133,7 @@ impl Store {
}
pub fn get_tree(self: &Arc<Self>, dir: &RepoPath, id: &TreeId) -> BackendResult<Tree> {
block_on(self.get_tree_async(dir, id))
self.get_tree_async(dir, id).block_on()
}
pub async fn get_tree_async(
@ -193,7 +193,7 @@ impl Store {
}
pub fn read_file(&self, path: &RepoPath, id: &FileId) -> BackendResult<Box<dyn Read>> {
block_on(self.read_file_async(path, id))
self.read_file_async(path, id).block_on()
}
pub async fn read_file_async(
@ -209,7 +209,7 @@ impl Store {
}
pub fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> BackendResult<String> {
block_on(self.read_symlink_async(path, id))
self.read_symlink_async(path, id).block_on()
}
pub async fn read_symlink_async(

View file

@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use futures::executor::block_on;
use jj_lib::backend::FileId;
use jj_lib::conflicts::{
extract_as_single_hunk, materialize_merge_result, parse_conflict, update_from_content,
@ -21,6 +20,7 @@ use jj_lib::merge::Merge;
use jj_lib::repo::Repo;
use jj_lib::repo_path::RepoPath;
use jj_lib::store::Store;
use pollster::FutureExt;
use testutils::TestRepo;
#[test]
@ -615,7 +615,11 @@ fn test_update_conflict_from_content() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let materialized = materialize_conflict_string(store, &path, &conflict);
let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap();
let parse = |content| {
update_from_content(&conflict, store, &path, content)
.block_on()
.unwrap()
};
assert_eq!(parse(materialized.as_bytes()), conflict);
// If the conflict is resolved, we get None back to indicate that.
@ -659,7 +663,11 @@ fn test_update_conflict_from_content_modify_delete() {
// If the content is unchanged compared to the materialized value, we get the
// old conflict id back.
let materialized = materialize_conflict_string(store, &path, &conflict);
let parse = |content| block_on(update_from_content(&conflict, store, &path, content)).unwrap();
let parse = |content| {
update_from_content(&conflict, store, &path, content)
.block_on()
.unwrap()
};
assert_eq!(parse(materialized.as_bytes()), conflict);
// If the conflict is resolved, we get None back to indicate that.
@ -690,7 +698,7 @@ fn materialize_conflict_string(
conflict: &Merge<Option<FileId>>,
) -> String {
let mut result: Vec<u8> = vec![];
let contents = block_on(extract_as_single_hunk(conflict, store, path));
let contents = extract_as_single_hunk(conflict, store, path).block_on();
materialize_merge_result(&contents, &mut result).unwrap();
String::from_utf8(result).unwrap()
}