forked from mirrors/jj
git_backend: remove fast path testing imported commits, filter them by caller
The idea is that GC, if implemented, will clean up objects based on the Index knowledge. It's probably okay to leave some extra metadata of unreachable objects, but GC-ed refs should be recreated if the corresponding heads get reimported. See also the next patch.
This commit is contained in:
parent
48c4985e34
commit
2e1aa6c49c
2 changed files with 17 additions and 11 deletions
|
@ -260,20 +260,22 @@ pub fn import_some_refs(
|
|||
|
||||
// Bulk-import all reachable Git commits to the backend to reduce overhead of
|
||||
// table merging.
|
||||
let head_ids = itertools::chain(
|
||||
let index = mut_repo.index();
|
||||
let missing_head_ids = itertools::chain(
|
||||
&changed_git_head,
|
||||
changed_git_refs.iter().map(|(_, new_target)| new_target),
|
||||
// changed_remote_refs might contain new_targets that are not in changed_git_refs,
|
||||
// but such targets should have already been imported to the backend.
|
||||
)
|
||||
.flat_map(|target| target.added_ids());
|
||||
let heads_imported = git_backend.import_head_commits(head_ids).is_ok();
|
||||
.flat_map(|target| target.added_ids())
|
||||
.filter(|&id| !index.has_id(id));
|
||||
let heads_imported = git_backend.import_head_commits(missing_head_ids).is_ok();
|
||||
|
||||
// Import new remote heads
|
||||
let mut head_commits = Vec::new();
|
||||
let get_commit = |id| {
|
||||
// If bulk-import failed, try again to find bad head or ref.
|
||||
if !heads_imported {
|
||||
if !heads_imported && !index.has_id(id) {
|
||||
git_backend.import_head_commits([id])?;
|
||||
}
|
||||
store.get_commit(id)
|
||||
|
|
|
@ -333,31 +333,35 @@ impl GitBackend {
|
|||
}
|
||||
|
||||
/// Imports the given commits and ancestors from the backing Git repo.
|
||||
///
|
||||
/// The `head_ids` may contain commits that have already been imported, but
|
||||
/// the caller should filter them out to eliminate redundant I/O processing.
|
||||
#[tracing::instrument(skip(self, head_ids))]
|
||||
pub fn import_head_commits<'a>(
|
||||
&self,
|
||||
head_ids: impl IntoIterator<Item = &'a CommitId>,
|
||||
) -> BackendResult<()> {
|
||||
let table = self.cached_extra_metadata_table()?;
|
||||
let mut missing_head_ids = head_ids
|
||||
let head_ids = head_ids
|
||||
.into_iter()
|
||||
.filter(|&id| *id != self.root_commit_id && table.get_value(id.as_bytes()).is_none())
|
||||
.filter(|&id| *id != self.root_commit_id)
|
||||
.collect_vec();
|
||||
if missing_head_ids.is_empty() {
|
||||
if head_ids.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// These commits are imported from Git. Make our change ids persist (otherwise
|
||||
// future write_commit() could reassign new change id.)
|
||||
tracing::debug!(
|
||||
heads_count = missing_head_ids.len(),
|
||||
heads_count = head_ids.len(),
|
||||
"import extra metadata entries"
|
||||
);
|
||||
let locked_repo = self.lock_git_repo();
|
||||
let (table, table_lock) = self.read_extra_metadata_table_locked()?;
|
||||
let mut mut_table = table.start_mutation();
|
||||
// Concurrent write_commit() might have updated the table before taking a lock.
|
||||
missing_head_ids.retain(|&id| mut_table.get_value(id.as_bytes()).is_none());
|
||||
let missing_head_ids = head_ids
|
||||
.into_iter()
|
||||
.filter(|&id| mut_table.get_value(id.as_bytes()).is_none())
|
||||
.collect_vec();
|
||||
import_extra_metadata_entries_from_heads(
|
||||
&locked_repo,
|
||||
&mut mut_table,
|
||||
|
|
Loading…
Reference in a new issue