mirror of
https://github.com/martinvonz/jj.git
synced 2025-01-05 20:55:05 +00:00
index: make heads_pos() deduplicate entries without building separate set
This is much faster (maybe because of better cache locality?) Another option is to use BTreeSet, but the BinaryHeap version is slightly faster. "bench revset" result in my linux repo: revsets/heads(tags()) --------------------- baseline 3.28 560.6±4.01ms 1 2.92 500.0±2.99ms 2 1.98 339.6±1.64ms 3 (this) 1.00 171.2±0.30ms
This commit is contained in:
parent
9832ee205d
commit
6399c392fd
1 changed files with 12 additions and 5 deletions
|
@ -974,17 +974,14 @@ impl<'a> CompositeIndex<'a> {
|
|||
// Walk ancestors of the parents of the candidates. Remove visited commits from
|
||||
// set of candidates. Stop walking when we have gone past the minimum
|
||||
// candidate generation.
|
||||
let mut visited = HashSet::new();
|
||||
while let Some(item) = work.pop() {
|
||||
if !visited.insert(item.pos) {
|
||||
continue;
|
||||
}
|
||||
while let Some(item) = dedup_pop(&mut work) {
|
||||
if item.generation < min_generation {
|
||||
break;
|
||||
}
|
||||
candidate_positions.remove(&item.pos);
|
||||
let entry = self.entry_by_pos(item.pos);
|
||||
for parent_entry in entry.parents() {
|
||||
assert!(parent_entry.pos < entry.pos);
|
||||
work.push(IndexPositionByGeneration::from(&parent_entry));
|
||||
}
|
||||
}
|
||||
|
@ -1596,6 +1593,16 @@ impl RevWalkItemGenerationRange {
|
|||
}
|
||||
}
|
||||
|
||||
/// Removes the greatest items (including duplicates) from the heap, returns
|
||||
/// one.
|
||||
fn dedup_pop<T: Ord>(heap: &mut BinaryHeap<T>) -> Option<T> {
|
||||
let item = heap.pop()?;
|
||||
while heap.peek() == Some(&item) {
|
||||
heap.pop().unwrap();
|
||||
}
|
||||
Some(item)
|
||||
}
|
||||
|
||||
impl IndexSegment for ReadonlyIndexImpl {
|
||||
fn segment_num_parent_commits(&self) -> u32 {
|
||||
self.num_parent_commits
|
||||
|
|
Loading…
Reference in a new issue