wip: Performance improvements for project search

Do not merge. Need to check with Conrad.

Co-authored-by: Antonio <antonio@zed.dev>
This commit is contained in:
Thorsten Ball 2024-08-27 14:48:17 +02:00
parent 8ec680cecb
commit f312763c33
4 changed files with 43 additions and 32 deletions

View file

@ -6,7 +6,7 @@ use crate::{
use anyhow::{anyhow, Context as _, Result};
use collections::{hash_map, HashMap, HashSet};
use fs::Fs;
use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt as _};
use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt};
use git::blame::Blame;
use gpui::{
AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel,
@ -788,7 +788,9 @@ impl BufferStore {
fs: Arc<dyn Fs>,
cx: &mut ModelContext<Self>,
) -> Receiver<Model<Buffer>> {
let (tx, rx) = smol::channel::unbounded();
const MAGIC_NUMBER: usize = 64;
let (tx, rx) = smol::channel::bounded(MAGIC_NUMBER);
let open_buffers = self.find_open_search_candidates(query, cx);
let skip_entries: HashSet<_> = open_buffers
.iter()
@ -796,33 +798,38 @@ impl BufferStore {
.collect();
let limit = limit.saturating_sub(open_buffers.len());
for open_buffer in open_buffers {
tx.send_blocking(open_buffer).ok();
}
let match_rx = self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.find_search_candidates(query.clone(), limit, skip_entries, fs, cx)
});
let mut project_paths_rx = self
.worktree_store
.update(cx, |worktree_store, cx| {
worktree_store.find_search_candidates(query.clone(), limit, skip_entries, fs, cx)
})
.chunks(MAGIC_NUMBER);
const MAX_CONCURRENT_BUFFER_OPENS: usize = 8;
cx.spawn(|this, mut cx| async move {
for open_buffer in open_buffers {
tx.send(open_buffer).await.ok();
}
for _ in 0..MAX_CONCURRENT_BUFFER_OPENS {
let mut match_rx = match_rx.clone();
let tx = tx.clone();
cx.spawn(|this, mut cx| async move {
while let Some(project_path) = match_rx.next().await {
let buffer = this
.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
.await
.log_err();
if let Some(buffer) = buffer {
tx.send_blocking(buffer).ok();
while let Some(project_paths) = project_paths_rx.next().await {
let buffers = this.update(&mut cx, |this, cx| {
project_paths
.into_iter()
.map(|project_path| this.open_buffer(project_path, cx))
.collect::<Vec<_>>()
})?;
for buffer_task in buffers {
if let Some(buffer) = buffer_task.await.log_err() {
if tx.send(buffer).await.is_err() {
println!("other end dropped, returning");
return anyhow::Ok(());
}
}
}
anyhow::Ok(())
})
.detach();
}
}
anyhow::Ok(())
})
.detach();
rx
}
@ -833,13 +840,9 @@ impl BufferStore {
query: &SearchQuery,
cx: &ModelContext<Self>,
) -> Vec<Model<Buffer>> {
let include_root = self
.worktree_store
.read(cx)
.visible_worktrees(cx)
.collect::<Vec<_>>()
.len()
> 1;
let worktree_count = self.worktree_store.read(cx).visible_worktrees(cx).count();
let include_root = worktree_count > 1;
self.buffers()
.filter_map(|buffer| {
let handle = buffer.clone();

View file

@ -7275,6 +7275,7 @@ impl Project {
query: SearchQuery,
cx: &mut ModelContext<Self>,
) -> Receiver<SearchResult> {
let start = std::time::Instant::now();
let (result_tx, result_rx) = smol::channel::bounded(1024);
let matching_buffers_rx =
@ -7343,6 +7344,7 @@ impl Project {
result_tx.send(SearchResult::LimitReached).await?;
}
println!("search took: {:?}", start.elapsed());
anyhow::Ok(())
})
.detach();

View file

@ -276,7 +276,7 @@ impl WorktreeStore {
fs: Arc<dyn Fs>,
cx: &ModelContext<Self>,
) -> Receiver<ProjectPath> {
let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
let snapshots = self
.visible_worktrees(cx)
.filter_map(|tree| {

View file

@ -4761,6 +4761,12 @@ impl BackgroundScanner {
request.relative_paths.extend(next_request.relative_paths);
request.done.extend(next_request.done);
}
if request.relative_paths.len() > 1 {
println!(
"------------------- Batched {} files",
request.relative_paths.len()
);
}
Ok(request)
}
}