Handle project-wide search on guests

Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
Nathan Sobo 2022-02-25 15:09:47 -07:00
parent 1278f5484f
commit e822c6a64e
6 changed files with 434 additions and 215 deletions

1
Cargo.lock generated
View file

@ -1785,6 +1785,7 @@ dependencies = [
"project", "project",
"theme", "theme",
"unindent", "unindent",
"util",
"workspace", "workspace",
] ]

View file

@ -13,6 +13,7 @@ gpui = { path = "../gpui" }
language = { path = "../language" } language = { path = "../language" }
project = { path = "../project" } project = { path = "../project" }
theme = { path = "../theme" } theme = { path = "../theme" }
util = { path = "../util" }
workspace = { path = "../workspace" } workspace = { path = "../workspace" }
anyhow = "1.0" anyhow = "1.0"
postage = { version = "0.4.1", features = ["futures-traits"] } postage = { version = "0.4.1", features = ["futures-traits"] }

View file

@ -12,6 +12,7 @@ use std::{
ops::Range, ops::Range,
path::PathBuf, path::PathBuf,
}; };
use util::ResultExt as _;
use workspace::{Item, ItemHandle, ItemNavHistory, ItemView, Settings, Workspace}; use workspace::{Item, ItemHandle, ItemNavHistory, ItemView, Settings, Workspace};
action!(Deploy); action!(Deploy);
@ -81,7 +82,7 @@ impl ProjectFind {
.update(cx, |project, cx| project.search(query.clone(), cx)); .update(cx, |project, cx| project.search(query.clone(), cx));
self.highlighted_ranges.clear(); self.highlighted_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move { self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
let matches = search.await; let matches = search.await.log_err()?;
if let Some(this) = this.upgrade(&cx) { if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.highlighted_ranges.clear(); this.highlighted_ranges.clear();

View file

@ -15,6 +15,7 @@ use gpui::{
UpgradeModelHandle, WeakModelHandle, UpgradeModelHandle, WeakModelHandle,
}; };
use language::{ use language::{
proto::{deserialize_anchor, serialize_anchor},
range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion, range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16, Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
ToLspPosition, ToOffset, ToPointUtf16, Transaction, ToLspPosition, ToOffset, ToPointUtf16, Transaction,
@ -226,6 +227,7 @@ impl Project {
client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>); client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>); client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>); client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
client.add_entity_request_handler(Self::handle_search_project);
client.add_entity_request_handler(Self::handle_get_project_symbols); client.add_entity_request_handler(Self::handle_get_project_symbols);
client.add_entity_request_handler(Self::handle_open_buffer_for_symbol); client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
client.add_entity_request_handler(Self::handle_open_buffer); client.add_entity_request_handler(Self::handle_open_buffer);
@ -2049,7 +2051,7 @@ impl Project {
&self, &self,
query: SearchQuery, query: SearchQuery,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>> { ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
if self.is_local() { if self.is_local() {
let snapshots = self let snapshots = self
.strong_worktrees(cx) .strong_worktrees(cx)
@ -2215,10 +2217,38 @@ impl Project {
} }
}) })
.await; .await;
matched_buffers.into_iter().flatten().collect() Ok(matched_buffers.into_iter().flatten().collect())
})
} else if let Some(project_id) = self.remote_id() {
let request = self.client.request(query.to_proto(project_id));
let request_handle = self.start_buffer_request(cx);
cx.spawn(|this, mut cx| async move {
let response = request.await?;
let mut result = HashMap::default();
for location in response.locations {
let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
let target_buffer = this
.update(&mut cx, |this, cx| {
this.deserialize_buffer(buffer, request_handle.clone(), cx)
})
.await?;
let start = location
.start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target start"))?;
let end = location
.end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?;
result
.entry(target_buffer)
.or_insert(Vec::new())
.push(start..end)
}
Ok(result)
}) })
} else { } else {
todo!() Task::ready(Ok(Default::default()))
} }
} }
@ -3012,6 +3042,36 @@ impl Project {
}) })
} }
async fn handle_search_project(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::SearchProject>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<proto::SearchProjectResponse> {
let peer_id = envelope.original_sender_id()?;
let query = SearchQuery::from_proto(envelope.payload)?;
let result = this
.update(&mut cx, |this, cx| this.search(query, cx))
.await?;
this.update(&mut cx, |this, cx| {
let mut locations = Vec::new();
for (buffer, ranges) in result {
for range in ranges {
let start = serialize_anchor(&range.start);
let end = serialize_anchor(&range.end);
let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
locations.push(proto::Location {
buffer: Some(buffer),
start: Some(start),
end: Some(end),
});
}
}
Ok(proto::SearchProjectResponse { locations })
})
}
async fn handle_open_buffer_for_symbol( async fn handle_open_buffer_for_symbol(
this: ModelHandle<Self>, this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::OpenBufferForSymbol>, envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
@ -4915,7 +4975,9 @@ mod tests {
.await; .await;
assert_eq!( assert_eq!(
search(&project, SearchQuery::text("TWO", false, true), &mut cx).await, search(&project, SearchQuery::text("TWO", false, true), &mut cx)
.await
.unwrap(),
HashMap::from_iter([ HashMap::from_iter([
("two.rs".to_string(), vec![6..9]), ("two.rs".to_string(), vec![6..9]),
("three.rs".to_string(), vec![37..40]) ("three.rs".to_string(), vec![37..40])
@ -4933,7 +4995,9 @@ mod tests {
}); });
assert_eq!( assert_eq!(
search(&project, SearchQuery::text("TWO", false, true), &mut cx).await, search(&project, SearchQuery::text("TWO", false, true), &mut cx)
.await
.unwrap(),
HashMap::from_iter([ HashMap::from_iter([
("two.rs".to_string(), vec![6..9]), ("two.rs".to_string(), vec![6..9]),
("three.rs".to_string(), vec![37..40]), ("three.rs".to_string(), vec![37..40]),
@ -4945,10 +5009,12 @@ mod tests {
project: &ModelHandle<Project>, project: &ModelHandle<Project>,
query: SearchQuery, query: SearchQuery,
cx: &mut gpui::TestAppContext, cx: &mut gpui::TestAppContext,
) -> HashMap<String, Vec<Range<usize>>> { ) -> Result<HashMap<String, Vec<Range<usize>>>> {
project let results = project
.update(cx, |project, cx| project.search(query, cx)) .update(cx, |project, cx| project.search(query, cx))
.await .await?;
Ok(results
.into_iter() .into_iter()
.map(|(buffer, ranges)| { .map(|(buffer, ranges)| {
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {
@ -4960,7 +5026,7 @@ mod tests {
(path, ranges) (path, ranges)
}) })
}) })
.collect() .collect())
} }
} }
} }

View file

@ -1,204 +1,227 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::Result; use anyhow::Result;
use language::{char_kind, Rope}; use client::proto;
use regex::{Regex, RegexBuilder}; use language::{char_kind, Rope};
use smol::future::yield_now; use regex::{Regex, RegexBuilder};
use std::{ use smol::future::yield_now;
io::{BufRead, BufReader, Read}, use std::{
ops::Range, io::{BufRead, BufReader, Read},
sync::Arc, ops::Range,
}; sync::Arc,
};
#[derive(Clone)]
pub enum SearchQuery { #[derive(Clone)]
Text { pub enum SearchQuery {
search: Arc<AhoCorasick<usize>>, Text {
query: Arc<str>, search: Arc<AhoCorasick<usize>>,
whole_word: bool, query: Arc<str>,
case_sensitive: bool, whole_word: bool,
}, case_sensitive: bool,
Regex { },
regex: Regex, Regex {
query: Arc<str>, regex: Regex,
multiline: bool, query: Arc<str>,
whole_word: bool, multiline: bool,
case_sensitive: bool, whole_word: bool,
}, case_sensitive: bool,
} },
}
impl SearchQuery {
pub fn text(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Self { impl SearchQuery {
let query = query.to_string(); pub fn text(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Self {
let search = AhoCorasickBuilder::new() let query = query.to_string();
.auto_configure(&[&query]) let search = AhoCorasickBuilder::new()
.ascii_case_insensitive(!case_sensitive) .auto_configure(&[&query])
.build(&[&query]); .ascii_case_insensitive(!case_sensitive)
Self::Text { .build(&[&query]);
search: Arc::new(search), Self::Text {
query: Arc::from(query), search: Arc::new(search),
whole_word, query: Arc::from(query),
case_sensitive, whole_word,
} case_sensitive,
} }
}
pub fn regex(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Result<Self> {
let mut query = query.to_string(); pub fn regex(query: impl ToString, whole_word: bool, case_sensitive: bool) -> Result<Self> {
let initial_query = Arc::from(query.as_str()); let mut query = query.to_string();
if whole_word { let initial_query = Arc::from(query.as_str());
let mut word_query = String::new(); if whole_word {
word_query.push_str("\\b"); let mut word_query = String::new();
word_query.push_str(&query); word_query.push_str("\\b");
word_query.push_str("\\b"); word_query.push_str(&query);
query = word_query word_query.push_str("\\b");
} query = word_query
}
let multiline = query.contains("\n") || query.contains("\\n");
let regex = RegexBuilder::new(&query) let multiline = query.contains("\n") || query.contains("\\n");
.case_insensitive(!case_sensitive) let regex = RegexBuilder::new(&query)
.multi_line(multiline) .case_insensitive(!case_sensitive)
.build()?; .multi_line(multiline)
Ok(Self::Regex { .build()?;
regex, Ok(Self::Regex {
query: initial_query, regex,
multiline, query: initial_query,
whole_word, multiline,
case_sensitive, whole_word,
}) case_sensitive,
} })
}
pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
if self.as_str().is_empty() { pub fn from_proto(message: proto::SearchProject) -> Result<Self> {
return Ok(false); if message.regex {
} Self::regex(message.query, message.whole_word, message.case_sensitive)
} else {
match self { Ok(Self::text(
Self::Text { search, .. } => { message.query,
let mat = search.stream_find_iter(stream).next(); message.whole_word,
match mat { message.case_sensitive,
Some(Ok(_)) => Ok(true), ))
Some(Err(err)) => Err(err.into()), }
None => Ok(false), }
}
} pub fn to_proto(&self, project_id: u64) -> proto::SearchProject {
Self::Regex { proto::SearchProject {
regex, multiline, .. project_id,
} => { query: self.as_str().to_string(),
let mut reader = BufReader::new(stream); regex: self.is_regex(),
if *multiline { whole_word: self.whole_word(),
let mut text = String::new(); case_sensitive: self.case_sensitive(),
if let Err(err) = reader.read_to_string(&mut text) { }
Err(err.into()) }
} else {
Ok(regex.find(&text).is_some()) pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
} if self.as_str().is_empty() {
} else { return Ok(false);
for line in reader.lines() { }
let line = line?;
if regex.find(&line).is_some() { match self {
return Ok(true); Self::Text { search, .. } => {
} let mat = search.stream_find_iter(stream).next();
} match mat {
Ok(false) Some(Ok(_)) => Ok(true),
} Some(Err(err)) => Err(err.into()),
} None => Ok(false),
} }
} }
Self::Regex {
pub async fn search(&self, rope: &Rope) -> Vec<Range<usize>> { regex, multiline, ..
const YIELD_INTERVAL: usize = 20000; } => {
let mut reader = BufReader::new(stream);
if self.as_str().is_empty() { if *multiline {
return Default::default(); let mut text = String::new();
} if let Err(err) = reader.read_to_string(&mut text) {
Err(err.into())
let mut matches = Vec::new(); } else {
match self { Ok(regex.find(&text).is_some())
Self::Text { }
search, whole_word, .. } else {
} => { for line in reader.lines() {
for (ix, mat) in search let line = line?;
.stream_find_iter(rope.bytes_in_range(0..rope.len())) if regex.find(&line).is_some() {
.enumerate() return Ok(true);
{ }
if (ix + 1) % YIELD_INTERVAL == 0 { }
yield_now().await; Ok(false)
} }
}
let mat = mat.unwrap(); }
if *whole_word { }
let prev_kind = rope.reversed_chars_at(mat.start()).next().map(char_kind);
let start_kind = char_kind(rope.chars_at(mat.start()).next().unwrap()); pub async fn search(&self, rope: &Rope) -> Vec<Range<usize>> {
let end_kind = char_kind(rope.reversed_chars_at(mat.end()).next().unwrap()); const YIELD_INTERVAL: usize = 20000;
let next_kind = rope.chars_at(mat.end()).next().map(char_kind);
if Some(start_kind) == prev_kind || Some(end_kind) == next_kind { if self.as_str().is_empty() {
continue; return Default::default();
} }
}
matches.push(mat.start()..mat.end()) let mut matches = Vec::new();
} match self {
} Self::Text {
Self::Regex { search, whole_word, ..
regex, multiline, .. } => {
} => { for (ix, mat) in search
if *multiline { .stream_find_iter(rope.bytes_in_range(0..rope.len()))
let text = rope.to_string(); .enumerate()
for (ix, mat) in regex.find_iter(&text).enumerate() { {
if (ix + 1) % YIELD_INTERVAL == 0 { if (ix + 1) % YIELD_INTERVAL == 0 {
yield_now().await; yield_now().await;
} }
matches.push(mat.start()..mat.end()); let mat = mat.unwrap();
} if *whole_word {
} else { let prev_kind = rope.reversed_chars_at(mat.start()).next().map(char_kind);
let mut line = String::new(); let start_kind = char_kind(rope.chars_at(mat.start()).next().unwrap());
let mut line_offset = 0; let end_kind = char_kind(rope.reversed_chars_at(mat.end()).next().unwrap());
for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() { let next_kind = rope.chars_at(mat.end()).next().map(char_kind);
if (chunk_ix + 1) % YIELD_INTERVAL == 0 { if Some(start_kind) == prev_kind || Some(end_kind) == next_kind {
yield_now().await; continue;
} }
}
for (newline_ix, text) in chunk.split('\n').enumerate() { matches.push(mat.start()..mat.end())
if newline_ix > 0 { }
for mat in regex.find_iter(&line) { }
let start = line_offset + mat.start(); Self::Regex {
let end = line_offset + mat.end(); regex, multiline, ..
matches.push(start..end); } => {
} if *multiline {
let text = rope.to_string();
line_offset += line.len() + 1; for (ix, mat) in regex.find_iter(&text).enumerate() {
line.clear(); if (ix + 1) % YIELD_INTERVAL == 0 {
} yield_now().await;
line.push_str(text); }
}
} matches.push(mat.start()..mat.end());
} }
} } else {
} let mut line = String::new();
matches let mut line_offset = 0;
} for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() {
if (chunk_ix + 1) % YIELD_INTERVAL == 0 {
pub fn as_str(&self) -> &str { yield_now().await;
match self { }
Self::Text { query, .. } => query.as_ref(),
Self::Regex { query, .. } => query.as_ref(), for (newline_ix, text) in chunk.split('\n').enumerate() {
} if newline_ix > 0 {
} for mat in regex.find_iter(&line) {
let start = line_offset + mat.start();
pub fn whole_word(&self) -> bool { let end = line_offset + mat.end();
match self { matches.push(start..end);
Self::Text { whole_word, .. } => *whole_word, }
Self::Regex { whole_word, .. } => *whole_word,
} line_offset += line.len() + 1;
} line.clear();
}
pub fn case_sensitive(&self) -> bool { line.push_str(text);
match self { }
Self::Text { case_sensitive, .. } => *case_sensitive, }
Self::Regex { case_sensitive, .. } => *case_sensitive, }
} }
} }
matches
pub fn is_regex(&self) -> bool { }
matches!(self, Self::Regex { .. })
} pub fn as_str(&self) -> &str {
} match self {
Self::Text { query, .. } => query.as_ref(),
Self::Regex { query, .. } => query.as_ref(),
}
}
pub fn whole_word(&self) -> bool {
match self {
Self::Text { whole_word, .. } => *whole_word,
Self::Regex { whole_word, .. } => *whole_word,
}
}
pub fn case_sensitive(&self) -> bool {
match self {
Self::Text { case_sensitive, .. } => *case_sensitive,
Self::Regex { case_sensitive, .. } => *case_sensitive,
}
}
pub fn is_regex(&self) -> bool {
matches!(self, Self::Regex { .. })
}
}

View file

@ -79,6 +79,7 @@ impl Server {
.add_message_handler(Server::disk_based_diagnostics_updated) .add_message_handler(Server::disk_based_diagnostics_updated)
.add_request_handler(Server::get_definition) .add_request_handler(Server::get_definition)
.add_request_handler(Server::get_references) .add_request_handler(Server::get_references)
.add_request_handler(Server::search_project)
.add_request_handler(Server::get_document_highlights) .add_request_handler(Server::get_document_highlights)
.add_request_handler(Server::get_project_symbols) .add_request_handler(Server::get_project_symbols)
.add_request_handler(Server::open_buffer_for_symbol) .add_request_handler(Server::open_buffer_for_symbol)
@ -570,6 +571,20 @@ impl Server {
.await?) .await?)
} }
async fn search_project(
self: Arc<Server>,
request: TypedEnvelope<proto::SearchProject>,
) -> tide::Result<proto::SearchProjectResponse> {
let host_connection_id = self
.state()
.read_project(request.payload.project_id, request.sender_id)?
.host_connection_id;
Ok(self
.peer
.forward_request(request.sender_id, host_connection_id, request.payload)
.await?)
}
async fn get_document_highlights( async fn get_document_highlights(
self: Arc<Server>, self: Arc<Server>,
request: TypedEnvelope<proto::GetDocumentHighlights>, request: TypedEnvelope<proto::GetDocumentHighlights>,
@ -1186,7 +1201,7 @@ mod tests {
LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, ToLspPosition, LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
}, },
lsp, lsp,
project::{DiagnosticSummary, Project, ProjectPath}, project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath},
workspace::{Settings, Workspace, WorkspaceParams}, workspace::{Settings, Workspace, WorkspaceParams},
}; };
@ -2843,6 +2858,118 @@ mod tests {
}); });
} }
#[gpui::test(iterations = 10)]
async fn test_project_search(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
cx_a.foreground().forbid_parking();
let lang_registry = Arc::new(LanguageRegistry::new());
let fs = FakeFs::new(cx_a.background());
fs.insert_tree(
"/root-1",
json!({
".zed.toml": r#"collaborators = ["user_b"]"#,
"a": "hello world",
"b": "goodnight moon",
"c": "a world of goo",
"d": "world champion of clown world",
}),
)
.await;
fs.insert_tree(
"/root-2",
json!({
"e": "disney world is fun",
}),
)
.await;
// Connect to a server as 2 clients.
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
let client_a = server.create_client(&mut cx_a, "user_a").await;
let client_b = server.create_client(&mut cx_b, "user_b").await;
// Share a project as client A
let project_a = cx_a.update(|cx| {
Project::local(
client_a.clone(),
client_a.user_store.clone(),
lang_registry.clone(),
fs.clone(),
cx,
)
});
let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
let (worktree_1, _) = project_a
.update(&mut cx_a, |p, cx| {
p.find_or_create_local_worktree("/root-1", false, cx)
})
.await
.unwrap();
worktree_1
.read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
let (worktree_2, _) = project_a
.update(&mut cx_a, |p, cx| {
p.find_or_create_local_worktree("/root-2", false, cx)
})
.await
.unwrap();
worktree_2
.read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
.await;
eprintln!("sharing");
project_a
.update(&mut cx_a, |p, cx| p.share(cx))
.await
.unwrap();
// Join the worktree as client B.
let project_b = Project::remote(
project_id,
client_b.clone(),
client_b.user_store.clone(),
lang_registry.clone(),
fs.clone(),
&mut cx_b.to_async(),
)
.await
.unwrap();
let results = project_b
.update(&mut cx_b, |project, cx| {
project.search(SearchQuery::text("world", false, false), cx)
})
.await
.unwrap();
let mut ranges_by_path = results
.into_iter()
.map(|(buffer, ranges)| {
buffer.read_with(&cx_b, |buffer, cx| {
let path = buffer.file().unwrap().full_path(cx);
let offset_ranges = ranges
.into_iter()
.map(|range| range.to_offset(buffer))
.collect::<Vec<_>>();
(path, offset_ranges)
})
})
.collect::<Vec<_>>();
ranges_by_path.sort_by_key(|(path, _)| path.clone());
assert_eq!(
ranges_by_path,
&[
(PathBuf::from("root-1/a"), vec![6..11]),
(PathBuf::from("root-1/c"), vec![2..7]),
(PathBuf::from("root-1/d"), vec![0..5, 24..29]),
(PathBuf::from("root-2/e"), vec![7..12]),
]
);
}
#[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
async fn test_document_highlights(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { async fn test_document_highlights(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
cx_a.foreground().forbid_parking(); cx_a.foreground().forbid_parking();