mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-24 19:10:24 +00:00
Implement Project::definition
when the buffer is remote
This commit is contained in:
parent
528a4dd9b4
commit
245490f934
6 changed files with 392 additions and 53 deletions
|
@ -155,7 +155,7 @@ pub fn serialize_diagnostics<'a>(
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
||||
pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
||||
proto::Anchor {
|
||||
replica_id: anchor.timestamp.replica_id as u32,
|
||||
local_timestamp: anchor.timestamp.value,
|
||||
|
@ -352,7 +352,7 @@ pub fn deserialize_diagnostics(
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
||||
pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
||||
Some(Anchor {
|
||||
timestamp: clock::Local {
|
||||
replica_id: anchor.replica_id as ReplicaId,
|
||||
|
|
|
@ -13,6 +13,7 @@ use gpui::{
|
|||
WeakModelHandle,
|
||||
};
|
||||
use language::{
|
||||
proto::{deserialize_anchor, serialize_anchor},
|
||||
range_from_lsp, Bias, Buffer, Diagnostic, DiagnosticEntry, File as _, Language,
|
||||
LanguageRegistry, Operation, PointUtf16, ToOffset, ToPointUtf16,
|
||||
};
|
||||
|
@ -336,6 +337,7 @@ impl Project {
|
|||
client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer),
|
||||
client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
@ -951,10 +953,10 @@ impl Project {
|
|||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<Definition>>> {
|
||||
let source_buffer_handle = source_buffer_handle.clone();
|
||||
let buffer = source_buffer_handle.read(cx);
|
||||
let source_buffer = source_buffer_handle.read(cx);
|
||||
let worktree;
|
||||
let buffer_abs_path;
|
||||
if let Some(file) = File::from_dyn(buffer.file()) {
|
||||
if let Some(file) = File::from_dyn(source_buffer.file()) {
|
||||
worktree = file.worktree.clone();
|
||||
buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
|
||||
} else {
|
||||
|
@ -962,11 +964,11 @@ impl Project {
|
|||
};
|
||||
|
||||
if worktree.read(cx).as_local().is_some() {
|
||||
let point = buffer.offset_to_point_utf16(position.to_offset(buffer));
|
||||
let point = source_buffer.offset_to_point_utf16(position.to_offset(source_buffer));
|
||||
let buffer_abs_path = buffer_abs_path.unwrap();
|
||||
let lang_name;
|
||||
let lang_server;
|
||||
if let Some(lang) = buffer.language() {
|
||||
if let Some(lang) = source_buffer.language() {
|
||||
lang_name = lang.name().to_string();
|
||||
if let Some(server) = self
|
||||
.language_servers
|
||||
|
@ -1061,9 +1063,67 @@ impl Project {
|
|||
|
||||
Ok(definitions)
|
||||
})
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let client = self.client.clone();
|
||||
let replica_id = self.replica_id();
|
||||
let request = proto::GetDefinition {
|
||||
project_id,
|
||||
buffer_id: source_buffer.remote_id(),
|
||||
position: Some(serialize_anchor(&source_buffer.anchor_before(position))),
|
||||
};
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = client.request(request).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let mut definitions = Vec::new();
|
||||
for definition in response.definitions {
|
||||
let target_buffer = match definition
|
||||
.buffer
|
||||
.ok_or_else(|| anyhow!("missing buffer"))?
|
||||
{
|
||||
proto::definition::Buffer::Id(id) => this
|
||||
.open_buffers
|
||||
.get(&(id as usize))
|
||||
.and_then(|buffer| buffer.upgrade(cx))
|
||||
.ok_or_else(|| anyhow!("no buffer exists for id {}", id))?,
|
||||
proto::definition::Buffer::State(mut buffer) => {
|
||||
let file = if let Some(file) = buffer.file.take() {
|
||||
let worktree_id = WorktreeId::from_proto(file.worktree_id);
|
||||
let worktree =
|
||||
this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
|
||||
anyhow!("no worktree found for id {}", file.worktree_id)
|
||||
})?;
|
||||
let file = File::from_proto(file, worktree.clone(), cx)?;
|
||||
Some(Box::new(file) as Box<dyn language::File>)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::from_proto(replica_id, buffer, file, cx).unwrap()
|
||||
});
|
||||
this.register_buffer(&buffer, &worktree, cx)?;
|
||||
buffer
|
||||
}
|
||||
};
|
||||
let target_start = definition
|
||||
.target_start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target start"))?;
|
||||
let target_end = definition
|
||||
.target_end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||
definitions.push(Definition {
|
||||
target_buffer,
|
||||
target_range: target_start..target_end,
|
||||
})
|
||||
}
|
||||
|
||||
Ok(definitions)
|
||||
})
|
||||
})
|
||||
} else {
|
||||
log::info!("go to definition is not yet implemented for guests");
|
||||
Task::ready(Ok(Default::default()))
|
||||
Task::ready(Err(anyhow!("project does not have a remote id")))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1627,6 +1687,62 @@ impl Project {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_get_definition(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::GetDefinition>,
|
||||
rpc: Arc<Client>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
let receipt = envelope.receipt();
|
||||
let sender_id = envelope.original_sender_id()?;
|
||||
let source_buffer = self
|
||||
.shared_buffers
|
||||
.get(&sender_id)
|
||||
.and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
|
||||
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
|
||||
let position = envelope
|
||||
.payload
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
if !source_buffer.read(cx).can_resolve(&position) {
|
||||
return Err(anyhow!("cannot resolve position"));
|
||||
}
|
||||
|
||||
let definitions = self.definition(&source_buffer, position, cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let definitions = definitions.await?;
|
||||
let mut response = proto::GetDefinitionResponse {
|
||||
definitions: Default::default(),
|
||||
};
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for definition in definitions {
|
||||
let buffer_id = definition.target_buffer.read(cx).remote_id();
|
||||
let shared_buffers = this.shared_buffers.entry(sender_id).or_default();
|
||||
let buffer = match shared_buffers.entry(buffer_id) {
|
||||
hash_map::Entry::Occupied(_) => proto::definition::Buffer::Id(buffer_id),
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
entry.insert(definition.target_buffer.clone());
|
||||
proto::definition::Buffer::State(
|
||||
definition.target_buffer.read(cx).to_proto(),
|
||||
)
|
||||
}
|
||||
};
|
||||
response.definitions.push(proto::Definition {
|
||||
target_start: Some(serialize_anchor(&definition.target_range.start)),
|
||||
target_end: Some(serialize_anchor(&definition.target_range.end)),
|
||||
buffer: Some(buffer),
|
||||
});
|
||||
}
|
||||
});
|
||||
rpc.respond(receipt, response).await?;
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_open_buffer(
|
||||
&mut self,
|
||||
envelope: TypedEnvelope<proto::OpenBuffer>,
|
||||
|
|
|
@ -20,40 +20,42 @@ message Envelope {
|
|||
LeaveProject leave_project = 14;
|
||||
AddProjectCollaborator add_project_collaborator = 15;
|
||||
RemoveProjectCollaborator remove_project_collaborator = 16;
|
||||
GetDefinition get_definition = 17;
|
||||
GetDefinitionResponse get_definition_response = 18;
|
||||
|
||||
RegisterWorktree register_worktree = 17;
|
||||
UnregisterWorktree unregister_worktree = 18;
|
||||
ShareWorktree share_worktree = 19;
|
||||
UpdateWorktree update_worktree = 20;
|
||||
UpdateDiagnosticSummary update_diagnostic_summary = 21;
|
||||
DiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 22;
|
||||
DiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 23;
|
||||
RegisterWorktree register_worktree = 19;
|
||||
UnregisterWorktree unregister_worktree = 20;
|
||||
ShareWorktree share_worktree = 21;
|
||||
UpdateWorktree update_worktree = 22;
|
||||
UpdateDiagnosticSummary update_diagnostic_summary = 23;
|
||||
DiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 24;
|
||||
DiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 25;
|
||||
|
||||
OpenBuffer open_buffer = 24;
|
||||
OpenBufferResponse open_buffer_response = 25;
|
||||
CloseBuffer close_buffer = 26;
|
||||
UpdateBuffer update_buffer = 27;
|
||||
UpdateBufferFile update_buffer_file = 28;
|
||||
SaveBuffer save_buffer = 29;
|
||||
BufferSaved buffer_saved = 30;
|
||||
BufferReloaded buffer_reloaded = 31;
|
||||
FormatBuffer format_buffer = 32;
|
||||
OpenBuffer open_buffer = 26;
|
||||
OpenBufferResponse open_buffer_response = 27;
|
||||
CloseBuffer close_buffer = 28;
|
||||
UpdateBuffer update_buffer = 29;
|
||||
UpdateBufferFile update_buffer_file = 30;
|
||||
SaveBuffer save_buffer = 31;
|
||||
BufferSaved buffer_saved = 32;
|
||||
BufferReloaded buffer_reloaded = 33;
|
||||
FormatBuffer format_buffer = 34;
|
||||
|
||||
GetChannels get_channels = 33;
|
||||
GetChannelsResponse get_channels_response = 34;
|
||||
JoinChannel join_channel = 35;
|
||||
JoinChannelResponse join_channel_response = 36;
|
||||
LeaveChannel leave_channel = 37;
|
||||
SendChannelMessage send_channel_message = 38;
|
||||
SendChannelMessageResponse send_channel_message_response = 39;
|
||||
ChannelMessageSent channel_message_sent = 40;
|
||||
GetChannelMessages get_channel_messages = 41;
|
||||
GetChannelMessagesResponse get_channel_messages_response = 42;
|
||||
GetChannels get_channels = 35;
|
||||
GetChannelsResponse get_channels_response = 36;
|
||||
JoinChannel join_channel = 37;
|
||||
JoinChannelResponse join_channel_response = 38;
|
||||
LeaveChannel leave_channel = 39;
|
||||
SendChannelMessage send_channel_message = 40;
|
||||
SendChannelMessageResponse send_channel_message_response = 41;
|
||||
ChannelMessageSent channel_message_sent = 42;
|
||||
GetChannelMessages get_channel_messages = 43;
|
||||
GetChannelMessagesResponse get_channel_messages_response = 44;
|
||||
|
||||
UpdateContacts update_contacts = 43;
|
||||
UpdateContacts update_contacts = 45;
|
||||
|
||||
GetUsers get_users = 44;
|
||||
GetUsersResponse get_users_response = 45;
|
||||
GetUsers get_users = 46;
|
||||
GetUsersResponse get_users_response = 47;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,6 +136,25 @@ message RemoveProjectCollaborator {
|
|||
uint32 peer_id = 2;
|
||||
}
|
||||
|
||||
message GetDefinition {
|
||||
uint64 project_id = 1;
|
||||
uint64 buffer_id = 2;
|
||||
Anchor position = 3;
|
||||
}
|
||||
|
||||
message GetDefinitionResponse {
|
||||
repeated Definition definitions = 1;
|
||||
}
|
||||
|
||||
message Definition {
|
||||
oneof buffer {
|
||||
uint64 id = 1;
|
||||
Buffer state = 2;
|
||||
}
|
||||
Anchor target_start = 3;
|
||||
Anchor target_end = 4;
|
||||
}
|
||||
|
||||
message OpenBuffer {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
|
|
|
@ -134,6 +134,8 @@ messages!(
|
|||
GetChannelMessagesResponse,
|
||||
GetChannels,
|
||||
GetChannelsResponse,
|
||||
GetDefinition,
|
||||
GetDefinitionResponse,
|
||||
GetUsers,
|
||||
GetUsersResponse,
|
||||
JoinChannel,
|
||||
|
@ -168,6 +170,7 @@ request_messages!(
|
|||
(FormatBuffer, Ack),
|
||||
(GetChannelMessages, GetChannelMessagesResponse),
|
||||
(GetChannels, GetChannelsResponse),
|
||||
(GetDefinition, GetDefinitionResponse),
|
||||
(GetUsers, GetUsersResponse),
|
||||
(JoinChannel, JoinChannelResponse),
|
||||
(JoinProject, JoinProjectResponse),
|
||||
|
@ -191,6 +194,7 @@ entity_messages!(
|
|||
DiskBasedDiagnosticsUpdated,
|
||||
DiskBasedDiagnosticsUpdating,
|
||||
FormatBuffer,
|
||||
GetDefinition,
|
||||
JoinProject,
|
||||
LeaveProject,
|
||||
OpenBuffer,
|
||||
|
|
|
@ -17,7 +17,7 @@ use rpc::{
|
|||
Connection, ConnectionId, Peer, TypedEnvelope,
|
||||
};
|
||||
use sha1::{Digest as _, Sha1};
|
||||
use std::{any::TypeId, future::Future, mem, path::PathBuf, sync::Arc, time::Instant};
|
||||
use std::{any::TypeId, future::Future, path::PathBuf, sync::Arc, time::Instant};
|
||||
use store::{Store, Worktree};
|
||||
use surf::StatusCode;
|
||||
use tide::log;
|
||||
|
@ -74,6 +74,7 @@ impl Server {
|
|||
.add_handler(Server::update_diagnostic_summary)
|
||||
.add_handler(Server::disk_based_diagnostics_updating)
|
||||
.add_handler(Server::disk_based_diagnostics_updated)
|
||||
.add_handler(Server::get_definition)
|
||||
.add_handler(Server::open_buffer)
|
||||
.add_handler(Server::close_buffer)
|
||||
.add_handler(Server::update_buffer)
|
||||
|
@ -479,26 +480,40 @@ impl Server {
|
|||
.worktree
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("missing worktree"))?;
|
||||
let entries = mem::take(&mut worktree.entries)
|
||||
.into_iter()
|
||||
.map(|entry| (entry.id, entry))
|
||||
let entries = worktree
|
||||
.entries
|
||||
.iter()
|
||||
.map(|entry| (entry.id, entry.clone()))
|
||||
.collect();
|
||||
let diagnostic_summaries = worktree
|
||||
.diagnostic_summaries
|
||||
.iter()
|
||||
.map(|summary| (PathBuf::from(summary.path.clone()), summary.clone()))
|
||||
.collect();
|
||||
|
||||
let diagnostic_summaries = mem::take(&mut worktree.diagnostic_summaries)
|
||||
.into_iter()
|
||||
.map(|summary| (PathBuf::from(summary.path.clone()), summary))
|
||||
.collect();
|
||||
|
||||
let contact_user_ids = self.state_mut().share_worktree(
|
||||
let shared_worktree = self.state_mut().share_worktree(
|
||||
request.payload.project_id,
|
||||
worktree.id,
|
||||
request.sender_id,
|
||||
entries,
|
||||
diagnostic_summaries,
|
||||
);
|
||||
if let Some(contact_user_ids) = contact_user_ids {
|
||||
if let Some(shared_worktree) = shared_worktree {
|
||||
broadcast(
|
||||
request.sender_id,
|
||||
shared_worktree.connection_ids,
|
||||
|connection_id| {
|
||||
self.peer.forward_send(
|
||||
request.sender_id,
|
||||
connection_id,
|
||||
request.payload.clone(),
|
||||
)
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
self.peer.respond(request.receipt(), proto::Ack {}).await?;
|
||||
self.update_contacts_for_users(&contact_user_ids).await?;
|
||||
self.update_contacts_for_users(&shared_worktree.authorized_user_ids)
|
||||
.await?;
|
||||
} else {
|
||||
self.peer
|
||||
.respond_with_error(
|
||||
|
@ -594,6 +609,24 @@ impl Server {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_definition(
|
||||
self: Arc<Server>,
|
||||
request: TypedEnvelope<proto::GetDefinition>,
|
||||
) -> tide::Result<()> {
|
||||
let receipt = request.receipt();
|
||||
let host_connection_id = self
|
||||
.state()
|
||||
.read_project(request.payload.project_id, request.sender_id)
|
||||
.ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?
|
||||
.host_connection_id;
|
||||
let response = self
|
||||
.peer
|
||||
.forward_request(request.sender_id, host_connection_id, request.payload)
|
||||
.await?;
|
||||
self.peer.respond(receipt, response).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn open_buffer(
|
||||
self: Arc<Server>,
|
||||
request: TypedEnvelope<proto::OpenBuffer>,
|
||||
|
@ -1156,8 +1189,8 @@ mod tests {
|
|||
editor::{Editor, EditorSettings, Input, MultiBuffer},
|
||||
fs::{FakeFs, Fs as _},
|
||||
language::{
|
||||
tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig,
|
||||
LanguageRegistry, LanguageServerConfig, Point,
|
||||
tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language,
|
||||
LanguageConfig, LanguageRegistry, LanguageServerConfig, Point,
|
||||
},
|
||||
lsp,
|
||||
project::{DiagnosticSummary, Project, ProjectPath},
|
||||
|
@ -2318,6 +2351,163 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_definition(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
let mut lang_registry = Arc::new(LanguageRegistry::new());
|
||||
let fs = Arc::new(FakeFs::new());
|
||||
fs.insert_tree(
|
||||
"/root-1",
|
||||
json!({
|
||||
".zed.toml": r#"collaborators = ["user_b"]"#,
|
||||
"a.rs": "const ONE: usize = b::TWO + b::THREE;",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
fs.insert_tree(
|
||||
"/root-2",
|
||||
json!({
|
||||
"b.rs": "const TWO: usize = 2;\nconst THREE: usize = 3;",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Set up a fake language server.
|
||||
let (language_server_config, mut fake_language_server) =
|
||||
LanguageServerConfig::fake(cx_a.background()).await;
|
||||
Arc::get_mut(&mut lang_registry)
|
||||
.unwrap()
|
||||
.add(Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".to_string(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
language_server: Some(language_server_config),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
)));
|
||||
|
||||
// Connect to a server as 2 clients.
|
||||
let mut server = TestServer::start(cx_a.foreground()).await;
|
||||
let client_a = server.create_client(&mut cx_a, "user_a").await;
|
||||
let client_b = server.create_client(&mut cx_b, "user_b").await;
|
||||
|
||||
// Share a project as client A
|
||||
let project_a = cx_a.update(|cx| {
|
||||
Project::local(
|
||||
client_a.clone(),
|
||||
client_a.user_store.clone(),
|
||||
lang_registry.clone(),
|
||||
fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (worktree_a, _) = project_a
|
||||
.update(&mut cx_a, |p, cx| {
|
||||
p.find_or_create_local_worktree("/root-1", false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
worktree_a
|
||||
.read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await;
|
||||
let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id());
|
||||
project_a
|
||||
.update(&mut cx_a, |p, cx| p.share(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Join the worktree as client B.
|
||||
let project_b = Project::remote(
|
||||
project_id,
|
||||
client_b.clone(),
|
||||
client_b.user_store.clone(),
|
||||
lang_registry.clone(),
|
||||
fs.clone(),
|
||||
&mut cx_b.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Open the file to be formatted on client B.
|
||||
let buffer_b = cx_b
|
||||
.background()
|
||||
.spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let definitions_1 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 23, cx));
|
||||
let (request_id, _) = fake_language_server
|
||||
.receive_request::<lsp::request::GotoDefinition>()
|
||||
.await;
|
||||
fake_language_server
|
||||
.respond(
|
||||
request_id,
|
||||
Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
|
||||
lsp::Url::from_file_path("/root-2/b.rs").unwrap(),
|
||||
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
|
||||
))),
|
||||
)
|
||||
.await;
|
||||
let definitions_1 = definitions_1.await.unwrap();
|
||||
cx_b.read(|cx| {
|
||||
assert_eq!(definitions_1.len(), 1);
|
||||
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
|
||||
let target_buffer = definitions_1[0].target_buffer.read(cx);
|
||||
assert_eq!(
|
||||
target_buffer.text(),
|
||||
"const TWO: usize = 2;\nconst THREE: usize = 3;"
|
||||
);
|
||||
assert_eq!(
|
||||
definitions_1[0].target_range.to_point(target_buffer),
|
||||
Point::new(0, 6)..Point::new(0, 9)
|
||||
);
|
||||
});
|
||||
|
||||
// Try getting more definitions for the same buffer, ensuring the buffer gets reused from
|
||||
// the previous call to `definition`.
|
||||
let definitions_2 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 33, cx));
|
||||
let (request_id, _) = fake_language_server
|
||||
.receive_request::<lsp::request::GotoDefinition>()
|
||||
.await;
|
||||
fake_language_server
|
||||
.respond(
|
||||
request_id,
|
||||
Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
|
||||
lsp::Url::from_file_path("/root-2/b.rs").unwrap(),
|
||||
lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
|
||||
))),
|
||||
)
|
||||
.await;
|
||||
let definitions_2 = definitions_2.await.unwrap();
|
||||
cx_b.read(|cx| {
|
||||
assert_eq!(definitions_2.len(), 1);
|
||||
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
|
||||
let target_buffer = definitions_2[0].target_buffer.read(cx);
|
||||
assert_eq!(
|
||||
target_buffer.text(),
|
||||
"const TWO: usize = 2;\nconst THREE: usize = 3;"
|
||||
);
|
||||
assert_eq!(
|
||||
definitions_2[0].target_range.to_point(target_buffer),
|
||||
Point::new(1, 6)..Point::new(1, 11)
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
definitions_1[0].target_buffer,
|
||||
definitions_2[0].target_buffer
|
||||
);
|
||||
|
||||
cx_b.update(|_| {
|
||||
drop(definitions_1);
|
||||
drop(definitions_2);
|
||||
});
|
||||
project_b
|
||||
.condition(&cx_b, |proj, cx| proj.worktrees(cx).count() == 1)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_basic_chat(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
|
|
|
@ -74,6 +74,11 @@ pub struct LeftProject {
|
|||
pub authorized_user_ids: Vec<UserId>,
|
||||
}
|
||||
|
||||
pub struct SharedWorktree {
|
||||
pub authorized_user_ids: Vec<UserId>,
|
||||
pub connection_ids: Vec<ConnectionId>,
|
||||
}
|
||||
|
||||
impl Store {
|
||||
pub fn add_connection(&mut self, connection_id: ConnectionId, user_id: UserId) {
|
||||
self.connections.insert(
|
||||
|
@ -393,7 +398,7 @@ impl Store {
|
|||
connection_id: ConnectionId,
|
||||
entries: HashMap<u64, proto::Entry>,
|
||||
diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
|
||||
) -> Option<Vec<UserId>> {
|
||||
) -> Option<SharedWorktree> {
|
||||
let project = self.projects.get_mut(&project_id)?;
|
||||
let worktree = project.worktrees.get_mut(&worktree_id)?;
|
||||
if project.host_connection_id == connection_id && project.share.is_some() {
|
||||
|
@ -401,7 +406,10 @@ impl Store {
|
|||
entries,
|
||||
diagnostic_summaries,
|
||||
});
|
||||
Some(project.authorized_user_ids())
|
||||
Some(SharedWorktree {
|
||||
authorized_user_ids: project.authorized_user_ids(),
|
||||
connection_ids: project.guest_connection_ids(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue