mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-23 18:32:17 +00:00
Merge remote-tracking branch 'origin/main' into ai-refactoring
This commit is contained in:
commit
44f554f489
96 changed files with 4850 additions and 1757 deletions
28
Cargo.lock
generated
28
Cargo.lock
generated
|
@ -1206,6 +1206,7 @@ dependencies = [
|
|||
"client",
|
||||
"collections",
|
||||
"db",
|
||||
"feature_flags",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"image",
|
||||
|
@ -1221,7 +1222,6 @@ dependencies = [
|
|||
"serde_derive",
|
||||
"settings",
|
||||
"smol",
|
||||
"staff_mode",
|
||||
"sum_tree",
|
||||
"tempfile",
|
||||
"text",
|
||||
|
@ -1380,6 +1380,7 @@ dependencies = [
|
|||
"async-tungstenite",
|
||||
"collections",
|
||||
"db",
|
||||
"feature_flags",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"image",
|
||||
|
@ -1394,7 +1395,6 @@ dependencies = [
|
|||
"serde_derive",
|
||||
"settings",
|
||||
"smol",
|
||||
"staff_mode",
|
||||
"sum_tree",
|
||||
"tempfile",
|
||||
"text",
|
||||
|
@ -1534,6 +1534,7 @@ dependencies = [
|
|||
"context_menu",
|
||||
"db",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"feedback",
|
||||
"futures 0.3.28",
|
||||
"fuzzy",
|
||||
|
@ -1549,7 +1550,6 @@ dependencies = [
|
|||
"serde",
|
||||
"serde_derive",
|
||||
"settings",
|
||||
"staff_mode",
|
||||
"theme",
|
||||
"theme_selector",
|
||||
"util",
|
||||
|
@ -2535,6 +2535,14 @@ version = "2.0.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
|
||||
|
||||
[[package]]
|
||||
name = "feature_flags"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"gpui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "feedback"
|
||||
version = "0.1.0"
|
||||
|
@ -6841,6 +6849,7 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
|
@ -6856,7 +6865,6 @@ dependencies = [
|
|||
"serde_json_lenient",
|
||||
"smallvec",
|
||||
"sqlez",
|
||||
"staff_mode",
|
||||
"toml 0.5.11",
|
||||
"tree-sitter",
|
||||
"tree-sitter-json 0.19.0",
|
||||
|
@ -7291,14 +7299,6 @@ version = "1.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||
|
||||
[[package]]
|
||||
name = "staff_mode"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"gpui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "static_assertions"
|
||||
version = "1.1.0"
|
||||
|
@ -7679,6 +7679,7 @@ name = "theme_selector"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
|
@ -7688,7 +7689,6 @@ dependencies = [
|
|||
"postage",
|
||||
"settings",
|
||||
"smol",
|
||||
"staff_mode",
|
||||
"theme",
|
||||
"util",
|
||||
"workspace",
|
||||
|
@ -9733,6 +9733,7 @@ dependencies = [
|
|||
"diagnostics",
|
||||
"editor",
|
||||
"env_logger 0.9.3",
|
||||
"feature_flags",
|
||||
"feedback",
|
||||
"file_finder",
|
||||
"fs",
|
||||
|
@ -9779,7 +9780,6 @@ dependencies = [
|
|||
"simplelog",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"staff_mode",
|
||||
"sum_tree",
|
||||
"tempdir",
|
||||
"terminal_view",
|
||||
|
|
|
@ -62,7 +62,7 @@ members = [
|
|||
"crates/snippet",
|
||||
"crates/sqlez",
|
||||
"crates/sqlez_macros",
|
||||
"crates/staff_mode",
|
||||
"crates/feature_flags",
|
||||
"crates/sum_tree",
|
||||
"crates/terminal",
|
||||
"crates/text",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.71-bullseye as builder
|
||||
FROM rust:1.72-bullseye as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
|
|
@ -98,6 +98,7 @@
|
|||
// Whether to show selections in the scrollbar.
|
||||
"selections": true
|
||||
},
|
||||
"relative_line_numbers": false,
|
||||
// Inlay hint related settings
|
||||
"inlay_hints": {
|
||||
// Global switch to toggle hints on and off, switched off by default.
|
||||
|
@ -284,8 +285,6 @@
|
|||
// "directory": "~/zed/projects/"
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//
|
||||
"working_directory": "current_project_directory",
|
||||
// Set the cursor blinking behavior in the terminal.
|
||||
// May take 4 values:
|
||||
|
@ -334,13 +333,32 @@
|
|||
// "line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
"line_height": "comfortable"
|
||||
"line_height": "comfortable",
|
||||
// Activate the python virtual environment, if one is found, in the
|
||||
// terminal's working directory (as resolved by the working_directory
|
||||
// setting). Set this to "off" to disable this behavior.
|
||||
"detect_venv": {
|
||||
"on": {
|
||||
// Default directories to search for virtual environments, relative
|
||||
// to the current working directory. We recommend overriding this
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [
|
||||
".env",
|
||||
"env",
|
||||
".venv",
|
||||
"venv"
|
||||
],
|
||||
// Can also be 'csh' and 'fish'
|
||||
"activate_script": "default"
|
||||
}
|
||||
}
|
||||
// Set the terminal's font size. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font size.
|
||||
// "font_size": "15"
|
||||
// "font_size": "15",
|
||||
// Set the terminal's font family. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font family.
|
||||
// "font_family": "Zed Mono"
|
||||
// "font_family": "Zed Mono",
|
||||
// ---
|
||||
},
|
||||
// Difference settings for semantic_index
|
||||
"semantic_index": {
|
||||
|
|
|
@ -1478,14 +1478,14 @@ impl Conversation {
|
|||
) -> Self {
|
||||
let markdown = language_registry.language_for_name("Markdown");
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "");
|
||||
buffer.set_language_registry(language_registry);
|
||||
cx.spawn_weak(|buffer, mut cx| async move {
|
||||
let markdown = markdown.await?;
|
||||
let buffer = buffer
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("buffer was dropped"))?;
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
|
||||
buffer.set_language(Some(markdown), cx)
|
||||
});
|
||||
anyhow::Ok(())
|
||||
|
@ -1567,7 +1567,7 @@ impl Conversation {
|
|||
let mut message_anchors = Vec::new();
|
||||
let mut next_message_id = MessageId(0);
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, saved_conversation.text, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, saved_conversation.text);
|
||||
for message in saved_conversation.messages {
|
||||
message_anchors.push(MessageAnchor {
|
||||
id: message.id,
|
||||
|
@ -1581,7 +1581,7 @@ impl Conversation {
|
|||
let buffer = buffer
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("buffer was dropped"))?;
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
|
||||
buffer.set_language(Some(markdown), cx)
|
||||
});
|
||||
anyhow::Ok(())
|
||||
|
@ -1751,7 +1751,9 @@ impl Conversation {
|
|||
stream: true,
|
||||
};
|
||||
|
||||
let Some(api_key) = self.api_key.borrow().clone() else { continue };
|
||||
let Some(api_key) = self.api_key.borrow().clone() else {
|
||||
continue;
|
||||
};
|
||||
let stream = stream_completion(api_key, cx.background().clone(), request);
|
||||
let assistant_message = self
|
||||
.insert_message_after(
|
||||
|
@ -2107,7 +2109,9 @@ impl Conversation {
|
|||
}) {
|
||||
current_message = messages.next();
|
||||
}
|
||||
let Some(message) = current_message.as_ref() else { break };
|
||||
let Some(message) = current_message.as_ref() else {
|
||||
break;
|
||||
};
|
||||
|
||||
// Skip offsets that are in the same message.
|
||||
while offsets.peek().map_or(false, |offset| {
|
||||
|
@ -2544,7 +2548,10 @@ impl ConversationEditor {
|
|||
let Some(panel) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
let Some(editor) = workspace.active_item(cx).and_then(|item| item.act_as::<Editor>(cx)) else {
|
||||
let Some(editor) = workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.act_as::<Editor>(cx))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
|
|
@ -644,7 +644,9 @@ impl Room {
|
|||
|
||||
if let Some(participants) = remote_participants.log_err() {
|
||||
for (participant, user) in room.participants.into_iter().zip(participants) {
|
||||
let Some(peer_id) = participant.peer_id else { continue };
|
||||
let Some(peer_id) = participant.peer_id else {
|
||||
continue;
|
||||
};
|
||||
this.participant_user_ids.insert(participant.user_id);
|
||||
|
||||
let old_projects = this
|
||||
|
|
|
@ -21,7 +21,7 @@ rpc = { path = "../rpc" }
|
|||
text = { path = "../text" }
|
||||
language = { path = "../language" }
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
feature_flags = { path = "../feature_flags" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
|
||||
anyhow.workspace = true
|
||||
|
|
|
@ -19,7 +19,7 @@ util = { path = "../util" }
|
|||
rpc = { path = "../rpc" }
|
||||
text = { path = "../text" }
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
feature_flags = { path = "../feature_flags" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
|
||||
anyhow.workspace = true
|
||||
|
|
|
@ -135,8 +135,6 @@ impl Telemetry {
|
|||
}
|
||||
}
|
||||
|
||||
/// This method takes the entire TelemetrySettings struct in order to force client code
|
||||
/// to pull the struct out of the settings global. Do not remove!
|
||||
pub fn set_authenticated_user_info(
|
||||
self: &Arc<Self>,
|
||||
metrics_id: Option<String>,
|
||||
|
|
|
@ -168,6 +168,7 @@ impl FakeServer {
|
|||
GetPrivateUserInfoResponse {
|
||||
metrics_id: "the-metrics-id".into(),
|
||||
staff: false,
|
||||
flags: Default::default(),
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
use super::{proto, Client, Status, TypedEnvelope};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::{hash_map::Entry, HashMap, HashSet};
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
|
||||
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
|
||||
use postage::{sink::Sink, watch};
|
||||
use rpc::proto::{RequestMessage, UsersResponse};
|
||||
use staff_mode::StaffMode;
|
||||
use std::sync::{Arc, Weak};
|
||||
use util::http::HttpClient;
|
||||
use util::TryFutureExt as _;
|
||||
|
@ -145,26 +145,23 @@ impl UserStore {
|
|||
let fetch_metrics_id =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
cx.read(|cx| {
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
info.as_ref().map(|info| info.metrics_id.clone()),
|
||||
info.as_ref().map(|info| info.staff).unwrap_or(false),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
cx.update_default_global(|staff_mode: &mut StaffMode, _| {
|
||||
if !staff_mode.0 {
|
||||
*staff_mode = StaffMode(
|
||||
info.as_ref()
|
||||
.map(|info| info.staff)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
()
|
||||
if let Some(info) = info {
|
||||
cx.update(|cx| {
|
||||
cx.update_flags(info.staff, info.flags);
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
info.staff,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
});
|
||||
} else {
|
||||
cx.read(|cx| {
|
||||
client
|
||||
.telemetry
|
||||
.set_authenticated_user_info(None, false, cx)
|
||||
});
|
||||
}
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
|
||||
|
|
|
@ -249,3 +249,22 @@ CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replic
|
|||
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
|
||||
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
|
||||
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
|
||||
|
||||
|
||||
CREATE TABLE "feature_flags" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"flag" TEXT NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
CREATE INDEX "index_feature_flags" ON "feature_flags" ("id");
|
||||
|
||||
|
||||
CREATE TABLE "user_features" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (user_id, feature_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
|
||||
CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
|
||||
CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
CREATE TABLE "feature_flags" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"flag" VARCHAR(255) NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_feature_flags" ON "feature_flags" ("id");
|
||||
|
||||
CREATE TABLE "user_features" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
"feature_id" INTEGER NOT NULL REFERENCES feature_flags(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (user_id, feature_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
|
||||
CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
|
||||
CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
|
|
@ -249,7 +249,9 @@ impl Database {
|
|||
let mut tx = Arc::new(Some(tx));
|
||||
let result = f(TransactionHandle(tx.clone())).await;
|
||||
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
|
||||
return Err(anyhow!("couldn't complete transaction because it's still in use"))?;
|
||||
return Err(anyhow!(
|
||||
"couldn't complete transaction because it's still in use"
|
||||
))?;
|
||||
};
|
||||
|
||||
Ok((tx, result))
|
||||
|
|
|
@ -125,3 +125,4 @@ id_type!(ServerId);
|
|||
id_type!(SignupId);
|
||||
id_type!(UserId);
|
||||
id_type!(ChannelBufferCollaboratorId);
|
||||
id_type!(FlagId);
|
||||
|
|
|
@ -465,9 +465,9 @@ impl Database {
|
|||
let mut rejoined_projects = Vec::new();
|
||||
for rejoined_project in &rejoin_room.rejoined_projects {
|
||||
let project_id = ProjectId::from_proto(rejoined_project.id);
|
||||
let Some(project) = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await? else { continue };
|
||||
let Some(project) = project::Entity::find_by_id(project_id).one(&*tx).await? else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut worktrees = Vec::new();
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
|
||||
|
|
|
@ -240,4 +240,58 @@ impl Database {
|
|||
result.push('%');
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub async fn create_user_flag(&self, flag: &str) -> Result<FlagId> {
|
||||
self.transaction(|tx| async move {
|
||||
let flag = feature_flag::Entity::insert(feature_flag::ActiveModel {
|
||||
flag: ActiveValue::set(flag.to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?
|
||||
.last_insert_id;
|
||||
|
||||
Ok(flag)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
pub async fn add_user_flag(&self, user: UserId, flag: FlagId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user_feature::Entity::insert(user_feature::ActiveModel {
|
||||
user_id: ActiveValue::set(user),
|
||||
feature_id: ActiveValue::set(flag),
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
Flag,
|
||||
}
|
||||
|
||||
let flags = user::Model {
|
||||
id: user,
|
||||
..Default::default()
|
||||
}
|
||||
.find_linked(user::UserFlags)
|
||||
.select_only()
|
||||
.column(feature_flag::Column::Flag)
|
||||
.into_values::<_, QueryAs>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(flags)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ pub mod channel_buffer_collaborator;
|
|||
pub mod channel_member;
|
||||
pub mod channel_path;
|
||||
pub mod contact;
|
||||
pub mod feature_flag;
|
||||
pub mod follower;
|
||||
pub mod language_server;
|
||||
pub mod project;
|
||||
|
@ -16,6 +17,7 @@ pub mod room_participant;
|
|||
pub mod server;
|
||||
pub mod signup;
|
||||
pub mod user;
|
||||
pub mod user_feature;
|
||||
pub mod worktree;
|
||||
pub mod worktree_diagnostic_summary;
|
||||
pub mod worktree_entry;
|
||||
|
|
40
crates/collab/src/db/tables/feature_flag.rs
Normal file
40
crates/collab/src/db/tables/feature_flag.rs
Normal file
|
@ -0,0 +1,40 @@
|
|||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::db::FlagId;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "feature_flags")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: FlagId,
|
||||
pub flag: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::user_feature::Entity")]
|
||||
UserFeature,
|
||||
}
|
||||
|
||||
impl Related<super::user_feature::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserFeature.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
pub struct FlaggedUsers;
|
||||
|
||||
impl Linked for FlaggedUsers {
|
||||
type FromEntity = Entity;
|
||||
|
||||
type ToEntity = super::user::Entity;
|
||||
|
||||
fn link(&self) -> Vec<RelationDef> {
|
||||
vec![
|
||||
super::user_feature::Relation::Flag.def().rev(),
|
||||
super::user_feature::Relation::User.def(),
|
||||
]
|
||||
}
|
||||
}
|
|
@ -28,6 +28,8 @@ pub enum Relation {
|
|||
HostedProjects,
|
||||
#[sea_orm(has_many = "super::channel_member::Entity")]
|
||||
ChannelMemberships,
|
||||
#[sea_orm(has_many = "super::user_feature::Entity")]
|
||||
UserFeatures,
|
||||
}
|
||||
|
||||
impl Related<super::access_token::Entity> for Entity {
|
||||
|
@ -54,4 +56,25 @@ impl Related<super::channel_member::Entity> for Entity {
|
|||
}
|
||||
}
|
||||
|
||||
impl Related<super::user_feature::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserFeatures.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
pub struct UserFlags;
|
||||
|
||||
impl Linked for UserFlags {
|
||||
type FromEntity = Entity;
|
||||
|
||||
type ToEntity = super::feature_flag::Entity;
|
||||
|
||||
fn link(&self) -> Vec<RelationDef> {
|
||||
vec![
|
||||
super::user_feature::Relation::User.def().rev(),
|
||||
super::user_feature::Relation::Flag.def(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
42
crates/collab/src/db/tables/user_feature.rs
Normal file
42
crates/collab/src/db/tables/user_feature.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::db::{FlagId, UserId};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "user_features")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub user_id: UserId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub feature_id: FlagId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::feature_flag::Entity",
|
||||
from = "Column::FeatureId",
|
||||
to = "super::feature_flag::Column::Id"
|
||||
)]
|
||||
Flag,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::feature_flag::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Flag.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
|
@ -1,5 +1,6 @@
|
|||
mod buffer_tests;
|
||||
mod db_tests;
|
||||
mod feature_flag_tests;
|
||||
|
||||
use super::*;
|
||||
use gpui::executor::Background;
|
||||
|
|
60
crates/collab/src/db/tests/feature_flag_tests.rs
Normal file
60
crates/collab/src/db/tests/feature_flag_tests.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use crate::{
|
||||
db::{Database, NewUserParams},
|
||||
test_both_dbs,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
test_both_dbs!(
|
||||
test_get_user_flags,
|
||||
test_get_user_flags_postgres,
|
||||
test_get_user_flags_sqlite
|
||||
);
|
||||
|
||||
async fn test_get_user_flags(db: &Arc<Database>) {
|
||||
let user_1 = db
|
||||
.create_user(
|
||||
&format!("user1@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: format!("user1"),
|
||||
github_user_id: 1,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let user_2 = db
|
||||
.create_user(
|
||||
&format!("user2@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: format!("user2"),
|
||||
github_user_id: 2,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
const CHANNELS_ALPHA: &'static str = "channels-alpha";
|
||||
const NEW_SEARCH: &'static str = "new-search";
|
||||
|
||||
let channels_flag = db.create_user_flag(CHANNELS_ALPHA).await.unwrap();
|
||||
let search_flag = db.create_user_flag(NEW_SEARCH).await.unwrap();
|
||||
|
||||
db.add_user_flag(user_1, channels_flag).await.unwrap();
|
||||
db.add_user_flag(user_1, search_flag).await.unwrap();
|
||||
|
||||
db.add_user_flag(user_2, channels_flag).await.unwrap();
|
||||
|
||||
let mut user_1_flags = db.get_user_flags(user_1).await.unwrap();
|
||||
user_1_flags.sort();
|
||||
assert_eq!(user_1_flags, &[CHANNELS_ALPHA, NEW_SEARCH]);
|
||||
|
||||
let mut user_2_flags = db.get_user_flags(user_2).await.unwrap();
|
||||
user_2_flags.sort();
|
||||
assert_eq!(user_2_flags, &[CHANNELS_ALPHA]);
|
||||
}
|
|
@ -2609,20 +2609,19 @@ async fn get_private_user_info(
|
|||
response: Response<proto::GetPrivateUserInfo>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let metrics_id = session
|
||||
.db()
|
||||
.await
|
||||
.get_user_metrics_id(session.user_id)
|
||||
.await?;
|
||||
let user = session
|
||||
.db()
|
||||
.await
|
||||
let db = session.db().await;
|
||||
|
||||
let metrics_id = db.get_user_metrics_id(session.user_id).await?;
|
||||
let user = db
|
||||
.get_user_by_id(session.user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
let flags = db.get_user_flags(session.user_id).await?;
|
||||
|
||||
response.send(proto::GetPrivateUserInfoResponse {
|
||||
metrics_id,
|
||||
staff: user.admin,
|
||||
flags,
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::{
|
|||
};
|
||||
use call::{room, ActiveCall, ParticipantLocation, Room};
|
||||
use client::{User, RECEIVE_TIMEOUT};
|
||||
use collections::HashSet;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
test::editor_test_context::EditorTestContext, ConfirmCodeAction, ConfirmCompletion,
|
||||
ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToggleCodeActions, Undo,
|
||||
|
@ -4821,15 +4821,16 @@ async fn test_project_search(
|
|||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Perform a search as the guest.
|
||||
let results = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text("world", false, false, Vec::new(), Vec::new()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let mut results = HashMap::default();
|
||||
let mut search_rx = project_b.update(cx_b, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text("world", false, false, Vec::new(), Vec::new()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
while let Some((buffer, ranges)) = search_rx.next().await {
|
||||
results.entry(buffer).or_insert(ranges);
|
||||
}
|
||||
|
||||
let mut ranges_by_path = results
|
||||
.into_iter()
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::{
|
|||
use anyhow::{anyhow, Result};
|
||||
use call::ActiveCall;
|
||||
use client::RECEIVE_TIMEOUT;
|
||||
use collections::BTreeMap;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use editor::Bias;
|
||||
use fs::{repository::GitFileStatus, FakeFs, Fs as _};
|
||||
use futures::StreamExt as _;
|
||||
|
@ -121,7 +121,9 @@ async fn test_random_collaboration(
|
|||
let mut operation_channels = Vec::new();
|
||||
|
||||
loop {
|
||||
let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else { break };
|
||||
let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else {
|
||||
break;
|
||||
};
|
||||
applied.store(true, SeqCst);
|
||||
let did_apply = apply_server_operation(
|
||||
deterministic.clone(),
|
||||
|
@ -224,7 +226,9 @@ async fn apply_server_operation(
|
|||
let client_ix = clients
|
||||
.iter()
|
||||
.position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
|
||||
let Some(client_ix) = client_ix else { return false };
|
||||
let Some(client_ix) = client_ix else {
|
||||
return false;
|
||||
};
|
||||
let user_connection_ids = server
|
||||
.connection_pool
|
||||
.lock()
|
||||
|
@ -718,7 +722,7 @@ async fn apply_client_operation(
|
|||
if detach { "detaching" } else { "awaiting" }
|
||||
);
|
||||
|
||||
let search = project.update(cx, |project, cx| {
|
||||
let mut search = project.update(cx, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text(query, false, false, Vec::new(), Vec::new()),
|
||||
cx,
|
||||
|
@ -726,15 +730,13 @@ async fn apply_client_operation(
|
|||
});
|
||||
drop(project);
|
||||
let search = cx.background().spawn(async move {
|
||||
search
|
||||
.await
|
||||
.map_err(|err| anyhow!("search request failed: {:?}", err))
|
||||
let mut results = HashMap::default();
|
||||
while let Some((buffer, ranges)) = search.next().await {
|
||||
results.entry(buffer).or_insert(ranges);
|
||||
}
|
||||
results
|
||||
});
|
||||
if detach {
|
||||
cx.update(|cx| search.detach_and_log_err(cx));
|
||||
} else {
|
||||
search.await?;
|
||||
}
|
||||
search.await;
|
||||
}
|
||||
|
||||
ClientOperation::WriteFsEntry {
|
||||
|
@ -1591,10 +1593,11 @@ impl TestPlan {
|
|||
81.. => match self.rng.gen_range(0..100_u32) {
|
||||
// Add a worktree to a local project
|
||||
0..=50 => {
|
||||
let Some(project) = client
|
||||
.local_projects()
|
||||
.choose(&mut self.rng)
|
||||
.cloned() else { continue };
|
||||
let Some(project) =
|
||||
client.local_projects().choose(&mut self.rng).cloned()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let mut paths = client.fs().paths(false);
|
||||
paths.remove(0);
|
||||
|
@ -1611,7 +1614,9 @@ impl TestPlan {
|
|||
|
||||
// Add an entry to a worktree
|
||||
_ => {
|
||||
let Some(project) = choose_random_project(client, &mut self.rng) else { continue };
|
||||
let Some(project) = choose_random_project(client, &mut self.rng) else {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
let worktree = project.read_with(cx, |project, cx| {
|
||||
|
@ -1645,7 +1650,9 @@ impl TestPlan {
|
|||
|
||||
// Query and mutate buffers
|
||||
60..=90 => {
|
||||
let Some(project) = choose_random_project(client, &mut self.rng) else { continue };
|
||||
let Some(project) = choose_random_project(client, &mut self.rng) else {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
|
||||
|
@ -1656,7 +1663,10 @@ impl TestPlan {
|
|||
.buffers_for_project(&project)
|
||||
.iter()
|
||||
.choose(&mut self.rng)
|
||||
.cloned() else { continue };
|
||||
.cloned()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let full_path = buffer
|
||||
.read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
|
||||
|
@ -2026,7 +2036,10 @@ async fn simulate_client(
|
|||
client.app_state.languages.add(Arc::new(language));
|
||||
|
||||
while let Some(batch_id) = operation_rx.next().await {
|
||||
let Some((operation, applied)) = plan.lock().next_client_operation(&client, batch_id, &cx) else { break };
|
||||
let Some((operation, applied)) = plan.lock().next_client_operation(&client, batch_id, &cx)
|
||||
else {
|
||||
break;
|
||||
};
|
||||
applied.store(true, SeqCst);
|
||||
match apply_client_operation(&client, operation, &mut cx).await {
|
||||
Ok(()) => {}
|
||||
|
|
|
@ -40,7 +40,7 @@ picker = { path = "../picker" }
|
|||
project = { path = "../project" }
|
||||
recent_projects = {path = "../recent_projects"}
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = {path = "../staff_mode"}
|
||||
feature_flags = {path = "../feature_flags"}
|
||||
theme = { path = "../theme" }
|
||||
theme_selector = { path = "../theme_selector" }
|
||||
vcs_menu = { path = "../vcs_menu" }
|
||||
|
|
|
@ -272,8 +272,12 @@ impl FollowableItem for ChannelView {
|
|||
state: &mut Option<proto::view::Variant>,
|
||||
cx: &mut AppContext,
|
||||
) -> Option<gpui::Task<anyhow::Result<ViewHandle<Self>>>> {
|
||||
let Some(proto::view::Variant::ChannelView(_)) = state else { return None };
|
||||
let Some(proto::view::Variant::ChannelView(state)) = state.take() else { unreachable!() };
|
||||
let Some(proto::view::Variant::ChannelView(_)) = state else {
|
||||
return None;
|
||||
};
|
||||
let Some(proto::view::Variant::ChannelView(state)) = state.take() else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
let open = ChannelView::open(state.channel_id, pane, workspace, cx);
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@ use client::{proto::PeerId, Client, Contact, User, UserStore};
|
|||
use context_menu::{ContextMenu, ContextMenuItem};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Cancel, Editor};
|
||||
|
||||
use feature_flags::{ChannelsAlpha, FeatureFlagAppExt, FeatureFlagViewExt};
|
||||
use futures::StreamExt;
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
|
@ -33,7 +35,6 @@ use panel_settings::{CollaborationPanelDockPosition, CollaborationPanelSettings}
|
|||
use project::{Fs, Project};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use settings::SettingsStore;
|
||||
use staff_mode::StaffMode;
|
||||
use std::{borrow::Cow, mem, sync::Arc};
|
||||
use theme::{components::ComponentExt, IconButton};
|
||||
use util::{iife, ResultExt, TryFutureExt};
|
||||
|
@ -182,9 +183,9 @@ pub struct CollabPanel {
|
|||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializedChannelsPanel {
|
||||
struct SerializedCollabPanel {
|
||||
width: Option<f32>,
|
||||
collapsed_channels: Vec<ChannelId>,
|
||||
collapsed_channels: Option<Vec<ChannelId>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -472,9 +473,10 @@ impl CollabPanel {
|
|||
}));
|
||||
this.subscriptions
|
||||
.push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx)));
|
||||
this.subscriptions.push(
|
||||
cx.observe_global::<StaffMode, _>(move |this, cx| this.update_entries(true, cx)),
|
||||
);
|
||||
this.subscriptions
|
||||
.push(cx.observe_flag::<ChannelsAlpha, _>(move |_, this, cx| {
|
||||
this.update_entries(true, cx)
|
||||
}));
|
||||
this.subscriptions.push(cx.subscribe(
|
||||
&this.channel_store,
|
||||
|this, _channel_store, e, cx| match e {
|
||||
|
@ -510,7 +512,7 @@ impl CollabPanel {
|
|||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
Some(serde_json::from_str::<SerializedChannelsPanel>(&panel)?)
|
||||
Some(serde_json::from_str::<SerializedCollabPanel>(&panel)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -520,7 +522,9 @@ impl CollabPanel {
|
|||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.width = serialized_panel.width;
|
||||
panel.collapsed_channels = serialized_panel.collapsed_channels;
|
||||
panel.collapsed_channels = serialized_panel
|
||||
.collapsed_channels
|
||||
.unwrap_or_else(|| Vec::new());
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
@ -537,9 +541,9 @@ impl CollabPanel {
|
|||
KEY_VALUE_STORE
|
||||
.write_kvp(
|
||||
COLLABORATION_PANEL_KEY.into(),
|
||||
serde_json::to_string(&SerializedChannelsPanel {
|
||||
serde_json::to_string(&SerializedCollabPanel {
|
||||
width,
|
||||
collapsed_channels,
|
||||
collapsed_channels: Some(collapsed_channels),
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
@ -672,7 +676,8 @@ impl CollabPanel {
|
|||
}
|
||||
|
||||
let mut request_entries = Vec::new();
|
||||
if self.include_channels_section(cx) {
|
||||
|
||||
if cx.has_flag::<ChannelsAlpha>() {
|
||||
self.entries.push(ListEntry::Header(Section::Channels, 0));
|
||||
|
||||
if channel_store.channel_count() > 0 || self.channel_editing_state.is_some() {
|
||||
|
@ -1909,14 +1914,6 @@ impl CollabPanel {
|
|||
.into_any()
|
||||
}
|
||||
|
||||
fn include_channels_section(&self, cx: &AppContext) -> bool {
|
||||
if cx.has_global::<StaffMode>() {
|
||||
cx.global::<StaffMode>().0
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn deploy_channel_context_menu(
|
||||
&mut self,
|
||||
position: Option<Vector2F>,
|
||||
|
|
|
@ -152,12 +152,9 @@ impl View for ChannelModal {
|
|||
let theme = &theme::current(cx).collab_panel.tabbed_modal;
|
||||
|
||||
let mode = self.picker.read(cx).delegate().mode;
|
||||
let Some(channel) = self
|
||||
.channel_store
|
||||
.read(cx)
|
||||
.channel_for_id(self.channel_id) else {
|
||||
return Empty::new().into_any()
|
||||
};
|
||||
let Some(channel) = self.channel_store.read(cx).channel_for_id(self.channel_id) else {
|
||||
return Empty::new().into_any();
|
||||
};
|
||||
|
||||
enum InviteMembers {}
|
||||
enum ManageMembers {}
|
||||
|
|
|
@ -980,7 +980,7 @@ mod tests {
|
|||
deterministic.forbid_parking();
|
||||
let (copilot, mut lsp) = Copilot::fake(cx);
|
||||
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "Hello", cx));
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Hello"));
|
||||
let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap();
|
||||
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
|
||||
assert_eq!(
|
||||
|
@ -996,7 +996,7 @@ mod tests {
|
|||
}
|
||||
);
|
||||
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "Goodbye", cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Goodbye"));
|
||||
let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap();
|
||||
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
|
||||
assert_eq!(
|
||||
|
|
|
@ -4,7 +4,10 @@ mod inlay_map;
|
|||
mod tab_map;
|
||||
mod wrap_map;
|
||||
|
||||
use crate::{Anchor, AnchorRangeExt, InlayId, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
|
||||
use crate::{
|
||||
link_go_to_definition::{DocumentRange, InlayRange},
|
||||
Anchor, AnchorRangeExt, InlayId, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
pub use block_map::{BlockMap, BlockPoint};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fold_map::FoldMap;
|
||||
|
@ -27,7 +30,7 @@ pub use block_map::{
|
|||
BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock, TransformBlock,
|
||||
};
|
||||
|
||||
pub use self::inlay_map::Inlay;
|
||||
pub use self::inlay_map::{Inlay, InlayOffset, InlayPoint};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FoldStatus {
|
||||
|
@ -39,7 +42,7 @@ pub trait ToDisplayPoint {
|
|||
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
|
||||
}
|
||||
|
||||
type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
|
||||
type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<DocumentRange>)>>;
|
||||
|
||||
pub struct DisplayMap {
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
|
@ -211,11 +214,28 @@ impl DisplayMap {
|
|||
ranges: Vec<Range<Anchor>>,
|
||||
style: HighlightStyle,
|
||||
) {
|
||||
self.text_highlights
|
||||
.insert(Some(type_id), Arc::new((style, ranges)));
|
||||
self.text_highlights.insert(
|
||||
Some(type_id),
|
||||
Arc::new((style, ranges.into_iter().map(DocumentRange::Text).collect())),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range<Anchor>])> {
|
||||
pub fn highlight_inlays(
|
||||
&mut self,
|
||||
type_id: TypeId,
|
||||
ranges: Vec<InlayRange>,
|
||||
style: HighlightStyle,
|
||||
) {
|
||||
self.text_highlights.insert(
|
||||
Some(type_id),
|
||||
Arc::new((
|
||||
style,
|
||||
ranges.into_iter().map(DocumentRange::Inlay).collect(),
|
||||
)),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[DocumentRange])> {
|
||||
let highlights = self.text_highlights.get(&Some(type_id))?;
|
||||
Some((highlights.0, &highlights.1))
|
||||
}
|
||||
|
@ -223,7 +243,7 @@ impl DisplayMap {
|
|||
pub fn clear_text_highlights(
|
||||
&mut self,
|
||||
type_id: TypeId,
|
||||
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
|
||||
) -> Option<Arc<(HighlightStyle, Vec<DocumentRange>)>> {
|
||||
self.text_highlights.remove(&Some(type_id))
|
||||
}
|
||||
|
||||
|
@ -387,12 +407,35 @@ impl DisplaySnapshot {
|
|||
}
|
||||
|
||||
fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
|
||||
self.inlay_snapshot
|
||||
.to_buffer_point(self.display_point_to_inlay_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn display_point_to_inlay_offset(&self, point: DisplayPoint, bias: Bias) -> InlayOffset {
|
||||
self.inlay_snapshot
|
||||
.to_offset(self.display_point_to_inlay_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn anchor_to_inlay_offset(&self, anchor: Anchor) -> InlayOffset {
|
||||
self.inlay_snapshot
|
||||
.to_inlay_offset(anchor.to_offset(&self.buffer_snapshot))
|
||||
}
|
||||
|
||||
pub fn inlay_offset_to_display_point(&self, offset: InlayOffset, bias: Bias) -> DisplayPoint {
|
||||
let inlay_point = self.inlay_snapshot.to_point(offset);
|
||||
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
|
||||
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let block_point = self.block_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
|
||||
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
|
||||
let block_point = point.0;
|
||||
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
|
||||
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
|
||||
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
|
||||
let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot);
|
||||
self.inlay_snapshot.to_buffer_point(inlay_point)
|
||||
fold_point.to_inlay_point(&self.fold_snapshot)
|
||||
}
|
||||
|
||||
pub fn max_point(&self) -> DisplayPoint {
|
||||
|
@ -428,15 +471,15 @@ impl DisplaySnapshot {
|
|||
&self,
|
||||
display_rows: Range<u32>,
|
||||
language_aware: bool,
|
||||
hint_highlights: Option<HighlightStyle>,
|
||||
suggestion_highlights: Option<HighlightStyle>,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
suggestion_highlight_style: Option<HighlightStyle>,
|
||||
) -> DisplayChunks<'_> {
|
||||
self.block_snapshot.chunks(
|
||||
display_rows,
|
||||
language_aware,
|
||||
Some(&self.text_highlights),
|
||||
hint_highlights,
|
||||
suggestion_highlights,
|
||||
hint_highlight_style,
|
||||
suggestion_highlight_style,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -757,7 +800,7 @@ impl DisplaySnapshot {
|
|||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn highlight_ranges<Tag: ?Sized + 'static>(
|
||||
&self,
|
||||
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
|
||||
) -> Option<Arc<(HighlightStyle, Vec<DocumentRange>)>> {
|
||||
let type_id = TypeId::of::<Tag>();
|
||||
self.text_highlights.get(&Some(type_id)).cloned()
|
||||
}
|
||||
|
@ -1319,7 +1362,8 @@ pub mod tests {
|
|||
|
||||
cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx
|
||||
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
|
@ -1408,7 +1452,8 @@ pub mod tests {
|
|||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx
|
||||
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
|
@ -1480,7 +1525,8 @@ pub mod tests {
|
|||
|
||||
let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx
|
||||
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
|
||||
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
|
|
@ -589,8 +589,8 @@ impl BlockSnapshot {
|
|||
rows: Range<u32>,
|
||||
language_aware: bool,
|
||||
text_highlights: Option<&'a TextHighlights>,
|
||||
hint_highlights: Option<HighlightStyle>,
|
||||
suggestion_highlights: Option<HighlightStyle>,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
suggestion_highlight_style: Option<HighlightStyle>,
|
||||
) -> BlockChunks<'a> {
|
||||
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
|
||||
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
|
||||
|
@ -623,8 +623,8 @@ impl BlockSnapshot {
|
|||
input_start..input_end,
|
||||
language_aware,
|
||||
text_highlights,
|
||||
hint_highlights,
|
||||
suggestion_highlights,
|
||||
hint_highlight_style,
|
||||
suggestion_highlight_style,
|
||||
),
|
||||
input_chunk: Default::default(),
|
||||
transforms: cursor,
|
||||
|
|
|
@ -652,8 +652,8 @@ impl FoldSnapshot {
|
|||
range: Range<FoldOffset>,
|
||||
language_aware: bool,
|
||||
text_highlights: Option<&'a TextHighlights>,
|
||||
hint_highlights: Option<HighlightStyle>,
|
||||
suggestion_highlights: Option<HighlightStyle>,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
suggestion_highlight_style: Option<HighlightStyle>,
|
||||
) -> FoldChunks<'a> {
|
||||
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>();
|
||||
|
||||
|
@ -675,8 +675,8 @@ impl FoldSnapshot {
|
|||
inlay_start..inlay_end,
|
||||
language_aware,
|
||||
text_highlights,
|
||||
hint_highlights,
|
||||
suggestion_highlights,
|
||||
hint_highlight_style,
|
||||
suggestion_highlight_style,
|
||||
),
|
||||
inlay_chunk: None,
|
||||
inlay_offset: inlay_start,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use crate::{
|
||||
link_go_to_definition::DocumentRange,
|
||||
multi_buffer::{MultiBufferChunks, MultiBufferRows},
|
||||
Anchor, InlayId, MultiBufferSnapshot, ToOffset,
|
||||
};
|
||||
|
@ -183,7 +184,7 @@ pub struct InlayBufferRows<'a> {
|
|||
max_buffer_row: u32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
struct HighlightEndpoint {
|
||||
offset: InlayOffset,
|
||||
is_start: bool,
|
||||
|
@ -210,6 +211,7 @@ pub struct InlayChunks<'a> {
|
|||
buffer_chunks: MultiBufferChunks<'a>,
|
||||
buffer_chunk: Option<Chunk<'a>>,
|
||||
inlay_chunks: Option<text::Chunks<'a>>,
|
||||
inlay_chunk: Option<&'a str>,
|
||||
output_offset: InlayOffset,
|
||||
max_output_offset: InlayOffset,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
|
@ -297,13 +299,31 @@ impl<'a> Iterator for InlayChunks<'a> {
|
|||
- self.transforms.start().0;
|
||||
inlay.text.chunks_in_range(start.0..end.0)
|
||||
});
|
||||
let inlay_chunk = self
|
||||
.inlay_chunk
|
||||
.get_or_insert_with(|| inlay_chunks.next().unwrap());
|
||||
let (chunk, remainder) = inlay_chunk.split_at(
|
||||
inlay_chunk
|
||||
.len()
|
||||
.min(next_highlight_endpoint.0 - self.output_offset.0),
|
||||
);
|
||||
*inlay_chunk = remainder;
|
||||
if inlay_chunk.is_empty() {
|
||||
self.inlay_chunk = None;
|
||||
}
|
||||
|
||||
let chunk = inlay_chunks.next().unwrap();
|
||||
self.output_offset.0 += chunk.len();
|
||||
let highlight_style = match inlay.id {
|
||||
let mut highlight_style = match inlay.id {
|
||||
InlayId::Suggestion(_) => self.suggestion_highlight_style,
|
||||
InlayId::Hint(_) => self.hint_highlight_style,
|
||||
};
|
||||
if !self.active_highlights.is_empty() {
|
||||
for active_highlight in self.active_highlights.values() {
|
||||
highlight_style
|
||||
.get_or_insert(Default::default())
|
||||
.highlight(*active_highlight);
|
||||
}
|
||||
}
|
||||
Chunk {
|
||||
text: chunk,
|
||||
highlight_style,
|
||||
|
@ -973,8 +993,8 @@ impl InlaySnapshot {
|
|||
range: Range<InlayOffset>,
|
||||
language_aware: bool,
|
||||
text_highlights: Option<&'a TextHighlights>,
|
||||
hint_highlights: Option<HighlightStyle>,
|
||||
suggestion_highlights: Option<HighlightStyle>,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
suggestion_highlight_style: Option<HighlightStyle>,
|
||||
) -> InlayChunks<'a> {
|
||||
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>();
|
||||
cursor.seek(&range.start, Bias::Right, &());
|
||||
|
@ -983,52 +1003,56 @@ impl InlaySnapshot {
|
|||
if let Some(text_highlights) = text_highlights {
|
||||
if !text_highlights.is_empty() {
|
||||
while cursor.start().0 < range.end {
|
||||
if true {
|
||||
let transform_start = self.buffer.anchor_after(
|
||||
self.to_buffer_offset(cmp::max(range.start, cursor.start().0)),
|
||||
);
|
||||
let transform_start = self.buffer.anchor_after(
|
||||
self.to_buffer_offset(cmp::max(range.start, cursor.start().0)),
|
||||
);
|
||||
let transform_start =
|
||||
self.to_inlay_offset(transform_start.to_offset(&self.buffer));
|
||||
|
||||
let transform_end = {
|
||||
let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0);
|
||||
self.buffer.anchor_before(self.to_buffer_offset(cmp::min(
|
||||
cursor.end(&()).0,
|
||||
cursor.start().0 + overshoot,
|
||||
)))
|
||||
};
|
||||
let transform_end = {
|
||||
let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0);
|
||||
self.buffer.anchor_before(self.to_buffer_offset(cmp::min(
|
||||
cursor.end(&()).0,
|
||||
cursor.start().0 + overshoot,
|
||||
)))
|
||||
};
|
||||
let transform_end = self.to_inlay_offset(transform_end.to_offset(&self.buffer));
|
||||
|
||||
for (tag, highlights) in text_highlights.iter() {
|
||||
let style = highlights.0;
|
||||
let ranges = &highlights.1;
|
||||
for (tag, text_highlights) in text_highlights.iter() {
|
||||
let style = text_highlights.0;
|
||||
let ranges = &text_highlights.1;
|
||||
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&transform_start, &self.buffer);
|
||||
if cmp.is_gt() {
|
||||
cmp::Ordering::Greater
|
||||
} else {
|
||||
cmp::Ordering::Less
|
||||
}
|
||||
}) {
|
||||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&transform_end, &self.buffer).is_ge() {
|
||||
break;
|
||||
}
|
||||
|
||||
highlight_endpoints.push(HighlightEndpoint {
|
||||
offset: self
|
||||
.to_inlay_offset(range.start.to_offset(&self.buffer)),
|
||||
is_start: true,
|
||||
tag: *tag,
|
||||
style,
|
||||
});
|
||||
highlight_endpoints.push(HighlightEndpoint {
|
||||
offset: self.to_inlay_offset(range.end.to_offset(&self.buffer)),
|
||||
is_start: false,
|
||||
tag: *tag,
|
||||
style,
|
||||
});
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = self
|
||||
.document_to_inlay_range(probe)
|
||||
.end
|
||||
.cmp(&transform_start);
|
||||
if cmp.is_gt() {
|
||||
cmp::Ordering::Greater
|
||||
} else {
|
||||
cmp::Ordering::Less
|
||||
}
|
||||
}) {
|
||||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
let range = self.document_to_inlay_range(range);
|
||||
if range.start.cmp(&transform_end).is_ge() {
|
||||
break;
|
||||
}
|
||||
|
||||
highlight_endpoints.push(HighlightEndpoint {
|
||||
offset: range.start,
|
||||
is_start: true,
|
||||
tag: *tag,
|
||||
style,
|
||||
});
|
||||
highlight_endpoints.push(HighlightEndpoint {
|
||||
offset: range.end,
|
||||
is_start: false,
|
||||
tag: *tag,
|
||||
style,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1046,17 +1070,30 @@ impl InlaySnapshot {
|
|||
transforms: cursor,
|
||||
buffer_chunks,
|
||||
inlay_chunks: None,
|
||||
inlay_chunk: None,
|
||||
buffer_chunk: None,
|
||||
output_offset: range.start,
|
||||
max_output_offset: range.end,
|
||||
hint_highlight_style: hint_highlights,
|
||||
suggestion_highlight_style: suggestion_highlights,
|
||||
hint_highlight_style,
|
||||
suggestion_highlight_style,
|
||||
highlight_endpoints: highlight_endpoints.into_iter().peekable(),
|
||||
active_highlights: Default::default(),
|
||||
snapshot: self,
|
||||
}
|
||||
}
|
||||
|
||||
fn document_to_inlay_range(&self, range: &DocumentRange) -> Range<InlayOffset> {
|
||||
match range {
|
||||
DocumentRange::Text(text_range) => {
|
||||
self.to_inlay_offset(text_range.start.to_offset(&self.buffer))
|
||||
..self.to_inlay_offset(text_range.end.to_offset(&self.buffer))
|
||||
}
|
||||
DocumentRange::Inlay(inlay_range) => {
|
||||
inlay_range.highlight_start..inlay_range.highlight_end
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn text(&self) -> String {
|
||||
self.chunks(Default::default()..self.len(), false, None, None, None)
|
||||
|
@ -1107,13 +1144,12 @@ fn push_isomorphic(sum_tree: &mut SumTree<Transform>, summary: TextSummary) {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{InlayId, MultiBuffer};
|
||||
use crate::{link_go_to_definition::InlayRange, InlayId, MultiBuffer};
|
||||
use gpui::AppContext;
|
||||
use project::{InlayHint, InlayHintLabel};
|
||||
use project::{InlayHint, InlayHintLabel, ResolveState};
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
use std::{cmp::Reverse, env, sync::Arc};
|
||||
use sum_tree::TreeMap;
|
||||
use text::Patch;
|
||||
use util::post_inc;
|
||||
|
||||
|
@ -1125,12 +1161,12 @@ mod tests {
|
|||
Anchor::min(),
|
||||
&InlayHint {
|
||||
label: InlayHintLabel::String("a".to_string()),
|
||||
buffer_id: 0,
|
||||
position: text::Anchor::default(),
|
||||
padding_left: false,
|
||||
padding_right: false,
|
||||
tooltip: None,
|
||||
kind: None,
|
||||
resolve_state: ResolveState::Resolved,
|
||||
},
|
||||
)
|
||||
.text
|
||||
|
@ -1145,12 +1181,12 @@ mod tests {
|
|||
Anchor::min(),
|
||||
&InlayHint {
|
||||
label: InlayHintLabel::String("a".to_string()),
|
||||
buffer_id: 0,
|
||||
position: text::Anchor::default(),
|
||||
padding_left: true,
|
||||
padding_right: true,
|
||||
tooltip: None,
|
||||
kind: None,
|
||||
resolve_state: ResolveState::Resolved,
|
||||
},
|
||||
)
|
||||
.text
|
||||
|
@ -1165,12 +1201,12 @@ mod tests {
|
|||
Anchor::min(),
|
||||
&InlayHint {
|
||||
label: InlayHintLabel::String(" a ".to_string()),
|
||||
buffer_id: 0,
|
||||
position: text::Anchor::default(),
|
||||
padding_left: false,
|
||||
padding_right: false,
|
||||
tooltip: None,
|
||||
kind: None,
|
||||
resolve_state: ResolveState::Resolved,
|
||||
},
|
||||
)
|
||||
.text
|
||||
|
@ -1185,12 +1221,12 @@ mod tests {
|
|||
Anchor::min(),
|
||||
&InlayHint {
|
||||
label: InlayHintLabel::String(" a ".to_string()),
|
||||
buffer_id: 0,
|
||||
position: text::Anchor::default(),
|
||||
padding_left: true,
|
||||
padding_right: true,
|
||||
tooltip: None,
|
||||
kind: None,
|
||||
resolve_state: ResolveState::Resolved,
|
||||
},
|
||||
)
|
||||
.text
|
||||
|
@ -1542,26 +1578,6 @@ mod tests {
|
|||
let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let mut next_inlay_id = 0;
|
||||
log::info!("buffer text: {:?}", buffer_snapshot.text());
|
||||
|
||||
let mut highlights = TreeMap::default();
|
||||
let highlight_count = rng.gen_range(0_usize..10);
|
||||
let mut highlight_ranges = (0..highlight_count)
|
||||
.map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
|
||||
.collect::<Vec<_>>();
|
||||
highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
log::info!("highlighting ranges {:?}", highlight_ranges);
|
||||
let highlight_ranges = highlight_ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
buffer_snapshot.anchor_before(range.start)..buffer_snapshot.anchor_after(range.end)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
highlights.insert(
|
||||
Some(TypeId::of::<()>()),
|
||||
Arc::new((HighlightStyle::default(), highlight_ranges)),
|
||||
);
|
||||
|
||||
let (mut inlay_map, mut inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
for _ in 0..operations {
|
||||
let mut inlay_edits = Patch::default();
|
||||
|
@ -1624,6 +1640,38 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
let mut highlights = TextHighlights::default();
|
||||
let highlight_count = rng.gen_range(0_usize..10);
|
||||
let mut highlight_ranges = (0..highlight_count)
|
||||
.map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
|
||||
.collect::<Vec<_>>();
|
||||
highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
|
||||
log::info!("highlighting ranges {:?}", highlight_ranges);
|
||||
let highlight_ranges = if rng.gen_bool(0.5) {
|
||||
highlight_ranges
|
||||
.into_iter()
|
||||
.map(|range| InlayRange {
|
||||
inlay_position: buffer_snapshot.anchor_before(range.start),
|
||||
highlight_start: inlay_snapshot.to_inlay_offset(range.start),
|
||||
highlight_end: inlay_snapshot.to_inlay_offset(range.end),
|
||||
})
|
||||
.map(DocumentRange::Inlay)
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
highlight_ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
buffer_snapshot.anchor_before(range.start)
|
||||
..buffer_snapshot.anchor_after(range.end)
|
||||
})
|
||||
.map(DocumentRange::Text)
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
highlights.insert(
|
||||
Some(TypeId::of::<()>()),
|
||||
Arc::new((HighlightStyle::default(), highlight_ranges)),
|
||||
);
|
||||
|
||||
for _ in 0..5 {
|
||||
let mut end = rng.gen_range(0..=inlay_snapshot.len().0);
|
||||
end = expected_text.clip_offset(end, Bias::Right);
|
||||
|
|
|
@ -224,8 +224,8 @@ impl TabSnapshot {
|
|||
range: Range<TabPoint>,
|
||||
language_aware: bool,
|
||||
text_highlights: Option<&'a TextHighlights>,
|
||||
hint_highlights: Option<HighlightStyle>,
|
||||
suggestion_highlights: Option<HighlightStyle>,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
suggestion_highlight_style: Option<HighlightStyle>,
|
||||
) -> TabChunks<'a> {
|
||||
let (input_start, expanded_char_column, to_next_stop) =
|
||||
self.to_fold_point(range.start, Bias::Left);
|
||||
|
@ -246,8 +246,8 @@ impl TabSnapshot {
|
|||
input_start..input_end,
|
||||
language_aware,
|
||||
text_highlights,
|
||||
hint_highlights,
|
||||
suggestion_highlights,
|
||||
hint_highlight_style,
|
||||
suggestion_highlight_style,
|
||||
),
|
||||
input_column,
|
||||
column: expanded_char_column,
|
||||
|
|
|
@ -576,8 +576,8 @@ impl WrapSnapshot {
|
|||
rows: Range<u32>,
|
||||
language_aware: bool,
|
||||
text_highlights: Option<&'a TextHighlights>,
|
||||
hint_highlights: Option<HighlightStyle>,
|
||||
suggestion_highlights: Option<HighlightStyle>,
|
||||
hint_highlight_style: Option<HighlightStyle>,
|
||||
suggestion_highlight_style: Option<HighlightStyle>,
|
||||
) -> WrapChunks<'a> {
|
||||
let output_start = WrapPoint::new(rows.start, 0);
|
||||
let output_end = WrapPoint::new(rows.end, 0);
|
||||
|
@ -595,8 +595,8 @@ impl WrapSnapshot {
|
|||
input_start..input_end,
|
||||
language_aware,
|
||||
text_highlights,
|
||||
hint_highlights,
|
||||
suggestion_highlights,
|
||||
hint_highlight_style,
|
||||
suggestion_highlight_style,
|
||||
),
|
||||
input_chunk: Default::default(),
|
||||
output_position: output_start,
|
||||
|
|
|
@ -23,7 +23,7 @@ pub mod test;
|
|||
|
||||
use ::git::diff::DiffHunk;
|
||||
use aho_corasick::AhoCorasick;
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use blink_manager::BlinkManager;
|
||||
use client::{ClickhouseEvent, TelemetrySettings};
|
||||
use clock::{Global, ReplicaId};
|
||||
|
@ -60,21 +60,24 @@ use itertools::Itertools;
|
|||
pub use language::{char_kind, CharKind};
|
||||
use language::{
|
||||
language_settings::{self, all_language_settings, InlayHintSettings},
|
||||
AutoindentMode, BracketPair, Buffer, CodeAction, CodeLabel, Completion, CursorShape,
|
||||
Diagnostic, DiagnosticSeverity, File, IndentKind, IndentSize, Language, OffsetRangeExt,
|
||||
OffsetUtf16, Point, Selection, SelectionGoal, TransactionId,
|
||||
point_from_lsp, AutoindentMode, BracketPair, Buffer, CodeAction, CodeLabel, Completion,
|
||||
CursorShape, Diagnostic, DiagnosticSeverity, File, IndentKind, IndentSize, Language,
|
||||
LanguageServerName, OffsetRangeExt, OffsetUtf16, Point, Selection, SelectionGoal,
|
||||
TransactionId,
|
||||
};
|
||||
use link_go_to_definition::{
|
||||
hide_link_definition, show_link_definition, LinkDefinitionKind, LinkGoToDefinitionState,
|
||||
hide_link_definition, show_link_definition, DocumentRange, GoToDefinitionLink, InlayRange,
|
||||
LinkGoToDefinitionState,
|
||||
};
|
||||
use log::error;
|
||||
use lsp::LanguageServerId;
|
||||
use multi_buffer::ToOffsetUtf16;
|
||||
pub use multi_buffer::{
|
||||
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset,
|
||||
ToPoint,
|
||||
};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::{FormatTrigger, Location, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use project::{FormatTrigger, Location, Project, ProjectPath, ProjectTransaction};
|
||||
use rand::{seq::SliceRandom, thread_rng};
|
||||
use scroll::{
|
||||
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
|
||||
|
@ -535,6 +538,8 @@ type CompletionId = usize;
|
|||
type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor;
|
||||
type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option<HighlightStyle>;
|
||||
|
||||
type BackgroundHighlight = (fn(&Theme) -> Color, Vec<DocumentRange>);
|
||||
|
||||
pub struct Editor {
|
||||
handle: WeakViewHandle<Self>,
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
|
@ -564,8 +569,7 @@ pub struct Editor {
|
|||
show_wrap_guides: Option<bool>,
|
||||
placeholder_text: Option<Arc<str>>,
|
||||
highlighted_rows: Option<Range<u32>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
background_highlights: BTreeMap<TypeId, (fn(&Theme) -> Color, Vec<Range<Anchor>>)>,
|
||||
background_highlights: BTreeMap<TypeId, BackgroundHighlight>,
|
||||
nav_history: Option<ItemNavHistory>,
|
||||
context_menu: Option<ContextMenu>,
|
||||
mouse_context_menu: ViewHandle<context_menu::ContextMenu>,
|
||||
|
@ -1247,6 +1251,19 @@ enum InlayHintRefreshReason {
|
|||
NewLinesShown,
|
||||
BufferEdited(HashSet<Arc<Language>>),
|
||||
RefreshRequested,
|
||||
ExcerptsRemoved(Vec<ExcerptId>),
|
||||
}
|
||||
impl InlayHintRefreshReason {
|
||||
fn description(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Toggle(_) => "toggle",
|
||||
Self::SettingsChange(_) => "settings change",
|
||||
Self::NewLinesShown => "new lines shown",
|
||||
Self::BufferEdited(_) => "buffer edited",
|
||||
Self::RefreshRequested => "refresh requested",
|
||||
Self::ExcerptsRemoved(_) => "excerpts removed",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Editor {
|
||||
|
@ -1254,7 +1271,7 @@ impl Editor {
|
|||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, String::new()));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::SingleLine, buffer, None, field_editor_style, cx)
|
||||
}
|
||||
|
@ -1263,7 +1280,7 @@ impl Editor {
|
|||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, String::new()));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::Full, buffer, None, field_editor_style, cx)
|
||||
}
|
||||
|
@ -1273,7 +1290,7 @@ impl Editor {
|
|||
field_editor_style: Option<Arc<GetFieldEditorTheme>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, String::new()));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(
|
||||
EditorMode::AutoHeight { max_lines },
|
||||
|
@ -2746,6 +2763,7 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
let reason_description = reason.description();
|
||||
let (invalidate_cache, required_languages) = match reason {
|
||||
InlayHintRefreshReason::Toggle(enabled) => {
|
||||
self.inlay_hint_cache.enabled = enabled;
|
||||
|
@ -2782,6 +2800,16 @@ impl Editor {
|
|||
ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None),
|
||||
}
|
||||
}
|
||||
InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => {
|
||||
if let Some(InlaySplice {
|
||||
to_remove,
|
||||
to_insert,
|
||||
}) = self.inlay_hint_cache.remove_excerpts(excerpts_removed)
|
||||
{
|
||||
self.splice_inlay_hints(to_remove, to_insert, cx);
|
||||
}
|
||||
return;
|
||||
}
|
||||
InlayHintRefreshReason::NewLinesShown => (InvalidationStrategy::None, None),
|
||||
InlayHintRefreshReason::BufferEdited(buffer_languages) => {
|
||||
(InvalidationStrategy::BufferEdited, Some(buffer_languages))
|
||||
|
@ -2795,6 +2823,7 @@ impl Editor {
|
|||
to_remove,
|
||||
to_insert,
|
||||
}) = self.inlay_hint_cache.spawn_hint_refresh(
|
||||
reason_description,
|
||||
self.excerpt_visible_offsets(required_languages.as_ref(), cx),
|
||||
invalidate_cache,
|
||||
cx,
|
||||
|
@ -4890,7 +4919,6 @@ impl Editor {
|
|||
if let Some(clipboard_selection) = clipboard_selections.get(ix) {
|
||||
let end_offset = start_offset + clipboard_selection.len;
|
||||
to_insert = &clipboard_text[start_offset..end_offset];
|
||||
dbg!(start_offset, end_offset, &clipboard_text, &to_insert);
|
||||
entire_line = clipboard_selection.is_entire_line;
|
||||
start_offset = end_offset + 1;
|
||||
original_indent_column =
|
||||
|
@ -6252,7 +6280,9 @@ impl Editor {
|
|||
) {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_offsets_with(|snapshot, selection| {
|
||||
let Some(enclosing_bracket_ranges) = snapshot.enclosing_bracket_ranges(selection.start..selection.end) else {
|
||||
let Some(enclosing_bracket_ranges) =
|
||||
snapshot.enclosing_bracket_ranges(selection.start..selection.end)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
@ -6264,7 +6294,8 @@ impl Editor {
|
|||
let close = close.to_inclusive();
|
||||
let length = close.end() - open.start;
|
||||
let inside = selection.start >= open.end && selection.end <= *close.start();
|
||||
let in_bracket_range = open.to_inclusive().contains(&selection.head()) || close.contains(&selection.head());
|
||||
let in_bracket_range = open.to_inclusive().contains(&selection.head())
|
||||
|| close.contains(&selection.head());
|
||||
|
||||
// If best is next to a bracket and current isn't, skip
|
||||
if !in_bracket_range && best_in_bracket_range {
|
||||
|
@ -6279,19 +6310,21 @@ impl Editor {
|
|||
best_length = length;
|
||||
best_inside = inside;
|
||||
best_in_bracket_range = in_bracket_range;
|
||||
best_destination = Some(if close.contains(&selection.start) && close.contains(&selection.end) {
|
||||
if inside {
|
||||
open.end
|
||||
best_destination = Some(
|
||||
if close.contains(&selection.start) && close.contains(&selection.end) {
|
||||
if inside {
|
||||
open.end
|
||||
} else {
|
||||
open.start
|
||||
}
|
||||
} else {
|
||||
open.start
|
||||
}
|
||||
} else {
|
||||
if inside {
|
||||
*close.start()
|
||||
} else {
|
||||
*close.end()
|
||||
}
|
||||
});
|
||||
if inside {
|
||||
*close.start()
|
||||
} else {
|
||||
*close.end()
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(destination) = best_destination {
|
||||
|
@ -6535,7 +6568,9 @@ impl Editor {
|
|||
split: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(cx) else { return };
|
||||
let Some(workspace) = self.workspace(cx) else {
|
||||
return;
|
||||
};
|
||||
let buffer = self.buffer.read(cx);
|
||||
let head = self.selections.newest::<usize>(cx).head();
|
||||
let (buffer, head) = if let Some(text_anchor) = buffer.text_anchor_for_position(head, cx) {
|
||||
|
@ -6553,7 +6588,14 @@ impl Editor {
|
|||
cx.spawn_labeled("Fetching Definition...", |editor, mut cx| async move {
|
||||
let definitions = definitions.await?;
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
editor.navigate_to_definitions(definitions, split, cx);
|
||||
editor.navigate_to_definitions(
|
||||
definitions
|
||||
.into_iter()
|
||||
.map(GoToDefinitionLink::Text)
|
||||
.collect(),
|
||||
split,
|
||||
cx,
|
||||
);
|
||||
})?;
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
|
@ -6562,76 +6604,178 @@ impl Editor {
|
|||
|
||||
pub fn navigate_to_definitions(
|
||||
&mut self,
|
||||
mut definitions: Vec<LocationLink>,
|
||||
mut definitions: Vec<GoToDefinitionLink>,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(cx) else { return };
|
||||
let Some(workspace) = self.workspace(cx) else {
|
||||
return;
|
||||
};
|
||||
let pane = workspace.read(cx).active_pane().clone();
|
||||
// If there is one definition, just open it directly
|
||||
if definitions.len() == 1 {
|
||||
let definition = definitions.pop().unwrap();
|
||||
let range = definition
|
||||
.target
|
||||
.range
|
||||
.to_offset(definition.target.buffer.read(cx));
|
||||
|
||||
let range = self.range_for_match(&range);
|
||||
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
} else {
|
||||
cx.window_context().defer(move |cx| {
|
||||
let target_editor: ViewHandle<Self> = workspace.update(cx, |workspace, cx| {
|
||||
if split {
|
||||
workspace.split_project_item(definition.target.buffer.clone(), cx)
|
||||
let target_task = match definition {
|
||||
GoToDefinitionLink::Text(link) => Task::Ready(Some(Ok(Some(link.target)))),
|
||||
GoToDefinitionLink::InlayHint(lsp_location, server_id) => {
|
||||
self.compute_target_location(lsp_location, server_id, cx)
|
||||
}
|
||||
};
|
||||
cx.spawn(|editor, mut cx| async move {
|
||||
let target = target_task.await.context("target resolution task")?;
|
||||
if let Some(target) = target {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
let range = target.range.to_offset(target.buffer.read(cx));
|
||||
let range = editor.range_for_match(&range);
|
||||
if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() {
|
||||
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
} else {
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx)
|
||||
cx.window_context().defer(move |cx| {
|
||||
let target_editor: ViewHandle<Self> =
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if split {
|
||||
workspace.split_project_item(target.buffer.clone(), cx)
|
||||
} else {
|
||||
workspace.open_project_item(target.buffer.clone(), cx)
|
||||
}
|
||||
});
|
||||
target_editor.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
// to avoid creating a history entry at the previous cursor location.
|
||||
pane.update(cx, |pane, _| pane.disable_history());
|
||||
target_editor.change_selections(
|
||||
Some(Autoscroll::fit()),
|
||||
cx,
|
||||
|s| {
|
||||
s.select_ranges([range]);
|
||||
},
|
||||
);
|
||||
pane.update(cx, |pane, _| pane.enable_history());
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
target_editor.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
// to avoid creating a history entry at the previous cursor location.
|
||||
pane.update(cx, |pane, _| pane.disable_history());
|
||||
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([range]);
|
||||
});
|
||||
pane.update(cx, |pane, _| pane.enable_history());
|
||||
});
|
||||
});
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
} else if !definitions.is_empty() {
|
||||
let replica_id = self.replica_id(cx);
|
||||
cx.window_context().defer(move |cx| {
|
||||
let title = definitions
|
||||
.iter()
|
||||
.find(|definition| definition.origin.is_some())
|
||||
.and_then(|definition| {
|
||||
definition.origin.as_ref().map(|origin| {
|
||||
let buffer = origin.buffer.read(cx);
|
||||
format!(
|
||||
"Definitions for {}",
|
||||
buffer
|
||||
.text_for_range(origin.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
cx.spawn(|editor, mut cx| async move {
|
||||
let (title, location_tasks) = editor
|
||||
.update(&mut cx, |editor, cx| {
|
||||
let title = definitions
|
||||
.iter()
|
||||
.find_map(|definition| match definition {
|
||||
GoToDefinitionLink::Text(link) => {
|
||||
link.origin.as_ref().map(|origin| {
|
||||
let buffer = origin.buffer.read(cx);
|
||||
format!(
|
||||
"Definitions for {}",
|
||||
buffer
|
||||
.text_for_range(origin.range.clone())
|
||||
.collect::<String>()
|
||||
)
|
||||
})
|
||||
}
|
||||
GoToDefinitionLink::InlayHint(_, _) => None,
|
||||
})
|
||||
.unwrap_or("Definitions".to_string());
|
||||
let location_tasks = definitions
|
||||
.into_iter()
|
||||
.map(|definition| match definition {
|
||||
GoToDefinitionLink::Text(link) => {
|
||||
Task::Ready(Some(Ok(Some(link.target))))
|
||||
}
|
||||
GoToDefinitionLink::InlayHint(lsp_location, server_id) => {
|
||||
editor.compute_target_location(lsp_location, server_id, cx)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(title, location_tasks)
|
||||
})
|
||||
.unwrap_or("Definitions".to_owned());
|
||||
let locations = definitions
|
||||
.context("location tasks preparation")?;
|
||||
|
||||
let locations = futures::future::join_all(location_tasks)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|definition| definition.target)
|
||||
.collect();
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
.filter_map(|location| location.transpose())
|
||||
.collect::<Result<_>>()
|
||||
.context("location tasks")?;
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
Self::open_locations_in_multibuffer(
|
||||
workspace, locations, replica_id, title, split, cx,
|
||||
)
|
||||
});
|
||||
});
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_target_location(
|
||||
&self,
|
||||
lsp_location: lsp::Location,
|
||||
server_id: LanguageServerId,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> Task<anyhow::Result<Option<Location>>> {
|
||||
let Some(project) = self.project.clone() else {
|
||||
return Task::Ready(Some(Ok(None)));
|
||||
};
|
||||
|
||||
cx.spawn(move |editor, mut cx| async move {
|
||||
let location_task = editor.update(&mut cx, |editor, cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
let language_server_name =
|
||||
editor.buffer.read(cx).as_singleton().and_then(|buffer| {
|
||||
project
|
||||
.language_server_for_buffer(buffer.read(cx), server_id, cx)
|
||||
.map(|(_, lsp_adapter)| {
|
||||
LanguageServerName(Arc::from(lsp_adapter.name()))
|
||||
})
|
||||
});
|
||||
language_server_name.map(|language_server_name| {
|
||||
project.open_local_buffer_via_lsp(
|
||||
lsp_location.uri.clone(),
|
||||
server_id,
|
||||
language_server_name,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
})?;
|
||||
let location = match location_task {
|
||||
Some(task) => Some({
|
||||
let target_buffer_handle = task.await.context("open local buffer")?;
|
||||
let range = {
|
||||
target_buffer_handle.update(&mut cx, |target_buffer, _| {
|
||||
let target_start = target_buffer.clip_point_utf16(
|
||||
point_from_lsp(lsp_location.range.start),
|
||||
Bias::Left,
|
||||
);
|
||||
let target_end = target_buffer.clip_point_utf16(
|
||||
point_from_lsp(lsp_location.range.end),
|
||||
Bias::Left,
|
||||
);
|
||||
target_buffer.anchor_after(target_start)
|
||||
..target_buffer.anchor_before(target_end)
|
||||
})
|
||||
};
|
||||
Location {
|
||||
buffer: target_buffer_handle,
|
||||
range,
|
||||
}
|
||||
}),
|
||||
None => None,
|
||||
};
|
||||
Ok(location)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_all_references(
|
||||
workspace: &mut Workspace,
|
||||
_: &FindAllReferences,
|
||||
|
@ -6767,10 +6911,18 @@ impl Editor {
|
|||
let rename_range = if let Some(range) = prepare_rename.await? {
|
||||
Some(range)
|
||||
} else {
|
||||
this.read_with(&cx, |this, cx| {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let buffer = this.buffer.read(cx).snapshot(cx);
|
||||
let display_snapshot = this
|
||||
.display_map
|
||||
.update(cx, |display_map, cx| display_map.snapshot(cx));
|
||||
let mut buffer_highlights = this
|
||||
.document_highlights_for_position(selection.head(), &buffer)
|
||||
.document_highlights_for_position(
|
||||
selection.head(),
|
||||
&buffer,
|
||||
&display_snapshot,
|
||||
)
|
||||
.filter_map(|highlight| highlight.as_text_range())
|
||||
.filter(|highlight| {
|
||||
highlight.start.excerpt_id() == selection.head().excerpt_id()
|
||||
&& highlight.end.excerpt_id() == selection.head().excerpt_id()
|
||||
|
@ -6825,11 +6977,15 @@ impl Editor {
|
|||
let ranges = this
|
||||
.clear_background_highlights::<DocumentHighlightWrite>(cx)
|
||||
.into_iter()
|
||||
.flat_map(|(_, ranges)| ranges)
|
||||
.flat_map(|(_, ranges)| {
|
||||
ranges.into_iter().filter_map(|range| range.as_text_range())
|
||||
})
|
||||
.chain(
|
||||
this.clear_background_highlights::<DocumentHighlightRead>(cx)
|
||||
.into_iter()
|
||||
.flat_map(|(_, ranges)| ranges),
|
||||
.flat_map(|(_, ranges)| {
|
||||
ranges.into_iter().filter_map(|range| range.as_text_range())
|
||||
}),
|
||||
)
|
||||
.collect();
|
||||
|
||||
|
@ -7497,16 +7653,36 @@ impl Editor {
|
|||
color_fetcher: fn(&Theme) -> Color,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.background_highlights
|
||||
.insert(TypeId::of::<T>(), (color_fetcher, ranges));
|
||||
self.background_highlights.insert(
|
||||
TypeId::of::<T>(),
|
||||
(
|
||||
color_fetcher,
|
||||
ranges.into_iter().map(DocumentRange::Text).collect(),
|
||||
),
|
||||
);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn highlight_inlay_background<T: 'static>(
|
||||
&mut self,
|
||||
ranges: Vec<InlayRange>,
|
||||
color_fetcher: fn(&Theme) -> Color,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.background_highlights.insert(
|
||||
TypeId::of::<T>(),
|
||||
(
|
||||
color_fetcher,
|
||||
ranges.into_iter().map(DocumentRange::Inlay).collect(),
|
||||
),
|
||||
);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn clear_background_highlights<T: 'static>(
|
||||
&mut self,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<(fn(&Theme) -> Color, Vec<Range<Anchor>>)> {
|
||||
) -> Option<BackgroundHighlight> {
|
||||
let highlights = self.background_highlights.remove(&TypeId::of::<T>());
|
||||
if highlights.is_some() {
|
||||
cx.notify();
|
||||
|
@ -7531,7 +7707,8 @@ impl Editor {
|
|||
&'a self,
|
||||
position: Anchor,
|
||||
buffer: &'a MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = &Range<Anchor>> {
|
||||
display_snapshot: &'a DisplaySnapshot,
|
||||
) -> impl 'a + Iterator<Item = &DocumentRange> {
|
||||
let read_highlights = self
|
||||
.background_highlights
|
||||
.get(&TypeId::of::<DocumentHighlightRead>())
|
||||
|
@ -7540,14 +7717,16 @@ impl Editor {
|
|||
.background_highlights
|
||||
.get(&TypeId::of::<DocumentHighlightWrite>())
|
||||
.map(|h| &h.1);
|
||||
let left_position = position.bias_left(buffer);
|
||||
let right_position = position.bias_right(buffer);
|
||||
let left_position = display_snapshot.anchor_to_inlay_offset(position.bias_left(buffer));
|
||||
let right_position = display_snapshot.anchor_to_inlay_offset(position.bias_right(buffer));
|
||||
read_highlights
|
||||
.into_iter()
|
||||
.chain(write_highlights)
|
||||
.flat_map(move |ranges| {
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&left_position, buffer);
|
||||
let cmp = document_to_inlay_range(probe, display_snapshot)
|
||||
.end
|
||||
.cmp(&left_position);
|
||||
if cmp.is_ge() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -7558,9 +7737,12 @@ impl Editor {
|
|||
};
|
||||
|
||||
let right_position = right_position.clone();
|
||||
ranges[start_ix..]
|
||||
.iter()
|
||||
.take_while(move |range| range.start.cmp(&right_position, buffer).is_le())
|
||||
ranges[start_ix..].iter().take_while(move |range| {
|
||||
document_to_inlay_range(range, display_snapshot)
|
||||
.start
|
||||
.cmp(&right_position)
|
||||
.is_le()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -7570,12 +7752,15 @@ impl Editor {
|
|||
display_snapshot: &DisplaySnapshot,
|
||||
theme: &Theme,
|
||||
) -> Vec<(Range<DisplayPoint>, Color)> {
|
||||
let search_range = display_snapshot.anchor_to_inlay_offset(search_range.start)
|
||||
..display_snapshot.anchor_to_inlay_offset(search_range.end);
|
||||
let mut results = Vec::new();
|
||||
let buffer = &display_snapshot.buffer_snapshot;
|
||||
for (color_fetcher, ranges) in self.background_highlights.values() {
|
||||
let color = color_fetcher(theme);
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&search_range.start, buffer);
|
||||
let cmp = document_to_inlay_range(probe, display_snapshot)
|
||||
.end
|
||||
.cmp(&search_range.start);
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -7585,80 +7770,36 @@ impl Editor {
|
|||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&search_range.end, buffer).is_ge() {
|
||||
let range = document_to_inlay_range(range, display_snapshot);
|
||||
if range.start.cmp(&search_range.end).is_ge() {
|
||||
break;
|
||||
}
|
||||
let start = range
|
||||
.start
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
let end = range
|
||||
.end
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
|
||||
let start = display_snapshot.inlay_offset_to_display_point(range.start, Bias::Left);
|
||||
let end = display_snapshot.inlay_offset_to_display_point(range.end, Bias::Right);
|
||||
results.push((start..end, color))
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
|
||||
pub fn background_highlights_in_range_for<T: 'static>(
|
||||
&self,
|
||||
search_range: Range<Anchor>,
|
||||
display_snapshot: &DisplaySnapshot,
|
||||
theme: &Theme,
|
||||
) -> Vec<(Range<DisplayPoint>, Color)> {
|
||||
let mut results = Vec::new();
|
||||
let buffer = &display_snapshot.buffer_snapshot;
|
||||
let Some((color_fetcher, ranges)) = self.background_highlights
|
||||
.get(&TypeId::of::<T>()) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let color = color_fetcher(theme);
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&search_range.start, buffer);
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Less
|
||||
}
|
||||
}) {
|
||||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&search_range.end, buffer).is_ge() {
|
||||
break;
|
||||
}
|
||||
let start = range
|
||||
.start
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
let end = range
|
||||
.end
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
results.push((start..end, color))
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
pub fn background_highlight_row_ranges<T: 'static>(
|
||||
&self,
|
||||
search_range: Range<Anchor>,
|
||||
display_snapshot: &DisplaySnapshot,
|
||||
count: usize,
|
||||
) -> Vec<RangeInclusive<DisplayPoint>> {
|
||||
let search_range = display_snapshot.anchor_to_inlay_offset(search_range.start)
|
||||
..display_snapshot.anchor_to_inlay_offset(search_range.end);
|
||||
let mut results = Vec::new();
|
||||
let buffer = &display_snapshot.buffer_snapshot;
|
||||
let Some((_, ranges)) = self.background_highlights
|
||||
.get(&TypeId::of::<T>()) else {
|
||||
return vec![];
|
||||
};
|
||||
let Some((_, ranges)) = self.background_highlights.get(&TypeId::of::<T>()) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&search_range.start, buffer);
|
||||
let cmp = document_to_inlay_range(probe, display_snapshot)
|
||||
.end
|
||||
.cmp(&search_range.start);
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -7678,19 +7819,24 @@ impl Editor {
|
|||
let mut start_row: Option<Point> = None;
|
||||
let mut end_row: Option<Point> = None;
|
||||
if ranges.len() > count {
|
||||
return vec![];
|
||||
return Vec::new();
|
||||
}
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&search_range.end, buffer).is_ge() {
|
||||
let range = document_to_inlay_range(range, display_snapshot);
|
||||
if range.start.cmp(&search_range.end).is_ge() {
|
||||
break;
|
||||
}
|
||||
let end = range.end.to_point(buffer);
|
||||
let end = display_snapshot
|
||||
.inlay_offset_to_display_point(range.end, Bias::Right)
|
||||
.to_point(display_snapshot);
|
||||
if let Some(current_row) = &end_row {
|
||||
if end.row == current_row.row {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let start = range.start.to_point(buffer);
|
||||
let start = display_snapshot
|
||||
.inlay_offset_to_display_point(range.start, Bias::Left)
|
||||
.to_point(display_snapshot);
|
||||
|
||||
if start_row.is_none() {
|
||||
assert_eq!(end_row, None);
|
||||
|
@ -7728,24 +7874,32 @@ impl Editor {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn highlight_inlays<T: 'static>(
|
||||
&mut self,
|
||||
ranges: Vec<InlayRange>,
|
||||
style: HighlightStyle,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.display_map.update(cx, |map, _| {
|
||||
map.highlight_inlays(TypeId::of::<T>(), ranges, style)
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn text_highlights<'a, T: 'static>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
) -> Option<(HighlightStyle, &'a [Range<Anchor>])> {
|
||||
) -> Option<(HighlightStyle, &'a [DocumentRange])> {
|
||||
self.display_map.read(cx).text_highlights(TypeId::of::<T>())
|
||||
}
|
||||
|
||||
pub fn clear_text_highlights<T: 'static>(
|
||||
&mut self,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
|
||||
let highlights = self
|
||||
pub fn clear_text_highlights<T: 'static>(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let text_highlights = self
|
||||
.display_map
|
||||
.update(cx, |map, _| map.clear_text_highlights(TypeId::of::<T>()));
|
||||
if highlights.is_some() {
|
||||
if text_highlights.is_some() {
|
||||
cx.notify();
|
||||
}
|
||||
highlights
|
||||
}
|
||||
|
||||
pub fn show_local_cursors(&self, cx: &AppContext) -> bool {
|
||||
|
@ -7763,7 +7917,9 @@ impl Editor {
|
|||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
multi_buffer::Event::Edited => {
|
||||
multi_buffer::Event::Edited {
|
||||
sigleton_buffer_edited,
|
||||
} => {
|
||||
self.refresh_active_diagnostics(cx);
|
||||
self.refresh_code_actions(cx);
|
||||
if self.has_active_copilot_suggestion(cx) {
|
||||
|
@ -7771,30 +7927,32 @@ impl Editor {
|
|||
}
|
||||
cx.emit(Event::BufferEdited);
|
||||
|
||||
if let Some(project) = &self.project {
|
||||
let project = project.read(cx);
|
||||
let languages_affected = multibuffer
|
||||
.read(cx)
|
||||
.all_buffers()
|
||||
.into_iter()
|
||||
.filter_map(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
let language = buffer.language()?;
|
||||
if project.is_local()
|
||||
&& project.language_servers_for_buffer(buffer, cx).count() == 0
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(language)
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<HashSet<_>>();
|
||||
if !languages_affected.is_empty() {
|
||||
self.refresh_inlay_hints(
|
||||
InlayHintRefreshReason::BufferEdited(languages_affected),
|
||||
cx,
|
||||
);
|
||||
if *sigleton_buffer_edited {
|
||||
if let Some(project) = &self.project {
|
||||
let project = project.read(cx);
|
||||
let languages_affected = multibuffer
|
||||
.read(cx)
|
||||
.all_buffers()
|
||||
.into_iter()
|
||||
.filter_map(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
let language = buffer.language()?;
|
||||
if project.is_local()
|
||||
&& project.language_servers_for_buffer(buffer, cx).count() == 0
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(language)
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<HashSet<_>>();
|
||||
if !languages_affected.is_empty() {
|
||||
self.refresh_inlay_hints(
|
||||
InlayHintRefreshReason::BufferEdited(languages_affected),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7802,12 +7960,16 @@ impl Editor {
|
|||
buffer,
|
||||
predecessor,
|
||||
excerpts,
|
||||
} => cx.emit(Event::ExcerptsAdded {
|
||||
buffer: buffer.clone(),
|
||||
predecessor: *predecessor,
|
||||
excerpts: excerpts.clone(),
|
||||
}),
|
||||
} => {
|
||||
cx.emit(Event::ExcerptsAdded {
|
||||
buffer: buffer.clone(),
|
||||
predecessor: *predecessor,
|
||||
excerpts: excerpts.clone(),
|
||||
});
|
||||
self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
|
||||
}
|
||||
multi_buffer::Event::ExcerptsRemoved { ids } => {
|
||||
self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx);
|
||||
cx.emit(Event::ExcerptsRemoved { ids: ids.clone() })
|
||||
}
|
||||
multi_buffer::Event::Reparsed => cx.emit(Event::Reparsed),
|
||||
|
@ -7952,6 +8114,7 @@ impl Editor {
|
|||
Some(
|
||||
ranges
|
||||
.iter()
|
||||
.filter_map(|range| range.as_text_range())
|
||||
.map(move |range| {
|
||||
range.start.to_offset_utf16(&snapshot)..range.end.to_offset_utf16(&snapshot)
|
||||
})
|
||||
|
@ -7990,9 +8153,7 @@ impl Editor {
|
|||
suggestion_accepted: bool,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let Some(project) = &self.project else {
|
||||
return
|
||||
};
|
||||
let Some(project) = &self.project else { return };
|
||||
|
||||
// If None, we are either getting suggestions in a new, unsaved file, or in a file without an extension
|
||||
let file_extension = self
|
||||
|
@ -8021,9 +8182,7 @@ impl Editor {
|
|||
file_extension: Option<String>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let Some(project) = &self.project else {
|
||||
return
|
||||
};
|
||||
let Some(project) = &self.project else { return };
|
||||
|
||||
// If None, we are in a file without an extension
|
||||
let file = self
|
||||
|
@ -8124,7 +8283,9 @@ impl Editor {
|
|||
}
|
||||
}
|
||||
|
||||
let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else { return; };
|
||||
let Some(lines) = serde_json::to_string_pretty(&lines).log_err() else {
|
||||
return;
|
||||
};
|
||||
cx.write_to_clipboard(ClipboardItem::new(lines));
|
||||
}
|
||||
|
||||
|
@ -8133,6 +8294,19 @@ impl Editor {
|
|||
}
|
||||
}
|
||||
|
||||
fn document_to_inlay_range(
|
||||
range: &DocumentRange,
|
||||
snapshot: &DisplaySnapshot,
|
||||
) -> Range<InlayOffset> {
|
||||
match range {
|
||||
DocumentRange::Text(text_range) => {
|
||||
snapshot.anchor_to_inlay_offset(text_range.start)
|
||||
..snapshot.anchor_to_inlay_offset(text_range.end)
|
||||
}
|
||||
DocumentRange::Inlay(inlay_range) => inlay_range.highlight_start..inlay_range.highlight_end,
|
||||
}
|
||||
}
|
||||
|
||||
fn inlay_hint_settings(
|
||||
location: Anchor,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
|
@ -8317,14 +8491,11 @@ impl View for Editor {
|
|||
) -> bool {
|
||||
let pending_selection = self.has_pending_selection();
|
||||
|
||||
if let Some(point) = self.link_go_to_definition_state.last_mouse_location.clone() {
|
||||
if let Some(point) = &self.link_go_to_definition_state.last_trigger_point {
|
||||
if event.cmd && !pending_selection {
|
||||
let point = point.clone();
|
||||
let snapshot = self.snapshot(cx);
|
||||
let kind = if event.shift {
|
||||
LinkDefinitionKind::Type
|
||||
} else {
|
||||
LinkDefinitionKind::Symbol
|
||||
};
|
||||
let kind = point.definition_kind(event.shift);
|
||||
|
||||
show_link_definition(kind, self, point, snapshot, cx);
|
||||
return false;
|
||||
|
@ -8408,6 +8579,7 @@ impl View for Editor {
|
|||
fn marked_text_range(&self, cx: &AppContext) -> Option<Range<usize>> {
|
||||
let snapshot = self.buffer.read(cx).read(cx);
|
||||
let range = self.text_highlights::<InputComposition>(cx)?.1.get(0)?;
|
||||
let range = range.as_text_range()?;
|
||||
Some(range.start.to_offset_utf16(&snapshot).0..range.end.to_offset_utf16(&snapshot).0)
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ pub struct EditorSettings {
|
|||
pub show_completions_on_input: bool,
|
||||
pub use_on_type_format: bool,
|
||||
pub scrollbar: Scrollbar,
|
||||
pub relative_line_numbers: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
|
||||
|
@ -34,6 +35,7 @@ pub struct EditorSettingsContent {
|
|||
pub show_completions_on_input: Option<bool>,
|
||||
pub use_on_type_format: Option<bool>,
|
||||
pub scrollbar: Option<ScrollbarContent>,
|
||||
pub relative_line_numbers: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
|
||||
|
|
|
@ -42,7 +42,7 @@ fn test_edit_events(cx: &mut TestAppContext) {
|
|||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = language::Buffer::new(0, "123456", cx);
|
||||
let mut buffer = language::Buffer::new(0, cx.model_id() as u64, "123456");
|
||||
buffer.set_group_interval(Duration::from_secs(1));
|
||||
buffer
|
||||
});
|
||||
|
@ -174,7 +174,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
|
|||
init_test(cx, |_| {});
|
||||
|
||||
let mut now = Instant::now();
|
||||
let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx));
|
||||
let buffer = cx.add_model(|cx| language::Buffer::new(0, cx.model_id() as u64, "123456"));
|
||||
let group_interval = buffer.read_with(cx, |buffer, _| buffer.transaction_group_interval());
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let editor = cx
|
||||
|
@ -247,7 +247,7 @@ fn test_ime_composition(cx: &mut TestAppContext) {
|
|||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = language::Buffer::new(0, "abcde", cx);
|
||||
let mut buffer = language::Buffer::new(0, cx.model_id() as u64, "abcde");
|
||||
// Ensure automatic grouping doesn't occur.
|
||||
buffer.set_group_interval(Duration::ZERO);
|
||||
buffer
|
||||
|
@ -1434,6 +1434,74 @@ async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_autoscroll(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
let line_height = cx.update_editor(|editor, cx| {
|
||||
editor.set_vertical_scroll_margin(2, cx);
|
||||
editor.style(cx).text.line_height(cx.font_cache())
|
||||
});
|
||||
|
||||
let window = cx.window;
|
||||
window.simulate_resize(vec2f(1000., 6.0 * line_height), &mut cx);
|
||||
|
||||
cx.set_state(
|
||||
&r#"ˇone
|
||||
two
|
||||
three
|
||||
four
|
||||
five
|
||||
six
|
||||
seven
|
||||
eight
|
||||
nine
|
||||
ten
|
||||
"#,
|
||||
);
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 0.0));
|
||||
});
|
||||
|
||||
// Add a cursor below the visible area. Since both cursors cannot fit
|
||||
// on screen, the editor autoscrolls to reveal the newest cursor, and
|
||||
// allows the vertical scroll margin below that cursor.
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
|
||||
selections.select_ranges([
|
||||
Point::new(0, 0)..Point::new(0, 0),
|
||||
Point::new(6, 0)..Point::new(6, 0),
|
||||
]);
|
||||
})
|
||||
});
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 3.0));
|
||||
});
|
||||
|
||||
// Move down. The editor cursor scrolls down to track the newest cursor.
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.move_down(&Default::default(), cx);
|
||||
});
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 4.0));
|
||||
});
|
||||
|
||||
// Add a cursor above the visible area. Since both cursors fit on screen,
|
||||
// the editor scrolls to show both.
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
|
||||
selections.select_ranges([
|
||||
Point::new(1, 0)..Point::new(1, 0),
|
||||
Point::new(6, 0)..Point::new(6, 0),
|
||||
]);
|
||||
})
|
||||
});
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 1.0));
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
@ -2213,10 +2281,12 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
|
|||
None,
|
||||
));
|
||||
|
||||
let toml_buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx).with_language(toml_language, cx));
|
||||
let toml_buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, "a = 1\nb = 2\n").with_language(toml_language, cx)
|
||||
});
|
||||
let rust_buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, "const c: usize = 3;\n", cx).with_language(rust_language, cx)
|
||||
Buffer::new(0, cx.model_id() as u64, "const c: usize = 3;\n")
|
||||
.with_language(rust_language, cx)
|
||||
});
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
|
@ -3686,7 +3756,8 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
|
@ -3849,7 +3920,8 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
let text = "fn a() {}";
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
|
||||
editor
|
||||
|
@ -4412,7 +4484,8 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
|
@ -4560,7 +4633,8 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
|
||||
editor
|
||||
|
@ -5766,7 +5840,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
|
|||
fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(
|
||||
|
@ -5850,7 +5924,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
|
|||
primary: None,
|
||||
}
|
||||
});
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, initial_text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, initial_text));
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
|
||||
|
@ -5908,7 +5982,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
|
|||
fn test_refresh_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
|
||||
let mut excerpt1_id = None;
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
|
@ -5995,7 +6069,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
|
|||
fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
|
||||
let mut excerpt1_id = None;
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
|
@ -6092,7 +6166,8 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
|
|||
"{{} }\n", //
|
||||
);
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
|
@ -7092,8 +7167,8 @@ async fn test_copilot_multibuffer(
|
|||
let (copilot, copilot_lsp) = Copilot::fake(cx);
|
||||
cx.update(|cx| cx.set_global(copilot));
|
||||
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "c = 3\nd = 4\n", cx));
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "a = 1\nb = 2\n"));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "c = 3\nd = 4\n"));
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(
|
||||
|
|
|
@ -13,6 +13,7 @@ use crate::{
|
|||
},
|
||||
link_go_to_definition::{
|
||||
go_to_fetched_definition, go_to_fetched_type_definition, update_go_to_definition_link,
|
||||
update_inlay_link_and_hover_points, GoToDefinitionTrigger,
|
||||
},
|
||||
mouse_context_menu, EditorSettings, EditorStyle, GutterHover, UnfoldAt,
|
||||
};
|
||||
|
@ -287,13 +288,13 @@ impl EditorElement {
|
|||
return false;
|
||||
}
|
||||
|
||||
let (position, target_position) = position_map.point_for_position(text_bounds, position);
|
||||
|
||||
let point_for_position = position_map.point_for_position(text_bounds, position);
|
||||
let position = point_for_position.previous_valid;
|
||||
if shift && alt {
|
||||
editor.select(
|
||||
SelectPhase::BeginColumnar {
|
||||
position,
|
||||
goal_column: target_position.column(),
|
||||
goal_column: point_for_position.exact_unclipped.column(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
|
@ -329,9 +330,13 @@ impl EditorElement {
|
|||
if !text_bounds.contains_point(position) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (point, _) = position_map.point_for_position(text_bounds, position);
|
||||
mouse_context_menu::deploy_context_menu(editor, position, point, cx);
|
||||
let point_for_position = position_map.point_for_position(text_bounds, position);
|
||||
mouse_context_menu::deploy_context_menu(
|
||||
editor,
|
||||
position,
|
||||
point_for_position.previous_valid,
|
||||
cx,
|
||||
);
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -353,17 +358,15 @@ impl EditorElement {
|
|||
}
|
||||
|
||||
if !pending_nonempty_selections && cmd && text_bounds.contains_point(position) {
|
||||
let (point, target_point) = position_map.point_for_position(text_bounds, position);
|
||||
|
||||
if point == target_point {
|
||||
if shift {
|
||||
go_to_fetched_type_definition(editor, point, alt, cx);
|
||||
} else {
|
||||
go_to_fetched_definition(editor, point, alt, cx);
|
||||
}
|
||||
|
||||
return true;
|
||||
let point = position_map.point_for_position(text_bounds, position);
|
||||
let could_be_inlay = point.as_valid().is_none();
|
||||
if shift || could_be_inlay {
|
||||
go_to_fetched_type_definition(editor, point, alt, cx);
|
||||
} else {
|
||||
go_to_fetched_definition(editor, point, alt, cx);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
end_selection
|
||||
|
@ -383,17 +386,20 @@ impl EditorElement {
|
|||
// This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed
|
||||
// Don't trigger hover popover if mouse is hovering over context menu
|
||||
let point = if text_bounds.contains_point(position) {
|
||||
let (point, target_point) = position_map.point_for_position(text_bounds, position);
|
||||
if point == target_point {
|
||||
Some(point)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
position_map
|
||||
.point_for_position(text_bounds, position)
|
||||
.as_valid()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
update_go_to_definition_link(editor, point, cmd, shift, cx);
|
||||
update_go_to_definition_link(
|
||||
editor,
|
||||
point.map(GoToDefinitionTrigger::Text),
|
||||
cmd,
|
||||
shift,
|
||||
cx,
|
||||
);
|
||||
|
||||
if editor.has_pending_selection() {
|
||||
let mut scroll_delta = Vector2F::zero();
|
||||
|
@ -422,13 +428,12 @@ impl EditorElement {
|
|||
))
|
||||
}
|
||||
|
||||
let (position, target_position) =
|
||||
position_map.point_for_position(text_bounds, position);
|
||||
let point_for_position = position_map.point_for_position(text_bounds, position);
|
||||
|
||||
editor.select(
|
||||
SelectPhase::Update {
|
||||
position,
|
||||
goal_column: target_position.column(),
|
||||
position: point_for_position.previous_valid,
|
||||
goal_column: point_for_position.exact_unclipped.column(),
|
||||
scroll_position: (position_map.snapshot.scroll_position() + scroll_delta)
|
||||
.clamp(Vector2F::zero(), position_map.scroll_max),
|
||||
},
|
||||
|
@ -455,10 +460,34 @@ impl EditorElement {
|
|||
) -> bool {
|
||||
// This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed
|
||||
// Don't trigger hover popover if mouse is hovering over context menu
|
||||
let point = position_to_display_point(position, text_bounds, position_map);
|
||||
|
||||
update_go_to_definition_link(editor, point, cmd, shift, cx);
|
||||
hover_at(editor, point, cx);
|
||||
if text_bounds.contains_point(position) {
|
||||
let point_for_position = position_map.point_for_position(text_bounds, position);
|
||||
match point_for_position.as_valid() {
|
||||
Some(point) => {
|
||||
update_go_to_definition_link(
|
||||
editor,
|
||||
Some(GoToDefinitionTrigger::Text(point)),
|
||||
cmd,
|
||||
shift,
|
||||
cx,
|
||||
);
|
||||
hover_at(editor, Some(point), cx);
|
||||
}
|
||||
None => {
|
||||
update_inlay_link_and_hover_points(
|
||||
&position_map.snapshot,
|
||||
point_for_position,
|
||||
editor,
|
||||
cmd,
|
||||
shift,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
update_go_to_definition_link(editor, None, cmd, shift, cx);
|
||||
hover_at(editor, None, cx);
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
@ -909,7 +938,7 @@ impl EditorElement {
|
|||
&text,
|
||||
cursor_row_layout.font_size(),
|
||||
&[(
|
||||
text.len(),
|
||||
text.chars().count(),
|
||||
RunStyle {
|
||||
font_id,
|
||||
color: style.background,
|
||||
|
@ -1408,10 +1437,61 @@ impl EditorElement {
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn calculate_relative_line_numbers(
|
||||
&self,
|
||||
snapshot: &EditorSnapshot,
|
||||
rows: &Range<u32>,
|
||||
relative_to: Option<u32>,
|
||||
) -> HashMap<u32, u32> {
|
||||
let mut relative_rows: HashMap<u32, u32> = Default::default();
|
||||
let Some(relative_to) = relative_to else {
|
||||
return relative_rows;
|
||||
};
|
||||
|
||||
let start = rows.start.min(relative_to);
|
||||
let end = rows.end.max(relative_to);
|
||||
|
||||
let buffer_rows = snapshot
|
||||
.buffer_rows(start)
|
||||
.take(1 + (end - start) as usize)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let head_idx = relative_to - start;
|
||||
let mut delta = 1;
|
||||
let mut i = head_idx + 1;
|
||||
while i < buffer_rows.len() as u32 {
|
||||
if buffer_rows[i as usize].is_some() {
|
||||
if rows.contains(&(i + start)) {
|
||||
relative_rows.insert(i + start, delta);
|
||||
}
|
||||
delta += 1;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
delta = 1;
|
||||
i = head_idx.min(buffer_rows.len() as u32 - 1);
|
||||
while i > 0 && buffer_rows[i as usize].is_none() {
|
||||
i -= 1;
|
||||
}
|
||||
|
||||
while i > 0 {
|
||||
i -= 1;
|
||||
if buffer_rows[i as usize].is_some() {
|
||||
if rows.contains(&(i + start)) {
|
||||
relative_rows.insert(i + start, delta);
|
||||
}
|
||||
delta += 1;
|
||||
}
|
||||
}
|
||||
|
||||
relative_rows
|
||||
}
|
||||
|
||||
fn layout_line_numbers(
|
||||
&self,
|
||||
rows: Range<u32>,
|
||||
active_rows: &BTreeMap<u32, bool>,
|
||||
newest_selection_head: DisplayPoint,
|
||||
is_singleton: bool,
|
||||
snapshot: &EditorSnapshot,
|
||||
cx: &ViewContext<Editor>,
|
||||
|
@ -1424,6 +1504,15 @@ impl EditorElement {
|
|||
let mut line_number_layouts = Vec::with_capacity(rows.len());
|
||||
let mut fold_statuses = Vec::with_capacity(rows.len());
|
||||
let mut line_number = String::new();
|
||||
let is_relative = settings::get::<EditorSettings>(cx).relative_line_numbers;
|
||||
let relative_to = if is_relative {
|
||||
Some(newest_selection_head.row())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let relative_rows = self.calculate_relative_line_numbers(&snapshot, &rows, relative_to);
|
||||
|
||||
for (ix, row) in snapshot
|
||||
.buffer_rows(rows.start)
|
||||
.take((rows.end - rows.start) as usize)
|
||||
|
@ -1438,7 +1527,11 @@ impl EditorElement {
|
|||
if let Some(buffer_row) = row {
|
||||
if include_line_numbers {
|
||||
line_number.clear();
|
||||
write!(&mut line_number, "{}", buffer_row + 1).unwrap();
|
||||
let default_number = buffer_row + 1;
|
||||
let number = relative_rows
|
||||
.get(&(ix as u32 + rows.start))
|
||||
.unwrap_or(&default_number);
|
||||
write!(&mut line_number, "{}", number).unwrap();
|
||||
line_number_layouts.push(Some(cx.text_layout_cache().layout_str(
|
||||
&line_number,
|
||||
style.text.font_size,
|
||||
|
@ -2082,14 +2175,11 @@ impl Element<Editor> for EditorElement {
|
|||
scroll_height
|
||||
.min(constraint.max_along(Axis::Vertical))
|
||||
.max(constraint.min_along(Axis::Vertical))
|
||||
.max(line_height)
|
||||
.min(line_height * max_lines as f32),
|
||||
)
|
||||
} else if let EditorMode::SingleLine = snapshot.mode {
|
||||
size.set_y(
|
||||
line_height
|
||||
.min(constraint.max_along(Axis::Vertical))
|
||||
.max(constraint.min_along(Axis::Vertical)),
|
||||
)
|
||||
size.set_y(line_height.max(constraint.min_along(Axis::Vertical)))
|
||||
} else if size.y().is_infinite() {
|
||||
size.set_y(scroll_height);
|
||||
}
|
||||
|
@ -2262,9 +2352,23 @@ impl Element<Editor> for EditorElement {
|
|||
})
|
||||
.collect();
|
||||
|
||||
let head_for_relative = newest_selection_head.unwrap_or_else(|| {
|
||||
let newest = editor.selections.newest::<Point>(cx);
|
||||
SelectionLayout::new(
|
||||
newest,
|
||||
editor.selections.line_mode,
|
||||
editor.cursor_shape,
|
||||
&snapshot.display_snapshot,
|
||||
true,
|
||||
true,
|
||||
)
|
||||
.head
|
||||
});
|
||||
|
||||
let (line_number_layouts, fold_statuses) = self.layout_line_numbers(
|
||||
start_row..end_row,
|
||||
&active_rows,
|
||||
head_for_relative,
|
||||
is_singleton,
|
||||
&snapshot,
|
||||
cx,
|
||||
|
@ -2632,22 +2736,42 @@ struct PositionMap {
|
|||
snapshot: EditorSnapshot,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PointForPosition {
|
||||
pub previous_valid: DisplayPoint,
|
||||
pub next_valid: DisplayPoint,
|
||||
pub exact_unclipped: DisplayPoint,
|
||||
pub column_overshoot_after_line_end: u32,
|
||||
}
|
||||
|
||||
impl PointForPosition {
|
||||
#[cfg(test)]
|
||||
pub fn valid(valid: DisplayPoint) -> Self {
|
||||
Self {
|
||||
previous_valid: valid,
|
||||
next_valid: valid,
|
||||
exact_unclipped: valid,
|
||||
column_overshoot_after_line_end: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_valid(&self) -> Option<DisplayPoint> {
|
||||
if self.previous_valid == self.exact_unclipped && self.next_valid == self.exact_unclipped {
|
||||
Some(self.previous_valid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PositionMap {
|
||||
/// Returns two display points:
|
||||
/// 1. The nearest *valid* position in the editor
|
||||
/// 2. An unclipped, potentially *invalid* position that maps directly to
|
||||
/// the given pixel position.
|
||||
fn point_for_position(
|
||||
&self,
|
||||
text_bounds: RectF,
|
||||
position: Vector2F,
|
||||
) -> (DisplayPoint, DisplayPoint) {
|
||||
fn point_for_position(&self, text_bounds: RectF, position: Vector2F) -> PointForPosition {
|
||||
let scroll_position = self.snapshot.scroll_position();
|
||||
let position = position - text_bounds.origin();
|
||||
let y = position.y().max(0.0).min(self.size.y());
|
||||
let x = position.x() + (scroll_position.x() * self.em_width);
|
||||
let row = (y / self.line_height + scroll_position.y()) as u32;
|
||||
let (column, x_overshoot) = if let Some(line) = self
|
||||
let (column, x_overshoot_after_line_end) = if let Some(line) = self
|
||||
.line_layouts
|
||||
.get(row as usize - scroll_position.y() as usize)
|
||||
.map(|line_with_spaces| &line_with_spaces.line)
|
||||
|
@ -2661,11 +2785,18 @@ impl PositionMap {
|
|||
(0, x)
|
||||
};
|
||||
|
||||
let mut target_point = DisplayPoint::new(row, column);
|
||||
let point = self.snapshot.clip_point(target_point, Bias::Left);
|
||||
*target_point.column_mut() += (x_overshoot / self.em_advance) as u32;
|
||||
let mut exact_unclipped = DisplayPoint::new(row, column);
|
||||
let previous_valid = self.snapshot.clip_point(exact_unclipped, Bias::Left);
|
||||
let next_valid = self.snapshot.clip_point(exact_unclipped, Bias::Right);
|
||||
|
||||
(point, target_point)
|
||||
let column_overshoot_after_line_end = (x_overshoot_after_line_end / self.em_advance) as u32;
|
||||
*exact_unclipped.column_mut() += column_overshoot_after_line_end;
|
||||
PointForPosition {
|
||||
previous_valid,
|
||||
next_valid,
|
||||
exact_unclipped,
|
||||
column_overshoot_after_line_end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2919,23 +3050,6 @@ impl HighlightedRange {
|
|||
}
|
||||
}
|
||||
|
||||
fn position_to_display_point(
|
||||
position: Vector2F,
|
||||
text_bounds: RectF,
|
||||
position_map: &PositionMap,
|
||||
) -> Option<DisplayPoint> {
|
||||
if text_bounds.contains_point(position) {
|
||||
let (point, target_point) = position_map.point_for_position(text_bounds, position);
|
||||
if point == target_point {
|
||||
Some(point)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn range_to_bounds(
|
||||
range: &Range<DisplayPoint>,
|
||||
content_origin: Vector2F,
|
||||
|
@ -3013,7 +3127,6 @@ mod tests {
|
|||
#[gpui::test]
|
||||
fn test_layout_line_numbers(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let editor = cx
|
||||
.add_window(|cx| {
|
||||
let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
|
||||
|
@ -3025,10 +3138,50 @@ mod tests {
|
|||
let layouts = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
element
|
||||
.layout_line_numbers(0..6, &Default::default(), false, &snapshot, cx)
|
||||
.layout_line_numbers(
|
||||
0..6,
|
||||
&Default::default(),
|
||||
DisplayPoint::new(0, 0),
|
||||
false,
|
||||
&snapshot,
|
||||
cx,
|
||||
)
|
||||
.0
|
||||
});
|
||||
assert_eq!(layouts.len(), 6);
|
||||
|
||||
let relative_rows = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
|
||||
});
|
||||
assert_eq!(relative_rows[&0], 3);
|
||||
assert_eq!(relative_rows[&1], 2);
|
||||
assert_eq!(relative_rows[&2], 1);
|
||||
// current line has no relative number
|
||||
assert_eq!(relative_rows[&4], 1);
|
||||
assert_eq!(relative_rows[&5], 2);
|
||||
|
||||
// works if cursor is before screen
|
||||
let relative_rows = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
|
||||
element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
|
||||
});
|
||||
assert_eq!(relative_rows.len(), 3);
|
||||
assert_eq!(relative_rows[&3], 2);
|
||||
assert_eq!(relative_rows[&4], 3);
|
||||
assert_eq!(relative_rows[&5], 4);
|
||||
|
||||
// works if cursor is after screen
|
||||
let relative_rows = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
|
||||
element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
|
||||
});
|
||||
assert_eq!(relative_rows.len(), 3);
|
||||
assert_eq!(relative_rows[&0], 5);
|
||||
assert_eq!(relative_rows[&1], 4);
|
||||
assert_eq!(relative_rows[&2], 3);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use crate::{
|
||||
display_map::ToDisplayPoint, Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings,
|
||||
EditorSnapshot, EditorStyle, RangeToAnchorExt,
|
||||
display_map::{InlayOffset, ToDisplayPoint},
|
||||
link_go_to_definition::{DocumentRange, InlayRange},
|
||||
Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings, EditorSnapshot, EditorStyle,
|
||||
ExcerptId, RangeToAnchorExt,
|
||||
};
|
||||
use futures::FutureExt;
|
||||
use gpui::{
|
||||
|
@ -11,7 +13,7 @@ use gpui::{
|
|||
AnyElement, AppContext, CursorRegion, Element, ModelHandle, MouseRegion, Task, ViewContext,
|
||||
};
|
||||
use language::{Bias, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry};
|
||||
use project::{HoverBlock, HoverBlockKind, Project};
|
||||
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart, Project};
|
||||
use std::{ops::Range, sync::Arc, time::Duration};
|
||||
use util::TryFutureExt;
|
||||
|
||||
|
@ -46,6 +48,106 @@ pub fn hover_at(editor: &mut Editor, point: Option<DisplayPoint>, cx: &mut ViewC
|
|||
}
|
||||
}
|
||||
|
||||
pub struct InlayHover {
|
||||
pub excerpt: ExcerptId,
|
||||
pub triggered_from: InlayOffset,
|
||||
pub range: InlayRange,
|
||||
pub tooltip: HoverBlock,
|
||||
}
|
||||
|
||||
pub fn find_hovered_hint_part(
|
||||
label_parts: Vec<InlayHintLabelPart>,
|
||||
hint_range: Range<InlayOffset>,
|
||||
hovered_offset: InlayOffset,
|
||||
) -> Option<(InlayHintLabelPart, Range<InlayOffset>)> {
|
||||
if hovered_offset >= hint_range.start && hovered_offset <= hint_range.end {
|
||||
let mut hovered_character = (hovered_offset - hint_range.start).0;
|
||||
let mut part_start = hint_range.start;
|
||||
for part in label_parts {
|
||||
let part_len = part.value.chars().count();
|
||||
if hovered_character > part_len {
|
||||
hovered_character -= part_len;
|
||||
part_start.0 += part_len;
|
||||
} else {
|
||||
let part_end = InlayOffset(part_start.0 + part_len);
|
||||
return Some((part, part_start..part_end));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut ViewContext<Editor>) {
|
||||
if settings::get::<EditorSettings>(cx).hover_popover_enabled {
|
||||
if editor.pending_rename.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(project) = editor.project.clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
|
||||
if let DocumentRange::Inlay(range) = symbol_range {
|
||||
if (range.highlight_start..range.highlight_end)
|
||||
.contains(&inlay_hover.triggered_from)
|
||||
{
|
||||
// Hover triggered from same location as last time. Don't show again.
|
||||
return;
|
||||
}
|
||||
}
|
||||
hide_hover(editor, cx);
|
||||
}
|
||||
|
||||
let snapshot = editor.snapshot(cx);
|
||||
// Don't request again if the location is the same as the previous request
|
||||
if let Some(triggered_from) = editor.hover_state.triggered_from {
|
||||
if inlay_hover.triggered_from
|
||||
== snapshot
|
||||
.display_snapshot
|
||||
.anchor_to_inlay_offset(triggered_from)
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let task = cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
cx.background()
|
||||
.timer(Duration::from_millis(HOVER_DELAY_MILLIS))
|
||||
.await;
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.hover_state.diagnostic_popover = None;
|
||||
})?;
|
||||
|
||||
let hover_popover = InfoPopover {
|
||||
project: project.clone(),
|
||||
symbol_range: DocumentRange::Inlay(inlay_hover.range),
|
||||
blocks: vec![inlay_hover.tooltip],
|
||||
language: None,
|
||||
rendered_content: None,
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
// Highlight the selected symbol using a background highlight
|
||||
this.highlight_inlay_background::<HoverState>(
|
||||
vec![inlay_hover.range],
|
||||
|theme| theme.editor.hover_popover.highlight,
|
||||
cx,
|
||||
);
|
||||
this.hover_state.info_popover = Some(hover_popover);
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
});
|
||||
|
||||
editor.hover_state.info_task = Some(task);
|
||||
}
|
||||
}
|
||||
|
||||
/// Hides the type information popup.
|
||||
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
|
||||
/// selections changed.
|
||||
|
@ -110,8 +212,13 @@ fn show_hover(
|
|||
if !ignore_timeout {
|
||||
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
|
||||
if symbol_range
|
||||
.to_offset(&snapshot.buffer_snapshot)
|
||||
.contains(&multibuffer_offset)
|
||||
.as_text_range()
|
||||
.map(|range| {
|
||||
range
|
||||
.to_offset(&snapshot.buffer_snapshot)
|
||||
.contains(&multibuffer_offset)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// Hover triggered from same location as last time. Don't show again.
|
||||
return;
|
||||
|
@ -219,7 +326,7 @@ fn show_hover(
|
|||
|
||||
Some(InfoPopover {
|
||||
project: project.clone(),
|
||||
symbol_range: range,
|
||||
symbol_range: DocumentRange::Text(range),
|
||||
blocks: hover_result.contents,
|
||||
language: hover_result.language,
|
||||
rendered_content: None,
|
||||
|
@ -227,10 +334,13 @@ fn show_hover(
|
|||
});
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(hover_popover) = hover_popover.as_ref() {
|
||||
if let Some(symbol_range) = hover_popover
|
||||
.as_ref()
|
||||
.and_then(|hover_popover| hover_popover.symbol_range.as_text_range())
|
||||
{
|
||||
// Highlight the selected symbol using a background highlight
|
||||
this.highlight_background::<HoverState>(
|
||||
vec![hover_popover.symbol_range.clone()],
|
||||
vec![symbol_range],
|
||||
|theme| theme.editor.hover_popover.highlight,
|
||||
cx,
|
||||
);
|
||||
|
@ -497,7 +607,10 @@ impl HoverState {
|
|||
.or_else(|| {
|
||||
self.info_popover
|
||||
.as_ref()
|
||||
.map(|info_popover| &info_popover.symbol_range.start)
|
||||
.map(|info_popover| match &info_popover.symbol_range {
|
||||
DocumentRange::Text(range) => &range.start,
|
||||
DocumentRange::Inlay(range) => &range.inlay_position,
|
||||
})
|
||||
})?;
|
||||
let point = anchor.to_display_point(&snapshot.display_snapshot);
|
||||
|
||||
|
@ -522,7 +635,7 @@ impl HoverState {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct InfoPopover {
|
||||
pub project: ModelHandle<Project>,
|
||||
pub symbol_range: Range<Anchor>,
|
||||
symbol_range: DocumentRange,
|
||||
pub blocks: Vec<HoverBlock>,
|
||||
language: Option<Arc<Language>>,
|
||||
rendered_content: Option<RenderedInfo>,
|
||||
|
@ -692,10 +805,17 @@ impl DiagnosticPopover {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
|
||||
use crate::{
|
||||
editor_tests::init_test,
|
||||
element::PointForPosition,
|
||||
inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels},
|
||||
link_go_to_definition::update_inlay_link_and_hover_points,
|
||||
test::editor_lsp_test_context::EditorLspTestContext,
|
||||
};
|
||||
use collections::BTreeSet;
|
||||
use gpui::fonts::Weight;
|
||||
use indoc::indoc;
|
||||
use language::{Diagnostic, DiagnosticSet};
|
||||
use language::{language_settings::InlayHintSettings, Diagnostic, DiagnosticSet};
|
||||
use lsp::LanguageServerId;
|
||||
use project::{HoverBlock, HoverBlockKind};
|
||||
use smol::stream::StreamExt;
|
||||
|
@ -1131,4 +1251,327 @@ mod tests {
|
|||
editor
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_hover_inlay_label_parts(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettings {
|
||||
enabled: true,
|
||||
show_type_hints: true,
|
||||
show_parameter_hints: true,
|
||||
show_other_hints: true,
|
||||
})
|
||||
});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
inlay_hint_provider: Some(lsp::OneOf::Right(
|
||||
lsp::InlayHintServerCapabilities::Options(lsp::InlayHintOptions {
|
||||
resolve_provider: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
)),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.set_state(indoc! {"
|
||||
struct TestStruct;
|
||||
|
||||
// ==================
|
||||
|
||||
struct TestNewType<T>(T);
|
||||
|
||||
fn main() {
|
||||
let variableˇ = TestNewType(TestStruct);
|
||||
}
|
||||
"});
|
||||
|
||||
let hint_start_offset = cx.ranges(indoc! {"
|
||||
struct TestStruct;
|
||||
|
||||
// ==================
|
||||
|
||||
struct TestNewType<T>(T);
|
||||
|
||||
fn main() {
|
||||
let variableˇ = TestNewType(TestStruct);
|
||||
}
|
||||
"})[0]
|
||||
.start;
|
||||
let hint_position = cx.to_lsp(hint_start_offset);
|
||||
let new_type_target_range = cx.lsp_range(indoc! {"
|
||||
struct TestStruct;
|
||||
|
||||
// ==================
|
||||
|
||||
struct «TestNewType»<T>(T);
|
||||
|
||||
fn main() {
|
||||
let variable = TestNewType(TestStruct);
|
||||
}
|
||||
"});
|
||||
let struct_target_range = cx.lsp_range(indoc! {"
|
||||
struct «TestStruct»;
|
||||
|
||||
// ==================
|
||||
|
||||
struct TestNewType<T>(T);
|
||||
|
||||
fn main() {
|
||||
let variable = TestNewType(TestStruct);
|
||||
}
|
||||
"});
|
||||
|
||||
let uri = cx.buffer_lsp_url.clone();
|
||||
let new_type_label = "TestNewType";
|
||||
let struct_label = "TestStruct";
|
||||
let entire_hint_label = ": TestNewType<TestStruct>";
|
||||
let closure_uri = uri.clone();
|
||||
cx.lsp
|
||||
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
|
||||
let task_uri = closure_uri.clone();
|
||||
async move {
|
||||
assert_eq!(params.text_document.uri, task_uri);
|
||||
Ok(Some(vec![lsp::InlayHint {
|
||||
position: hint_position,
|
||||
label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart {
|
||||
value: entire_hint_label.to_string(),
|
||||
..Default::default()
|
||||
}]),
|
||||
kind: Some(lsp::InlayHintKind::TYPE),
|
||||
text_edits: None,
|
||||
tooltip: None,
|
||||
padding_left: Some(false),
|
||||
padding_right: Some(false),
|
||||
data: None,
|
||||
}]))
|
||||
}
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
cx.foreground().run_until_parked();
|
||||
cx.update_editor(|editor, cx| {
|
||||
let expected_layers = vec![entire_hint_label.to_string()];
|
||||
assert_eq!(expected_layers, cached_hint_labels(editor));
|
||||
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
|
||||
});
|
||||
|
||||
let inlay_range = cx
|
||||
.ranges(indoc! {"
|
||||
struct TestStruct;
|
||||
|
||||
// ==================
|
||||
|
||||
struct TestNewType<T>(T);
|
||||
|
||||
fn main() {
|
||||
let variable« »= TestNewType(TestStruct);
|
||||
}
|
||||
"})
|
||||
.get(0)
|
||||
.cloned()
|
||||
.unwrap();
|
||||
let new_type_hint_part_hover_position = cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let previous_valid = inlay_range.start.to_display_point(&snapshot);
|
||||
let next_valid = inlay_range.end.to_display_point(&snapshot);
|
||||
assert_eq!(previous_valid.row(), next_valid.row());
|
||||
assert!(previous_valid.column() < next_valid.column());
|
||||
let exact_unclipped = DisplayPoint::new(
|
||||
previous_valid.row(),
|
||||
previous_valid.column()
|
||||
+ (entire_hint_label.find(new_type_label).unwrap() + new_type_label.len() / 2)
|
||||
as u32,
|
||||
);
|
||||
PointForPosition {
|
||||
previous_valid,
|
||||
next_valid,
|
||||
exact_unclipped,
|
||||
column_overshoot_after_line_end: 0,
|
||||
}
|
||||
});
|
||||
cx.update_editor(|editor, cx| {
|
||||
update_inlay_link_and_hover_points(
|
||||
&editor.snapshot(cx),
|
||||
new_type_hint_part_hover_position,
|
||||
editor,
|
||||
true,
|
||||
false,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
let resolve_closure_uri = uri.clone();
|
||||
cx.lsp
|
||||
.handle_request::<lsp::request::InlayHintResolveRequest, _, _>(
|
||||
move |mut hint_to_resolve, _| {
|
||||
let mut resolved_hint_positions = BTreeSet::new();
|
||||
let task_uri = resolve_closure_uri.clone();
|
||||
async move {
|
||||
let inserted = resolved_hint_positions.insert(hint_to_resolve.position);
|
||||
assert!(inserted, "Hint {hint_to_resolve:?} was resolved twice");
|
||||
|
||||
// `: TestNewType<TestStruct>`
|
||||
hint_to_resolve.label = lsp::InlayHintLabel::LabelParts(vec![
|
||||
lsp::InlayHintLabelPart {
|
||||
value: ": ".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::InlayHintLabelPart {
|
||||
value: new_type_label.to_string(),
|
||||
location: Some(lsp::Location {
|
||||
uri: task_uri.clone(),
|
||||
range: new_type_target_range,
|
||||
}),
|
||||
tooltip: Some(lsp::InlayHintLabelPartTooltip::String(format!(
|
||||
"A tooltip for `{new_type_label}`"
|
||||
))),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::InlayHintLabelPart {
|
||||
value: "<".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::InlayHintLabelPart {
|
||||
value: struct_label.to_string(),
|
||||
location: Some(lsp::Location {
|
||||
uri: task_uri,
|
||||
range: struct_target_range,
|
||||
}),
|
||||
tooltip: Some(lsp::InlayHintLabelPartTooltip::MarkupContent(
|
||||
lsp::MarkupContent {
|
||||
kind: lsp::MarkupKind::Markdown,
|
||||
value: format!("A tooltip for `{struct_label}`"),
|
||||
},
|
||||
)),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::InlayHintLabelPart {
|
||||
value: ">".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
Ok(hint_to_resolve)
|
||||
}
|
||||
},
|
||||
)
|
||||
.next()
|
||||
.await;
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
update_inlay_link_and_hover_points(
|
||||
&editor.snapshot(cx),
|
||||
new_type_hint_part_hover_position,
|
||||
editor,
|
||||
true,
|
||||
false,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.foreground()
|
||||
.advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100));
|
||||
cx.foreground().run_until_parked();
|
||||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let hover_state = &editor.hover_state;
|
||||
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
|
||||
let popover = hover_state.info_popover.as_ref().unwrap();
|
||||
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
let entire_inlay_start = snapshot.display_point_to_inlay_offset(
|
||||
inlay_range.start.to_display_point(&snapshot),
|
||||
Bias::Left,
|
||||
);
|
||||
|
||||
let expected_new_type_label_start = InlayOffset(entire_inlay_start.0 + ": ".len());
|
||||
assert_eq!(
|
||||
popover.symbol_range,
|
||||
DocumentRange::Inlay(InlayRange {
|
||||
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
|
||||
highlight_start: expected_new_type_label_start,
|
||||
highlight_end: InlayOffset(
|
||||
expected_new_type_label_start.0 + new_type_label.len()
|
||||
),
|
||||
}),
|
||||
"Popover range should match the new type label part"
|
||||
);
|
||||
assert_eq!(
|
||||
popover
|
||||
.rendered_content
|
||||
.as_ref()
|
||||
.expect("should have label text for new type hint")
|
||||
.text,
|
||||
format!("A tooltip for `{new_type_label}`"),
|
||||
"Rendered text should not anyhow alter backticks"
|
||||
);
|
||||
});
|
||||
|
||||
let struct_hint_part_hover_position = cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let previous_valid = inlay_range.start.to_display_point(&snapshot);
|
||||
let next_valid = inlay_range.end.to_display_point(&snapshot);
|
||||
assert_eq!(previous_valid.row(), next_valid.row());
|
||||
assert!(previous_valid.column() < next_valid.column());
|
||||
let exact_unclipped = DisplayPoint::new(
|
||||
previous_valid.row(),
|
||||
previous_valid.column()
|
||||
+ (entire_hint_label.find(struct_label).unwrap() + struct_label.len() / 2)
|
||||
as u32,
|
||||
);
|
||||
PointForPosition {
|
||||
previous_valid,
|
||||
next_valid,
|
||||
exact_unclipped,
|
||||
column_overshoot_after_line_end: 0,
|
||||
}
|
||||
});
|
||||
cx.update_editor(|editor, cx| {
|
||||
update_inlay_link_and_hover_points(
|
||||
&editor.snapshot(cx),
|
||||
struct_hint_part_hover_position,
|
||||
editor,
|
||||
true,
|
||||
false,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.foreground()
|
||||
.advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100));
|
||||
cx.foreground().run_until_parked();
|
||||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let hover_state = &editor.hover_state;
|
||||
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
|
||||
let popover = hover_state.info_popover.as_ref().unwrap();
|
||||
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
let entire_inlay_start = snapshot.display_point_to_inlay_offset(
|
||||
inlay_range.start.to_display_point(&snapshot),
|
||||
Bias::Left,
|
||||
);
|
||||
let expected_struct_label_start =
|
||||
InlayOffset(entire_inlay_start.0 + ": ".len() + new_type_label.len() + "<".len());
|
||||
assert_eq!(
|
||||
popover.symbol_range,
|
||||
DocumentRange::Inlay(InlayRange {
|
||||
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
|
||||
highlight_start: expected_struct_label_start,
|
||||
highlight_end: InlayOffset(expected_struct_label_start.0 + struct_label.len()),
|
||||
}),
|
||||
"Popover range should match the struct label part"
|
||||
);
|
||||
assert_eq!(
|
||||
popover
|
||||
.rendered_content
|
||||
.as_ref()
|
||||
.expect("should have label text for struct hint")
|
||||
.text,
|
||||
format!("A tooltip for {struct_label}"),
|
||||
"Rendered markdown element should remove backticks from text"
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -55,8 +55,12 @@ impl FollowableItem for Editor {
|
|||
cx: &mut AppContext,
|
||||
) -> Option<Task<Result<ViewHandle<Self>>>> {
|
||||
let project = workspace.read(cx).project().to_owned();
|
||||
let Some(proto::view::Variant::Editor(_)) = state else { return None };
|
||||
let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() };
|
||||
let Some(proto::view::Variant::Editor(_)) = state else {
|
||||
return None;
|
||||
};
|
||||
let Some(proto::view::Variant::Editor(state)) = state.take() else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
let client = project.read(cx).client();
|
||||
let replica_id = project.read(cx).replica_id();
|
||||
|
@ -341,10 +345,16 @@ async fn update_editor_from_message(
|
|||
|
||||
let mut insertions = message.inserted_excerpts.into_iter().peekable();
|
||||
while let Some(insertion) = insertions.next() {
|
||||
let Some(excerpt) = insertion.excerpt else { continue };
|
||||
let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { continue };
|
||||
let Some(excerpt) = insertion.excerpt else {
|
||||
continue;
|
||||
};
|
||||
let Some(previous_excerpt_id) = insertion.previous_excerpt_id else {
|
||||
continue;
|
||||
};
|
||||
let buffer_id = excerpt.buffer_id;
|
||||
let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else { continue };
|
||||
let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let adjacent_excerpts = iter::from_fn(|| {
|
||||
let insertion = insertions.peek()?;
|
||||
|
@ -615,7 +625,7 @@ impl Item for Editor {
|
|||
|
||||
fn workspace_deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
hide_link_definition(self, cx);
|
||||
self.link_go_to_definition_state.last_mouse_location = None;
|
||||
self.link_go_to_definition_state.last_trigger_point = None;
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -756,7 +756,8 @@ mod tests {
|
|||
.select_font(family_id, &Default::default())
|
||||
.unwrap();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abc\ndefg\nhijkl\nmn"));
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(
|
||||
|
|
|
@ -6,7 +6,7 @@ use clock::ReplicaId;
|
|||
use collections::{BTreeMap, Bound, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, SinkExt};
|
||||
use git::diff::DiffHunk;
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle};
|
||||
pub use language::Completion;
|
||||
use language::{
|
||||
char_kind,
|
||||
|
@ -67,7 +67,9 @@ pub enum Event {
|
|||
ExcerptsEdited {
|
||||
ids: Vec<ExcerptId>,
|
||||
},
|
||||
Edited,
|
||||
Edited {
|
||||
sigleton_buffer_edited: bool,
|
||||
},
|
||||
Reloaded,
|
||||
DiffBaseChanged,
|
||||
LanguageChanged,
|
||||
|
@ -836,59 +838,59 @@ impl MultiBuffer {
|
|||
|
||||
pub fn stream_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
ranges: Vec<Range<text::Anchor>>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) {
|
||||
) -> mpsc::Receiver<Range<Anchor>> {
|
||||
let (mut tx, rx) = mpsc::channel(256);
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
for (buffer, ranges) in excerpts {
|
||||
let (buffer_id, buffer_snapshot) =
|
||||
buffer.read_with(&cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let (buffer_id, buffer_snapshot) =
|
||||
buffer.read_with(&cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
|
||||
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
for (excerpt_id, range_count) in
|
||||
excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
});
|
||||
|
||||
for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
(task, rx)
|
||||
})
|
||||
.detach();
|
||||
|
||||
rx
|
||||
}
|
||||
|
||||
pub fn push_excerpts<O>(
|
||||
|
@ -1070,7 +1072,9 @@ impl MultiBuffer {
|
|||
old: edit_start..edit_start,
|
||||
new: edit_start..edit_end,
|
||||
}]);
|
||||
cx.emit(Event::Edited);
|
||||
cx.emit(Event::Edited {
|
||||
sigleton_buffer_edited: false,
|
||||
});
|
||||
cx.emit(Event::ExcerptsAdded {
|
||||
buffer,
|
||||
predecessor: prev_excerpt_id,
|
||||
|
@ -1094,7 +1098,9 @@ impl MultiBuffer {
|
|||
old: 0..prev_len,
|
||||
new: 0..0,
|
||||
}]);
|
||||
cx.emit(Event::Edited);
|
||||
cx.emit(Event::Edited {
|
||||
sigleton_buffer_edited: false,
|
||||
});
|
||||
cx.emit(Event::ExcerptsRemoved { ids });
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -1302,7 +1308,9 @@ impl MultiBuffer {
|
|||
}
|
||||
|
||||
self.subscriptions.publish_mut(edits);
|
||||
cx.emit(Event::Edited);
|
||||
cx.emit(Event::Edited {
|
||||
sigleton_buffer_edited: false,
|
||||
});
|
||||
cx.emit(Event::ExcerptsRemoved { ids });
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -1363,7 +1371,9 @@ impl MultiBuffer {
|
|||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
cx.emit(match event {
|
||||
language::Event::Edited => Event::Edited,
|
||||
language::Event::Edited => Event::Edited {
|
||||
sigleton_buffer_edited: true,
|
||||
},
|
||||
language::Event::DirtyChanged => Event::DirtyChanged,
|
||||
language::Event::Saved => Event::Saved,
|
||||
language::Event::FileHandleChanged => Event::FileHandleChanged,
|
||||
|
@ -1608,7 +1618,7 @@ impl MultiBuffer {
|
|||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl MultiBuffer {
|
||||
pub fn build_simple(text: &str, cx: &mut gpui::AppContext) -> ModelHandle<Self> {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
cx.add_model(|cx| Self::singleton(buffer, cx))
|
||||
}
|
||||
|
||||
|
@ -1618,7 +1628,7 @@ impl MultiBuffer {
|
|||
) -> ModelHandle<Self> {
|
||||
let multi = cx.add_model(|_| Self::new(0));
|
||||
for (text, ranges) in excerpts {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange {
|
||||
context: range,
|
||||
primary: None,
|
||||
|
@ -1710,7 +1720,7 @@ impl MultiBuffer {
|
|||
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
|
||||
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
|
||||
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
|
||||
buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx)));
|
||||
buffers.push(cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text)));
|
||||
let buffer = buffers.last().unwrap().read(cx);
|
||||
log::info!(
|
||||
"Creating new buffer {} with text: {:?}",
|
||||
|
@ -2814,7 +2824,9 @@ impl MultiBufferSnapshot {
|
|||
// Get the ranges of the innermost pair of brackets.
|
||||
let mut result: Option<(Range<usize>, Range<usize>)> = None;
|
||||
|
||||
let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else { return None; };
|
||||
let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
for (open, close) in enclosing_bracket_ranges {
|
||||
let len = close.end - open.start;
|
||||
|
@ -4097,7 +4109,8 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_singleton(cx: &mut AppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'a')));
|
||||
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
||||
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
|
@ -4124,7 +4137,7 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_remote(cx: &mut AppContext) {
|
||||
let host_buffer = cx.add_model(|cx| Buffer::new(0, "a", cx));
|
||||
let host_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "a"));
|
||||
let guest_buffer = cx.add_model(|cx| {
|
||||
let state = host_buffer.read(cx).to_proto();
|
||||
let ops = cx
|
||||
|
@ -4155,15 +4168,17 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) {
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx));
|
||||
let buffer_1 =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'a')));
|
||||
let buffer_2 =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'g')));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
||||
let events = Rc::new(RefCell::new(Vec::<Event>::new()));
|
||||
multibuffer.update(cx, |_, cx| {
|
||||
let events = events.clone();
|
||||
cx.subscribe(&multibuffer, move |_, _, event, _| {
|
||||
if let Event::Edited = event {
|
||||
if let Event::Edited { .. } = event {
|
||||
events.borrow_mut().push(event.clone())
|
||||
}
|
||||
})
|
||||
|
@ -4218,7 +4233,17 @@ mod tests {
|
|||
// Adding excerpts emits an edited event.
|
||||
assert_eq!(
|
||||
events.borrow().as_slice(),
|
||||
&[Event::Edited, Event::Edited, Event::Edited]
|
||||
&[
|
||||
Event::Edited {
|
||||
sigleton_buffer_edited: false
|
||||
},
|
||||
Event::Edited {
|
||||
sigleton_buffer_edited: false
|
||||
},
|
||||
Event::Edited {
|
||||
sigleton_buffer_edited: false
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
|
@ -4379,8 +4404,10 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_excerpt_events(cx: &mut AppContext) {
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(10, 3, 'a'), cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(10, 3, 'm'), cx));
|
||||
let buffer_1 =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(10, 3, 'a')));
|
||||
let buffer_2 =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(10, 3, 'm')));
|
||||
|
||||
let leader_multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let follower_multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
@ -4397,7 +4424,7 @@ mod tests {
|
|||
excerpts,
|
||||
} => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx),
|
||||
Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx),
|
||||
Event::Edited => {
|
||||
Event::Edited { .. } => {
|
||||
*follower_edit_event_count.borrow_mut() += 1;
|
||||
}
|
||||
_ => {}
|
||||
|
@ -4485,7 +4512,8 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_push_excerpts_with_context_lines(cx: &mut AppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(20, 3, 'a')));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.push_excerpts_with_context_lines(
|
||||
|
@ -4521,9 +4549,10 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(20, 3, 'a')));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
let ranges = vec![
|
||||
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
|
||||
|
@ -4531,12 +4560,10 @@ mod tests {
|
|||
snapshot.anchor_before(Point::new(15, 0))
|
||||
..snapshot.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx)
|
||||
multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx)
|
||||
});
|
||||
|
||||
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
|
||||
// Ensure task is finished when stream completes.
|
||||
task.await;
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||
assert_eq!(
|
||||
|
@ -4569,7 +4596,7 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_singleton_multibuffer_anchors(cx: &mut AppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
|
||||
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
||||
let old_snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
|
@ -4589,8 +4616,8 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_multibuffer_anchors(cx: &mut AppContext) {
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "efghi", cx));
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "efghi"));
|
||||
let multibuffer = cx.add_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(
|
||||
|
@ -4647,8 +4674,8 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) {
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "ABCDEFGHIJKLMNOP", cx));
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "ABCDEFGHIJKLMNOP"));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
||||
// Create an insertion id in buffer 1 that doesn't exist in buffer 2.
|
||||
|
@ -5043,7 +5070,9 @@ mod tests {
|
|||
let base_text = util::RandomCharIter::new(&mut rng)
|
||||
.take(10)
|
||||
.collect::<String>();
|
||||
buffers.push(cx.add_model(|cx| Buffer::new(0, base_text, cx)));
|
||||
buffers.push(
|
||||
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text)),
|
||||
);
|
||||
buffers.last().unwrap()
|
||||
} else {
|
||||
buffers.choose(&mut rng).unwrap()
|
||||
|
@ -5384,8 +5413,8 @@ mod tests {
|
|||
fn test_history(cx: &mut AppContext) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "1234", cx));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "5678", cx));
|
||||
let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "1234"));
|
||||
let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "5678"));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let group_interval = multibuffer.read(cx).history.group_interval;
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
|
|
|
@ -65,47 +65,52 @@ impl Editor {
|
|||
self.set_scroll_position(scroll_position, cx);
|
||||
}
|
||||
|
||||
let (autoscroll, local) =
|
||||
if let Some(autoscroll) = self.scroll_manager.autoscroll_request.take() {
|
||||
autoscroll
|
||||
} else {
|
||||
return false;
|
||||
};
|
||||
let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let first_cursor_top;
|
||||
let last_cursor_bottom;
|
||||
let mut target_top;
|
||||
let mut target_bottom;
|
||||
if let Some(highlighted_rows) = &self.highlighted_rows {
|
||||
first_cursor_top = highlighted_rows.start as f32;
|
||||
last_cursor_bottom = first_cursor_top + 1.;
|
||||
} else if autoscroll == Autoscroll::newest() {
|
||||
let newest_selection = self.selections.newest::<Point>(cx);
|
||||
first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
|
||||
last_cursor_bottom = first_cursor_top + 1.;
|
||||
target_top = highlighted_rows.start as f32;
|
||||
target_bottom = target_top + 1.;
|
||||
} else {
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
first_cursor_top = selections
|
||||
target_top = selections
|
||||
.first()
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32;
|
||||
last_cursor_bottom = selections
|
||||
target_bottom = selections
|
||||
.last()
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32
|
||||
+ 1.0;
|
||||
|
||||
// If the selections can't all fit on screen, scroll to the newest.
|
||||
if autoscroll == Autoscroll::newest()
|
||||
|| autoscroll == Autoscroll::fit() && target_bottom - target_top > visible_lines
|
||||
{
|
||||
let newest_selection_top = selections
|
||||
.iter()
|
||||
.max_by_key(|s| s.id)
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32;
|
||||
target_top = newest_selection_top;
|
||||
target_bottom = newest_selection_top + 1.;
|
||||
}
|
||||
}
|
||||
|
||||
let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
|
||||
0.
|
||||
} else {
|
||||
((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
|
||||
((visible_lines - (target_bottom - target_top)) / 2.0).floor()
|
||||
};
|
||||
if margin < 0.0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
let strategy = match autoscroll {
|
||||
Autoscroll::Strategy(strategy) => strategy,
|
||||
|
@ -113,8 +118,8 @@ impl Editor {
|
|||
let last_autoscroll = &self.scroll_manager.last_autoscroll;
|
||||
if let Some(last_autoscroll) = last_autoscroll {
|
||||
if self.scroll_manager.anchor.offset == last_autoscroll.0
|
||||
&& first_cursor_top == last_autoscroll.1
|
||||
&& last_cursor_bottom == last_autoscroll.2
|
||||
&& target_top == last_autoscroll.1
|
||||
&& target_bottom == last_autoscroll.2
|
||||
{
|
||||
last_autoscroll.3.next()
|
||||
} else {
|
||||
|
@ -129,37 +134,41 @@ impl Editor {
|
|||
match strategy {
|
||||
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
|
||||
let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
|
||||
let target_top = (first_cursor_top - margin).max(0.0);
|
||||
let target_bottom = last_cursor_bottom + margin;
|
||||
let target_top = (target_top - margin).max(0.0);
|
||||
let target_bottom = target_bottom + margin;
|
||||
let start_row = scroll_position.y();
|
||||
let end_row = start_row + visible_lines;
|
||||
|
||||
if target_top < start_row {
|
||||
let needs_scroll_up = target_top < start_row;
|
||||
let needs_scroll_down = target_bottom >= end_row;
|
||||
|
||||
if needs_scroll_up && !needs_scroll_down {
|
||||
scroll_position.set_y(target_top);
|
||||
self.set_scroll_position_internal(scroll_position, local, true, cx);
|
||||
} else if target_bottom >= end_row {
|
||||
}
|
||||
if !needs_scroll_up && needs_scroll_down {
|
||||
scroll_position.set_y(target_bottom - visible_lines);
|
||||
self.set_scroll_position_internal(scroll_position, local, true, cx);
|
||||
}
|
||||
}
|
||||
AutoscrollStrategy::Center => {
|
||||
scroll_position.set_y((first_cursor_top - margin).max(0.0));
|
||||
scroll_position.set_y((target_top - margin).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, true, cx);
|
||||
}
|
||||
AutoscrollStrategy::Top => {
|
||||
scroll_position.set_y((first_cursor_top).max(0.0));
|
||||
scroll_position.set_y((target_top).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, true, cx);
|
||||
}
|
||||
AutoscrollStrategy::Bottom => {
|
||||
scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
|
||||
scroll_position.set_y((target_bottom - visible_lines).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, true, cx);
|
||||
}
|
||||
}
|
||||
|
||||
self.scroll_manager.last_autoscroll = Some((
|
||||
self.scroll_manager.anchor.offset,
|
||||
first_cursor_top,
|
||||
last_cursor_bottom,
|
||||
target_top,
|
||||
target_bottom,
|
||||
strategy,
|
||||
));
|
||||
|
||||
|
|
|
@ -225,6 +225,7 @@ impl<'a> EditorTestContext<'a> {
|
|||
.map(|h| h.1.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|range| range.as_text_range())
|
||||
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
|
||||
.collect()
|
||||
});
|
||||
|
@ -240,6 +241,7 @@ impl<'a> EditorTestContext<'a> {
|
|||
.map(|ranges| ranges.as_ref().clone().1)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|range| range.as_text_range())
|
||||
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
|
||||
.collect();
|
||||
assert_set_eq!(actual_ranges, expected_ranges);
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
[package]
|
||||
name = "staff_mode"
|
||||
name = "feature_flags"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/staff_mode.rs"
|
||||
path = "src/feature_flags.rs"
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
79
crates/feature_flags/src/feature_flags.rs
Normal file
79
crates/feature_flags/src/feature_flags.rs
Normal file
|
@ -0,0 +1,79 @@
|
|||
use gpui::{AppContext, Subscription, ViewContext};
|
||||
|
||||
#[derive(Default)]
|
||||
struct FeatureFlags {
|
||||
flags: Vec<String>,
|
||||
staff: bool,
|
||||
}
|
||||
|
||||
impl FeatureFlags {
|
||||
fn has_flag(&self, flag: &str) -> bool {
|
||||
self.staff || self.flags.iter().find(|f| f.as_str() == flag).is_some()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FeatureFlag {
|
||||
const NAME: &'static str;
|
||||
}
|
||||
|
||||
pub enum ChannelsAlpha {}
|
||||
|
||||
impl FeatureFlag for ChannelsAlpha {
|
||||
const NAME: &'static str = "channels_alpha";
|
||||
}
|
||||
|
||||
pub trait FeatureFlagViewExt<V: 'static> {
|
||||
fn observe_flag<T: FeatureFlag, F>(&mut self, callback: F) -> Subscription
|
||||
where
|
||||
F: Fn(bool, &mut V, &mut ViewContext<V>) + 'static;
|
||||
}
|
||||
|
||||
impl<V: 'static> FeatureFlagViewExt<V> for ViewContext<'_, '_, V> {
|
||||
fn observe_flag<T: FeatureFlag, F>(&mut self, callback: F) -> Subscription
|
||||
where
|
||||
F: Fn(bool, &mut V, &mut ViewContext<V>) + 'static,
|
||||
{
|
||||
self.observe_global::<FeatureFlags, _>(move |v, cx| {
|
||||
let feature_flags = cx.global::<FeatureFlags>();
|
||||
callback(feature_flags.has_flag(<T as FeatureFlag>::NAME), v, cx);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FeatureFlagAppExt {
|
||||
fn update_flags(&mut self, staff: bool, flags: Vec<String>);
|
||||
fn set_staff(&mut self, staff: bool);
|
||||
fn has_flag<T: FeatureFlag>(&self) -> bool;
|
||||
fn is_staff(&self) -> bool;
|
||||
}
|
||||
|
||||
impl FeatureFlagAppExt for AppContext {
|
||||
fn update_flags(&mut self, staff: bool, flags: Vec<String>) {
|
||||
self.update_default_global::<FeatureFlags, _, _>(|feature_flags, _| {
|
||||
feature_flags.staff = staff;
|
||||
feature_flags.flags = flags;
|
||||
})
|
||||
}
|
||||
|
||||
fn set_staff(&mut self, staff: bool) {
|
||||
self.update_default_global::<FeatureFlags, _, _>(|feature_flags, _| {
|
||||
feature_flags.staff = staff;
|
||||
})
|
||||
}
|
||||
|
||||
fn has_flag<T: FeatureFlag>(&self) -> bool {
|
||||
if self.has_global::<FeatureFlags>() {
|
||||
self.global::<FeatureFlags>().has_flag(T::NAME)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_staff(&self) -> bool {
|
||||
if self.has_global::<FeatureFlags>() {
|
||||
return self.global::<FeatureFlags>().staff;
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
|
@ -67,7 +67,9 @@ impl KeymapContextPredicate {
|
|||
}
|
||||
|
||||
pub fn eval(&self, contexts: &[KeymapContext]) -> bool {
|
||||
let Some(context) = contexts.first() else { return false };
|
||||
let Some(context) = contexts.first() else {
|
||||
return false;
|
||||
};
|
||||
match self {
|
||||
Self::Identifier(name) => (&context.set).contains(name.as_str()),
|
||||
Self::Equal(left, right) => context
|
||||
|
|
|
@ -14,7 +14,7 @@ use crate::{
|
|||
CodeLabel, LanguageScope, Outline,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
pub use clock::ReplicaId;
|
||||
use fs::LineEnding;
|
||||
use futures::FutureExt as _;
|
||||
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
|
||||
|
@ -347,13 +347,9 @@ impl CharKind {
|
|||
}
|
||||
|
||||
impl Buffer {
|
||||
pub fn new<T: Into<String>>(
|
||||
replica_id: ReplicaId,
|
||||
base_text: T,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
|
||||
Self::build(
|
||||
TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
|
||||
TextBuffer::new(replica_id, id, base_text.into()),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
@ -2504,7 +2500,9 @@ impl BufferSnapshot {
|
|||
|
||||
matches.advance();
|
||||
|
||||
let Some((open, close)) = open.zip(close) else { continue };
|
||||
let Some((open, close)) = open.zip(close) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let bracket_range = open.start..=close.end;
|
||||
if !bracket_range.overlaps(&range) {
|
||||
|
|
|
@ -43,8 +43,8 @@ fn test_line_endings(cx: &mut gpui::AppContext) {
|
|||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer =
|
||||
Buffer::new(0, "one\r\ntwo\rthree", cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
assert_eq!(buffer.text(), "one\ntwo\nthree");
|
||||
assert_eq!(buffer.line_ending(), LineEnding::Windows);
|
||||
|
||||
|
@ -138,8 +138,8 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
|
||||
|
||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
|
||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
|
||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
|
||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
|
||||
let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
|
||||
buffer1.update(cx, {
|
||||
let buffer1_ops = buffer1_ops.clone();
|
||||
|
@ -222,7 +222,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
||||
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
|
||||
|
||||
let text = "a\nccc\ndddd\nffffff\n";
|
||||
|
@ -254,7 +254,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||
]
|
||||
.join("\n");
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
|
||||
// Spawn a task to format the buffer's whitespace.
|
||||
// Pause so that the foratting task starts running.
|
||||
|
@ -318,8 +318,9 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
||||
let text = "fn a() {}";
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
|
@ -443,7 +444,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "{}", cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.set_sync_parse_timeout(Duration::ZERO);
|
||||
buffer
|
||||
});
|
||||
|
@ -491,8 +493,9 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
@ -576,8 +579,9 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
@ -613,7 +617,9 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(language), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
|
||||
});
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
// extra context nodes are included in the outline.
|
||||
|
@ -655,8 +661,9 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
// point is at the start of an item
|
||||
|
@ -877,7 +884,8 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
|
|||
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() { b(|c| {}) }";
|
||||
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -917,7 +925,8 @@ fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
|
|||
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() {}";
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
||||
|
@ -959,7 +968,8 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
|
|||
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() {}";
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
|
||||
|
@ -1000,6 +1010,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {
|
||||
c;
|
||||
|
@ -1007,7 +1018,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1073,6 +1083,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {
|
||||
b();
|
||||
|
@ -1080,7 +1091,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
"
|
||||
.replace("|", "") // marker to preserve trailing whitespace
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1136,13 +1146,13 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {
|
||||
i
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1198,11 +1208,11 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1254,7 +1264,8 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
|||
|
||||
cx.add_model(|cx| {
|
||||
let text = "a\nb";
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.edit(
|
||||
[(0..1, "\n"), (2..3, "\n")],
|
||||
Some(AutoindentMode::EachLine),
|
||||
|
@ -1280,7 +1291,8 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
|||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.edit(
|
||||
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
|
||||
Some(AutoindentMode::EachLine),
|
||||
|
@ -1317,7 +1329,8 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
|
|||
}
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// When this text was copied, both of the quotation marks were at the same
|
||||
// indent level, but the indentation of the first line was not included in
|
||||
|
@ -1402,7 +1415,8 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
|
|||
}
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// The original indent columns are not known, so this text is
|
||||
// auto-indented in a block as if the first line was copied in
|
||||
|
@ -1481,7 +1495,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
|||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
|
||||
Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "Markdown".into(),
|
||||
|
@ -1557,7 +1571,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||
false,
|
||||
);
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
||||
buffer.set_language_registry(language_registry);
|
||||
buffer.set_language(Some(html_language), cx);
|
||||
buffer.edit(
|
||||
|
@ -1593,7 +1607,8 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(ruby_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
|
||||
|
||||
let text = r#"
|
||||
class C
|
||||
|
@ -1683,7 +1698,8 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
|||
|
||||
let text = r#"a["b"] = <C d="e"></C>;"#;
|
||||
|
||||
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(language), cx);
|
||||
let buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
let config = snapshot.language_scope_at(0).unwrap();
|
||||
|
@ -1762,7 +1778,8 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = Buffer::new(0, text.clone(), cx).with_language(Arc::new(language), cx);
|
||||
let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
|
||||
.with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
// By default, all brackets are enabled
|
||||
|
@ -1806,7 +1823,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
|||
language_registry.add(Arc::new(html_lang()));
|
||||
language_registry.add(Arc::new(erb_lang()));
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
||||
buffer.set_language_registry(language_registry.clone());
|
||||
buffer.set_language(
|
||||
language_registry
|
||||
|
@ -1838,7 +1855,7 @@ fn test_serialization(cx: &mut gpui::AppContext) {
|
|||
let mut now = Instant::now();
|
||||
|
||||
let buffer1 = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "abc", cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
|
||||
buffer.edit([(3..3, "D")], None, cx);
|
||||
|
||||
now += Duration::from_secs(1);
|
||||
|
@ -1893,7 +1910,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
let mut replica_ids = Vec::new();
|
||||
let mut buffers = Vec::new();
|
||||
let network = Rc::new(RefCell::new(Network::new(rng.clone())));
|
||||
let base_buffer = cx.add_model(|cx| Buffer::new(0, base_text.as_str(), cx));
|
||||
let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
|
||||
|
||||
for i in 0..rng.gen_range(min_peers..=max_peers) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
|
@ -2394,7 +2411,8 @@ fn assert_bracket_pairs(
|
|||
) {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, expected_text.clone(), cx).with_language(Arc::new(language), cx)
|
||||
Buffer::new(0, cx.model_id() as u64, expected_text.clone())
|
||||
.with_language(Arc::new(language), cx)
|
||||
});
|
||||
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ use futures::{
|
|||
FutureExt, TryFutureExt as _,
|
||||
};
|
||||
use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
|
||||
use highlight_map::HighlightMap;
|
||||
pub use highlight_map::HighlightMap;
|
||||
use lazy_static::lazy_static;
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
|
|
|
@ -310,7 +310,9 @@ impl SyntaxSnapshot {
|
|||
// Ignore edits that end before the start of this layer, and don't consider them
|
||||
// for any subsequent layers at this same depth.
|
||||
loop {
|
||||
let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else { continue 'outer };
|
||||
let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else {
|
||||
continue 'outer;
|
||||
};
|
||||
if edit_range.end.cmp(&layer.range.start, text).is_le() {
|
||||
first_edit_ix_for_depth += 1;
|
||||
} else {
|
||||
|
@ -391,7 +393,9 @@ impl SyntaxSnapshot {
|
|||
.filter::<_, ()>(|summary| summary.contains_unknown_injections);
|
||||
cursor.next(text);
|
||||
while let Some(layer) = cursor.item() {
|
||||
let SyntaxLayerContent::Pending { language_name } = &layer.content else { unreachable!() };
|
||||
let SyntaxLayerContent::Pending { language_name } = &layer.content else {
|
||||
unreachable!()
|
||||
};
|
||||
if registry
|
||||
.language_for_name_or_extension(language_name)
|
||||
.now_or_never()
|
||||
|
@ -533,7 +537,9 @@ impl SyntaxSnapshot {
|
|||
|
||||
let content = match step.language {
|
||||
ParseStepLanguage::Loaded { language } => {
|
||||
let Some(grammar) = language.grammar() else { continue };
|
||||
let Some(grammar) = language.grammar() else {
|
||||
continue;
|
||||
};
|
||||
let tree;
|
||||
let changed_ranges;
|
||||
|
||||
|
|
|
@ -932,8 +932,12 @@ fn check_interpolation(
|
|||
.zip(new_syntax_map.layers.iter())
|
||||
{
|
||||
assert_eq!(old_layer.range, new_layer.range);
|
||||
let Some(old_tree) = old_layer.content.tree() else { continue };
|
||||
let Some(new_tree) = new_layer.content.tree() else { continue };
|
||||
let Some(old_tree) = old_layer.content.tree() else {
|
||||
continue;
|
||||
};
|
||||
let Some(new_tree) = new_layer.content.tree() else {
|
||||
continue;
|
||||
};
|
||||
let old_start_byte = old_layer.range.start.to_offset(old_buffer);
|
||||
let new_start_byte = new_layer.range.start.to_offset(new_buffer);
|
||||
let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
|
||||
|
|
|
@ -176,7 +176,9 @@ impl LogStore {
|
|||
cx.notify();
|
||||
LanguageServerState {
|
||||
rpc_state: None,
|
||||
log_buffer: cx.add_model(|cx| Buffer::new(0, "", cx)).clone(),
|
||||
log_buffer: cx
|
||||
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""))
|
||||
.clone(),
|
||||
}
|
||||
})
|
||||
.log_buffer
|
||||
|
@ -241,7 +243,7 @@ impl LogStore {
|
|||
let rpc_state = server_state.rpc_state.get_or_insert_with(|| {
|
||||
let io_tx = self.io_tx.clone();
|
||||
let language = project.read(cx).languages().language_for_name("JSON");
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, "", cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""));
|
||||
cx.spawn_weak({
|
||||
let buffer = buffer.clone();
|
||||
|_, mut cx| async move {
|
||||
|
@ -327,7 +329,7 @@ impl LspLogView {
|
|||
.projects
|
||||
.get(&project.downgrade())
|
||||
.and_then(|project| project.servers.keys().copied().next());
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, "", cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""));
|
||||
let mut this = Self {
|
||||
editor: Self::editor_for_buffer(project.clone(), buffer, cx),
|
||||
project,
|
||||
|
@ -549,7 +551,9 @@ impl View for LspLogToolbarItemView {
|
|||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
|
||||
let theme = theme::current(cx).clone();
|
||||
let Some(log_view) = self.log_view.as_ref() else { return Empty::new().into_any() };
|
||||
let Some(log_view) = self.log_view.as_ref() else {
|
||||
return Empty::new().into_any();
|
||||
};
|
||||
let log_view = log_view.read(cx);
|
||||
let menu_rows = log_view.menu_items(cx).unwrap_or_default();
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ pub enum Subscription {
|
|||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Request<'a, T> {
|
||||
pub struct Request<'a, T> {
|
||||
jsonrpc: &'static str,
|
||||
id: usize,
|
||||
method: &'a str,
|
||||
|
@ -435,7 +435,13 @@ impl LanguageServer {
|
|||
}),
|
||||
inlay_hint: Some(InlayHintClientCapabilities {
|
||||
resolve_support: Some(InlayHintResolveClientCapabilities {
|
||||
properties: vec!["textEdits".to_string(), "tooltip".to_string()],
|
||||
properties: vec![
|
||||
"textEdits".to_string(),
|
||||
"tooltip".to_string(),
|
||||
"label.tooltip".to_string(),
|
||||
"label.location".to_string(),
|
||||
"label.command".to_string(),
|
||||
],
|
||||
}),
|
||||
dynamic_registration: Some(false),
|
||||
}),
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
use crate::{
|
||||
DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel,
|
||||
InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink,
|
||||
MarkupContent, Project, ProjectTransaction,
|
||||
MarkupContent, Project, ProjectTransaction, ResolveState,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::proto::{self, PeerId};
|
||||
use fs::LineEnding;
|
||||
use futures::future;
|
||||
use gpui::{AppContext, AsyncAppContext, ModelHandle};
|
||||
use language::{
|
||||
language_settings::{language_settings, InlayHintKind},
|
||||
point_from_lsp, point_to_lsp,
|
||||
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
|
||||
Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped,
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind,
|
||||
CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction,
|
||||
Unclipped,
|
||||
};
|
||||
use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, ServerCapabilities};
|
||||
use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, OneOf, ServerCapabilities};
|
||||
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
|
||||
|
||||
pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions {
|
||||
|
@ -1431,7 +1433,7 @@ impl LspCommand for GetCompletions {
|
|||
})
|
||||
});
|
||||
|
||||
Ok(futures::future::join_all(completions).await)
|
||||
Ok(future::join_all(completions).await)
|
||||
}
|
||||
|
||||
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
|
||||
|
@ -1499,7 +1501,7 @@ impl LspCommand for GetCompletions {
|
|||
let completions = message.completions.into_iter().map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.clone())
|
||||
});
|
||||
futures::future::try_join_all(completions).await
|
||||
future::try_join_all(completions).await
|
||||
}
|
||||
|
||||
fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 {
|
||||
|
@ -1653,7 +1655,11 @@ impl LspCommand for OnTypeFormatting {
|
|||
type ProtoRequest = proto::OnTypeFormatting;
|
||||
|
||||
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
|
||||
let Some(on_type_formatting_options) = &server_capabilities.document_on_type_formatting_provider else { return false };
|
||||
let Some(on_type_formatting_options) =
|
||||
&server_capabilities.document_on_type_formatting_provider
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
on_type_formatting_options
|
||||
.first_trigger_character
|
||||
.contains(&self.trigger)
|
||||
|
@ -1767,7 +1773,9 @@ impl LspCommand for OnTypeFormatting {
|
|||
_: ModelHandle<Buffer>,
|
||||
_: AsyncAppContext,
|
||||
) -> Result<Option<Transaction>> {
|
||||
let Some(transaction) = message.transaction else { return Ok(None) };
|
||||
let Some(transaction) = message.transaction else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(language::proto::deserialize_transaction(transaction)?))
|
||||
}
|
||||
|
||||
|
@ -1776,6 +1784,377 @@ impl LspCommand for OnTypeFormatting {
|
|||
}
|
||||
}
|
||||
|
||||
impl InlayHints {
|
||||
pub async fn lsp_to_project_hint(
|
||||
lsp_hint: lsp::InlayHint,
|
||||
buffer_handle: &ModelHandle<Buffer>,
|
||||
server_id: LanguageServerId,
|
||||
resolve_state: ResolveState,
|
||||
force_no_type_left_padding: bool,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> anyhow::Result<InlayHint> {
|
||||
let kind = lsp_hint.kind.and_then(|kind| match kind {
|
||||
lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
|
||||
lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
let position = cx.update(|cx| {
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
|
||||
if kind == Some(InlayHintKind::Parameter) {
|
||||
buffer.anchor_before(position)
|
||||
} else {
|
||||
buffer.anchor_after(position)
|
||||
}
|
||||
});
|
||||
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
|
||||
.await
|
||||
.context("lsp to project inlay hint conversion")?;
|
||||
let padding_left = if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
|
||||
false
|
||||
} else {
|
||||
lsp_hint.padding_left.unwrap_or(false)
|
||||
};
|
||||
|
||||
Ok(InlayHint {
|
||||
position,
|
||||
padding_left,
|
||||
padding_right: lsp_hint.padding_right.unwrap_or(false),
|
||||
label,
|
||||
kind,
|
||||
tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
|
||||
lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
|
||||
lsp::InlayHintTooltip::MarkupContent(markup_content) => {
|
||||
InlayHintTooltip::MarkupContent(MarkupContent {
|
||||
kind: match markup_content.kind {
|
||||
lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
|
||||
lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
|
||||
},
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
}),
|
||||
resolve_state,
|
||||
})
|
||||
}
|
||||
|
||||
async fn lsp_inlay_label_to_project(
|
||||
lsp_label: lsp::InlayHintLabel,
|
||||
server_id: LanguageServerId,
|
||||
) -> anyhow::Result<InlayHintLabel> {
|
||||
let label = match lsp_label {
|
||||
lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
|
||||
lsp::InlayHintLabel::LabelParts(lsp_parts) => {
|
||||
let mut parts = Vec::with_capacity(lsp_parts.len());
|
||||
for lsp_part in lsp_parts {
|
||||
parts.push(InlayHintLabelPart {
|
||||
value: lsp_part.value,
|
||||
tooltip: lsp_part.tooltip.map(|tooltip| match tooltip {
|
||||
lsp::InlayHintLabelPartTooltip::String(s) => {
|
||||
InlayHintLabelPartTooltip::String(s)
|
||||
}
|
||||
lsp::InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
|
||||
InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
|
||||
kind: match markup_content.kind {
|
||||
lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
|
||||
lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
|
||||
},
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
}),
|
||||
location: Some(server_id).zip(lsp_part.location),
|
||||
});
|
||||
}
|
||||
InlayHintLabel::LabelParts(parts)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(label)
|
||||
}
|
||||
|
||||
pub fn project_to_proto_hint(response_hint: InlayHint) -> proto::InlayHint {
|
||||
let (state, lsp_resolve_state) = match response_hint.resolve_state {
|
||||
ResolveState::Resolved => (0, None),
|
||||
ResolveState::CanResolve(server_id, resolve_data) => (
|
||||
1,
|
||||
resolve_data
|
||||
.map(|json_data| {
|
||||
serde_json::to_string(&json_data)
|
||||
.expect("failed to serialize resolve json data")
|
||||
})
|
||||
.map(|value| proto::resolve_state::LspResolveState {
|
||||
server_id: server_id.0 as u64,
|
||||
value,
|
||||
}),
|
||||
),
|
||||
ResolveState::Resolving => (2, None),
|
||||
};
|
||||
let resolve_state = Some(proto::ResolveState {
|
||||
state,
|
||||
lsp_resolve_state,
|
||||
});
|
||||
proto::InlayHint {
|
||||
position: Some(language::proto::serialize_anchor(&response_hint.position)),
|
||||
padding_left: response_hint.padding_left,
|
||||
padding_right: response_hint.padding_right,
|
||||
label: Some(proto::InlayHintLabel {
|
||||
label: Some(match response_hint.label {
|
||||
InlayHintLabel::String(s) => proto::inlay_hint_label::Label::Value(s),
|
||||
InlayHintLabel::LabelParts(label_parts) => {
|
||||
proto::inlay_hint_label::Label::LabelParts(proto::InlayHintLabelParts {
|
||||
parts: label_parts.into_iter().map(|label_part| {
|
||||
let location_url = label_part.location.as_ref().map(|(_, location)| location.uri.to_string());
|
||||
let location_range_start = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.start).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
|
||||
let location_range_end = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.end).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
|
||||
proto::InlayHintLabelPart {
|
||||
value: label_part.value,
|
||||
tooltip: label_part.tooltip.map(|tooltip| {
|
||||
let proto_tooltip = match tooltip {
|
||||
InlayHintLabelPartTooltip::String(s) => proto::inlay_hint_label_part_tooltip::Content::Value(s),
|
||||
InlayHintLabelPartTooltip::MarkupContent(markup_content) => proto::inlay_hint_label_part_tooltip::Content::MarkupContent(proto::MarkupContent {
|
||||
is_markdown: markup_content.kind == HoverBlockKind::Markdown,
|
||||
value: markup_content.value,
|
||||
}),
|
||||
};
|
||||
proto::InlayHintLabelPartTooltip {content: Some(proto_tooltip)}
|
||||
}),
|
||||
location_url,
|
||||
location_range_start,
|
||||
location_range_end,
|
||||
language_server_id: label_part.location.as_ref().map(|(server_id, _)| server_id.0 as u64),
|
||||
}}).collect()
|
||||
})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
kind: response_hint.kind.map(|kind| kind.name().to_string()),
|
||||
tooltip: response_hint.tooltip.map(|response_tooltip| {
|
||||
let proto_tooltip = match response_tooltip {
|
||||
InlayHintTooltip::String(s) => proto::inlay_hint_tooltip::Content::Value(s),
|
||||
InlayHintTooltip::MarkupContent(markup_content) => {
|
||||
proto::inlay_hint_tooltip::Content::MarkupContent(proto::MarkupContent {
|
||||
is_markdown: markup_content.kind == HoverBlockKind::Markdown,
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
};
|
||||
proto::InlayHintTooltip {
|
||||
content: Some(proto_tooltip),
|
||||
}
|
||||
}),
|
||||
resolve_state,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn proto_to_project_hint(message_hint: proto::InlayHint) -> anyhow::Result<InlayHint> {
|
||||
let resolve_state = message_hint.resolve_state.as_ref().unwrap_or_else(|| {
|
||||
panic!("incorrect proto inlay hint message: no resolve state in hint {message_hint:?}",)
|
||||
});
|
||||
let resolve_state_data = resolve_state
|
||||
.lsp_resolve_state.as_ref()
|
||||
.map(|lsp_resolve_state| {
|
||||
serde_json::from_str::<Option<lsp::LSPAny>>(&lsp_resolve_state.value)
|
||||
.with_context(|| format!("incorrect proto inlay hint message: non-json resolve state {lsp_resolve_state:?}"))
|
||||
.map(|state| (LanguageServerId(lsp_resolve_state.server_id as usize), state))
|
||||
})
|
||||
.transpose()?;
|
||||
let resolve_state = match resolve_state.state {
|
||||
0 => ResolveState::Resolved,
|
||||
1 => {
|
||||
let (server_id, lsp_resolve_state) = resolve_state_data.with_context(|| {
|
||||
format!(
|
||||
"No lsp resolve data for the hint that can be resolved: {message_hint:?}"
|
||||
)
|
||||
})?;
|
||||
ResolveState::CanResolve(server_id, lsp_resolve_state)
|
||||
}
|
||||
2 => ResolveState::Resolving,
|
||||
invalid => {
|
||||
anyhow::bail!("Unexpected resolve state {invalid} for hint {message_hint:?}")
|
||||
}
|
||||
};
|
||||
Ok(InlayHint {
|
||||
position: message_hint
|
||||
.position
|
||||
.and_then(language::proto::deserialize_anchor)
|
||||
.context("invalid position")?,
|
||||
label: match message_hint
|
||||
.label
|
||||
.and_then(|label| label.label)
|
||||
.context("missing label")?
|
||||
{
|
||||
proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
|
||||
proto::inlay_hint_label::Label::LabelParts(parts) => {
|
||||
let mut label_parts = Vec::new();
|
||||
for part in parts.parts {
|
||||
label_parts.push(InlayHintLabelPart {
|
||||
value: part.value,
|
||||
tooltip: part.tooltip.map(|tooltip| match tooltip.content {
|
||||
Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => {
|
||||
InlayHintLabelPartTooltip::String(s)
|
||||
}
|
||||
Some(
|
||||
proto::inlay_hint_label_part_tooltip::Content::MarkupContent(
|
||||
markup_content,
|
||||
),
|
||||
) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
|
||||
kind: if markup_content.is_markdown {
|
||||
HoverBlockKind::Markdown
|
||||
} else {
|
||||
HoverBlockKind::PlainText
|
||||
},
|
||||
value: markup_content.value,
|
||||
}),
|
||||
None => InlayHintLabelPartTooltip::String(String::new()),
|
||||
}),
|
||||
location: {
|
||||
match part
|
||||
.location_url
|
||||
.zip(
|
||||
part.location_range_start.and_then(|start| {
|
||||
Some(start..part.location_range_end?)
|
||||
}),
|
||||
)
|
||||
.zip(part.language_server_id)
|
||||
{
|
||||
Some(((uri, range), server_id)) => Some((
|
||||
LanguageServerId(server_id as usize),
|
||||
lsp::Location {
|
||||
uri: lsp::Url::parse(&uri)
|
||||
.context("invalid uri in hint part {part:?}")?,
|
||||
range: lsp::Range::new(
|
||||
point_to_lsp(PointUtf16::new(
|
||||
range.start.row,
|
||||
range.start.column,
|
||||
)),
|
||||
point_to_lsp(PointUtf16::new(
|
||||
range.end.row,
|
||||
range.end.column,
|
||||
)),
|
||||
),
|
||||
},
|
||||
)),
|
||||
None => None,
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
InlayHintLabel::LabelParts(label_parts)
|
||||
}
|
||||
},
|
||||
padding_left: message_hint.padding_left,
|
||||
padding_right: message_hint.padding_right,
|
||||
kind: message_hint
|
||||
.kind
|
||||
.as_deref()
|
||||
.and_then(InlayHintKind::from_name),
|
||||
tooltip: message_hint.tooltip.and_then(|tooltip| {
|
||||
Some(match tooltip.content? {
|
||||
proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
|
||||
proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
|
||||
InlayHintTooltip::MarkupContent(MarkupContent {
|
||||
kind: if markup_content.is_markdown {
|
||||
HoverBlockKind::Markdown
|
||||
} else {
|
||||
HoverBlockKind::PlainText
|
||||
},
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
})
|
||||
}),
|
||||
resolve_state,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn project_to_lsp_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp::InlayHint {
|
||||
lsp::InlayHint {
|
||||
position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
|
||||
kind: hint.kind.map(|kind| match kind {
|
||||
InlayHintKind::Type => lsp::InlayHintKind::TYPE,
|
||||
InlayHintKind::Parameter => lsp::InlayHintKind::PARAMETER,
|
||||
}),
|
||||
text_edits: None,
|
||||
tooltip: hint.tooltip.and_then(|tooltip| {
|
||||
Some(match tooltip {
|
||||
InlayHintTooltip::String(s) => lsp::InlayHintTooltip::String(s),
|
||||
InlayHintTooltip::MarkupContent(markup_content) => {
|
||||
lsp::InlayHintTooltip::MarkupContent(lsp::MarkupContent {
|
||||
kind: match markup_content.kind {
|
||||
HoverBlockKind::PlainText => lsp::MarkupKind::PlainText,
|
||||
HoverBlockKind::Markdown => lsp::MarkupKind::Markdown,
|
||||
HoverBlockKind::Code { .. } => return None,
|
||||
},
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
})
|
||||
}),
|
||||
label: match hint.label {
|
||||
InlayHintLabel::String(s) => lsp::InlayHintLabel::String(s),
|
||||
InlayHintLabel::LabelParts(label_parts) => lsp::InlayHintLabel::LabelParts(
|
||||
label_parts
|
||||
.into_iter()
|
||||
.map(|part| lsp::InlayHintLabelPart {
|
||||
value: part.value,
|
||||
tooltip: part.tooltip.and_then(|tooltip| {
|
||||
Some(match tooltip {
|
||||
InlayHintLabelPartTooltip::String(s) => {
|
||||
lsp::InlayHintLabelPartTooltip::String(s)
|
||||
}
|
||||
InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
|
||||
lsp::InlayHintLabelPartTooltip::MarkupContent(
|
||||
lsp::MarkupContent {
|
||||
kind: match markup_content.kind {
|
||||
HoverBlockKind::PlainText => {
|
||||
lsp::MarkupKind::PlainText
|
||||
}
|
||||
HoverBlockKind::Markdown => {
|
||||
lsp::MarkupKind::Markdown
|
||||
}
|
||||
HoverBlockKind::Code { .. } => return None,
|
||||
},
|
||||
value: markup_content.value,
|
||||
},
|
||||
)
|
||||
}
|
||||
})
|
||||
}),
|
||||
location: part.location.map(|(_, location)| location),
|
||||
command: None,
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
},
|
||||
padding_left: Some(hint.padding_left),
|
||||
padding_right: Some(hint.padding_right),
|
||||
data: match hint.resolve_state {
|
||||
ResolveState::CanResolve(_, data) => data,
|
||||
ResolveState::Resolving | ResolveState::Resolved => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn can_resolve_inlays(capabilities: &ServerCapabilities) -> bool {
|
||||
capabilities
|
||||
.inlay_hint_provider
|
||||
.as_ref()
|
||||
.and_then(|options| match options {
|
||||
OneOf::Left(_is_supported) => None,
|
||||
OneOf::Right(capabilities) => match capabilities {
|
||||
lsp::InlayHintServerCapabilities::Options(o) => o.resolve_provider,
|
||||
lsp::InlayHintServerCapabilities::RegistrationOptions(o) => {
|
||||
o.inlay_hint_options.resolve_provider
|
||||
}
|
||||
},
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspCommand for InlayHints {
|
||||
type Response = Vec<InlayHint>;
|
||||
|
@ -1783,7 +2162,9 @@ impl LspCommand for InlayHints {
|
|||
type ProtoRequest = proto::InlayHints;
|
||||
|
||||
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
|
||||
let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else { return false };
|
||||
let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else {
|
||||
return false;
|
||||
};
|
||||
match inlay_hint_provider {
|
||||
lsp::OneOf::Left(enabled) => *enabled,
|
||||
lsp::OneOf::Right(inlay_hint_capabilities) => match inlay_hint_capabilities {
|
||||
|
@ -1816,8 +2197,9 @@ impl LspCommand for InlayHints {
|
|||
buffer: ModelHandle<Buffer>,
|
||||
server_id: LanguageServerId,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<Vec<InlayHint>> {
|
||||
let (lsp_adapter, _) = language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
|
||||
) -> anyhow::Result<Vec<InlayHint>> {
|
||||
let (lsp_adapter, lsp_server) =
|
||||
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
|
||||
// `typescript-language-server` adds padding to the left for type hints, turning
|
||||
// `const foo: boolean` into `const foo : boolean` which looks odd.
|
||||
// `rust-analyzer` does not have the padding for this case, and we have to accomodate both.
|
||||
|
@ -1827,93 +2209,32 @@ impl LspCommand for InlayHints {
|
|||
// Hence let's use a heuristic first to handle the most awkward case and look for more.
|
||||
let force_no_type_left_padding =
|
||||
lsp_adapter.name.0.as_ref() == "typescript-language-server";
|
||||
cx.read(|cx| {
|
||||
let origin_buffer = buffer.read(cx);
|
||||
Ok(message
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|lsp_hint| {
|
||||
let kind = lsp_hint.kind.and_then(|kind| match kind {
|
||||
lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
|
||||
lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
|
||||
_ => None,
|
||||
});
|
||||
let position = origin_buffer
|
||||
.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
|
||||
let padding_left =
|
||||
if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
|
||||
false
|
||||
} else {
|
||||
lsp_hint.padding_left.unwrap_or(false)
|
||||
};
|
||||
InlayHint {
|
||||
buffer_id: origin_buffer.remote_id(),
|
||||
position: if kind == Some(InlayHintKind::Parameter) {
|
||||
origin_buffer.anchor_before(position)
|
||||
} else {
|
||||
origin_buffer.anchor_after(position)
|
||||
},
|
||||
padding_left,
|
||||
padding_right: lsp_hint.padding_right.unwrap_or(false),
|
||||
label: match lsp_hint.label {
|
||||
lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
|
||||
lsp::InlayHintLabel::LabelParts(lsp_parts) => {
|
||||
InlayHintLabel::LabelParts(
|
||||
lsp_parts
|
||||
.into_iter()
|
||||
.map(|label_part| InlayHintLabelPart {
|
||||
value: label_part.value,
|
||||
tooltip: label_part.tooltip.map(
|
||||
|tooltip| {
|
||||
match tooltip {
|
||||
lsp::InlayHintLabelPartTooltip::String(s) => {
|
||||
InlayHintLabelPartTooltip::String(s)
|
||||
}
|
||||
lsp::InlayHintLabelPartTooltip::MarkupContent(
|
||||
markup_content,
|
||||
) => InlayHintLabelPartTooltip::MarkupContent(
|
||||
MarkupContent {
|
||||
kind: format!("{:?}", markup_content.kind),
|
||||
value: markup_content.value,
|
||||
},
|
||||
),
|
||||
}
|
||||
},
|
||||
),
|
||||
location: label_part.location.map(|lsp_location| {
|
||||
let target_start = origin_buffer.clip_point_utf16(
|
||||
point_from_lsp(lsp_location.range.start),
|
||||
Bias::Left,
|
||||
);
|
||||
let target_end = origin_buffer.clip_point_utf16(
|
||||
point_from_lsp(lsp_location.range.end),
|
||||
Bias::Left,
|
||||
);
|
||||
Location {
|
||||
buffer: buffer.clone(),
|
||||
range: origin_buffer.anchor_after(target_start)
|
||||
..origin_buffer.anchor_before(target_end),
|
||||
}
|
||||
}),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
},
|
||||
kind,
|
||||
tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
|
||||
lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
|
||||
lsp::InlayHintTooltip::MarkupContent(markup_content) => {
|
||||
InlayHintTooltip::MarkupContent(MarkupContent {
|
||||
kind: format!("{:?}", markup_content.kind),
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
}),
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
|
||||
let hints = message.unwrap_or_default().into_iter().map(|lsp_hint| {
|
||||
let resolve_state = if InlayHints::can_resolve_inlays(lsp_server.capabilities()) {
|
||||
ResolveState::CanResolve(lsp_server.server_id(), lsp_hint.data.clone())
|
||||
} else {
|
||||
ResolveState::Resolved
|
||||
};
|
||||
|
||||
let buffer = buffer.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
InlayHints::lsp_to_project_hint(
|
||||
lsp_hint,
|
||||
&buffer,
|
||||
server_id,
|
||||
resolve_state,
|
||||
force_no_type_left_padding,
|
||||
&mut cx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
});
|
||||
future::join_all(hints)
|
||||
.await
|
||||
.into_iter()
|
||||
.collect::<anyhow::Result<_>>()
|
||||
.context("lsp to project inlay hints conversion")
|
||||
}
|
||||
|
||||
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints {
|
||||
|
@ -1959,23 +2280,7 @@ impl LspCommand for InlayHints {
|
|||
proto::InlayHintsResponse {
|
||||
hints: response
|
||||
.into_iter()
|
||||
.map(|response_hint| proto::InlayHint {
|
||||
position: Some(language::proto::serialize_anchor(&response_hint.position)),
|
||||
padding_left: response_hint.padding_left,
|
||||
padding_right: response_hint.padding_right,
|
||||
kind: response_hint.kind.map(|kind| kind.name().to_string()),
|
||||
// Do not pass extra data such as tooltips to clients: host can put tooltip data from the cache during resolution.
|
||||
tooltip: None,
|
||||
// Similarly, do not pass label parts to clients: host can return a detailed list during resolution.
|
||||
label: Some(proto::InlayHintLabel {
|
||||
label: Some(proto::inlay_hint_label::Label::Value(
|
||||
match response_hint.label {
|
||||
InlayHintLabel::String(s) => s,
|
||||
InlayHintLabel::LabelParts(_) => response_hint.text(),
|
||||
},
|
||||
)),
|
||||
}),
|
||||
})
|
||||
.map(|response_hint| InlayHints::project_to_proto_hint(response_hint))
|
||||
.collect(),
|
||||
version: serialize_version(buffer_version),
|
||||
}
|
||||
|
@ -1984,10 +2289,10 @@ impl LspCommand for InlayHints {
|
|||
async fn response_from_proto(
|
||||
self,
|
||||
message: proto::InlayHintsResponse,
|
||||
project: ModelHandle<Project>,
|
||||
_: ModelHandle<Project>,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<Vec<InlayHint>> {
|
||||
) -> anyhow::Result<Vec<InlayHint>> {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -1996,82 +2301,7 @@ impl LspCommand for InlayHints {
|
|||
|
||||
let mut hints = Vec::new();
|
||||
for message_hint in message.hints {
|
||||
let buffer_id = message_hint
|
||||
.position
|
||||
.as_ref()
|
||||
.and_then(|location| location.buffer_id)
|
||||
.context("missing buffer id")?;
|
||||
let hint = InlayHint {
|
||||
buffer_id,
|
||||
position: message_hint
|
||||
.position
|
||||
.and_then(language::proto::deserialize_anchor)
|
||||
.context("invalid position")?,
|
||||
label: match message_hint
|
||||
.label
|
||||
.and_then(|label| label.label)
|
||||
.context("missing label")?
|
||||
{
|
||||
proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
|
||||
proto::inlay_hint_label::Label::LabelParts(parts) => {
|
||||
let mut label_parts = Vec::new();
|
||||
for part in parts.parts {
|
||||
label_parts.push(InlayHintLabelPart {
|
||||
value: part.value,
|
||||
tooltip: part.tooltip.map(|tooltip| match tooltip.content {
|
||||
Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => InlayHintLabelPartTooltip::String(s),
|
||||
Some(proto::inlay_hint_label_part_tooltip::Content::MarkupContent(markup_content)) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
|
||||
kind: markup_content.kind,
|
||||
value: markup_content.value,
|
||||
}),
|
||||
None => InlayHintLabelPartTooltip::String(String::new()),
|
||||
}),
|
||||
location: match part.location {
|
||||
Some(location) => {
|
||||
let target_buffer = project
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.wait_for_remote_buffer(location.buffer_id, cx)
|
||||
})
|
||||
.await?;
|
||||
Some(Location {
|
||||
range: location
|
||||
.start
|
||||
.and_then(language::proto::deserialize_anchor)
|
||||
.context("invalid start")?
|
||||
..location
|
||||
.end
|
||||
.and_then(language::proto::deserialize_anchor)
|
||||
.context("invalid end")?,
|
||||
buffer: target_buffer,
|
||||
})},
|
||||
None => None,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
InlayHintLabel::LabelParts(label_parts)
|
||||
}
|
||||
},
|
||||
padding_left: message_hint.padding_left,
|
||||
padding_right: message_hint.padding_right,
|
||||
kind: message_hint
|
||||
.kind
|
||||
.as_deref()
|
||||
.and_then(InlayHintKind::from_name),
|
||||
tooltip: message_hint.tooltip.and_then(|tooltip| {
|
||||
Some(match tooltip.content? {
|
||||
proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
|
||||
proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
|
||||
InlayHintTooltip::MarkupContent(MarkupContent {
|
||||
kind: markup_content.kind,
|
||||
value: markup_content.value,
|
||||
})
|
||||
}
|
||||
})
|
||||
}),
|
||||
};
|
||||
|
||||
hints.push(hint);
|
||||
hints.push(InlayHints::proto_to_project_hint(message_hint)?);
|
||||
}
|
||||
|
||||
Ok(hints)
|
||||
|
|
|
@ -26,8 +26,8 @@ use futures::{
|
|||
};
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use gpui::{
|
||||
AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity, ModelContext,
|
||||
ModelHandle, Task, WeakModelHandle,
|
||||
executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
|
||||
ModelContext, ModelHandle, Task, WeakModelHandle,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
|
@ -37,11 +37,11 @@ use language::{
|
|||
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
||||
serialize_anchor, serialize_version,
|
||||
},
|
||||
range_from_lsp, range_to_lsp, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
|
||||
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
|
||||
Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
|
||||
Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
|
||||
ToPointUtf16, Transaction, Unclipped,
|
||||
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
|
||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
|
||||
File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
|
||||
OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
|
||||
ToOffset, ToPointUtf16, Transaction, Unclipped,
|
||||
};
|
||||
use log::error;
|
||||
use lsp::{
|
||||
|
@ -57,8 +57,8 @@ use serde::Serialize;
|
|||
use settings::SettingsStore;
|
||||
use sha2::{Digest, Sha256};
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
use smol::channel::{Receiver, Sender};
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
cmp::{self, Ordering},
|
||||
convert::TryInto,
|
||||
hash::Hash,
|
||||
|
@ -67,7 +67,6 @@ use std::{
|
|||
ops::Range,
|
||||
path::{self, Component, Path, PathBuf},
|
||||
process::Stdio,
|
||||
rc::Rc,
|
||||
str,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
|
@ -333,15 +332,22 @@ pub struct Location {
|
|||
pub range: Range<language::Anchor>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct InlayHint {
|
||||
pub buffer_id: u64,
|
||||
pub position: language::Anchor,
|
||||
pub label: InlayHintLabel,
|
||||
pub kind: Option<InlayHintKind>,
|
||||
pub padding_left: bool,
|
||||
pub padding_right: bool,
|
||||
pub tooltip: Option<InlayHintTooltip>,
|
||||
pub resolve_state: ResolveState,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ResolveState {
|
||||
Resolved,
|
||||
CanResolve(LanguageServerId, Option<lsp::LSPAny>),
|
||||
Resolving,
|
||||
}
|
||||
|
||||
impl InlayHint {
|
||||
|
@ -353,34 +359,34 @@ impl InlayHint {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum InlayHintLabel {
|
||||
String(String),
|
||||
LabelParts(Vec<InlayHintLabelPart>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct InlayHintLabelPart {
|
||||
pub value: String,
|
||||
pub tooltip: Option<InlayHintLabelPartTooltip>,
|
||||
pub location: Option<Location>,
|
||||
pub location: Option<(LanguageServerId, lsp::Location)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum InlayHintTooltip {
|
||||
String(String),
|
||||
MarkupContent(MarkupContent),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum InlayHintLabelPartTooltip {
|
||||
String(String),
|
||||
MarkupContent(MarkupContent),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct MarkupContent {
|
||||
pub kind: String,
|
||||
pub kind: HoverBlockKind,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
|
@ -414,7 +420,7 @@ pub struct HoverBlock {
|
|||
pub kind: HoverBlockKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum HoverBlockKind {
|
||||
PlainText,
|
||||
Markdown,
|
||||
|
@ -518,6 +524,28 @@ impl FormatTrigger {
|
|||
}
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum SearchMatchCandidate {
|
||||
OpenBuffer {
|
||||
buffer: ModelHandle<Buffer>,
|
||||
// This might be an unnamed file without representation on filesystem
|
||||
path: Option<Arc<Path>>,
|
||||
},
|
||||
Path {
|
||||
worktree_id: WorktreeId,
|
||||
path: Arc<Path>,
|
||||
},
|
||||
}
|
||||
|
||||
type SearchMatchCandidateIndex = usize;
|
||||
impl SearchMatchCandidate {
|
||||
fn path(&self) -> Option<Arc<Path>> {
|
||||
match self {
|
||||
SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
|
||||
SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Project {
|
||||
pub fn init_settings(cx: &mut AppContext) {
|
||||
|
@ -551,6 +579,7 @@ impl Project {
|
|||
client.add_model_request_handler(Self::handle_apply_code_action);
|
||||
client.add_model_request_handler(Self::handle_on_type_formatting);
|
||||
client.add_model_request_handler(Self::handle_inlay_hints);
|
||||
client.add_model_request_handler(Self::handle_resolve_inlay_hint);
|
||||
client.add_model_request_handler(Self::handle_refresh_inlay_hints);
|
||||
client.add_model_request_handler(Self::handle_reload_buffers);
|
||||
client.add_model_request_handler(Self::handle_synchronize_buffers);
|
||||
|
@ -1539,9 +1568,9 @@ impl Project {
|
|||
if self.is_remote() {
|
||||
return Err(anyhow!("creating buffers as a guest is not supported yet"));
|
||||
}
|
||||
|
||||
let id = post_inc(&mut self.next_buffer_id);
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(self.replica_id(), text, cx)
|
||||
Buffer::new(self.replica_id(), id, text)
|
||||
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
|
||||
});
|
||||
self.register_buffer(&buffer, cx)?;
|
||||
|
@ -1679,7 +1708,7 @@ impl Project {
|
|||
}
|
||||
|
||||
/// LanguageServerName is owned, because it is inserted into a map
|
||||
fn open_local_buffer_via_lsp(
|
||||
pub fn open_local_buffer_via_lsp(
|
||||
&mut self,
|
||||
abs_path: lsp::Url,
|
||||
language_server_id: LanguageServerId,
|
||||
|
@ -4969,7 +4998,7 @@ impl Project {
|
|||
buffer_handle: ModelHandle<Buffer>,
|
||||
range: Range<T>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<InlayHint>>> {
|
||||
) -> Task<anyhow::Result<Vec<InlayHint>>> {
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
|
||||
let range_start = range.start;
|
||||
|
@ -5019,192 +5048,79 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn resolve_inlay_hint(
|
||||
&self,
|
||||
hint: InlayHint,
|
||||
buffer_handle: ModelHandle<Buffer>,
|
||||
server_id: LanguageServerId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<anyhow::Result<InlayHint>> {
|
||||
if self.is_local() {
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let (_, lang_server) = if let Some((adapter, server)) =
|
||||
self.language_server_for_buffer(buffer, server_id, cx)
|
||||
{
|
||||
(adapter.clone(), server.clone())
|
||||
} else {
|
||||
return Task::ready(Ok(hint));
|
||||
};
|
||||
if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
|
||||
return Task::ready(Ok(hint));
|
||||
}
|
||||
|
||||
let buffer_snapshot = buffer.snapshot();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
|
||||
InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
|
||||
);
|
||||
let resolved_hint = resolve_task
|
||||
.await
|
||||
.context("inlay hint resolve LSP request")?;
|
||||
let resolved_hint = InlayHints::lsp_to_project_hint(
|
||||
resolved_hint,
|
||||
&buffer_handle,
|
||||
server_id,
|
||||
ResolveState::Resolved,
|
||||
false,
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
Ok(resolved_hint)
|
||||
})
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let client = self.client.clone();
|
||||
let request = proto::ResolveInlayHint {
|
||||
project_id,
|
||||
buffer_id: buffer_handle.read(cx).remote_id(),
|
||||
language_server_id: server_id.0 as u64,
|
||||
hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
|
||||
};
|
||||
cx.spawn(|_, _| async move {
|
||||
let response = client
|
||||
.request(request)
|
||||
.await
|
||||
.context("inlay hints proto request")?;
|
||||
match response.hint {
|
||||
Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
|
||||
.context("inlay hints proto resolve response conversion"),
|
||||
None => Ok(hint),
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("project does not have a remote id")))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn search(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
|
||||
) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
|
||||
if self.is_local() {
|
||||
let snapshots = self
|
||||
.visible_worktrees(cx)
|
||||
.filter_map(|tree| {
|
||||
let tree = tree.read(cx).as_local()?;
|
||||
Some(tree.snapshot())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let background = cx.background().clone();
|
||||
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
|
||||
if path_count == 0 {
|
||||
return Task::ready(Ok(Default::default()));
|
||||
}
|
||||
let workers = background.num_cpus().min(path_count);
|
||||
let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
|
||||
cx.background()
|
||||
.spawn({
|
||||
let fs = self.fs.clone();
|
||||
let background = cx.background().clone();
|
||||
let query = query.clone();
|
||||
async move {
|
||||
let fs = &fs;
|
||||
let query = &query;
|
||||
let matching_paths_tx = &matching_paths_tx;
|
||||
let paths_per_worker = (path_count + workers - 1) / workers;
|
||||
let snapshots = &snapshots;
|
||||
background
|
||||
.scoped(|scope| {
|
||||
for worker_ix in 0..workers {
|
||||
let worker_start_ix = worker_ix * paths_per_worker;
|
||||
let worker_end_ix = worker_start_ix + paths_per_worker;
|
||||
scope.spawn(async move {
|
||||
let mut snapshot_start_ix = 0;
|
||||
let mut abs_path = PathBuf::new();
|
||||
for snapshot in snapshots {
|
||||
let snapshot_end_ix =
|
||||
snapshot_start_ix + snapshot.visible_file_count();
|
||||
if worker_end_ix <= snapshot_start_ix {
|
||||
break;
|
||||
} else if worker_start_ix > snapshot_end_ix {
|
||||
snapshot_start_ix = snapshot_end_ix;
|
||||
continue;
|
||||
} else {
|
||||
let start_in_snapshot = worker_start_ix
|
||||
.saturating_sub(snapshot_start_ix);
|
||||
let end_in_snapshot =
|
||||
cmp::min(worker_end_ix, snapshot_end_ix)
|
||||
- snapshot_start_ix;
|
||||
|
||||
for entry in snapshot
|
||||
.files(false, start_in_snapshot)
|
||||
.take(end_in_snapshot - start_in_snapshot)
|
||||
{
|
||||
if matching_paths_tx.is_closed() {
|
||||
break;
|
||||
}
|
||||
let matches = if query
|
||||
.file_matches(Some(&entry.path))
|
||||
{
|
||||
abs_path.clear();
|
||||
abs_path.push(&snapshot.abs_path());
|
||||
abs_path.push(&entry.path);
|
||||
if let Some(file) =
|
||||
fs.open_sync(&abs_path).await.log_err()
|
||||
{
|
||||
query.detect(file).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if matches {
|
||||
let project_path =
|
||||
(snapshot.id(), entry.path.clone());
|
||||
if matching_paths_tx
|
||||
.send(project_path)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
snapshot_start_ix = snapshot_end_ix;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
|
||||
let open_buffers = self
|
||||
.opened_buffers
|
||||
.values()
|
||||
.filter_map(|b| b.upgrade(cx))
|
||||
.collect::<HashSet<_>>();
|
||||
cx.spawn(|this, cx| async move {
|
||||
for buffer in &open_buffers {
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
buffers_tx.send((buffer.clone(), snapshot)).await?;
|
||||
}
|
||||
|
||||
let open_buffers = Rc::new(RefCell::new(open_buffers));
|
||||
while let Some(project_path) = matching_paths_rx.next().await {
|
||||
if buffers_tx.is_closed() {
|
||||
break;
|
||||
}
|
||||
|
||||
let this = this.clone();
|
||||
let open_buffers = open_buffers.clone();
|
||||
let buffers_tx = buffers_tx.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
if let Some(buffer) = this
|
||||
.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
if open_buffers.borrow_mut().insert(buffer.clone()) {
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
buffers_tx.send((buffer, snapshot)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let background = cx.background().clone();
|
||||
cx.background().spawn(async move {
|
||||
let query = &query;
|
||||
let mut matched_buffers = Vec::new();
|
||||
for _ in 0..workers {
|
||||
matched_buffers.push(HashMap::default());
|
||||
}
|
||||
background
|
||||
.scoped(|scope| {
|
||||
for worker_matched_buffers in matched_buffers.iter_mut() {
|
||||
let mut buffers_rx = buffers_rx.clone();
|
||||
scope.spawn(async move {
|
||||
while let Some((buffer, snapshot)) = buffers_rx.next().await {
|
||||
let buffer_matches = if query.file_matches(
|
||||
snapshot.file().map(|file| file.path().as_ref()),
|
||||
) {
|
||||
query
|
||||
.search(&snapshot, None)
|
||||
.await
|
||||
.iter()
|
||||
.map(|range| {
|
||||
snapshot.anchor_before(range.start)
|
||||
..snapshot.anchor_after(range.end)
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
if !buffer_matches.is_empty() {
|
||||
worker_matched_buffers
|
||||
.insert(buffer.clone(), buffer_matches);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.await;
|
||||
Ok(matched_buffers.into_iter().flatten().collect())
|
||||
})
|
||||
self.search_local(query, cx)
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let request = self.client.request(query.to_proto(project_id));
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
|
@ -5228,13 +5144,303 @@ impl Project {
|
|||
.or_insert(Vec::new())
|
||||
.push(start..end)
|
||||
}
|
||||
Ok(result)
|
||||
for (buffer, ranges) in result {
|
||||
let _ = tx.send((buffer, ranges)).await;
|
||||
}
|
||||
Result::<(), anyhow::Error>::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
rx
|
||||
} else {
|
||||
Task::ready(Ok(Default::default()))
|
||||
unimplemented!();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn search_local(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
|
||||
// Local search is split into several phases.
|
||||
// TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
|
||||
// and the second phase that finds positions of all the matches found in the candidate files.
|
||||
// The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
|
||||
//
|
||||
// It gets a bit hairy though, because we must account for files that do not have a persistent representation
|
||||
// on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
|
||||
//
|
||||
// 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
|
||||
// Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
|
||||
// of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
|
||||
// 2. At this point, we have a list of all potentially matching buffers/files.
|
||||
// We sort that list by buffer path - this list is retained for later use.
|
||||
// We ensure that all buffers are now opened and available in project.
|
||||
// 3. We run a scan over all the candidate buffers on multiple background threads.
|
||||
// We cannot assume that there will even be a match - while at least one match
|
||||
// is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
|
||||
// There is also an auxilliary background thread responsible for result gathering.
|
||||
// This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
|
||||
// it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
|
||||
// As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
|
||||
// entry - which might already be available thanks to out-of-order processing.
|
||||
//
|
||||
// We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
|
||||
// This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
|
||||
// This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
|
||||
// in face of constantly updating list of sorted matches.
|
||||
// Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
|
||||
let snapshots = self
|
||||
.visible_worktrees(cx)
|
||||
.filter_map(|tree| {
|
||||
let tree = tree.read(cx).as_local()?;
|
||||
Some(tree.snapshot())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let background = cx.background().clone();
|
||||
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
|
||||
if path_count == 0 {
|
||||
let (_, rx) = smol::channel::bounded(1024);
|
||||
return rx;
|
||||
}
|
||||
let workers = background.num_cpus().min(path_count);
|
||||
let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
|
||||
let mut unnamed_files = vec![];
|
||||
let opened_buffers = self
|
||||
.opened_buffers
|
||||
.iter()
|
||||
.filter_map(|(_, b)| {
|
||||
let buffer = b.upgrade(cx)?;
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
if let Some(path) = snapshot.file().map(|file| file.path()) {
|
||||
Some((path.clone(), (buffer, snapshot)))
|
||||
} else {
|
||||
unnamed_files.push(buffer);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
cx.background()
|
||||
.spawn(Self::background_search(
|
||||
unnamed_files,
|
||||
opened_buffers,
|
||||
cx.background().clone(),
|
||||
self.fs.clone(),
|
||||
workers,
|
||||
query.clone(),
|
||||
path_count,
|
||||
snapshots,
|
||||
matching_paths_tx,
|
||||
))
|
||||
.detach();
|
||||
|
||||
let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
|
||||
let background = cx.background().clone();
|
||||
let (result_tx, result_rx) = smol::channel::bounded(1024);
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
let Ok(buffers) = buffers.await else {
|
||||
return;
|
||||
};
|
||||
|
||||
let buffers_len = buffers.len();
|
||||
if buffers_len == 0 {
|
||||
return;
|
||||
}
|
||||
let query = &query;
|
||||
let (finished_tx, mut finished_rx) = smol::channel::unbounded();
|
||||
background
|
||||
.scoped(|scope| {
|
||||
#[derive(Clone)]
|
||||
struct FinishedStatus {
|
||||
entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
|
||||
buffer_index: SearchMatchCandidateIndex,
|
||||
}
|
||||
|
||||
for _ in 0..workers {
|
||||
let finished_tx = finished_tx.clone();
|
||||
let mut buffers_rx = buffers_rx.clone();
|
||||
scope.spawn(async move {
|
||||
while let Some((entry, buffer_index)) = buffers_rx.next().await {
|
||||
let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
|
||||
{
|
||||
if query.file_matches(
|
||||
snapshot.file().map(|file| file.path().as_ref()),
|
||||
) {
|
||||
query
|
||||
.search(&snapshot, None)
|
||||
.await
|
||||
.iter()
|
||||
.map(|range| {
|
||||
snapshot.anchor_before(range.start)
|
||||
..snapshot.anchor_after(range.end)
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let status = if !buffer_matches.is_empty() {
|
||||
let entry = if let Some((buffer, _)) = entry.as_ref() {
|
||||
Some((buffer.clone(), buffer_matches))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
FinishedStatus {
|
||||
entry,
|
||||
buffer_index,
|
||||
}
|
||||
} else {
|
||||
FinishedStatus {
|
||||
entry: None,
|
||||
buffer_index,
|
||||
}
|
||||
};
|
||||
if finished_tx.send(status).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
// Report sorted matches
|
||||
scope.spawn(async move {
|
||||
let mut current_index = 0;
|
||||
let mut scratch = vec![None; buffers_len];
|
||||
while let Some(status) = finished_rx.next().await {
|
||||
debug_assert!(
|
||||
scratch[status.buffer_index].is_none(),
|
||||
"Got match status of position {} twice",
|
||||
status.buffer_index
|
||||
);
|
||||
let index = status.buffer_index;
|
||||
scratch[index] = Some(status);
|
||||
while current_index < buffers_len {
|
||||
let Some(current_entry) = scratch[current_index].take() else {
|
||||
// We intentionally **do not** increment `current_index` here. When next element arrives
|
||||
// from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
|
||||
// this time.
|
||||
break;
|
||||
};
|
||||
if let Some(entry) = current_entry.entry {
|
||||
result_tx.send(entry).await.log_err();
|
||||
}
|
||||
current_index += 1;
|
||||
}
|
||||
if current_index == buffers_len {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.await;
|
||||
})
|
||||
.detach();
|
||||
result_rx
|
||||
}
|
||||
/// Pick paths that might potentially contain a match of a given search query.
|
||||
async fn background_search(
|
||||
unnamed_buffers: Vec<ModelHandle<Buffer>>,
|
||||
opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
|
||||
background: Arc<Background>,
|
||||
fs: Arc<dyn Fs>,
|
||||
workers: usize,
|
||||
query: SearchQuery,
|
||||
path_count: usize,
|
||||
snapshots: Vec<LocalSnapshot>,
|
||||
matching_paths_tx: Sender<SearchMatchCandidate>,
|
||||
) {
|
||||
let fs = &fs;
|
||||
let query = &query;
|
||||
let matching_paths_tx = &matching_paths_tx;
|
||||
let snapshots = &snapshots;
|
||||
let paths_per_worker = (path_count + workers - 1) / workers;
|
||||
for buffer in unnamed_buffers {
|
||||
matching_paths_tx
|
||||
.send(SearchMatchCandidate::OpenBuffer {
|
||||
buffer: buffer.clone(),
|
||||
path: None,
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
for (path, (buffer, _)) in opened_buffers.iter() {
|
||||
matching_paths_tx
|
||||
.send(SearchMatchCandidate::OpenBuffer {
|
||||
buffer: buffer.clone(),
|
||||
path: Some(path.clone()),
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
background
|
||||
.scoped(|scope| {
|
||||
for worker_ix in 0..workers {
|
||||
let worker_start_ix = worker_ix * paths_per_worker;
|
||||
let worker_end_ix = worker_start_ix + paths_per_worker;
|
||||
let unnamed_buffers = opened_buffers.clone();
|
||||
scope.spawn(async move {
|
||||
let mut snapshot_start_ix = 0;
|
||||
let mut abs_path = PathBuf::new();
|
||||
for snapshot in snapshots {
|
||||
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
|
||||
if worker_end_ix <= snapshot_start_ix {
|
||||
break;
|
||||
} else if worker_start_ix > snapshot_end_ix {
|
||||
snapshot_start_ix = snapshot_end_ix;
|
||||
continue;
|
||||
} else {
|
||||
let start_in_snapshot =
|
||||
worker_start_ix.saturating_sub(snapshot_start_ix);
|
||||
let end_in_snapshot =
|
||||
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
|
||||
|
||||
for entry in snapshot
|
||||
.files(false, start_in_snapshot)
|
||||
.take(end_in_snapshot - start_in_snapshot)
|
||||
{
|
||||
if matching_paths_tx.is_closed() {
|
||||
break;
|
||||
}
|
||||
if unnamed_buffers.contains_key(&entry.path) {
|
||||
continue;
|
||||
}
|
||||
let matches = if query.file_matches(Some(&entry.path)) {
|
||||
abs_path.clear();
|
||||
abs_path.push(&snapshot.abs_path());
|
||||
abs_path.push(&entry.path);
|
||||
if let Some(file) = fs.open_sync(&abs_path).await.log_err()
|
||||
{
|
||||
query.detect(file).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if matches {
|
||||
let project_path = SearchMatchCandidate::Path {
|
||||
worktree_id: snapshot.id(),
|
||||
path: entry.path.clone(),
|
||||
};
|
||||
if matching_paths_tx.send(project_path).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
snapshot_start_ix = snapshot_end_ix;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
// TODO: Wire this up to allow selecting a server?
|
||||
fn request_lsp<R: LspCommand>(
|
||||
&self,
|
||||
|
@ -5309,6 +5515,61 @@ impl Project {
|
|||
Task::ready(Ok(Default::default()))
|
||||
}
|
||||
|
||||
fn sort_candidates_and_open_buffers(
|
||||
mut matching_paths_rx: Receiver<SearchMatchCandidate>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (
|
||||
futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
|
||||
Receiver<(
|
||||
Option<(ModelHandle<Buffer>, BufferSnapshot)>,
|
||||
SearchMatchCandidateIndex,
|
||||
)>,
|
||||
) {
|
||||
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
|
||||
let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
|
||||
cx.spawn(|this, cx| async move {
|
||||
let mut buffers = vec![];
|
||||
while let Some(entry) = matching_paths_rx.next().await {
|
||||
buffers.push(entry);
|
||||
}
|
||||
buffers.sort_by_key(|candidate| candidate.path());
|
||||
let matching_paths = buffers.clone();
|
||||
let _ = sorted_buffers_tx.send(buffers);
|
||||
for (index, candidate) in matching_paths.into_iter().enumerate() {
|
||||
if buffers_tx.is_closed() {
|
||||
break;
|
||||
}
|
||||
let this = this.clone();
|
||||
let buffers_tx = buffers_tx.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
let buffer = match candidate {
|
||||
SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
|
||||
SearchMatchCandidate::Path { worktree_id, path } => this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.open_buffer((worktree_id, path), cx)
|
||||
})
|
||||
.await
|
||||
.log_err(),
|
||||
};
|
||||
if let Some(buffer) = buffer {
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
buffers_tx
|
||||
.send((Some((buffer, snapshot)), index))
|
||||
.await
|
||||
.log_err();
|
||||
} else {
|
||||
buffers_tx.send((None, index)).await.log_err();
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
(sorted_buffers_rx, buffers_rx)
|
||||
}
|
||||
|
||||
pub fn find_or_create_local_worktree(
|
||||
&mut self,
|
||||
abs_path: impl AsRef<Path>,
|
||||
|
@ -6816,6 +7077,40 @@ impl Project {
|
|||
}))
|
||||
}
|
||||
|
||||
async fn handle_resolve_inlay_hint(
|
||||
this: ModelHandle<Self>,
|
||||
envelope: TypedEnvelope<proto::ResolveInlayHint>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::ResolveInlayHintResponse> {
|
||||
let proto_hint = envelope
|
||||
.payload
|
||||
.hint
|
||||
.expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
|
||||
let hint = InlayHints::proto_to_project_hint(proto_hint)
|
||||
.context("resolved proto inlay hint conversion")?;
|
||||
let buffer = this.update(&mut cx, |this, cx| {
|
||||
this.opened_buffers
|
||||
.get(&envelope.payload.buffer_id)
|
||||
.and_then(|buffer| buffer.upgrade(cx))
|
||||
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
|
||||
})?;
|
||||
let response_hint = this
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.resolve_inlay_hint(
|
||||
hint,
|
||||
buffer,
|
||||
LanguageServerId(envelope.payload.language_server_id as usize),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.context("inlay hints fetch")?;
|
||||
Ok(proto::ResolveInlayHintResponse {
|
||||
hint: Some(InlayHints::project_to_proto_hint(response_hint)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_refresh_inlay_hints(
|
||||
this: ModelHandle<Self>,
|
||||
_: TypedEnvelope<proto::RefreshInlayHints>,
|
||||
|
@ -6894,17 +7189,17 @@ impl Project {
|
|||
) -> Result<proto::SearchProjectResponse> {
|
||||
let peer_id = envelope.original_sender_id()?;
|
||||
let query = SearchQuery::from_proto(envelope.payload)?;
|
||||
let result = this
|
||||
.update(&mut cx, |this, cx| this.search(query, cx))
|
||||
.await?;
|
||||
let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut locations = Vec::new();
|
||||
for (buffer, ranges) in result {
|
||||
while let Some((buffer, ranges)) = result.next().await {
|
||||
for range in ranges {
|
||||
let start = serialize_anchor(&range.start);
|
||||
let end = serialize_anchor(&range.end);
|
||||
let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
|
||||
let buffer_id = this.update(&mut cx, |this, cx| {
|
||||
this.create_buffer_for_peer(&buffer, peer_id, cx)
|
||||
});
|
||||
locations.push(proto::Location {
|
||||
buffer_id,
|
||||
start: Some(start),
|
||||
|
@ -6914,6 +7209,7 @@ impl Project {
|
|||
}
|
||||
Ok(proto::SearchProjectResponse { locations })
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn handle_open_buffer_for_symbol(
|
||||
|
@ -7579,7 +7875,7 @@ impl Project {
|
|||
self.language_servers_for_buffer(buffer, cx).next()
|
||||
}
|
||||
|
||||
fn language_server_for_buffer(
|
||||
pub fn language_server_for_buffer(
|
||||
&self,
|
||||
buffer: &Buffer,
|
||||
server_id: LanguageServerId,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
|
||||
use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
|
||||
use fs::{FakeFs, LineEnding, RealFs};
|
||||
use futures::{future, StreamExt};
|
||||
use gpui::{executor::Deterministic, test::subscribe, AppContext};
|
||||
|
@ -3953,11 +3953,12 @@ async fn search(
|
|||
query: SearchQuery,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) -> Result<HashMap<String, Vec<Range<usize>>>> {
|
||||
let results = project
|
||||
.update(cx, |project, cx| project.search(query, cx))
|
||||
.await?;
|
||||
|
||||
Ok(results
|
||||
let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
|
||||
let mut result = HashMap::default();
|
||||
while let Some((buffer, range)) = search_rx.next().await {
|
||||
result.entry(buffer).or_insert(range);
|
||||
}
|
||||
Ok(result
|
||||
.into_iter()
|
||||
.map(|(buffer, ranges)| {
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
use crate::Project;
|
||||
use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
|
||||
use std::path::PathBuf;
|
||||
use terminal::{Terminal, TerminalBuilder, TerminalSettings};
|
||||
use std::path::{Path, PathBuf};
|
||||
use terminal::{
|
||||
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
|
||||
Terminal, TerminalBuilder,
|
||||
};
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
pub struct Terminals {
|
||||
pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
|
||||
|
@ -20,10 +26,12 @@ impl Project {
|
|||
));
|
||||
} else {
|
||||
let settings = settings::get::<TerminalSettings>(cx);
|
||||
let python_settings = settings.detect_venv.clone();
|
||||
let shell = settings.shell.clone();
|
||||
|
||||
let terminal = TerminalBuilder::new(
|
||||
working_directory.clone(),
|
||||
settings.shell.clone(),
|
||||
shell.clone(),
|
||||
settings.env.clone(),
|
||||
Some(settings.blinking.clone()),
|
||||
settings.alternate_scroll,
|
||||
|
@ -47,6 +55,15 @@ impl Project {
|
|||
})
|
||||
.detach();
|
||||
|
||||
if let Some(python_settings) = &python_settings.as_option() {
|
||||
let activate_script_path =
|
||||
self.find_activate_script_path(&python_settings, working_directory);
|
||||
self.activate_python_virtual_environment(
|
||||
activate_script_path,
|
||||
&terminal_handle,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
terminal_handle
|
||||
});
|
||||
|
||||
|
@ -54,6 +71,50 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn find_activate_script_path(
|
||||
&mut self,
|
||||
settings: &VenvSettingsContent,
|
||||
working_directory: Option<PathBuf>,
|
||||
) -> Option<PathBuf> {
|
||||
// When we are unable to resolve the working directory, the terminal builder
|
||||
// defaults to '/'. We should probably encode this directly somewhere, but for
|
||||
// now, let's just hard code it here.
|
||||
let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
|
||||
let activate_script_name = match settings.activate_script {
|
||||
terminal_settings::ActivateScript::Default => "activate",
|
||||
terminal_settings::ActivateScript::Csh => "activate.csh",
|
||||
terminal_settings::ActivateScript::Fish => "activate.fish",
|
||||
};
|
||||
|
||||
for virtual_environment_name in settings.directories {
|
||||
let mut path = working_directory.join(virtual_environment_name);
|
||||
path.push("bin/");
|
||||
path.push(activate_script_name);
|
||||
|
||||
if path.exists() {
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn activate_python_virtual_environment(
|
||||
&mut self,
|
||||
activate_script: Option<PathBuf>,
|
||||
terminal_handle: &ModelHandle<Terminal>,
|
||||
cx: &mut ModelContext<Project>,
|
||||
) {
|
||||
if let Some(activate_script) = activate_script {
|
||||
// Paths are not strings so we need to jump through some hoops to format the command without `format!`
|
||||
let mut command = Vec::from("source ".as_bytes());
|
||||
command.extend_from_slice(activate_script.as_os_str().as_bytes());
|
||||
command.push(b'\n');
|
||||
|
||||
terminal_handle.update(cx, |this, _| this.input_bytes(command));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
|
||||
&self.terminals.local_handles
|
||||
}
|
||||
|
|
|
@ -2317,9 +2317,10 @@ impl BackgroundScannerState {
|
|||
for changed_path in changed_paths {
|
||||
let Some(dot_git_dir) = changed_path
|
||||
.ancestors()
|
||||
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT)) else {
|
||||
continue;
|
||||
};
|
||||
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Avoid processing the same repository multiple times, if multiple paths
|
||||
// within it have changed.
|
||||
|
@ -2348,7 +2349,10 @@ impl BackgroundScannerState {
|
|||
let Some(work_dir) = self
|
||||
.snapshot
|
||||
.entry_for_id(entry_id)
|
||||
.map(|entry| RepositoryWorkDirectory(entry.path.clone())) else { continue };
|
||||
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
log::info!("reload git repository {:?}", dot_git_dir);
|
||||
let repository = repository.repo_ptr.lock();
|
||||
|
@ -4026,7 +4030,7 @@ struct UpdateIgnoreStatusJob {
|
|||
scan_queue: Sender<ScanJob>,
|
||||
}
|
||||
|
||||
pub trait WorktreeHandle {
|
||||
pub trait WorktreeModelHandle {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
fn flush_fs_events<'a>(
|
||||
&self,
|
||||
|
@ -4034,7 +4038,7 @@ pub trait WorktreeHandle {
|
|||
) -> futures::future::LocalBoxFuture<'a, ()>;
|
||||
}
|
||||
|
||||
impl WorktreeHandle for ModelHandle<Worktree> {
|
||||
impl WorktreeModelHandle for ModelHandle<Worktree> {
|
||||
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
|
||||
// occurred before the worktree was constructed. These events can cause the worktree to perform
|
||||
// extra directory scans, and emit extra scan-state notifications.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{
|
||||
worktree::{Event, Snapshot, WorktreeHandle},
|
||||
worktree::{Event, Snapshot, WorktreeModelHandle},
|
||||
Entry, EntryKind, PathChange, Worktree,
|
||||
};
|
||||
use anyhow::Result;
|
||||
|
|
|
@ -44,7 +44,9 @@ impl View for QuickActionBar {
|
|||
}
|
||||
|
||||
fn render(&mut self, cx: &mut gpui::ViewContext<'_, '_, Self>) -> gpui::AnyElement<Self> {
|
||||
let Some(editor) = self.active_editor() else { return Empty::new().into_any(); };
|
||||
let Some(editor) = self.active_editor() else {
|
||||
return Empty::new().into_any();
|
||||
};
|
||||
|
||||
let inlay_hints_enabled = editor.read(cx).inlay_hints_enabled();
|
||||
let mut bar = Flex::row().with_child(render_quick_action_bar_button(
|
||||
|
|
|
@ -128,6 +128,8 @@ message Envelope {
|
|||
|
||||
InlayHints inlay_hints = 116;
|
||||
InlayHintsResponse inlay_hints_response = 117;
|
||||
ResolveInlayHint resolve_inlay_hint = 137;
|
||||
ResolveInlayHintResponse resolve_inlay_hint_response = 138;
|
||||
RefreshInlayHints refresh_inlay_hints = 118;
|
||||
|
||||
CreateChannel create_channel = 119;
|
||||
|
@ -754,6 +756,7 @@ message InlayHint {
|
|||
bool padding_left = 4;
|
||||
bool padding_right = 5;
|
||||
InlayHintTooltip tooltip = 6;
|
||||
ResolveState resolve_state = 7;
|
||||
}
|
||||
|
||||
message InlayHintLabel {
|
||||
|
@ -770,7 +773,10 @@ message InlayHintLabelParts {
|
|||
message InlayHintLabelPart {
|
||||
string value = 1;
|
||||
InlayHintLabelPartTooltip tooltip = 2;
|
||||
Location location = 3;
|
||||
optional string location_url = 3;
|
||||
PointUtf16 location_range_start = 4;
|
||||
PointUtf16 location_range_end = 5;
|
||||
optional uint64 language_server_id = 6;
|
||||
}
|
||||
|
||||
message InlayHintTooltip {
|
||||
|
@ -787,12 +793,39 @@ message InlayHintLabelPartTooltip {
|
|||
}
|
||||
}
|
||||
|
||||
message ResolveState {
|
||||
State state = 1;
|
||||
LspResolveState lsp_resolve_state = 2;
|
||||
|
||||
enum State {
|
||||
Resolved = 0;
|
||||
CanResolve = 1;
|
||||
Resolving = 2;
|
||||
}
|
||||
|
||||
message LspResolveState {
|
||||
string value = 1;
|
||||
uint64 server_id = 2;
|
||||
}
|
||||
}
|
||||
|
||||
message ResolveInlayHint {
|
||||
uint64 project_id = 1;
|
||||
uint64 buffer_id = 2;
|
||||
uint64 language_server_id = 3;
|
||||
InlayHint hint = 4;
|
||||
}
|
||||
|
||||
message ResolveInlayHintResponse {
|
||||
InlayHint hint = 1;
|
||||
}
|
||||
|
||||
message RefreshInlayHints {
|
||||
uint64 project_id = 1;
|
||||
}
|
||||
|
||||
message MarkupContent {
|
||||
string kind = 1;
|
||||
bool is_markdown = 1;
|
||||
string value = 2;
|
||||
}
|
||||
|
||||
|
@ -1081,6 +1114,7 @@ message GetPrivateUserInfo {}
|
|||
message GetPrivateUserInfoResponse {
|
||||
string metrics_id = 1;
|
||||
bool staff = 2;
|
||||
repeated string flags = 3;
|
||||
}
|
||||
|
||||
// Entities
|
||||
|
|
|
@ -197,6 +197,8 @@ messages!(
|
|||
(OnTypeFormattingResponse, Background),
|
||||
(InlayHints, Background),
|
||||
(InlayHintsResponse, Background),
|
||||
(ResolveInlayHint, Background),
|
||||
(ResolveInlayHintResponse, Background),
|
||||
(RefreshInlayHints, Foreground),
|
||||
(Ping, Foreground),
|
||||
(PrepareRename, Background),
|
||||
|
@ -299,6 +301,7 @@ request_messages!(
|
|||
(PrepareRename, PrepareRenameResponse),
|
||||
(OnTypeFormatting, OnTypeFormattingResponse),
|
||||
(InlayHints, InlayHintsResponse),
|
||||
(ResolveInlayHint, ResolveInlayHintResponse),
|
||||
(RefreshInlayHints, Ack),
|
||||
(ReloadBuffers, ReloadBuffersResponse),
|
||||
(RequestContact, Ack),
|
||||
|
@ -355,6 +358,7 @@ entity_messages!(
|
|||
PerformRename,
|
||||
OnTypeFormatting,
|
||||
InlayHints,
|
||||
ResolveInlayHint,
|
||||
RefreshInlayHints,
|
||||
PrepareRename,
|
||||
ReloadBuffers,
|
||||
|
|
|
@ -6,4 +6,4 @@ pub use conn::Connection;
|
|||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 60;
|
||||
pub const PROTOCOL_VERSION: u32 = 61;
|
||||
|
|
|
@ -837,6 +837,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
r#"
|
||||
A regular expression (shortened as regex or regexp;[1] also referred to as
|
||||
rational expression[2][3]) is a sequence of characters that specifies a search
|
||||
|
@ -844,7 +845,6 @@ mod tests {
|
|||
for "find" or "find and replace" operations on strings, or for input validation.
|
||||
"#
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let window = cx.add_window(|_| EmptyView);
|
||||
|
@ -1225,7 +1225,7 @@ mod tests {
|
|||
expected_query_matches_count > 1,
|
||||
"Should pick a query with multiple results"
|
||||
);
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, buffer_text));
|
||||
let window = cx.add_window(|_| EmptyView);
|
||||
let editor = window.add_view(cx, |cx| Editor::for_buffer(buffer.clone(), None, cx));
|
||||
|
||||
|
@ -1412,7 +1412,7 @@ mod tests {
|
|||
for "find" or "find and replace" operations on strings, or for input validation.
|
||||
"#
|
||||
.unindent();
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, buffer_text));
|
||||
let window = cx.add_window(|_| EmptyView);
|
||||
|
||||
let editor = window.add_view(cx, |cx| Editor::for_buffer(buffer.clone(), None, cx));
|
||||
|
|
|
@ -185,28 +185,26 @@ impl ProjectSearch {
|
|||
self.active_query = Some(query);
|
||||
self.match_ranges.clear();
|
||||
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
|
||||
let matches = search.await.log_err()?;
|
||||
let mut matches = search;
|
||||
let this = this.upgrade(&cx)?;
|
||||
let mut matches = matches.into_iter().collect::<Vec<_>>();
|
||||
let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.clear();
|
||||
this.excerpts.update(cx, |this, cx| this.clear(cx));
|
||||
this.no_results = Some(true);
|
||||
matches.sort_by_key(|(buffer, _)| buffer.read(cx).file().map(|file| file.path()));
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.clear(cx);
|
||||
excerpts.stream_excerpts_with_context_lines(matches, 1, cx)
|
||||
})
|
||||
});
|
||||
|
||||
while let Some(match_range) = match_ranges.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.push(match_range);
|
||||
while let Ok(Some(match_range)) = match_ranges.try_next() {
|
||||
this.match_ranges.push(match_range);
|
||||
}
|
||||
while let Some((buffer, anchors)) = matches.next().await {
|
||||
let mut ranges = this.update(&mut cx, |this, cx| {
|
||||
this.no_results = Some(false);
|
||||
cx.notify();
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.stream_excerpts_with_context_lines(buffer, anchors, 1, cx)
|
||||
})
|
||||
});
|
||||
|
||||
while let Some(range) = ranges.next().await {
|
||||
this.update(&mut cx, |this, _| this.match_ranges.push(range));
|
||||
}
|
||||
this.update(&mut cx, |_, cx| cx.notify());
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
|
@ -238,29 +236,31 @@ impl ProjectSearch {
|
|||
self.no_results = Some(true);
|
||||
self.pending_search = Some(cx.spawn(|this, mut cx| async move {
|
||||
let results = search?.await.log_err()?;
|
||||
let matches = results
|
||||
.into_iter()
|
||||
.map(|result| (result.buffer, vec![result.range.start..result.range.start]));
|
||||
|
||||
let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.clear(cx);
|
||||
|
||||
let matches = results
|
||||
.into_iter()
|
||||
.map(|result| (result.buffer, vec![result.range.start..result.range.start]))
|
||||
.collect();
|
||||
|
||||
excerpts.stream_excerpts_with_context_lines(matches, 3, cx)
|
||||
})
|
||||
});
|
||||
|
||||
while let Some(match_range) = match_ranges.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.push(match_range);
|
||||
while let Ok(Some(match_range)) = match_ranges.try_next() {
|
||||
this.match_ranges.push(match_range);
|
||||
}
|
||||
for (buffer, ranges) in matches {
|
||||
let mut match_ranges = this.update(&mut cx, |this, cx| {
|
||||
this.no_results = Some(false);
|
||||
cx.notify();
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.stream_excerpts_with_context_lines(buffer, ranges, 3, cx)
|
||||
})
|
||||
});
|
||||
while let Some(match_range) = match_ranges.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.push(match_range);
|
||||
while let Ok(Some(match_range)) = match_ranges.try_next() {
|
||||
this.match_ranges.push(match_range);
|
||||
}
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
|
@ -885,7 +885,9 @@ impl ProjectSearchView {
|
|||
if !dir_entry.is_dir() {
|
||||
return;
|
||||
}
|
||||
let Some(filter_str) = dir_entry.path.to_str() else { return; };
|
||||
let Some(filter_str) = dir_entry.path.to_str() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
|
||||
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
|
||||
|
|
|
@ -57,7 +57,9 @@ pub fn init(
|
|||
|
||||
cx.subscribe_global::<WorkspaceCreated, _>({
|
||||
move |event, cx| {
|
||||
let Some(semantic_index) = SemanticIndex::global(cx) else { return; };
|
||||
let Some(semantic_index) = SemanticIndex::global(cx) else {
|
||||
return;
|
||||
};
|
||||
let workspace = &event.0;
|
||||
if let Some(workspace) = workspace.upgrade(cx) {
|
||||
let project = workspace.read(cx).project().clone();
|
||||
|
|
|
@ -16,7 +16,7 @@ collections = { path = "../collections" }
|
|||
gpui = { path = "../gpui" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
fs = { path = "../fs" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
feature_flags = { path = "../feature_flags" }
|
||||
util = { path = "../util" }
|
||||
|
||||
anyhow.workspace = true
|
||||
|
|
|
@ -63,20 +63,23 @@ impl KeymapFile {
|
|||
// string. But `RawValue` currently does not work inside of an untagged enum.
|
||||
match action {
|
||||
Value::Array(items) => {
|
||||
let Ok([name, data]): Result<[serde_json::Value; 2], _> = items.try_into() else {
|
||||
let Ok([name, data]): Result<[serde_json::Value; 2], _> =
|
||||
items.try_into()
|
||||
else {
|
||||
return Some(Err(anyhow!("Expected array of length 2")));
|
||||
};
|
||||
let serde_json::Value::String(name) = name else {
|
||||
return Some(Err(anyhow!("Expected first item in array to be a string.")))
|
||||
return Some(Err(anyhow!(
|
||||
"Expected first item in array to be a string."
|
||||
)));
|
||||
};
|
||||
cx.deserialize_action(
|
||||
&name,
|
||||
Some(data),
|
||||
)
|
||||
},
|
||||
cx.deserialize_action(&name, Some(data))
|
||||
}
|
||||
Value::String(name) => cx.deserialize_action(&name, None),
|
||||
Value::Null => Ok(no_action()),
|
||||
_ => return Some(Err(anyhow!("Expected two-element array, got {action:?}"))),
|
||||
_ => {
|
||||
return Some(Err(anyhow!("Expected two-element array, got {action:?}")))
|
||||
}
|
||||
}
|
||||
.with_context(|| {
|
||||
format!(
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
use gpui::AppContext;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct StaffMode(pub bool);
|
||||
|
||||
impl std::ops::Deref for StaffMode {
|
||||
type Target = bool;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Despite what the type system requires me to tell you, the init function will only be called a once
|
||||
/// as soon as we know that the staff mode is enabled.
|
||||
pub fn staff_mode<F: FnMut(&mut AppContext) + 'static>(cx: &mut AppContext, mut init: F) {
|
||||
if **cx.default_global::<StaffMode>() {
|
||||
init(cx)
|
||||
} else {
|
||||
let mut once = Some(());
|
||||
cx.observe_global::<StaffMode, _>(move |cx| {
|
||||
if **cx.global::<StaffMode>() && once.take().is_some() {
|
||||
init(cx);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
/// Immediately checks and runs the init function if the staff mode is not enabled.
|
||||
/// This is only included for symettry with staff_mode() above
|
||||
pub fn not_staff_mode<F: FnOnce(&mut AppContext) + 'static>(cx: &mut AppContext, init: F) {
|
||||
if !**cx.default_global::<StaffMode>() {
|
||||
init(cx)
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
pub mod mappings;
|
||||
pub use alacritty_terminal;
|
||||
pub mod terminal_settings;
|
||||
|
||||
use alacritty_terminal::{
|
||||
ansi::{ClearMode, Handler},
|
||||
|
@ -31,8 +32,8 @@ use mappings::mouse::{
|
|||
};
|
||||
|
||||
use procinfo::LocalProcessInfo;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use terminal_settings::{AlternateScroll, Shell, TerminalBlink, TerminalSettings};
|
||||
use util::truncate_and_trailoff;
|
||||
|
||||
use std::{
|
||||
|
@ -48,7 +49,6 @@ use std::{
|
|||
use thiserror::Error;
|
||||
|
||||
use gpui::{
|
||||
fonts,
|
||||
geometry::vector::{vec2f, Vector2F},
|
||||
keymap_matcher::Keystroke,
|
||||
platform::{Modifiers, MouseButton, MouseMovedEvent, TouchPhase},
|
||||
|
@ -78,7 +78,7 @@ lazy_static! {
|
|||
// * use more strict regex for `file://` protocol matching: original regex has `file:` inside, but we want to avoid matching `some::file::module` strings.
|
||||
static ref URL_REGEX: RegexSearch = RegexSearch::new(r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#).unwrap();
|
||||
|
||||
static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.:/@\-~]+"#).unwrap();
|
||||
static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.\[\]:/@\-~]+"#).unwrap();
|
||||
}
|
||||
|
||||
///Upward flowing events, for changing the title and such
|
||||
|
@ -134,122 +134,6 @@ pub fn init(cx: &mut AppContext) {
|
|||
settings::register::<TerminalSettings>(cx);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TerminalDockPosition {
|
||||
Left,
|
||||
Bottom,
|
||||
Right,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TerminalSettings {
|
||||
pub shell: Shell,
|
||||
pub working_directory: WorkingDirectory,
|
||||
font_size: Option<f32>,
|
||||
pub font_family: Option<String>,
|
||||
pub line_height: TerminalLineHeight,
|
||||
pub font_features: Option<fonts::Features>,
|
||||
pub env: HashMap<String, String>,
|
||||
pub blinking: TerminalBlink,
|
||||
pub alternate_scroll: AlternateScroll,
|
||||
pub option_as_meta: bool,
|
||||
pub copy_on_select: bool,
|
||||
pub dock: TerminalDockPosition,
|
||||
pub default_width: f32,
|
||||
pub default_height: f32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TerminalSettingsContent {
|
||||
pub shell: Option<Shell>,
|
||||
pub working_directory: Option<WorkingDirectory>,
|
||||
pub font_size: Option<f32>,
|
||||
pub font_family: Option<String>,
|
||||
pub line_height: Option<TerminalLineHeight>,
|
||||
pub font_features: Option<fonts::Features>,
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
pub blinking: Option<TerminalBlink>,
|
||||
pub alternate_scroll: Option<AlternateScroll>,
|
||||
pub option_as_meta: Option<bool>,
|
||||
pub copy_on_select: Option<bool>,
|
||||
pub dock: Option<TerminalDockPosition>,
|
||||
pub default_width: Option<f32>,
|
||||
pub default_height: Option<f32>,
|
||||
}
|
||||
|
||||
impl TerminalSettings {
|
||||
pub fn font_size(&self, cx: &AppContext) -> Option<f32> {
|
||||
self.font_size
|
||||
.map(|size| theme::adjusted_font_size(size, cx))
|
||||
}
|
||||
}
|
||||
|
||||
impl settings::Setting for TerminalSettings {
|
||||
const KEY: Option<&'static str> = Some("terminal");
|
||||
|
||||
type FileContent = TerminalSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &AppContext,
|
||||
) -> Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TerminalLineHeight {
|
||||
#[default]
|
||||
Comfortable,
|
||||
Standard,
|
||||
Custom(f32),
|
||||
}
|
||||
|
||||
impl TerminalLineHeight {
|
||||
pub fn value(&self) -> f32 {
|
||||
match self {
|
||||
TerminalLineHeight::Comfortable => 1.618,
|
||||
TerminalLineHeight::Standard => 1.3,
|
||||
TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TerminalBlink {
|
||||
Off,
|
||||
TerminalControlled,
|
||||
On,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Shell {
|
||||
System,
|
||||
Program(String),
|
||||
WithArguments { program: String, args: Vec<String> },
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AlternateScroll {
|
||||
On,
|
||||
Off,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum WorkingDirectory {
|
||||
CurrentProjectDirectory,
|
||||
FirstProjectDirectory,
|
||||
AlwaysHome,
|
||||
Always { directory: String },
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
||||
pub struct TerminalSize {
|
||||
pub cell_width: f32,
|
||||
|
@ -1018,6 +902,10 @@ impl Terminal {
|
|||
self.pty_tx.notify(input.into_bytes());
|
||||
}
|
||||
|
||||
fn write_bytes_to_pty(&self, input: Vec<u8>) {
|
||||
self.pty_tx.notify(input);
|
||||
}
|
||||
|
||||
pub fn input(&mut self, input: String) {
|
||||
self.events
|
||||
.push_back(InternalEvent::Scroll(AlacScroll::Bottom));
|
||||
|
@ -1026,6 +914,14 @@ impl Terminal {
|
|||
self.write_to_pty(input);
|
||||
}
|
||||
|
||||
pub fn input_bytes(&mut self, input: Vec<u8>) {
|
||||
self.events
|
||||
.push_back(InternalEvent::Scroll(AlacScroll::Bottom));
|
||||
self.events.push_back(InternalEvent::SetSelection(None));
|
||||
|
||||
self.write_bytes_to_pty(input);
|
||||
}
|
||||
|
||||
pub fn try_keystroke(&mut self, keystroke: &Keystroke, alt_is_meta: bool) -> bool {
|
||||
let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta);
|
||||
if let Some(esc) = esc {
|
||||
|
|
163
crates/terminal/src/terminal_settings.rs
Normal file
163
crates/terminal/src/terminal_settings.rs
Normal file
|
@ -0,0 +1,163 @@
|
|||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use gpui::{fonts, AppContext};
|
||||
use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TerminalDockPosition {
|
||||
Left,
|
||||
Bottom,
|
||||
Right,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TerminalSettings {
|
||||
pub shell: Shell,
|
||||
pub working_directory: WorkingDirectory,
|
||||
font_size: Option<f32>,
|
||||
pub font_family: Option<String>,
|
||||
pub line_height: TerminalLineHeight,
|
||||
pub font_features: Option<fonts::Features>,
|
||||
pub env: HashMap<String, String>,
|
||||
pub blinking: TerminalBlink,
|
||||
pub alternate_scroll: AlternateScroll,
|
||||
pub option_as_meta: bool,
|
||||
pub copy_on_select: bool,
|
||||
pub dock: TerminalDockPosition,
|
||||
pub default_width: f32,
|
||||
pub default_height: f32,
|
||||
pub detect_venv: VenvSettings,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum VenvSettings {
|
||||
#[default]
|
||||
Off,
|
||||
On {
|
||||
activate_script: Option<ActivateScript>,
|
||||
directories: Option<Vec<PathBuf>>,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct VenvSettingsContent<'a> {
|
||||
pub activate_script: ActivateScript,
|
||||
pub directories: &'a [PathBuf],
|
||||
}
|
||||
|
||||
impl VenvSettings {
|
||||
pub fn as_option(&self) -> Option<VenvSettingsContent> {
|
||||
match self {
|
||||
VenvSettings::Off => None,
|
||||
VenvSettings::On {
|
||||
activate_script,
|
||||
directories,
|
||||
} => Some(VenvSettingsContent {
|
||||
activate_script: activate_script.unwrap_or(ActivateScript::Default),
|
||||
directories: directories.as_deref().unwrap_or(&[]),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ActivateScript {
|
||||
#[default]
|
||||
Default,
|
||||
Csh,
|
||||
Fish,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct TerminalSettingsContent {
|
||||
pub shell: Option<Shell>,
|
||||
pub working_directory: Option<WorkingDirectory>,
|
||||
pub font_size: Option<f32>,
|
||||
pub font_family: Option<String>,
|
||||
pub line_height: Option<TerminalLineHeight>,
|
||||
pub font_features: Option<fonts::Features>,
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
pub blinking: Option<TerminalBlink>,
|
||||
pub alternate_scroll: Option<AlternateScroll>,
|
||||
pub option_as_meta: Option<bool>,
|
||||
pub copy_on_select: Option<bool>,
|
||||
pub dock: Option<TerminalDockPosition>,
|
||||
pub default_width: Option<f32>,
|
||||
pub default_height: Option<f32>,
|
||||
pub detect_venv: Option<VenvSettings>,
|
||||
}
|
||||
|
||||
impl TerminalSettings {
|
||||
pub fn font_size(&self, cx: &AppContext) -> Option<f32> {
|
||||
self.font_size
|
||||
.map(|size| theme::adjusted_font_size(size, cx))
|
||||
}
|
||||
}
|
||||
|
||||
impl settings::Setting for TerminalSettings {
|
||||
const KEY: Option<&'static str> = Some("terminal");
|
||||
|
||||
type FileContent = TerminalSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TerminalLineHeight {
|
||||
#[default]
|
||||
Comfortable,
|
||||
Standard,
|
||||
Custom(f32),
|
||||
}
|
||||
|
||||
impl TerminalLineHeight {
|
||||
pub fn value(&self) -> f32 {
|
||||
match self {
|
||||
TerminalLineHeight::Comfortable => 1.618,
|
||||
TerminalLineHeight::Standard => 1.3,
|
||||
TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TerminalBlink {
|
||||
Off,
|
||||
TerminalControlled,
|
||||
On,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Shell {
|
||||
System,
|
||||
Program(String),
|
||||
WithArguments { program: String, args: Vec<String> },
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AlternateScroll {
|
||||
On,
|
||||
Off,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum WorkingDirectory {
|
||||
CurrentProjectDirectory,
|
||||
FirstProjectDirectory,
|
||||
AlwaysHome,
|
||||
Always { directory: String },
|
||||
}
|
|
@ -25,7 +25,8 @@ use terminal::{
|
|||
term::{cell::Flags, TermMode},
|
||||
},
|
||||
mappings::colors::convert_color,
|
||||
IndexedCell, Terminal, TerminalContent, TerminalSettings, TerminalSize,
|
||||
terminal_settings::TerminalSettings,
|
||||
IndexedCell, Terminal, TerminalContent, TerminalSize,
|
||||
};
|
||||
use theme::{TerminalStyle, ThemeSettings};
|
||||
use util::ResultExt;
|
||||
|
|
|
@ -9,7 +9,7 @@ use gpui::{
|
|||
use project::Fs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::SettingsStore;
|
||||
use terminal::{TerminalDockPosition, TerminalSettings};
|
||||
use terminal::terminal_settings::{TerminalDockPosition, TerminalSettings};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use workspace::{
|
||||
dock::{DockPosition, Panel},
|
||||
|
|
|
@ -33,7 +33,8 @@ use terminal::{
|
|||
index::Point,
|
||||
term::{search::RegexSearch, TermMode},
|
||||
},
|
||||
Event, MaybeNavigationTarget, Terminal, TerminalBlink, WorkingDirectory,
|
||||
terminal_settings::{TerminalBlink, TerminalSettings, WorkingDirectory},
|
||||
Event, MaybeNavigationTarget, Terminal,
|
||||
};
|
||||
use util::{paths::PathLikeWithPosition, ResultExt};
|
||||
use workspace::{
|
||||
|
@ -44,8 +45,6 @@ use workspace::{
|
|||
NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
pub use terminal::TerminalSettings;
|
||||
|
||||
const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
|
||||
|
||||
///Event to transmit the scroll from the element to the view
|
||||
|
|
|
@ -16,7 +16,7 @@ gpui = { path = "../gpui" }
|
|||
picker = { path = "../picker" }
|
||||
theme = { path = "../theme" }
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
feature_flags = { path = "../feature_flags" }
|
||||
workspace = { path = "../workspace" }
|
||||
util = { path = "../util" }
|
||||
log.workspace = true
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
|
||||
use gpui::{actions, elements::*, AnyElement, AppContext, Element, MouseState, ViewContext};
|
||||
use picker::{Picker, PickerDelegate, PickerEvent};
|
||||
use settings::{update_settings_file, SettingsStore};
|
||||
use staff_mode::StaffMode;
|
||||
use std::sync::Arc;
|
||||
use theme::{Theme, ThemeMeta, ThemeRegistry, ThemeSettings};
|
||||
use util::ResultExt;
|
||||
|
@ -54,7 +54,7 @@ impl ThemeSelectorDelegate {
|
|||
fn new(fs: Arc<dyn Fs>, cx: &mut ViewContext<ThemeSelector>) -> Self {
|
||||
let original_theme = theme::current(cx).clone();
|
||||
|
||||
let staff_mode = **cx.default_global::<StaffMode>();
|
||||
let staff_mode = cx.is_staff();
|
||||
let registry = cx.global::<Arc<ThemeRegistry>>();
|
||||
let mut theme_names = registry.list(staff_mode).collect::<Vec<_>>();
|
||||
theme_names.sort_unstable_by(|a, b| a.is_light.cmp(&b.is_light).then(a.name.cmp(&b.name)));
|
||||
|
|
|
@ -107,20 +107,15 @@ impl PickerDelegate for BranchListDelegate {
|
|||
let delegate = view.delegate();
|
||||
let project = delegate.workspace.read(cx).project().read(&cx);
|
||||
|
||||
let Some(worktree) = project
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
else {
|
||||
let Some(worktree) = project.visible_worktrees(cx).next() else {
|
||||
bail!("Cannot update branch list as there are no visible worktrees")
|
||||
};
|
||||
let mut cwd = worktree .read(cx)
|
||||
.abs_path()
|
||||
.to_path_buf();
|
||||
let mut cwd = worktree.read(cx).abs_path().to_path_buf();
|
||||
cwd.push(".git");
|
||||
let Some(repo) = project.fs().open_repo(&cwd) else {bail!("Project does not have associated git repository.")};
|
||||
let mut branches = repo
|
||||
.lock()
|
||||
.branches()?;
|
||||
let Some(repo) = project.fs().open_repo(&cwd) else {
|
||||
bail!("Project does not have associated git repository.")
|
||||
};
|
||||
let mut branches = repo.lock().branches()?;
|
||||
const RECENT_BRANCHES_COUNT: usize = 10;
|
||||
if query.is_empty() && branches.len() > RECENT_BRANCHES_COUNT {
|
||||
// Truncate list of recent branches
|
||||
|
@ -142,8 +137,13 @@ impl PickerDelegate for BranchListDelegate {
|
|||
})
|
||||
.collect::<Vec<_>>())
|
||||
})
|
||||
.log_err() else { return; };
|
||||
let Some(candidates) = candidates.log_err() else {return;};
|
||||
.log_err()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let Some(candidates) = candidates.log_err() else {
|
||||
return;
|
||||
};
|
||||
let matches = if query.is_empty() {
|
||||
candidates
|
||||
.into_iter()
|
||||
|
@ -184,7 +184,11 @@ impl PickerDelegate for BranchListDelegate {
|
|||
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
let current_pick = self.selected_index();
|
||||
let Some(current_pick) = self.matches.get(current_pick).map(|pick| pick.string.clone()) else {
|
||||
let Some(current_pick) = self
|
||||
.matches
|
||||
.get(current_pick)
|
||||
.map(|pick| pick.string.clone())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
cx.spawn(|picker, mut cx| async move {
|
||||
|
|
|
@ -33,7 +33,7 @@ fn paste(_: &mut Workspace, action: &Paste, cx: &mut ViewContext<Workspace>) {
|
|||
editor.set_clip_at_line_ends(false, cx);
|
||||
|
||||
let Some(item) = cx.read_from_clipboard() else {
|
||||
return
|
||||
return;
|
||||
};
|
||||
let clipboard_text = Cow::Borrowed(item.text());
|
||||
if clipboard_text.is_empty() {
|
||||
|
|
|
@ -77,7 +77,10 @@ pub fn visual_motion(motion: Motion, times: Option<usize>, cx: &mut WindowContex
|
|||
}
|
||||
|
||||
let Some((new_head, goal)) =
|
||||
motion.move_point(map, current_head, selection.goal, times) else { return };
|
||||
motion.move_point(map, current_head, selection.goal, times)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
selection.set_head(new_head, goal);
|
||||
|
||||
|
@ -123,16 +126,21 @@ pub fn visual_block_motion(
|
|||
let map = &s.display_map();
|
||||
let mut head = s.newest_anchor().head().to_display_point(map);
|
||||
let mut tail = s.oldest_anchor().tail().to_display_point(map);
|
||||
let mut goal = s.newest_anchor().goal;
|
||||
|
||||
let (start, end) = match s.newest_anchor().goal {
|
||||
SelectionGoal::ColumnRange { start, end } if preserve_goal => (start, end),
|
||||
SelectionGoal::Column(start) if preserve_goal => (start, start + 1),
|
||||
_ => (tail.column(), head.column()),
|
||||
};
|
||||
let goal = SelectionGoal::ColumnRange { start, end };
|
||||
|
||||
let was_reversed = tail.column() > head.column();
|
||||
|
||||
if !was_reversed && !preserve_goal {
|
||||
head = movement::saturating_left(map, head);
|
||||
}
|
||||
|
||||
let Some((new_head, _)) = move_selection(&map, head, goal) else {
|
||||
return
|
||||
return;
|
||||
};
|
||||
head = new_head;
|
||||
|
||||
|
@ -146,13 +154,6 @@ pub fn visual_block_motion(
|
|||
head = movement::saturating_right(map, head)
|
||||
}
|
||||
|
||||
let (start, end) = match goal {
|
||||
SelectionGoal::ColumnRange { start, end } if preserve_goal => (start, end),
|
||||
SelectionGoal::Column(start) if preserve_goal => (start, start + 1),
|
||||
_ => (tail.column(), head.column()),
|
||||
};
|
||||
goal = SelectionGoal::ColumnRange { start, end };
|
||||
|
||||
let columns = if is_reversed {
|
||||
head.column()..tail.column()
|
||||
} else if head.column() == tail.column() {
|
||||
|
@ -788,6 +789,26 @@ mod test {
|
|||
"
|
||||
})
|
||||
.await;
|
||||
|
||||
//https://github.com/zed-industries/community/issues/1950
|
||||
cx.set_shared_state(indoc! {
|
||||
"Theˇ quick brown
|
||||
|
||||
fox jumps over
|
||||
the lazy dog
|
||||
"
|
||||
})
|
||||
.await;
|
||||
cx.simulate_shared_keystrokes(["l", "ctrl-v", "j", "j"])
|
||||
.await;
|
||||
cx.assert_shared_state(indoc! {
|
||||
"The «qˇ»uick brown
|
||||
|
||||
fox «jˇ»umps over
|
||||
the lazy dog
|
||||
"
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
|
|
@ -30,3 +30,9 @@
|
|||
{"Key":"o"}
|
||||
{"Key":"escape"}
|
||||
{"Get":{"state":"Theˇouick\nbroo\nfoxo\njumo over the\n\nlazy dog\n","mode":"Normal"}}
|
||||
{"Put":{"state":"Theˇ quick brown\n\nfox jumps over\nthe lazy dog\n"}}
|
||||
{"Key":"l"}
|
||||
{"Key":"ctrl-v"}
|
||||
{"Key":"j"}
|
||||
{"Key":"j"}
|
||||
{"Get":{"state":"The «qˇ»uick brown\n\nfox «jˇ»umps over\nthe lazy dog\n","mode":"VisualBlock"}}
|
||||
|
|
|
@ -742,8 +742,8 @@ mod element {
|
|||
|
||||
while proposed_current_pixel_change.abs() > 0. {
|
||||
let Some(current_ix) = successors.next() else {
|
||||
break;
|
||||
};
|
||||
break;
|
||||
};
|
||||
|
||||
let next_target_size = f32::max(
|
||||
size(current_ix + 1, flexes.as_slice()) - proposed_current_pixel_change,
|
||||
|
|
|
@ -2314,8 +2314,12 @@ impl Workspace {
|
|||
item_id_to_move: usize,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let Some(pane_to_split) = pane_to_split.upgrade(cx) else { return; };
|
||||
let Some(from) = from.upgrade(cx) else { return; };
|
||||
let Some(pane_to_split) = pane_to_split.upgrade(cx) else {
|
||||
return;
|
||||
};
|
||||
let Some(from) = from.upgrade(cx) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let new_pane = self.add_pane(cx);
|
||||
self.move_item(from.clone(), new_pane.clone(), item_id_to_move, 0, cx);
|
||||
|
|
|
@ -60,7 +60,7 @@ quick_action_bar = { path = "../quick_action_bar" }
|
|||
recent_projects = { path = "../recent_projects" }
|
||||
rpc = { path = "../rpc" }
|
||||
settings = { path = "../settings" }
|
||||
staff_mode = { path = "../staff_mode" }
|
||||
feature_flags = { path = "../feature_flags" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
text = { path = "../text" }
|
||||
terminal_view = { path = "../terminal_view" }
|
||||
|
|
|
@ -289,7 +289,7 @@ mod tests {
|
|||
let language = crate::languages::language("c", tree_sitter_c::language(), None).await;
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "").with_language(language, cx);
|
||||
|
||||
// empty function
|
||||
buffer.edit([(0..0, "int main() {}")], None, cx);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||
use gpui::AppContext;
|
||||
use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
|
@ -9,7 +10,6 @@ use node_runtime::NodeRuntime;
|
|||
use serde_json::json;
|
||||
use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore};
|
||||
use smol::fs;
|
||||
use staff_mode::StaffMode;
|
||||
use std::{
|
||||
any::Any,
|
||||
ffi::OsString,
|
||||
|
@ -104,7 +104,7 @@ impl LspAdapter for JsonLspAdapter {
|
|||
cx: &mut AppContext,
|
||||
) -> Option<BoxFuture<'static, serde_json::Value>> {
|
||||
let action_names = cx.all_action_names().collect::<Vec<_>>();
|
||||
let staff_mode = cx.default_global::<StaffMode>().0;
|
||||
let staff_mode = cx.is_staff();
|
||||
let language_names = &self.languages.language_names();
|
||||
let settings_schema = cx.global::<SettingsStore>().json_schema(
|
||||
&SettingsJsonSchemaParams {
|
||||
|
|
|
@ -89,7 +89,9 @@ impl LspAdapter for PythonLspAdapter {
|
|||
// to allow our own fuzzy score to be used to break ties.
|
||||
//
|
||||
// see https://github.com/microsoft/pyright/blob/95ef4e103b9b2f129c9320427e51b73ea7cf78bd/packages/pyright-internal/src/languageService/completionProvider.ts#LL2873
|
||||
let Some(sort_text) = &mut item.sort_text else { return };
|
||||
let Some(sort_text) = &mut item.sort_text else {
|
||||
return;
|
||||
};
|
||||
let mut parts = sort_text.split('.');
|
||||
let Some(first) = parts.next() else { return };
|
||||
let Some(second) = parts.next() else { return };
|
||||
|
@ -208,7 +210,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "").with_language(language, cx);
|
||||
let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| {
|
||||
let ix = buffer.len();
|
||||
buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx);
|
||||
|
|
|
@ -474,7 +474,7 @@ mod tests {
|
|||
let language = crate::languages::language("rust", tree_sitter_rust::language(), None).await;
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "").with_language(language, cx);
|
||||
|
||||
// indent between braces
|
||||
buffer.set_text("fn a() {}", cx);
|
||||
|
|
|
@ -356,8 +356,9 @@ mod tests {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| language::Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
language::Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx)
|
||||
});
|
||||
let outline = buffer.read_with(cx, |buffer, _| buffer.snapshot().outline(None).unwrap());
|
||||
assert_eq!(
|
||||
outline
|
||||
|
|
|
@ -53,8 +53,6 @@ use uuid::Uuid;
|
|||
use welcome::{show_welcome_experience, FIRST_OPEN};
|
||||
|
||||
use fs::RealFs;
|
||||
#[cfg(debug_assertions)]
|
||||
use staff_mode::StaffMode;
|
||||
use util::{channel::RELEASE_CHANNEL, paths, ResultExt, TryFutureExt};
|
||||
use workspace::AppState;
|
||||
use zed::{
|
||||
|
@ -122,7 +120,10 @@ fn main() {
|
|||
cx.set_global(*RELEASE_CHANNEL);
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
cx.set_global(StaffMode(true));
|
||||
{
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
cx.set_staff(true);
|
||||
}
|
||||
|
||||
let mut store = SettingsStore::default();
|
||||
store
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[toolchain]
|
||||
channel = "1.71"
|
||||
channel = "1.72"
|
||||
components = [ "rustfmt" ]
|
||||
targets = [ "x86_64-apple-darwin", "aarch64-apple-darwin", "wasm32-wasi" ]
|
||||
|
|
Loading…
Reference in a new issue