mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-25 01:34:02 +00:00
update clippy
This commit is contained in:
commit
c5002d85a9
149 changed files with 2565 additions and 2361 deletions
6
.cargo/config.toml
Normal file
6
.cargo/config.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
[target.'cfg(all())']
|
||||
rustflags = [
|
||||
"-Aclippy::reversed_empty_ranges",
|
||||
"-Aclippy::missing_safety_doc",
|
||||
"-Aclippy::let_unit_value",
|
||||
]
|
BIN
.github/workflows/.ci.yml.swp
vendored
Normal file
BIN
.github/workflows/.ci.yml.swp
vendored
Normal file
Binary file not shown.
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -42,7 +42,7 @@ jobs:
|
|||
clean: false
|
||||
|
||||
- name: Run clippy
|
||||
run: cargo clippy
|
||||
run: cargo clippy --workspace -- -D warnings -A clippy::reversed_empty_ranges -A clippy::missing_safety_doc -A clippy::let_unit_value
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --workspace --no-fail-fast
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -4,7 +4,6 @@
|
|||
/plugins/bin
|
||||
/script/node_modules
|
||||
/styles/node_modules
|
||||
/crates/collab/.env.toml
|
||||
/crates/collab/static/styles.css
|
||||
/vendor/bin
|
||||
/assets/themes/*.json
|
||||
|
|
|
@ -15,9 +15,9 @@ use workspace::{ItemHandle, StatusItemView, Workspace};
|
|||
|
||||
actions!(lsp_status, [ShowErrorMessage]);
|
||||
|
||||
const DOWNLOAD_ICON: &'static str = "icons/download_12.svg";
|
||||
const WARNING_ICON: &'static str = "icons/triangle_exclamation_12.svg";
|
||||
const DONE_ICON: &'static str = "icons/circle_check_12.svg";
|
||||
const DOWNLOAD_ICON: &str = "icons/download_12.svg";
|
||||
const WARNING_ICON: &str = "icons/triangle_exclamation_12.svg";
|
||||
const DONE_ICON: &str = "icons/circle_check_12.svg";
|
||||
|
||||
pub enum Event {
|
||||
ShowError { lsp_name: Arc<str>, error: String },
|
||||
|
@ -76,7 +76,7 @@ impl ActivityIndicator {
|
|||
cx.subscribe(&this, move |workspace, _, event, cx| match event {
|
||||
Event::ShowError { lsp_name, error } => {
|
||||
if let Some(buffer) = project
|
||||
.update(cx, |project, cx| project.create_buffer(&error, None, cx))
|
||||
.update(cx, |project, cx| project.create_buffer(error, None, cx))
|
||||
.log_err()
|
||||
{
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
|
|
|
@ -13,8 +13,7 @@ use std::{env, ffi::OsString, path::PathBuf, sync::Arc, time::Duration};
|
|||
use update_notification::UpdateNotification;
|
||||
use workspace::Workspace;
|
||||
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &'static str =
|
||||
"auto-updater-should-show-updated-notification";
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
|
||||
const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
lazy_static! {
|
||||
|
@ -61,7 +60,7 @@ pub fn init(
|
|||
server_url: String,
|
||||
cx: &mut MutableAppContext,
|
||||
) {
|
||||
if let Some(version) = ZED_APP_VERSION.clone().or(cx.platform().app_version().ok()) {
|
||||
if let Some(version) = (*ZED_APP_VERSION).or_else(|| cx.platform().app_version().ok()) {
|
||||
let auto_updater = cx.add_model(|cx| {
|
||||
let updater = AutoUpdater::new(version, db.clone(), http_client, server_url.clone());
|
||||
updater.start_polling(cx).detach();
|
||||
|
|
|
@ -8,8 +8,8 @@ use gpui::{
|
|||
elements::*,
|
||||
platform::CursorStyle,
|
||||
views::{ItemType, Select, SelectStyle},
|
||||
AppContext, Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription,
|
||||
Task, View, ViewContext, ViewHandle,
|
||||
AnyViewHandle, AppContext, Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext,
|
||||
Subscription, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use menu::Confirm;
|
||||
use postage::prelude::Stream;
|
||||
|
@ -70,7 +70,7 @@ impl ChatPanel {
|
|||
let theme = &cx.global::<Settings>().theme.chat_panel.channel_select;
|
||||
SelectStyle {
|
||||
header: theme.header.container,
|
||||
menu: theme.menu.clone(),
|
||||
menu: theme.menu,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
@ -91,7 +91,7 @@ impl ChatPanel {
|
|||
let _observe_status = cx.spawn_weak(|this, mut cx| {
|
||||
let mut status = rpc.status();
|
||||
async move {
|
||||
while let Some(_) = status.recv().await {
|
||||
while (status.recv().await).is_some() {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |_, cx| cx.notify());
|
||||
} else {
|
||||
|
@ -397,7 +397,7 @@ impl View for ChatPanel {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if matches!(
|
||||
*self.rpc.status().borrow(),
|
||||
client::Status::Connected { .. }
|
||||
|
|
|
@ -145,7 +145,7 @@ impl ChannelList {
|
|||
}
|
||||
|
||||
pub fn available_channels(&self) -> Option<&[ChannelDetails]> {
|
||||
self.available_channels.as_ref().map(Vec::as_slice)
|
||||
self.available_channels.as_deref()
|
||||
}
|
||||
|
||||
pub fn get_channel(
|
||||
|
@ -601,8 +601,8 @@ mod tests {
|
|||
|
||||
let user_id = 5;
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let mut client = Client::new(http_client.clone());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let client = Client::new(http_client.clone());
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
|
||||
Channel::init(&client);
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
|
@ -623,7 +623,7 @@ mod tests {
|
|||
},
|
||||
)
|
||||
.await;
|
||||
channel_list.next_notification(&cx).await;
|
||||
channel_list.next_notification(cx).await;
|
||||
channel_list.read_with(cx, |list, _| {
|
||||
assert_eq!(
|
||||
list.available_channels().unwrap(),
|
||||
|
@ -701,7 +701,7 @@ mod tests {
|
|||
.await;
|
||||
|
||||
assert_eq!(
|
||||
channel.next_event(&cx).await,
|
||||
channel.next_event(cx).await,
|
||||
ChannelEvent::MessagesUpdated {
|
||||
old_range: 0..0,
|
||||
new_count: 2,
|
||||
|
@ -749,7 +749,7 @@ mod tests {
|
|||
.await;
|
||||
|
||||
assert_eq!(
|
||||
channel.next_event(&cx).await,
|
||||
channel.next_event(cx).await,
|
||||
ChannelEvent::MessagesUpdated {
|
||||
old_range: 2..2,
|
||||
new_count: 1,
|
||||
|
@ -798,7 +798,7 @@ mod tests {
|
|||
.await;
|
||||
|
||||
assert_eq!(
|
||||
channel.next_event(&cx).await,
|
||||
channel.next_event(cx).await,
|
||||
ChannelEvent::MessagesUpdated {
|
||||
old_range: 0..0,
|
||||
new_count: 2,
|
||||
|
|
|
@ -41,13 +41,13 @@ pub use user::*;
|
|||
|
||||
lazy_static! {
|
||||
pub static ref ZED_SERVER_URL: String =
|
||||
std::env::var("ZED_SERVER_URL").unwrap_or("https://zed.dev".to_string());
|
||||
std::env::var("ZED_SERVER_URL").unwrap_or_else(|_| "https://zed.dev".to_string());
|
||||
pub static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
}
|
||||
|
||||
pub const ZED_SECRET_CLIENT_TOKEN: &'static str = "618033988749894";
|
||||
pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894";
|
||||
|
||||
actions!(client, [Authenticate]);
|
||||
|
||||
|
@ -65,10 +65,13 @@ pub struct Client {
|
|||
http: Arc<dyn HttpClient>,
|
||||
state: RwLock<ClientState>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
authenticate: RwLock<
|
||||
Option<Box<dyn 'static + Send + Sync + Fn(&AsyncAppContext) -> Task<Result<Credentials>>>>,
|
||||
>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
establish_connection: RwLock<
|
||||
Option<
|
||||
|
@ -149,6 +152,7 @@ struct ClientState {
|
|||
entities_by_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakEntityHandle>,
|
||||
models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
|
||||
entity_types_by_message_type: HashMap<TypeId, TypeId>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
message_handlers: HashMap<
|
||||
TypeId,
|
||||
Arc<
|
||||
|
@ -596,7 +600,7 @@ impl Client {
|
|||
let mut status_rx = self.status();
|
||||
let _ = status_rx.next().await;
|
||||
futures::select_biased! {
|
||||
authenticate = self.authenticate(&cx).fuse() => {
|
||||
authenticate = self.authenticate(cx).fuse() => {
|
||||
match authenticate {
|
||||
Ok(creds) => credentials = Some(creds),
|
||||
Err(err) => {
|
||||
|
@ -819,7 +823,7 @@ impl Client {
|
|||
.get("Location")
|
||||
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
|
||||
.to_str()
|
||||
.map_err(|error| EstablishConnectionError::other(error))?
|
||||
.map_err(EstablishConnectionError::other)?
|
||||
.to_string();
|
||||
}
|
||||
// Until we switch the zed.dev domain to point to the new Next.js app, there
|
||||
|
@ -1051,7 +1055,7 @@ fn write_credentials_to_keychain(credentials: &Credentials, cx: &AsyncAppContext
|
|||
)
|
||||
}
|
||||
|
||||
const WORKTREE_URL_PREFIX: &'static str = "zed://worktrees/";
|
||||
const WORKTREE_URL_PREFIX: &str = "zed://worktrees/";
|
||||
|
||||
pub fn encode_worktree_url(id: u64, access_token: &str) -> String {
|
||||
format!("{}{}/{}", WORKTREE_URL_PREFIX, id, access_token)
|
||||
|
@ -1081,8 +1085,8 @@ mod tests {
|
|||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
let mut status = client.status();
|
||||
assert!(matches!(
|
||||
status.next().await,
|
||||
|
@ -1169,8 +1173,8 @@ mod tests {
|
|||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
|
||||
let (done_tx1, mut done_rx1) = smol::channel::unbounded();
|
||||
let (done_tx2, mut done_rx2) = smol::channel::unbounded();
|
||||
|
@ -1215,8 +1219,8 @@ mod tests {
|
|||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
|
||||
let model = cx.add_model(|_| Model::default());
|
||||
let (done_tx1, _done_rx1) = smol::channel::unbounded();
|
||||
|
@ -1243,8 +1247,8 @@ mod tests {
|
|||
cx.foreground().forbid_parking();
|
||||
|
||||
let user_id = 5;
|
||||
let mut client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &mut client, &cx).await;
|
||||
let client = Client::new(FakeHttpClient::with_404_response());
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
|
||||
let model = cx.add_model(|_| Model::default());
|
||||
let (done_tx, mut done_rx) = smol::channel::unbounded();
|
||||
|
|
|
@ -16,7 +16,7 @@ pub type Request = isahc::Request<AsyncBody>;
|
|||
pub type Response = isahc::Response<AsyncBody>;
|
||||
|
||||
pub trait HttpClient: Send + Sync {
|
||||
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response, Error>>;
|
||||
fn send(&self, req: Request) -> BoxFuture<Result<Response, Error>>;
|
||||
|
||||
fn get<'a>(
|
||||
&'a self,
|
||||
|
@ -45,7 +45,7 @@ pub fn client() -> Arc<dyn HttpClient> {
|
|||
}
|
||||
|
||||
impl HttpClient for isahc::HttpClient {
|
||||
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response, Error>> {
|
||||
fn send(&self, req: Request) -> BoxFuture<Result<Response, Error>> {
|
||||
Box::pin(async move { self.send_async(req).await })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ impl FakeServer {
|
|||
}
|
||||
})
|
||||
.override_establish_connection({
|
||||
let peer = Arc::downgrade(&server.peer).clone();
|
||||
let peer = Arc::downgrade(&server.peer);
|
||||
let state = Arc::downgrade(&server.state);
|
||||
move |credentials, cx| {
|
||||
let peer = peer.clone();
|
||||
|
@ -123,6 +123,7 @@ impl FakeServer {
|
|||
self.peer.send(self.connection_id(), message).unwrap();
|
||||
}
|
||||
|
||||
#[allow(clippy::await_holding_lock)]
|
||||
pub async fn receive<M: proto::EnvelopedMessage>(&self) -> Result<TypedEnvelope<M>> {
|
||||
self.executor.start_waiting();
|
||||
let message = self
|
||||
|
@ -194,7 +195,7 @@ pub struct FakeHttpClient {
|
|||
}
|
||||
|
||||
impl FakeHttpClient {
|
||||
pub fn new<Fut, F>(handler: F) -> Arc<dyn HttpClient>
|
||||
pub fn create<Fut, F>(handler: F) -> Arc<dyn HttpClient>
|
||||
where
|
||||
Fut: 'static + Send + Future<Output = Result<Response, http::Error>>,
|
||||
F: 'static + Send + Sync + Fn(Request) -> Fut,
|
||||
|
@ -205,7 +206,7 @@ impl FakeHttpClient {
|
|||
}
|
||||
|
||||
pub fn with_404_response() -> Arc<dyn HttpClient> {
|
||||
Self::new(|_| async move {
|
||||
Self::create(|_| async move {
|
||||
Ok(isahc::Response::builder()
|
||||
.status(404)
|
||||
.body(Default::default())
|
||||
|
@ -221,7 +222,7 @@ impl fmt::Debug for FakeHttpClient {
|
|||
}
|
||||
|
||||
impl HttpClient for FakeHttpClient {
|
||||
fn send<'a>(&'a self, req: Request) -> BoxFuture<'a, Result<Response, crate::http::Error>> {
|
||||
fn send(&self, req: Request) -> BoxFuture<Result<Response, crate::http::Error>> {
|
||||
let future = (self.handler)(req);
|
||||
Box::pin(async move { future.await.map(Into::into) })
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ pub struct User {
|
|||
|
||||
impl PartialOrd for User {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(&other))
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -54,14 +54,14 @@ impl<'a> Add<&'a Self> for Local {
|
|||
type Output = Local;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
cmp::max(&self, other).clone()
|
||||
*cmp::max(&self, other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Local> for Local {
|
||||
fn add_assign(&mut self, other: &Self) {
|
||||
if *self < *other {
|
||||
*self = other.clone();
|
||||
*self = *other;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ impl Global {
|
|||
false
|
||||
}
|
||||
|
||||
pub fn iter<'a>(&'a self) -> impl 'a + Iterator<Item = Local> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = Local> + '_ {
|
||||
self.0.iter().enumerate().map(|(replica_id, seq)| Local {
|
||||
replica_id: replica_id as ReplicaId,
|
||||
value: *seq,
|
||||
|
|
|
@ -394,7 +394,7 @@ async fn create_access_token(
|
|||
} else {
|
||||
return Err(Error::Http(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
format!("you do not have permission to impersonate other users"),
|
||||
"you do not have permission to impersonate other users".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
|||
let state = req.extensions().get::<Arc<AppState>>().unwrap();
|
||||
let mut credentials_valid = false;
|
||||
for password_hash in state.db.get_access_token_hashes(user_id).await? {
|
||||
if verify_access_token(&access_token, &password_hash)? {
|
||||
if verify_access_token(access_token, &password_hash)? {
|
||||
credentials_valid = true;
|
||||
break;
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ pub fn encrypt_access_token(access_token: &str, public_key: String) -> Result<St
|
|||
let native_app_public_key =
|
||||
rpc::auth::PublicKey::try_from(public_key).context("failed to parse app public key")?;
|
||||
let encrypted_access_token = native_app_public_key
|
||||
.encrypt_string(&access_token)
|
||||
.encrypt_string(access_token)
|
||||
.context("failed to encrypt access token with public key")?;
|
||||
Ok(encrypted_access_token)
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ pub trait Db: Send + Sync {
|
|||
#[cfg(test)]
|
||||
async fn teardown(&self, url: &str);
|
||||
#[cfg(test)]
|
||||
fn as_fake<'a>(&'a self) -> Option<&'a tests::FakeDb>;
|
||||
fn as_fake(&self) -> Option<&tests::FakeDb>;
|
||||
}
|
||||
|
||||
pub struct PostgresDb {
|
||||
|
@ -165,7 +165,7 @@ impl PostgresDb {
|
|||
pub async fn new(url: &str, max_connections: u32) -> Result<Self> {
|
||||
let pool = DbOptions::new()
|
||||
.max_connections(max_connections)
|
||||
.connect(&url)
|
||||
.connect(url)
|
||||
.await
|
||||
.context("failed to connect to postgres database")?;
|
||||
Ok(Self { pool })
|
||||
|
@ -568,7 +568,7 @@ impl Db for PostgresDb {
|
|||
for count in counts {
|
||||
extension_counts
|
||||
.entry(count.worktree_id as u64)
|
||||
.or_insert(HashMap::default())
|
||||
.or_insert_with(HashMap::default)
|
||||
.insert(count.extension, count.count as usize);
|
||||
}
|
||||
Ok(extension_counts)
|
||||
|
@ -863,20 +863,18 @@ impl Db for PostgresDb {
|
|||
should_notify,
|
||||
});
|
||||
}
|
||||
} else if accepted {
|
||||
contacts.push(Contact::Accepted {
|
||||
user_id: user_id_a,
|
||||
should_notify: should_notify && !a_to_b,
|
||||
});
|
||||
} else if a_to_b {
|
||||
contacts.push(Contact::Incoming {
|
||||
user_id: user_id_a,
|
||||
should_notify,
|
||||
});
|
||||
} else {
|
||||
if accepted {
|
||||
contacts.push(Contact::Accepted {
|
||||
user_id: user_id_a,
|
||||
should_notify: should_notify && !a_to_b,
|
||||
});
|
||||
} else if a_to_b {
|
||||
contacts.push(Contact::Incoming {
|
||||
user_id: user_id_a,
|
||||
should_notify,
|
||||
});
|
||||
} else {
|
||||
contacts.push(Contact::Outgoing { user_id: user_id_a });
|
||||
}
|
||||
contacts.push(Contact::Outgoing { user_id: user_id_a });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1331,7 +1329,7 @@ macro_rules! id_type {
|
|||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn to_proto(&self) -> u64 {
|
||||
pub fn to_proto(self) -> u64 {
|
||||
self.0 as u64
|
||||
}
|
||||
}
|
||||
|
@ -2408,6 +2406,7 @@ pub mod tests {
|
|||
}
|
||||
|
||||
impl TestDb {
|
||||
#[allow(clippy::await_holding_lock)]
|
||||
pub async fn postgres() -> Self {
|
||||
lazy_static! {
|
||||
static ref LOCK: Mutex<()> = Mutex::new(());
|
||||
|
|
|
@ -18,6 +18,7 @@ use futures::{channel::mpsc, Future, StreamExt as _};
|
|||
use gpui::{
|
||||
executor::{self, Deterministic},
|
||||
geometry::vector::vec2f,
|
||||
test::EmptyView,
|
||||
ModelHandle, Task, TestAppContext, ViewHandle,
|
||||
};
|
||||
use language::{
|
||||
|
@ -67,7 +68,7 @@ async fn test_share_project(
|
|||
cx_b2: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
let (window_b, _) = cx_b.add_window(|_| EmptyView);
|
||||
let (_, window_b) = cx_b.add_window(|_| EmptyView);
|
||||
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
@ -145,7 +146,7 @@ async fn test_share_project(
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let editor_b = cx_b.add_view(window_b, |cx| Editor::for_buffer(buffer_b, None, cx));
|
||||
let editor_b = cx_b.add_view(&window_b, |cx| Editor::for_buffer(buffer_b, None, cx));
|
||||
|
||||
// TODO
|
||||
// // Create a selection set as client B and see that selection set as client A.
|
||||
|
@ -156,7 +157,7 @@ async fn test_share_project(
|
|||
// Edit the buffer as client B and see that edit as client A.
|
||||
editor_b.update(cx_b, |editor, cx| editor.handle_input("ok, ", cx));
|
||||
buffer_a
|
||||
.condition(&cx_a, |buffer, _| buffer.text() == "ok, b-contents")
|
||||
.condition(cx_a, |buffer, _| buffer.text() == "ok, b-contents")
|
||||
.await;
|
||||
|
||||
// TODO
|
||||
|
@ -481,7 +482,7 @@ async fn test_cancel_join_request(
|
|||
client_b.client.clone(),
|
||||
client_b.user_store.clone(),
|
||||
client_b.project_store.clone(),
|
||||
client_b.language_registry.clone().clone(),
|
||||
client_b.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx,
|
||||
)
|
||||
|
@ -503,7 +504,7 @@ async fn test_cancel_join_request(
|
|||
deterministic.run_until_parked();
|
||||
assert_eq!(
|
||||
&*project_a_events.borrow(),
|
||||
&[project::Event::ContactCancelledJoinRequest(user_b.clone())]
|
||||
&[project::Event::ContactCancelledJoinRequest(user_b)]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -553,17 +554,17 @@ async fn test_offline_projects(
|
|||
user_store: ModelHandle<UserStore>,
|
||||
cx: &mut gpui::MutableAppContext,
|
||||
) {
|
||||
let open_project_ids = project_store
|
||||
.read(cx)
|
||||
.projects(cx)
|
||||
.filter_map(|project| project.read(cx).remote_id())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let user_store = user_store.read(cx);
|
||||
for contact in user_store.contacts() {
|
||||
if contact.user.id == user_store.current_user().unwrap().id {
|
||||
for project in &contact.projects {
|
||||
if !open_project_ids.contains(&project.id) {
|
||||
let store_contains_project = project_store
|
||||
.read(cx)
|
||||
.projects(cx)
|
||||
.filter_map(|project| project.read(cx).remote_id())
|
||||
.any(|x| x == project.id);
|
||||
|
||||
if !store_contains_project {
|
||||
panic!(
|
||||
concat!(
|
||||
"current user's contact data has a project",
|
||||
|
@ -902,7 +903,7 @@ async fn test_propagate_saves_and_fs_changes(
|
|||
client_a.fs.insert_file("/a/file4", "4".into()).await;
|
||||
|
||||
worktree_a
|
||||
.condition(&cx_a, |tree, _| {
|
||||
.condition(cx_a, |tree, _| {
|
||||
tree.paths()
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -910,7 +911,7 @@ async fn test_propagate_saves_and_fs_changes(
|
|||
})
|
||||
.await;
|
||||
worktree_b
|
||||
.condition(&cx_b, |tree, _| {
|
||||
.condition(cx_b, |tree, _| {
|
||||
tree.paths()
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -918,7 +919,7 @@ async fn test_propagate_saves_and_fs_changes(
|
|||
})
|
||||
.await;
|
||||
worktree_c
|
||||
.condition(&cx_c, |tree, _| {
|
||||
.condition(cx_c, |tree, _| {
|
||||
tree.paths()
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -928,17 +929,17 @@ async fn test_propagate_saves_and_fs_changes(
|
|||
|
||||
// Ensure buffer files are updated as well.
|
||||
buffer_a
|
||||
.condition(&cx_a, |buf, _| {
|
||||
.condition(cx_a, |buf, _| {
|
||||
buf.file().unwrap().path().to_str() == Some("file1-renamed")
|
||||
})
|
||||
.await;
|
||||
buffer_b
|
||||
.condition(&cx_b, |buf, _| {
|
||||
.condition(cx_b, |buf, _| {
|
||||
buf.file().unwrap().path().to_str() == Some("file1-renamed")
|
||||
})
|
||||
.await;
|
||||
buffer_c
|
||||
.condition(&cx_c, |buf, _| {
|
||||
.condition(cx_c, |buf, _| {
|
||||
buf.file().unwrap().path().to_str() == Some("file1-renamed")
|
||||
})
|
||||
.await;
|
||||
|
@ -1245,7 +1246,7 @@ async fn test_buffer_conflict_after_save(cx_a: &mut TestAppContext, cx_b: &mut T
|
|||
|
||||
buffer_b.update(cx_b, |buf, cx| buf.save(cx)).await.unwrap();
|
||||
buffer_b
|
||||
.condition(&cx_b, |buffer_b, _| !buffer_b.is_dirty())
|
||||
.condition(cx_b, |buffer_b, _| !buffer_b.is_dirty())
|
||||
.await;
|
||||
buffer_b.read_with(cx_b, |buf, _| {
|
||||
assert!(!buf.has_conflict());
|
||||
|
@ -1298,7 +1299,7 @@ async fn test_buffer_reloading(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
|
|||
.await
|
||||
.unwrap();
|
||||
buffer_b
|
||||
.condition(&cx_b, |buf, _| {
|
||||
.condition(cx_b, |buf, _| {
|
||||
buf.text() == new_contents.to_string() && !buf.is_dirty()
|
||||
})
|
||||
.await;
|
||||
|
@ -1348,7 +1349,7 @@ async fn test_editing_while_guest_opens_buffer(
|
|||
|
||||
let text = buffer_a.read_with(cx_a, |buf, _| buf.text());
|
||||
let buffer_b = buffer_b.await.unwrap();
|
||||
buffer_b.condition(&cx_b, |buf, _| buf.text() == text).await;
|
||||
buffer_b.condition(cx_b, |buf, _| buf.text() == text).await;
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
|
@ -1373,7 +1374,7 @@ async fn test_leaving_worktree_while_opening_buffer(
|
|||
|
||||
// See that a guest has joined as client A.
|
||||
project_a
|
||||
.condition(&cx_a, |p, _| p.collaborators().len() == 1)
|
||||
.condition(cx_a, |p, _| p.collaborators().len() == 1)
|
||||
.await;
|
||||
|
||||
// Begin opening a buffer as client B, but leave the project before the open completes.
|
||||
|
@ -1385,7 +1386,7 @@ async fn test_leaving_worktree_while_opening_buffer(
|
|||
|
||||
// See that the guest has left.
|
||||
project_a
|
||||
.condition(&cx_a, |p, _| p.collaborators().len() == 0)
|
||||
.condition(cx_a, |p, _| p.collaborators().is_empty())
|
||||
.await;
|
||||
}
|
||||
|
||||
|
@ -1420,7 +1421,7 @@ async fn test_leaving_project(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
|||
// Drop client B's connection and ensure client A observes client B leaving the project.
|
||||
client_b.disconnect(&cx_b.to_async()).unwrap();
|
||||
project_a
|
||||
.condition(cx_a, |p, _| p.collaborators().len() == 0)
|
||||
.condition(cx_a, |p, _| p.collaborators().is_empty())
|
||||
.await;
|
||||
|
||||
// Rejoin the project as client B
|
||||
|
@ -1436,7 +1437,7 @@ async fn test_leaving_project(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
|||
server.disconnect_client(client_b.current_user_id(cx_b));
|
||||
cx_a.foreground().advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
project_a
|
||||
.condition(cx_a, |p, _| p.collaborators().len() == 0)
|
||||
.condition(cx_a, |p, _| p.collaborators().is_empty())
|
||||
.await;
|
||||
}
|
||||
|
||||
|
@ -1638,7 +1639,6 @@ async fn test_collaborating_with_diagnostics(
|
|||
buffer
|
||||
.snapshot()
|
||||
.diagnostics_in_range::<_, Point>(0..buffer.len(), false)
|
||||
.map(|entry| entry)
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
DiagnosticEntry {
|
||||
|
@ -1736,14 +1736,14 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
|||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let (window_b, _) = cx_b.add_window(|_| EmptyView);
|
||||
let editor_b = cx_b.add_view(window_b, |cx| {
|
||||
let (_, window_b) = cx_b.add_window(|_| EmptyView);
|
||||
let editor_b = cx_b.add_view(&window_b, |cx| {
|
||||
Editor::for_buffer(buffer_b.clone(), Some(project_b.clone()), cx)
|
||||
});
|
||||
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
buffer_b
|
||||
.condition(&cx_b, |buffer, _| !buffer.completion_triggers().is_empty())
|
||||
.condition(cx_b, |buffer, _| !buffer.completion_triggers().is_empty())
|
||||
.await;
|
||||
|
||||
// Type a completion trigger character as the guest.
|
||||
|
@ -1807,12 +1807,12 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
|||
.await
|
||||
.unwrap();
|
||||
buffer_a
|
||||
.condition(&cx_a, |buffer, _| buffer.text() == "fn main() { a. }")
|
||||
.condition(cx_a, |buffer, _| buffer.text() == "fn main() { a. }")
|
||||
.await;
|
||||
|
||||
// Confirm a completion on the guest.
|
||||
editor_b
|
||||
.condition(&cx_b, |editor, _| editor.context_menu_visible())
|
||||
.condition(cx_b, |editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
editor.confirm_completion(&ConfirmCompletion { item_ix: Some(0) }, cx);
|
||||
|
@ -1843,12 +1843,12 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
|||
|
||||
// The additional edit is applied.
|
||||
buffer_a
|
||||
.condition(&cx_a, |buffer, _| {
|
||||
.condition(cx_a, |buffer, _| {
|
||||
buffer.text() == "use d::SomeTrait;\nfn main() { a.first_method() }"
|
||||
})
|
||||
.await;
|
||||
buffer_b
|
||||
.condition(&cx_b, |buffer, _| {
|
||||
.condition(cx_b, |buffer, _| {
|
||||
buffer.text() == "use d::SomeTrait;\nfn main() { a.first_method() }"
|
||||
})
|
||||
.await;
|
||||
|
@ -2255,9 +2255,9 @@ async fn test_references(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
|||
Path::new("three.rs")
|
||||
);
|
||||
|
||||
assert_eq!(references[0].range.to_offset(&two_buffer), 24..27);
|
||||
assert_eq!(references[1].range.to_offset(&two_buffer), 35..38);
|
||||
assert_eq!(references[2].range.to_offset(&three_buffer), 37..40);
|
||||
assert_eq!(references[0].range.to_offset(two_buffer), 24..27);
|
||||
assert_eq!(references[1].range.to_offset(two_buffer), 35..38);
|
||||
assert_eq!(references[2].range.to_offset(three_buffer), 37..40);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -2706,7 +2706,7 @@ async fn test_collaborating_with_code_actions(
|
|||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
cx_b.update(|cx| editor::init(cx));
|
||||
cx_b.update(editor::init);
|
||||
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
@ -2838,7 +2838,7 @@ async fn test_collaborating_with_code_actions(
|
|||
);
|
||||
});
|
||||
editor_b
|
||||
.condition(&cx_b, |editor, _| editor.context_menu_visible())
|
||||
.condition(cx_b, |editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
|
||||
fake_language_server.remove_request_handler::<lsp::request::CodeActionRequest>();
|
||||
|
@ -2911,7 +2911,7 @@ async fn test_collaborating_with_code_actions(
|
|||
#[gpui::test(iterations = 10)]
|
||||
async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
cx_b.update(|cx| editor::init(cx));
|
||||
cx_b.update(editor::init);
|
||||
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
@ -3097,7 +3097,7 @@ async fn test_language_server_statuses(
|
|||
) {
|
||||
deterministic.forbid_parking();
|
||||
|
||||
cx_b.update(|cx| editor::init(cx));
|
||||
cx_b.update(editor::init);
|
||||
let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
@ -3206,24 +3206,24 @@ async fn test_basic_chat(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
|||
// Create an org that includes these 2 users.
|
||||
let db = &server.app_state.db;
|
||||
let org_id = db.create_org("Test Org", "test-org").await.unwrap();
|
||||
db.add_org_member(org_id, client_a.current_user_id(&cx_a), false)
|
||||
db.add_org_member(org_id, client_a.current_user_id(cx_a), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.add_org_member(org_id, client_b.current_user_id(&cx_b), false)
|
||||
db.add_org_member(org_id, client_b.current_user_id(cx_b), false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Create a channel that includes all the users.
|
||||
let channel_id = db.create_org_channel(org_id, "test-channel").await.unwrap();
|
||||
db.add_channel_member(channel_id, client_a.current_user_id(&cx_a), false)
|
||||
db.add_channel_member(channel_id, client_a.current_user_id(cx_a), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.add_channel_member(channel_id, client_b.current_user_id(&cx_b), false)
|
||||
db.add_channel_member(channel_id, client_b.current_user_id(cx_b), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.create_channel_message(
|
||||
channel_id,
|
||||
client_b.current_user_id(&cx_b),
|
||||
client_b.current_user_id(cx_b),
|
||||
"hello A, it's B.",
|
||||
OffsetDateTime::now_utc(),
|
||||
1,
|
||||
|
@ -3250,7 +3250,7 @@ async fn test_basic_chat(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
|||
});
|
||||
channel_a.read_with(cx_a, |channel, _| assert!(channel.messages().is_empty()));
|
||||
channel_a
|
||||
.condition(&cx_a, |channel, _| {
|
||||
.condition(cx_a, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [("user_b".to_string(), "hello A, it's B.".to_string(), false)]
|
||||
})
|
||||
|
@ -3276,7 +3276,7 @@ async fn test_basic_chat(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
|||
});
|
||||
channel_b.read_with(cx_b, |channel, _| assert!(channel.messages().is_empty()));
|
||||
channel_b
|
||||
.condition(&cx_b, |channel, _| {
|
||||
.condition(cx_b, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [("user_b".to_string(), "hello A, it's B.".to_string(), false)]
|
||||
})
|
||||
|
@ -3303,7 +3303,7 @@ async fn test_basic_chat(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
|||
.unwrap();
|
||||
|
||||
channel_b
|
||||
.condition(&cx_b, |channel, _| {
|
||||
.condition(cx_b, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [
|
||||
("user_b".to_string(), "hello A, it's B.".to_string(), false),
|
||||
|
@ -3343,10 +3343,10 @@ async fn test_chat_message_validation(cx_a: &mut TestAppContext) {
|
|||
let db = &server.app_state.db;
|
||||
let org_id = db.create_org("Test Org", "test-org").await.unwrap();
|
||||
let channel_id = db.create_org_channel(org_id, "test-channel").await.unwrap();
|
||||
db.add_org_member(org_id, client_a.current_user_id(&cx_a), false)
|
||||
db.add_org_member(org_id, client_a.current_user_id(cx_a), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.add_channel_member(channel_id, client_a.current_user_id(&cx_a), false)
|
||||
db.add_channel_member(channel_id, client_a.current_user_id(cx_a), false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -3405,24 +3405,24 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
// Create an org that includes these 2 users.
|
||||
let db = &server.app_state.db;
|
||||
let org_id = db.create_org("Test Org", "test-org").await.unwrap();
|
||||
db.add_org_member(org_id, client_a.current_user_id(&cx_a), false)
|
||||
db.add_org_member(org_id, client_a.current_user_id(cx_a), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.add_org_member(org_id, client_b.current_user_id(&cx_b), false)
|
||||
db.add_org_member(org_id, client_b.current_user_id(cx_b), false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Create a channel that includes all the users.
|
||||
let channel_id = db.create_org_channel(org_id, "test-channel").await.unwrap();
|
||||
db.add_channel_member(channel_id, client_a.current_user_id(&cx_a), false)
|
||||
db.add_channel_member(channel_id, client_a.current_user_id(cx_a), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.add_channel_member(channel_id, client_b.current_user_id(&cx_b), false)
|
||||
db.add_channel_member(channel_id, client_b.current_user_id(cx_b), false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.create_channel_message(
|
||||
channel_id,
|
||||
client_b.current_user_id(&cx_b),
|
||||
client_b.current_user_id(cx_b),
|
||||
"hello A, it's B.",
|
||||
OffsetDateTime::now_utc(),
|
||||
2,
|
||||
|
@ -3450,7 +3450,7 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
});
|
||||
channel_a.read_with(cx_a, |channel, _| assert!(channel.messages().is_empty()));
|
||||
channel_a
|
||||
.condition(&cx_a, |channel, _| {
|
||||
.condition(cx_a, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [("user_b".to_string(), "hello A, it's B.".to_string(), false)]
|
||||
})
|
||||
|
@ -3476,7 +3476,7 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
});
|
||||
channel_b.read_with(cx_b, |channel, _| assert!(channel.messages().is_empty()));
|
||||
channel_b
|
||||
.condition(&cx_b, |channel, _| {
|
||||
.condition(cx_b, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [("user_b".to_string(), "hello A, it's B.".to_string(), false)]
|
||||
})
|
||||
|
@ -3484,7 +3484,7 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
|
||||
// Disconnect client B, ensuring we can still access its cached channel data.
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_b.current_user_id(&cx_b));
|
||||
server.disconnect_client(client_b.current_user_id(cx_b));
|
||||
cx_b.foreground().advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
while !matches!(
|
||||
status_b.next().await,
|
||||
|
@ -3553,7 +3553,7 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
// Verify that B sees the new messages upon reconnection, as well as the message client B
|
||||
// sent while offline.
|
||||
channel_b
|
||||
.condition(&cx_b, |channel, _| {
|
||||
.condition(cx_b, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [
|
||||
("user_b".to_string(), "hello A, it's B.".to_string(), false),
|
||||
|
@ -3572,7 +3572,7 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
.await
|
||||
.unwrap();
|
||||
channel_b
|
||||
.condition(&cx_b, |channel, _| {
|
||||
.condition(cx_b, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [
|
||||
("user_b".to_string(), "hello A, it's B.".to_string(), false),
|
||||
|
@ -3591,7 +3591,7 @@ async fn test_chat_reconnection(cx_a: &mut TestAppContext, cx_b: &mut TestAppCon
|
|||
.await
|
||||
.unwrap();
|
||||
channel_a
|
||||
.condition(&cx_a, |channel, _| {
|
||||
.condition(cx_a, |channel, _| {
|
||||
channel_messages(channel)
|
||||
== [
|
||||
("user_b".to_string(), "hello A, it's B.".to_string(), false),
|
||||
|
@ -3700,7 +3700,7 @@ async fn test_contacts(
|
|||
}
|
||||
|
||||
project_a
|
||||
.condition(&cx_a, |project, _| {
|
||||
.condition(cx_a, |project, _| {
|
||||
project.collaborators().contains_key(&client_b.peer_id)
|
||||
})
|
||||
.await;
|
||||
|
@ -3765,6 +3765,7 @@ async fn test_contacts(
|
|||
});
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn contacts(user_store: &UserStore) -> Vec<(&str, bool, Vec<(&str, Vec<&str>)>)> {
|
||||
user_store
|
||||
.contacts()
|
||||
|
@ -3830,27 +3831,27 @@ async fn test_contact_requests(
|
|||
|
||||
// All users see the pending request appear in all their clients.
|
||||
assert_eq!(
|
||||
client_a.summarize_contacts(&cx_a).outgoing_requests,
|
||||
client_a.summarize_contacts(cx_a).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_a2.summarize_contacts(&cx_a2).outgoing_requests,
|
||||
client_a2.summarize_contacts(cx_a2).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_b.summarize_contacts(&cx_b).incoming_requests,
|
||||
client_b.summarize_contacts(cx_b).incoming_requests,
|
||||
&["user_a", "user_c"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_b2.summarize_contacts(&cx_b2).incoming_requests,
|
||||
client_b2.summarize_contacts(cx_b2).incoming_requests,
|
||||
&["user_a", "user_c"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_c.summarize_contacts(&cx_c).outgoing_requests,
|
||||
client_c.summarize_contacts(cx_c).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_c2.summarize_contacts(&cx_c2).outgoing_requests,
|
||||
client_c2.summarize_contacts(cx_c2).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
|
||||
|
@ -3860,15 +3861,15 @@ async fn test_contact_requests(
|
|||
disconnect_and_reconnect(&client_c, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
client_a.summarize_contacts(&cx_a).outgoing_requests,
|
||||
client_a.summarize_contacts(cx_a).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_b.summarize_contacts(&cx_b).incoming_requests,
|
||||
client_b.summarize_contacts(cx_b).incoming_requests,
|
||||
&["user_a", "user_c"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_c.summarize_contacts(&cx_c).outgoing_requests,
|
||||
client_c.summarize_contacts(cx_c).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
|
||||
|
@ -3884,18 +3885,18 @@ async fn test_contact_requests(
|
|||
executor.run_until_parked();
|
||||
|
||||
// User B sees user A as their contact now in all client, and the incoming request from them is removed.
|
||||
let contacts_b = client_b.summarize_contacts(&cx_b);
|
||||
let contacts_b = client_b.summarize_contacts(cx_b);
|
||||
assert_eq!(contacts_b.current, &["user_a", "user_b"]);
|
||||
assert_eq!(contacts_b.incoming_requests, &["user_c"]);
|
||||
let contacts_b2 = client_b2.summarize_contacts(&cx_b2);
|
||||
let contacts_b2 = client_b2.summarize_contacts(cx_b2);
|
||||
assert_eq!(contacts_b2.current, &["user_a", "user_b"]);
|
||||
assert_eq!(contacts_b2.incoming_requests, &["user_c"]);
|
||||
|
||||
// User A sees user B as their contact now in all clients, and the outgoing request to them is removed.
|
||||
let contacts_a = client_a.summarize_contacts(&cx_a);
|
||||
let contacts_a = client_a.summarize_contacts(cx_a);
|
||||
assert_eq!(contacts_a.current, &["user_a", "user_b"]);
|
||||
assert!(contacts_a.outgoing_requests.is_empty());
|
||||
let contacts_a2 = client_a2.summarize_contacts(&cx_a2);
|
||||
let contacts_a2 = client_a2.summarize_contacts(cx_a2);
|
||||
assert_eq!(contacts_a2.current, &["user_a", "user_b"]);
|
||||
assert!(contacts_a2.outgoing_requests.is_empty());
|
||||
|
||||
|
@ -3905,20 +3906,20 @@ async fn test_contact_requests(
|
|||
disconnect_and_reconnect(&client_c, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
client_a.summarize_contacts(&cx_a).current,
|
||||
client_a.summarize_contacts(cx_a).current,
|
||||
&["user_a", "user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_b.summarize_contacts(&cx_b).current,
|
||||
client_b.summarize_contacts(cx_b).current,
|
||||
&["user_a", "user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_b.summarize_contacts(&cx_b).incoming_requests,
|
||||
client_b.summarize_contacts(cx_b).incoming_requests,
|
||||
&["user_c"]
|
||||
);
|
||||
assert_eq!(client_c.summarize_contacts(&cx_c).current, &["user_c"]);
|
||||
assert_eq!(client_c.summarize_contacts(cx_c).current, &["user_c"]);
|
||||
assert_eq!(
|
||||
client_c.summarize_contacts(&cx_c).outgoing_requests,
|
||||
client_c.summarize_contacts(cx_c).outgoing_requests,
|
||||
&["user_b"]
|
||||
);
|
||||
|
||||
|
@ -3934,18 +3935,18 @@ async fn test_contact_requests(
|
|||
executor.run_until_parked();
|
||||
|
||||
// User B doesn't see user C as their contact, and the incoming request from them is removed.
|
||||
let contacts_b = client_b.summarize_contacts(&cx_b);
|
||||
let contacts_b = client_b.summarize_contacts(cx_b);
|
||||
assert_eq!(contacts_b.current, &["user_a", "user_b"]);
|
||||
assert!(contacts_b.incoming_requests.is_empty());
|
||||
let contacts_b2 = client_b2.summarize_contacts(&cx_b2);
|
||||
let contacts_b2 = client_b2.summarize_contacts(cx_b2);
|
||||
assert_eq!(contacts_b2.current, &["user_a", "user_b"]);
|
||||
assert!(contacts_b2.incoming_requests.is_empty());
|
||||
|
||||
// User C doesn't see user B as their contact, and the outgoing request to them is removed.
|
||||
let contacts_c = client_c.summarize_contacts(&cx_c);
|
||||
let contacts_c = client_c.summarize_contacts(cx_c);
|
||||
assert_eq!(contacts_c.current, &["user_c"]);
|
||||
assert!(contacts_c.outgoing_requests.is_empty());
|
||||
let contacts_c2 = client_c2.summarize_contacts(&cx_c2);
|
||||
let contacts_c2 = client_c2.summarize_contacts(cx_c2);
|
||||
assert_eq!(contacts_c2.current, &["user_c"]);
|
||||
assert!(contacts_c2.outgoing_requests.is_empty());
|
||||
|
||||
|
@ -3955,20 +3956,20 @@ async fn test_contact_requests(
|
|||
disconnect_and_reconnect(&client_c, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
client_a.summarize_contacts(&cx_a).current,
|
||||
client_a.summarize_contacts(cx_a).current,
|
||||
&["user_a", "user_b"]
|
||||
);
|
||||
assert_eq!(
|
||||
client_b.summarize_contacts(&cx_b).current,
|
||||
client_b.summarize_contacts(cx_b).current,
|
||||
&["user_a", "user_b"]
|
||||
);
|
||||
assert!(client_b
|
||||
.summarize_contacts(&cx_b)
|
||||
.summarize_contacts(cx_b)
|
||||
.incoming_requests
|
||||
.is_empty());
|
||||
assert_eq!(client_c.summarize_contacts(&cx_c).current, &["user_c"]);
|
||||
assert_eq!(client_c.summarize_contacts(cx_c).current, &["user_c"]);
|
||||
assert!(client_c
|
||||
.summarize_contacts(&cx_c)
|
||||
.summarize_contacts(cx_c)
|
||||
.outgoing_requests
|
||||
.is_empty());
|
||||
|
||||
|
@ -4245,7 +4246,10 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
|||
// Clients A and B follow each other in split panes
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
assert_ne!(*workspace.active_pane(), pane_a1);
|
||||
let pane_a1 = pane_a1.clone();
|
||||
cx.defer(move |workspace, _| {
|
||||
assert_ne!(*workspace.active_pane(), pane_a1);
|
||||
});
|
||||
});
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
|
@ -4258,7 +4262,10 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
|||
.unwrap();
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
assert_ne!(*workspace.active_pane(), pane_b1);
|
||||
let pane_b1 = pane_b1.clone();
|
||||
cx.defer(move |workspace, _| {
|
||||
assert_ne!(*workspace.active_pane(), pane_b1);
|
||||
});
|
||||
});
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
|
@ -4270,17 +4277,26 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.activate_next_pane(cx);
|
||||
});
|
||||
// Wait for focus effects to be fully flushed
|
||||
workspace_a.update(cx_a, |workspace, _| {
|
||||
assert_eq!(*workspace.active_pane(), pane_a1);
|
||||
});
|
||||
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.activate_next_pane(cx);
|
||||
assert_eq!(*workspace.active_pane(), pane_a1);
|
||||
workspace.open_path((worktree_id, "3.txt"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.activate_next_pane(cx);
|
||||
});
|
||||
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.activate_next_pane(cx);
|
||||
assert_eq!(*workspace.active_pane(), pane_b1);
|
||||
workspace.open_path((worktree_id, "4.txt"), true, cx)
|
||||
})
|
||||
|
@ -4310,17 +4326,24 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
|||
Some((worktree_id, "3.txt").into())
|
||||
);
|
||||
workspace.activate_next_pane(cx);
|
||||
});
|
||||
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
assert_eq!(
|
||||
workspace.active_item(cx).unwrap().project_path(cx),
|
||||
Some((worktree_id, "4.txt").into())
|
||||
);
|
||||
});
|
||||
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
assert_eq!(
|
||||
workspace.active_item(cx).unwrap().project_path(cx),
|
||||
Some((worktree_id, "4.txt").into())
|
||||
);
|
||||
workspace.activate_next_pane(cx);
|
||||
});
|
||||
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
assert_eq!(
|
||||
workspace.active_item(cx).unwrap().project_path(cx),
|
||||
Some((worktree_id, "3.txt").into())
|
||||
|
@ -4530,13 +4553,13 @@ async fn test_peers_simultaneously_following_each_other(
|
|||
futures::try_join!(a_follow_b, b_follow_a).unwrap();
|
||||
workspace_a.read_with(cx_a, |workspace, _| {
|
||||
assert_eq!(
|
||||
workspace.leader_for_pane(&workspace.active_pane()),
|
||||
workspace.leader_for_pane(workspace.active_pane()),
|
||||
Some(client_b_id)
|
||||
);
|
||||
});
|
||||
workspace_b.read_with(cx_b, |workspace, _| {
|
||||
assert_eq!(
|
||||
workspace.leader_for_pane(&workspace.active_pane()),
|
||||
workspace.leader_for_pane(workspace.active_pane()),
|
||||
Some(client_a_id)
|
||||
);
|
||||
});
|
||||
|
@ -4717,7 +4740,7 @@ async fn test_random_collaboration(
|
|||
|
||||
fake_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>({
|
||||
let rng = rng.clone();
|
||||
let project = project.clone();
|
||||
let project = project;
|
||||
move |params, mut cx| {
|
||||
let highlights = if let Some(project) = project.upgrade(&cx) {
|
||||
project.update(&mut cx, |project, cx| {
|
||||
|
@ -5004,10 +5027,12 @@ async fn test_random_collaboration(
|
|||
for guest_buffer in &guest_client.buffers {
|
||||
let buffer_id = guest_buffer.read_with(&guest_cx, |buffer, _| buffer.remote_id());
|
||||
let host_buffer = host_project.read_with(&host_cx, |project, cx| {
|
||||
project.buffer_for_id(buffer_id, cx).expect(&format!(
|
||||
"host does not have buffer for guest:{}, peer:{}, id:{}",
|
||||
guest_client.username, guest_client.peer_id, buffer_id
|
||||
))
|
||||
project.buffer_for_id(buffer_id, cx).unwrap_or_else(|| {
|
||||
panic!(
|
||||
"host does not have buffer for guest:{}, peer:{}, id:{}",
|
||||
guest_client.username, guest_client.peer_id, buffer_id
|
||||
)
|
||||
})
|
||||
});
|
||||
let path =
|
||||
host_buffer.read_with(&host_cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
|
||||
|
@ -5151,7 +5176,7 @@ impl TestServer {
|
|||
languages: Arc::new(LanguageRegistry::new(Task::ready(()))),
|
||||
themes: ThemeRegistry::new((), cx.font_cache()),
|
||||
fs: fs.clone(),
|
||||
build_window_options: || Default::default(),
|
||||
build_window_options: Default::default,
|
||||
initialize_workspace: |_, _, _| unimplemented!(),
|
||||
});
|
||||
|
||||
|
@ -5387,8 +5412,8 @@ impl TestClient {
|
|||
project: &ModelHandle<Project>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> ViewHandle<Workspace> {
|
||||
let (window_id, _) = cx.add_window(|_| EmptyView);
|
||||
cx.add_view(window_id, |cx| Workspace::new(project.clone(), cx))
|
||||
let (_, root_view) = cx.add_window(|_| EmptyView);
|
||||
cx.add_view(&root_view, |cx| Workspace::new(project.clone(), cx))
|
||||
}
|
||||
|
||||
async fn simulate_host(
|
||||
|
@ -5517,7 +5542,7 @@ impl TestClient {
|
|||
|
||||
log::info!("Host: creating file {:?}", path,);
|
||||
|
||||
if fs.create_dir(&parent_path).await.is_ok()
|
||||
if fs.create_dir(parent_path).await.is_ok()
|
||||
&& fs.create_file(&path, Default::default()).await.is_ok()
|
||||
{
|
||||
break;
|
||||
|
@ -5564,7 +5589,7 @@ impl TestClient {
|
|||
let buffer = if client.buffers.is_empty() || rng.lock().gen() {
|
||||
let worktree = if let Some(worktree) = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees(&cx)
|
||||
.worktrees(cx)
|
||||
.filter(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
worktree.is_visible()
|
||||
|
@ -5797,7 +5822,7 @@ impl TestClient {
|
|||
let worktree = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees(&cx)
|
||||
.worktrees(cx)
|
||||
.filter(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
worktree.is_visible()
|
||||
|
@ -5901,19 +5926,3 @@ fn channel_messages(channel: &Channel) -> Vec<(String, String, bool)> {
|
|||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
struct EmptyView;
|
||||
|
||||
impl gpui::Entity for EmptyView {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl gpui::View for EmptyView {
|
||||
fn ui_name() -> &'static str {
|
||||
"empty view"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut gpui::RenderContext<Self>) -> gpui::ElementBox {
|
||||
gpui::Element::boxed(gpui::elements::Empty::new())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -832,7 +832,7 @@ impl Server {
|
|||
// First, we send the metadata associated with each worktree.
|
||||
for (receipt, replica_id) in &receipts_with_replica_ids {
|
||||
self.peer.respond(
|
||||
receipt.clone(),
|
||||
*receipt,
|
||||
proto::JoinProjectResponse {
|
||||
variant: Some(proto::join_project_response::Variant::Accept(
|
||||
proto::join_project_response::Accept {
|
||||
|
@ -1711,7 +1711,7 @@ impl Server {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn store<'a>(&'a self) -> StoreGuard<'a> {
|
||||
pub(crate) async fn store(&self) -> StoreGuard<'_> {
|
||||
#[cfg(test)]
|
||||
tokio::task::yield_now().await;
|
||||
let guard = self.store.lock().await;
|
||||
|
@ -1796,7 +1796,7 @@ impl Header for ProtocolVersion {
|
|||
{
|
||||
let version = values
|
||||
.next()
|
||||
.ok_or_else(|| axum::headers::Error::invalid())?
|
||||
.ok_or_else(axum::headers::Error::invalid)?
|
||||
.to_str()
|
||||
.map_err(|_| axum::headers::Error::invalid())?
|
||||
.parse()
|
||||
|
|
|
@ -159,8 +159,10 @@ impl Store {
|
|||
let connection_projects = mem::take(&mut connection.projects);
|
||||
let connection_channels = mem::take(&mut connection.channels);
|
||||
|
||||
let mut result = RemovedConnectionState::default();
|
||||
result.user_id = user_id;
|
||||
let mut result = RemovedConnectionState {
|
||||
user_id,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Leave all channels.
|
||||
for channel_id in connection_channels {
|
||||
|
@ -223,10 +225,10 @@ impl Store {
|
|||
.user_id)
|
||||
}
|
||||
|
||||
pub fn connection_ids_for_user<'a>(
|
||||
&'a self,
|
||||
pub fn connection_ids_for_user(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
) -> impl 'a + Iterator<Item = ConnectionId> {
|
||||
) -> impl Iterator<Item = ConnectionId> + '_ {
|
||||
self.connections_by_user_id
|
||||
.get(&user_id)
|
||||
.into_iter()
|
||||
|
@ -425,14 +427,14 @@ impl Store {
|
|||
}
|
||||
|
||||
for guest_connection in project.guests.keys() {
|
||||
if let Some(connection) = self.connections.get_mut(&guest_connection) {
|
||||
if let Some(connection) = self.connections.get_mut(guest_connection) {
|
||||
connection.projects.remove(&project_id);
|
||||
}
|
||||
}
|
||||
|
||||
for requester_user_id in project.join_requests.keys() {
|
||||
if let Some(requester_connection_ids) =
|
||||
self.connections_by_user_id.get_mut(&requester_user_id)
|
||||
self.connections_by_user_id.get_mut(requester_user_id)
|
||||
{
|
||||
for requester_connection_id in requester_connection_ids.iter() {
|
||||
if let Some(requester_connection) =
|
||||
|
@ -544,6 +546,7 @@ impl Store {
|
|||
Some(receipts)
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn accept_join_project_request(
|
||||
&mut self,
|
||||
responder_connection_id: ConnectionId,
|
||||
|
@ -638,6 +641,7 @@ impl Store {
|
|||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn update_worktree(
|
||||
&mut self,
|
||||
connection_id: ConnectionId,
|
||||
|
@ -660,7 +664,7 @@ impl Store {
|
|||
worktree.root_name = worktree_root_name.to_string();
|
||||
|
||||
for entry_id in removed_entries {
|
||||
worktree.entries.remove(&entry_id);
|
||||
worktree.entries.remove(entry_id);
|
||||
}
|
||||
|
||||
for entry in updated_entries {
|
||||
|
@ -760,7 +764,7 @@ impl Store {
|
|||
pub fn check_invariants(&self) {
|
||||
for (connection_id, connection) in &self.connections {
|
||||
for project_id in &connection.projects {
|
||||
let project = &self.projects.get(&project_id).unwrap();
|
||||
let project = &self.projects.get(project_id).unwrap();
|
||||
if project.host_connection_id != *connection_id {
|
||||
assert!(project.guests.contains_key(connection_id));
|
||||
}
|
||||
|
|
|
@ -4,7 +4,8 @@ use gpui::{
|
|||
actions,
|
||||
elements::{ChildView, Flex, Label, ParentElement},
|
||||
keymap::Keystroke,
|
||||
Action, Element, Entity, MouseState, MutableAppContext, View, ViewContext, ViewHandle,
|
||||
Action, AnyViewHandle, Element, Entity, MouseState, MutableAppContext, View, ViewContext,
|
||||
ViewHandle,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::Settings;
|
||||
|
@ -82,10 +83,12 @@ impl CommandPalette {
|
|||
fn toggle(_: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
|
||||
let workspace = cx.handle();
|
||||
let window_id = cx.window_id();
|
||||
let focused_view_id = cx.focused_view_id(window_id).unwrap_or(workspace.id());
|
||||
let focused_view_id = cx
|
||||
.focused_view_id(window_id)
|
||||
.unwrap_or_else(|| workspace.id());
|
||||
|
||||
cx.as_mut().defer(move |cx| {
|
||||
let this = cx.add_view(window_id, |cx| Self::new(focused_view_id, cx));
|
||||
let this = cx.add_view(workspace.clone(), |cx| Self::new(focused_view_id, cx));
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.toggle_modal(cx, |_, cx| {
|
||||
cx.subscribe(&this, Self::on_event).detach();
|
||||
|
@ -110,10 +113,10 @@ impl CommandPalette {
|
|||
} => {
|
||||
let window_id = *window_id;
|
||||
let focused_view_id = *focused_view_id;
|
||||
let action = (*action).boxed_clone();
|
||||
let action = action.boxed_clone();
|
||||
workspace.dismiss_modal(cx);
|
||||
cx.as_mut()
|
||||
.defer(move |cx| cx.dispatch_action_at(window_id, focused_view_id, &*action))
|
||||
.defer(move |cx| cx.dispatch_any_action_at(window_id, focused_view_id, action))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -132,8 +135,10 @@ impl View for CommandPalette {
|
|||
ChildView::new(self.picker.clone()).boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.picker);
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.picker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,8 +350,8 @@ mod tests {
|
|||
});
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [], cx).await;
|
||||
let (window_id, workspace) = cx.add_window(|cx| Workspace::new(project, cx));
|
||||
let editor = cx.add_view(window_id, |cx| {
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(project, cx));
|
||||
let editor = cx.add_view(&workspace, |cx| {
|
||||
let mut editor = Editor::single_line(None, cx);
|
||||
editor.set_text("abc", cx);
|
||||
editor
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use client::{ContactRequestStatus, User, UserStore};
|
||||
use gpui::{
|
||||
actions, elements::*, Entity, ModelHandle, MouseState, MutableAppContext, RenderContext, Task,
|
||||
View, ViewContext, ViewHandle,
|
||||
actions, elements::*, AnyViewHandle, Entity, ModelHandle, MouseState, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::Settings;
|
||||
|
@ -42,7 +42,7 @@ impl View for ContactFinder {
|
|||
ChildView::new(self.picker.clone()).boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.picker);
|
||||
}
|
||||
}
|
||||
|
@ -111,7 +111,7 @@ impl PickerDelegate for ContactFinder {
|
|||
) -> ElementBox {
|
||||
let theme = &cx.global::<Settings>().theme;
|
||||
let user = &self.potential_contacts[ix];
|
||||
let request_status = self.user_store.read(cx).contact_request_status(&user);
|
||||
let request_status = self.user_store.read(cx).contact_request_status(user);
|
||||
|
||||
let icon_path = match request_status {
|
||||
ContactRequestStatus::None | ContactRequestStatus::RequestReceived => {
|
||||
|
@ -121,7 +121,7 @@ impl PickerDelegate for ContactFinder {
|
|||
"icons/x_mark_8.svg"
|
||||
}
|
||||
};
|
||||
let button_style = if self.user_store.read(cx).is_contact_request_pending(&user) {
|
||||
let button_style = if self.user_store.read(cx).is_contact_request_pending(user) {
|
||||
&theme.contact_finder.disabled_contact_button
|
||||
} else {
|
||||
&theme.contact_finder.contact_button
|
||||
|
|
|
@ -13,9 +13,9 @@ use gpui::{
|
|||
geometry::{rect::RectF, vector::vec2f},
|
||||
impl_actions, impl_internal_actions,
|
||||
platform::CursorStyle,
|
||||
AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle, MouseButton,
|
||||
MutableAppContext, RenderContext, Subscription, View, ViewContext, ViewHandle, WeakModelHandle,
|
||||
WeakViewHandle,
|
||||
AnyViewHandle, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle,
|
||||
MouseButton, MutableAppContext, RenderContext, Subscription, View, ViewContext, ViewHandle,
|
||||
WeakModelHandle, WeakViewHandle,
|
||||
};
|
||||
use join_project_notification::JoinProjectNotification;
|
||||
use menu::{Confirm, SelectNext, SelectPrev};
|
||||
|
@ -131,9 +131,9 @@ impl ContactsPanel {
|
|||
move |_, cx| {
|
||||
if let Some(workspace_handle) = workspace.upgrade(cx) {
|
||||
cx.subscribe(&workspace_handle.read(cx).project().clone(), {
|
||||
let workspace = workspace.clone();
|
||||
move |_, project, event, cx| match event {
|
||||
project::Event::ContactRequestedJoin(user) => {
|
||||
let workspace = workspace;
|
||||
move |_, project, event, cx| {
|
||||
if let project::Event::ContactRequestedJoin(user) = event {
|
||||
if let Some(workspace) = workspace.upgrade(cx) {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_notification(user.id as usize, cx, |cx| {
|
||||
|
@ -148,7 +148,6 @@ impl ContactsPanel {
|
|||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
@ -161,17 +160,16 @@ impl ContactsPanel {
|
|||
|
||||
cx.subscribe(&user_store, move |_, user_store, event, cx| {
|
||||
if let Some(workspace) = workspace.upgrade(cx) {
|
||||
workspace.update(cx, |workspace, cx| match event {
|
||||
client::Event::Contact { user, kind } => match kind {
|
||||
ContactEventKind::Requested | ContactEventKind::Accepted => workspace
|
||||
.show_notification(user.id as usize, cx, |cx| {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let client::Event::Contact { user, kind } = event {
|
||||
if let ContactEventKind::Requested | ContactEventKind::Accepted = kind {
|
||||
workspace.show_notification(user.id as usize, cx, |cx| {
|
||||
cx.add_view(|cx| {
|
||||
ContactNotification::new(user.clone(), *kind, user_store, cx)
|
||||
})
|
||||
}),
|
||||
_ => {}
|
||||
},
|
||||
_ => {}
|
||||
})
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -188,7 +186,7 @@ impl ContactsPanel {
|
|||
|
||||
match &this.entries[ix] {
|
||||
ContactEntry::Header(section) => {
|
||||
let is_collapsed = this.collapsed_sections.contains(§ion);
|
||||
let is_collapsed = this.collapsed_sections.contains(section);
|
||||
Self::render_header(
|
||||
*section,
|
||||
&theme.contacts_panel,
|
||||
|
@ -229,7 +227,7 @@ impl ContactsPanel {
|
|||
contact.clone(),
|
||||
current_user_id,
|
||||
*project_ix,
|
||||
open_project.clone(),
|
||||
*open_project,
|
||||
&theme.contacts_panel,
|
||||
&theme.tooltip,
|
||||
is_last_project_for_contact,
|
||||
|
@ -238,7 +236,7 @@ impl ContactsPanel {
|
|||
)
|
||||
}
|
||||
ContactEntry::OfflineProject(project) => Self::render_offline_project(
|
||||
project.clone(),
|
||||
*project,
|
||||
&theme.contacts_panel,
|
||||
&theme.tooltip,
|
||||
is_selected,
|
||||
|
@ -345,6 +343,7 @@ impl ContactsPanel {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_project(
|
||||
contact: Arc<Contact>,
|
||||
current_user_id: Option<u64>,
|
||||
|
@ -370,7 +369,7 @@ impl ContactsPanel {
|
|||
.or(theme.contact_avatar.height)
|
||||
.unwrap_or(0.);
|
||||
let row = &theme.project_row.default;
|
||||
let tree_branch = theme.tree_branch.clone();
|
||||
let tree_branch = theme.tree_branch;
|
||||
let line_height = row.name.text.line_height(font_cache);
|
||||
let cap_height = row.name.text.cap_height(font_cache);
|
||||
let baseline_offset =
|
||||
|
@ -641,7 +640,7 @@ impl ContactsPanel {
|
|||
let button_style = if is_contact_request_pending {
|
||||
&theme.disabled_button
|
||||
} else {
|
||||
&theme.contact_button.style_for(mouse_state, false)
|
||||
theme.contact_button.style_for(mouse_state, false)
|
||||
};
|
||||
render_icon_button(button_style, "icons/x_mark_8.svg")
|
||||
.aligned()
|
||||
|
@ -663,7 +662,7 @@ impl ContactsPanel {
|
|||
let button_style = if is_contact_request_pending {
|
||||
&theme.disabled_button
|
||||
} else {
|
||||
&theme.contact_button.style_for(mouse_state, false)
|
||||
theme.contact_button.style_for(mouse_state, false)
|
||||
};
|
||||
render_icon_button(button_style, "icons/check_8.svg")
|
||||
.aligned()
|
||||
|
@ -685,7 +684,7 @@ impl ContactsPanel {
|
|||
let button_style = if is_contact_request_pending {
|
||||
&theme.disabled_button
|
||||
} else {
|
||||
&theme.contact_button.style_for(mouse_state, false)
|
||||
theme.contact_button.style_for(mouse_state, false)
|
||||
};
|
||||
render_icon_button(button_style, "icons/x_mark_8.svg")
|
||||
.aligned()
|
||||
|
@ -1152,7 +1151,7 @@ impl View for ContactsPanel {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.filter_editor);
|
||||
}
|
||||
|
||||
|
@ -1224,7 +1223,7 @@ mod tests {
|
|||
let client = Client::new(http_client.clone());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
let project_store = cx.add_model(|_| ProjectStore::new(project::Db::open_fake()));
|
||||
let server = FakeServer::for_client(current_user_id, &client, &cx).await;
|
||||
let server = FakeServer::for_client(current_user_id, &client, cx).await;
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree("/private_dir", json!({ "one.rs": "" }))
|
||||
.await;
|
||||
|
@ -1248,8 +1247,8 @@ mod tests {
|
|||
.0
|
||||
.read_with(cx, |worktree, _| worktree.id().to_proto());
|
||||
|
||||
let workspace = cx.add_view(0, |cx| Workspace::new(project.clone(), cx));
|
||||
let panel = cx.add_view(0, |cx| {
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(project.clone(), cx));
|
||||
let panel = cx.add_view(&workspace, |cx| {
|
||||
ContactsPanel::new(
|
||||
user_store.clone(),
|
||||
project_store.clone(),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use gpui::{
|
||||
elements::*, geometry::vector::Vector2F, impl_internal_actions, keymap, platform::CursorStyle,
|
||||
Action, AppContext, Axis, Entity, MouseButton, MutableAppContext, RenderContext,
|
||||
Action, AnyViewHandle, AppContext, Axis, Entity, MouseButton, MutableAppContext, RenderContext,
|
||||
SizeConstraint, Subscription, View, ViewContext,
|
||||
};
|
||||
use menu::*;
|
||||
|
@ -106,7 +106,7 @@ impl View for ContextMenu {
|
|||
.boxed()
|
||||
}
|
||||
|
||||
fn on_blur(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_out(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
self.reset(cx);
|
||||
}
|
||||
}
|
||||
|
@ -156,9 +156,7 @@ impl ContextMenu {
|
|||
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
|
||||
if let Some(ix) = self.selected_index {
|
||||
if let Some(ContextMenuItem::Item { action, .. }) = self.items.get(ix) {
|
||||
let window_id = cx.window_id();
|
||||
let view_id = cx.view_id();
|
||||
cx.dispatch_action_at(window_id, view_id, action.as_ref());
|
||||
cx.dispatch_any_action(action.boxed_clone());
|
||||
self.reset(cx);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ impl View for ProjectDiagnosticsEditor {
|
|||
}
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if !self.path_states.is_empty() {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
|
@ -365,7 +365,7 @@ impl ProjectDiagnosticsEditor {
|
|||
if !diagnostic.message.is_empty() {
|
||||
group_state.block_count += 1;
|
||||
blocks_to_add.push(BlockProperties {
|
||||
position: (excerpt_id.clone(), entry.range.start.clone()),
|
||||
position: (excerpt_id.clone(), entry.range.start),
|
||||
height: diagnostic.message.matches('\n').count() as u8 + 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: diagnostic_block_renderer(diagnostic, true),
|
||||
|
@ -460,7 +460,7 @@ impl ProjectDiagnosticsEditor {
|
|||
for selection in &mut selections {
|
||||
if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
|
||||
let group_ix = match groups.binary_search_by(|probe| {
|
||||
probe.excerpts.last().unwrap().cmp(&new_excerpt_id)
|
||||
probe.excerpts.last().unwrap().cmp(new_excerpt_id)
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
|
@ -468,7 +468,7 @@ impl ProjectDiagnosticsEditor {
|
|||
let offset = excerpts_snapshot
|
||||
.anchor_in_excerpt(
|
||||
group.excerpts[group.primary_excerpt_ix].clone(),
|
||||
group.primary_diagnostic.range.start.clone(),
|
||||
group.primary_diagnostic.range.start,
|
||||
)
|
||||
.to_offset(&excerpts_snapshot);
|
||||
selection.start = offset;
|
||||
|
@ -486,10 +486,8 @@ impl ProjectDiagnosticsEditor {
|
|||
if self.editor.is_focused(cx) {
|
||||
cx.focus_self();
|
||||
}
|
||||
} else {
|
||||
if cx.handle().is_focused(cx) {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
} else if cx.handle().is_focused(cx) {
|
||||
cx.focus(&self.editor);
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -568,10 +566,6 @@ impl workspace::Item for ProjectDiagnosticsEditor {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
fn should_activate_item_on_event(event: &Self::Event) -> bool {
|
||||
Editor::should_activate_item_on_event(event)
|
||||
}
|
||||
|
||||
fn should_update_tab_on_event(event: &Event) -> bool {
|
||||
Editor::should_update_tab_on_event(event)
|
||||
}
|
||||
|
@ -729,12 +723,12 @@ fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
|
|||
) -> Ordering {
|
||||
lhs.range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.to_offset(snapshot)
|
||||
.cmp(&rhs.range.start.to_offset(snapshot))
|
||||
.then_with(|| {
|
||||
lhs.range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.to_offset(snapshot)
|
||||
.cmp(&rhs.range.end.to_offset(snapshot))
|
||||
})
|
||||
.then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message))
|
||||
|
@ -786,7 +780,7 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
|
||||
let workspace = cx.add_view(0, |cx| Workspace::new(project.clone(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| Workspace::new(project.clone(), cx));
|
||||
|
||||
// Create some diagnostics
|
||||
project.update(cx, |project, cx| {
|
||||
|
@ -873,11 +867,11 @@ mod tests {
|
|||
});
|
||||
|
||||
// Open the project diagnostics view while there are already diagnostics.
|
||||
let view = cx.add_view(0, |cx| {
|
||||
let view = cx.add_view(&workspace, |cx| {
|
||||
ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx)
|
||||
});
|
||||
|
||||
view.next_notification(&cx).await;
|
||||
view.next_notification(cx).await;
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
|
@ -964,7 +958,7 @@ mod tests {
|
|||
project.disk_based_diagnostics_finished(0, cx);
|
||||
});
|
||||
|
||||
view.next_notification(&cx).await;
|
||||
view.next_notification(cx).await;
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
|
@ -1078,7 +1072,7 @@ mod tests {
|
|||
project.disk_based_diagnostics_finished(0, cx);
|
||||
});
|
||||
|
||||
view.next_notification(&cx).await;
|
||||
view.next_notification(cx).await;
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
|
|
|
@ -253,7 +253,7 @@ impl DisplaySnapshot {
|
|||
self.buffer_snapshot.len() == 0
|
||||
}
|
||||
|
||||
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> DisplayBufferRows<'a> {
|
||||
pub fn buffer_rows(&self, start_row: u32) -> DisplayBufferRows {
|
||||
self.blocks_snapshot.buffer_rows(start_row)
|
||||
}
|
||||
|
||||
|
@ -313,7 +313,7 @@ impl DisplaySnapshot {
|
|||
fn point_to_display_point(&self, point: Point, bias: Bias) -> DisplayPoint {
|
||||
let fold_point = self.folds_snapshot.to_fold_point(point, bias);
|
||||
let tab_point = self.tabs_snapshot.to_tab_point(fold_point);
|
||||
let wrap_point = self.wraps_snapshot.from_tab_point(tab_point);
|
||||
let wrap_point = self.wraps_snapshot.tab_point_to_wrap_point(tab_point);
|
||||
let block_point = self.blocks_snapshot.to_block_point(wrap_point);
|
||||
DisplayPoint(block_point)
|
||||
}
|
||||
|
@ -336,16 +336,12 @@ impl DisplaySnapshot {
|
|||
.map(|h| h.text)
|
||||
}
|
||||
|
||||
pub fn chunks<'a>(
|
||||
&'a self,
|
||||
display_rows: Range<u32>,
|
||||
language_aware: bool,
|
||||
) -> DisplayChunks<'a> {
|
||||
pub fn chunks(&self, display_rows: Range<u32>, language_aware: bool) -> DisplayChunks<'_> {
|
||||
self.blocks_snapshot
|
||||
.chunks(display_rows, language_aware, Some(&self.text_highlights))
|
||||
}
|
||||
|
||||
pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a {
|
||||
pub fn chars_at(&self, point: DisplayPoint) -> impl Iterator<Item = char> + '_ {
|
||||
let mut column = 0;
|
||||
let mut chars = self.text_chunks(point.row()).flat_map(str::chars);
|
||||
while column < point.column() {
|
||||
|
@ -372,15 +368,15 @@ impl DisplaySnapshot {
|
|||
}
|
||||
|
||||
pub fn column_from_chars(&self, display_row: u32, char_count: u32) -> u32 {
|
||||
let mut count = 0;
|
||||
let mut column = 0;
|
||||
for c in self.chars_at(DisplayPoint::new(display_row, 0)) {
|
||||
if c == '\n' || count >= char_count {
|
||||
|
||||
for (count, c) in self.chars_at(DisplayPoint::new(display_row, 0)).enumerate() {
|
||||
if c == '\n' || count >= char_count as usize {
|
||||
break;
|
||||
}
|
||||
count += 1;
|
||||
column += c.len_utf8() as u32;
|
||||
}
|
||||
|
||||
column
|
||||
}
|
||||
|
||||
|
@ -401,20 +397,17 @@ impl DisplaySnapshot {
|
|||
DisplayPoint(point)
|
||||
}
|
||||
|
||||
pub fn folds_in_range<'a, T>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
) -> impl Iterator<Item = &'a Range<Anchor>>
|
||||
pub fn folds_in_range<T>(&self, range: Range<T>) -> impl Iterator<Item = &Range<Anchor>>
|
||||
where
|
||||
T: ToOffset,
|
||||
{
|
||||
self.folds_snapshot.folds_in_range(range)
|
||||
}
|
||||
|
||||
pub fn blocks_in_range<'a>(
|
||||
&'a self,
|
||||
pub fn blocks_in_range(
|
||||
&self,
|
||||
rows: Range<u32>,
|
||||
) -> impl Iterator<Item = (u32, &'a TransformBlock)> {
|
||||
) -> impl Iterator<Item = (u32, &TransformBlock)> {
|
||||
self.blocks_snapshot.blocks_in_range(rows)
|
||||
}
|
||||
|
||||
|
@ -1015,7 +1008,7 @@ pub mod tests {
|
|||
});
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
|
||||
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
let font_cache = cx.font_cache();
|
||||
|
@ -1102,7 +1095,7 @@ pub mod tests {
|
|||
cx.update(|cx| cx.set_global(Settings::test(cx)));
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
|
||||
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
let font_cache = cx.font_cache();
|
||||
|
@ -1173,7 +1166,7 @@ pub mod tests {
|
|||
let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
|
||||
buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
|
||||
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
|
|
|
@ -20,7 +20,7 @@ use std::{
|
|||
use sum_tree::{Bias, SumTree};
|
||||
use text::{Edit, Point};
|
||||
|
||||
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
|
||||
const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize];
|
||||
|
||||
pub struct BlockMap {
|
||||
next_block_id: AtomicUsize,
|
||||
|
@ -102,6 +102,7 @@ struct Transform {
|
|||
block: Option<TransformBlock>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Clone)]
|
||||
pub enum TransformBlock {
|
||||
Custom(Arc<Block>),
|
||||
|
@ -317,7 +318,7 @@ impl BlockMap {
|
|||
let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| {
|
||||
probe
|
||||
.position
|
||||
.to_point(&buffer)
|
||||
.to_point(buffer)
|
||||
.cmp(&new_buffer_start)
|
||||
.then(Ordering::Greater)
|
||||
}) {
|
||||
|
@ -335,7 +336,7 @@ impl BlockMap {
|
|||
match self.blocks[start_block_ix..].binary_search_by(|probe| {
|
||||
probe
|
||||
.position
|
||||
.to_point(&buffer)
|
||||
.to_point(buffer)
|
||||
.cmp(&new_buffer_end)
|
||||
.then(Ordering::Greater)
|
||||
}) {
|
||||
|
@ -349,14 +350,14 @@ impl BlockMap {
|
|||
self.blocks[start_block_ix..end_block_ix]
|
||||
.iter()
|
||||
.map(|block| {
|
||||
let mut position = block.position.to_point(&buffer);
|
||||
let mut position = block.position.to_point(buffer);
|
||||
match block.disposition {
|
||||
BlockDisposition::Above => position.column = 0,
|
||||
BlockDisposition::Below => {
|
||||
position.column = buffer.line_len(position.row)
|
||||
}
|
||||
}
|
||||
let position = wrap_snapshot.from_point(position, Bias::Left);
|
||||
let position = wrap_snapshot.make_wrap_point(position, Bias::Left);
|
||||
(position.row(), TransformBlock::Custom(block.clone()))
|
||||
}),
|
||||
);
|
||||
|
@ -366,7 +367,7 @@ impl BlockMap {
|
|||
.map(|excerpt_boundary| {
|
||||
(
|
||||
wrap_snapshot
|
||||
.from_point(Point::new(excerpt_boundary.row, 0), Bias::Left)
|
||||
.make_wrap_point(Point::new(excerpt_boundary.row, 0), Bias::Left)
|
||||
.row(),
|
||||
TransformBlock::ExcerptHeader {
|
||||
key: excerpt_boundary.key,
|
||||
|
@ -385,7 +386,7 @@ impl BlockMap {
|
|||
|
||||
// Place excerpt headers above custom blocks on the same row.
|
||||
blocks_in_edit.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| {
|
||||
row_a.cmp(&row_b).then_with(|| match (block_a, block_b) {
|
||||
row_a.cmp(row_b).then_with(|| match (block_a, block_b) {
|
||||
(
|
||||
TransformBlock::ExcerptHeader { .. },
|
||||
TransformBlock::ExcerptHeader { .. },
|
||||
|
@ -498,9 +499,9 @@ impl<'a> BlockMapWriter<'a> {
|
|||
ids.push(id);
|
||||
|
||||
let position = block.position;
|
||||
let point = position.to_point(&buffer);
|
||||
let point = position.to_point(buffer);
|
||||
let wrap_row = wrap_snapshot
|
||||
.from_point(Point::new(point.row, 0), Bias::Left)
|
||||
.make_wrap_point(Point::new(point.row, 0), Bias::Left)
|
||||
.row();
|
||||
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
|
||||
let end_row = wrap_snapshot
|
||||
|
@ -510,7 +511,7 @@ impl<'a> BlockMapWriter<'a> {
|
|||
let block_ix = match self
|
||||
.0
|
||||
.blocks
|
||||
.binary_search_by(|probe| probe.position.cmp(&position, &buffer))
|
||||
.binary_search_by(|probe| probe.position.cmp(&position, buffer))
|
||||
{
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
|
@ -543,11 +544,11 @@ impl<'a> BlockMapWriter<'a> {
|
|||
let mut last_block_buffer_row = None;
|
||||
self.0.blocks.retain(|block| {
|
||||
if block_ids.contains(&block.id) {
|
||||
let buffer_row = block.position.to_point(&buffer).row;
|
||||
let buffer_row = block.position.to_point(buffer).row;
|
||||
if last_block_buffer_row != Some(buffer_row) {
|
||||
last_block_buffer_row = Some(buffer_row);
|
||||
let wrap_row = wrap_snapshot
|
||||
.from_point(Point::new(buffer_row, 0), Bias::Left)
|
||||
.make_wrap_point(Point::new(buffer_row, 0), Bias::Left)
|
||||
.row();
|
||||
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
|
||||
let end_row = wrap_snapshot
|
||||
|
@ -620,7 +621,7 @@ impl BlockSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BlockBufferRows<'a> {
|
||||
pub fn buffer_rows(&self, start_row: u32) -> BlockBufferRows {
|
||||
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
|
||||
cursor.seek(&BlockRow(start_row), Bias::Right, &());
|
||||
let (output_start, input_start) = cursor.start();
|
||||
|
@ -638,10 +639,10 @@ impl BlockSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn blocks_in_range<'a>(
|
||||
&'a self,
|
||||
pub fn blocks_in_range(
|
||||
&self,
|
||||
rows: Range<u32>,
|
||||
) -> impl Iterator<Item = (u32, &'a TransformBlock)> {
|
||||
) -> impl Iterator<Item = (u32, &TransformBlock)> {
|
||||
let mut cursor = self.transforms.cursor::<BlockRow>();
|
||||
cursor.seek(&BlockRow(rows.start), Bias::Right, &());
|
||||
std::iter::from_fn(move || {
|
||||
|
@ -1025,7 +1026,7 @@ mod tests {
|
|||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
|
||||
let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1.try_into().unwrap());
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot, 1.try_into().unwrap());
|
||||
let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
|
||||
|
||||
|
@ -1194,7 +1195,7 @@ mod tests {
|
|||
let buffer = MultiBuffer::build_simple(text, cx);
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
|
||||
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1.try_into().unwrap());
|
||||
let (_, tabs_snapshot) = TabMap::new(folds_snapshot, 1.try_into().unwrap());
|
||||
let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
|
||||
let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
|
||||
|
||||
|
@ -1262,11 +1263,11 @@ mod tests {
|
|||
|
||||
let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot, tab_size);
|
||||
let (wrap_map, wraps_snapshot) =
|
||||
WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
|
||||
let mut block_map = BlockMap::new(
|
||||
wraps_snapshot.clone(),
|
||||
wraps_snapshot,
|
||||
buffer_start_header_height,
|
||||
excerpt_header_height,
|
||||
);
|
||||
|
@ -1383,7 +1384,7 @@ mod tests {
|
|||
position.column = buffer_snapshot.line_len(position.row);
|
||||
}
|
||||
};
|
||||
let row = wraps_snapshot.from_point(position, Bias::Left).row();
|
||||
let row = wraps_snapshot.make_wrap_point(position, Bias::Left).row();
|
||||
(
|
||||
row,
|
||||
ExpectedBlock::Custom {
|
||||
|
@ -1396,7 +1397,7 @@ mod tests {
|
|||
expected_blocks.extend(buffer_snapshot.excerpt_boundaries_in_range(0..).map(
|
||||
|boundary| {
|
||||
let position =
|
||||
wraps_snapshot.from_point(Point::new(boundary.row, 0), Bias::Left);
|
||||
wraps_snapshot.make_wrap_point(Point::new(boundary.row, 0), Bias::Left);
|
||||
(
|
||||
position.row(),
|
||||
ExpectedBlock::ExcerptHeader {
|
||||
|
|
|
@ -41,27 +41,27 @@ impl FoldPoint {
|
|||
&mut self.0.column
|
||||
}
|
||||
|
||||
pub fn to_buffer_point(&self, snapshot: &FoldSnapshot) -> Point {
|
||||
pub fn to_buffer_point(self, snapshot: &FoldSnapshot) -> Point {
|
||||
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
|
||||
cursor.seek(self, Bias::Right, &());
|
||||
cursor.seek(&self, Bias::Right, &());
|
||||
let overshoot = self.0 - cursor.start().0 .0;
|
||||
cursor.start().1 + overshoot
|
||||
}
|
||||
|
||||
pub fn to_buffer_offset(&self, snapshot: &FoldSnapshot) -> usize {
|
||||
pub fn to_buffer_offset(self, snapshot: &FoldSnapshot) -> usize {
|
||||
let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
|
||||
cursor.seek(self, Bias::Right, &());
|
||||
cursor.seek(&self, Bias::Right, &());
|
||||
let overshoot = self.0 - cursor.start().0 .0;
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.point_to_offset(cursor.start().1 + overshoot)
|
||||
}
|
||||
|
||||
pub fn to_offset(&self, snapshot: &FoldSnapshot) -> FoldOffset {
|
||||
pub fn to_offset(self, snapshot: &FoldSnapshot) -> FoldOffset {
|
||||
let mut cursor = snapshot
|
||||
.transforms
|
||||
.cursor::<(FoldPoint, TransformSummary)>();
|
||||
cursor.seek(self, Bias::Right, &());
|
||||
cursor.seek(&self, Bias::Right, &());
|
||||
let overshoot = self.0 - cursor.start().1.output.lines;
|
||||
let mut offset = cursor.start().1.output.len;
|
||||
if !overshoot.is_zero() {
|
||||
|
@ -600,10 +600,7 @@ impl FoldSnapshot {
|
|||
self.transforms.summary().output.longest_row
|
||||
}
|
||||
|
||||
pub fn folds_in_range<'a, T>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
) -> impl Iterator<Item = &'a Range<Anchor>>
|
||||
pub fn folds_in_range<T>(&self, range: Range<T>) -> impl Iterator<Item = &Range<Anchor>>
|
||||
where
|
||||
T: ToOffset,
|
||||
{
|
||||
|
@ -689,7 +686,7 @@ impl FoldSnapshot {
|
|||
let ranges = &highlights.1;
|
||||
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&transform_start, &self.buffer_snapshot());
|
||||
let cmp = probe.end.cmp(&transform_start, self.buffer_snapshot());
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -1040,11 +1037,7 @@ impl<'a> Iterator for FoldChunks<'a> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let transform = if let Some(item) = self.transform_cursor.item() {
|
||||
item
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let transform = self.transform_cursor.item()?;
|
||||
|
||||
// If we're in a fold, then return the fold's display text and
|
||||
// advance the transform and buffer cursors to the end of the fold.
|
||||
|
@ -1150,11 +1143,11 @@ impl Ord for HighlightEndpoint {
|
|||
pub struct FoldOffset(pub usize);
|
||||
|
||||
impl FoldOffset {
|
||||
pub fn to_point(&self, snapshot: &FoldSnapshot) -> FoldPoint {
|
||||
pub fn to_point(self, snapshot: &FoldSnapshot) -> FoldPoint {
|
||||
let mut cursor = snapshot
|
||||
.transforms
|
||||
.cursor::<(FoldOffset, TransformSummary)>();
|
||||
cursor.seek(self, Bias::Right, &());
|
||||
cursor.seek(&self, Bias::Right, &());
|
||||
let overshoot = if cursor.item().map_or(true, |t| t.is_fold()) {
|
||||
Point::new(0, (self.0 - cursor.start().0 .0) as u32)
|
||||
} else {
|
||||
|
@ -1214,7 +1207,7 @@ mod tests {
|
|||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
|
||||
|
||||
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
|
||||
let (mut writer, _, _) = map.write(buffer_snapshot, vec![]);
|
||||
let (snapshot2, edits) = writer.fold(vec![
|
||||
Point::new(0, 2)..Point::new(2, 2),
|
||||
Point::new(2, 4)..Point::new(4, 1),
|
||||
|
@ -1245,8 +1238,7 @@ mod tests {
|
|||
);
|
||||
buffer.snapshot(cx)
|
||||
});
|
||||
let (snapshot3, edits) =
|
||||
map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
|
||||
let (snapshot3, edits) = map.read(buffer_snapshot, subscription.consume().into_inner());
|
||||
assert_eq!(snapshot3.text(), "123a…c123c…eeeee");
|
||||
assert_eq!(
|
||||
edits,
|
||||
|
@ -1276,7 +1268,7 @@ mod tests {
|
|||
|
||||
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
|
||||
writer.unfold(Some(Point::new(0, 4)..Point::new(0, 4)), true);
|
||||
let (snapshot6, _) = map.read(buffer_snapshot.clone(), vec![]);
|
||||
let (snapshot6, _) = map.read(buffer_snapshot, vec![]);
|
||||
assert_eq!(snapshot6.text(), "123aaaaa\nbbbbbb\nccc123456eee");
|
||||
}
|
||||
|
||||
|
@ -1314,7 +1306,7 @@ mod tests {
|
|||
// Create two adjacent folds.
|
||||
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
|
||||
writer.fold(vec![0..2, 2..5]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot, vec![]);
|
||||
assert_eq!(snapshot.text(), "…fghijkl");
|
||||
|
||||
// Edit within one of the folds.
|
||||
|
@ -1322,8 +1314,7 @@ mod tests {
|
|||
buffer.edit([(0..1, "12345")], None, cx);
|
||||
buffer.snapshot(cx)
|
||||
});
|
||||
let (snapshot, _) =
|
||||
map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
|
||||
let (snapshot, _) = map.read(buffer_snapshot, subscription.consume().into_inner());
|
||||
assert_eq!(snapshot.text(), "12345…fghijkl");
|
||||
}
|
||||
}
|
||||
|
@ -1340,7 +1331,7 @@ mod tests {
|
|||
Point::new(1, 2)..Point::new(3, 2),
|
||||
Point::new(3, 1)..Point::new(4, 1),
|
||||
]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot, vec![]);
|
||||
assert_eq!(snapshot.text(), "aa…eeeee");
|
||||
}
|
||||
|
||||
|
@ -1357,14 +1348,14 @@ mod tests {
|
|||
Point::new(0, 2)..Point::new(2, 2),
|
||||
Point::new(3, 1)..Point::new(4, 1),
|
||||
]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot, vec![]);
|
||||
assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee");
|
||||
|
||||
let buffer_snapshot = buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(Point::new(2, 2)..Point::new(3, 1), "")], None, cx);
|
||||
buffer.snapshot(cx)
|
||||
});
|
||||
let (snapshot, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
|
||||
let (snapshot, _) = map.read(buffer_snapshot, subscription.consume().into_inner());
|
||||
assert_eq!(snapshot.text(), "aa…eeeee");
|
||||
}
|
||||
|
||||
|
@ -1661,7 +1652,7 @@ mod tests {
|
|||
Point::new(3, 1)..Point::new(4, 1),
|
||||
]);
|
||||
|
||||
let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
|
||||
let (snapshot, _) = map.read(buffer_snapshot, vec![]);
|
||||
assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee\nffffff\n");
|
||||
assert_eq!(
|
||||
snapshot.buffer_rows(0).collect::<Vec<_>>(),
|
||||
|
|
|
@ -253,7 +253,7 @@ impl TabSnapshot {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn from_point(&self, point: Point, bias: Bias) -> TabPoint {
|
||||
pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint {
|
||||
self.to_tab_point(self.fold_snapshot.to_fold_point(point, bias))
|
||||
}
|
||||
|
||||
|
@ -290,7 +290,7 @@ impl TabSnapshot {
|
|||
}
|
||||
|
||||
fn collapse_tabs(
|
||||
mut chars: impl Iterator<Item = char>,
|
||||
chars: impl Iterator<Item = char>,
|
||||
column: usize,
|
||||
bias: Bias,
|
||||
tab_size: NonZeroU32,
|
||||
|
@ -298,7 +298,7 @@ impl TabSnapshot {
|
|||
let mut expanded_bytes = 0;
|
||||
let mut expanded_chars = 0;
|
||||
let mut collapsed_bytes = 0;
|
||||
while let Some(c) = chars.next() {
|
||||
for c in chars {
|
||||
if expanded_bytes >= column {
|
||||
break;
|
||||
}
|
||||
|
@ -410,7 +410,7 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
|
|||
}
|
||||
|
||||
// Handles a tab width <= 16
|
||||
const SPACES: &'static str = " ";
|
||||
const SPACES: &str = " ";
|
||||
|
||||
pub struct TabChunks<'a> {
|
||||
fold_chunks: fold_map::FoldChunks<'a>,
|
||||
|
@ -518,7 +518,7 @@ mod tests {
|
|||
|
||||
let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone());
|
||||
fold_map.randomly_mutate(&mut rng);
|
||||
let (folds_snapshot, _) = fold_map.read(buffer_snapshot.clone(), vec![]);
|
||||
let (folds_snapshot, _) = fold_map.read(buffer_snapshot, vec![]);
|
||||
log::info!("FoldMap text: {:?}", folds_snapshot.text());
|
||||
|
||||
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
|
||||
|
|
|
@ -285,7 +285,7 @@ impl WrapMap {
|
|||
if tab_snapshot.version <= self.snapshot.tab_snapshot.version {
|
||||
to_remove_len += 1;
|
||||
} else {
|
||||
let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), &edits);
|
||||
let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits);
|
||||
self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits);
|
||||
self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits);
|
||||
}
|
||||
|
@ -394,7 +394,7 @@ impl WrapSnapshot {
|
|||
new_rows: Range<u32>,
|
||||
}
|
||||
|
||||
let mut tab_edits_iter = tab_edits.into_iter().peekable();
|
||||
let mut tab_edits_iter = tab_edits.iter().peekable();
|
||||
let mut row_edits = Vec::new();
|
||||
while let Some(edit) = tab_edits_iter.next() {
|
||||
let mut row_edit = RowEdit {
|
||||
|
@ -671,11 +671,11 @@ impl WrapSnapshot {
|
|||
self.tab_snapshot.to_point(self.to_tab_point(point), bias)
|
||||
}
|
||||
|
||||
pub fn from_point(&self, point: Point, bias: Bias) -> WrapPoint {
|
||||
self.from_tab_point(self.tab_snapshot.from_point(point, bias))
|
||||
pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint {
|
||||
self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias))
|
||||
}
|
||||
|
||||
pub fn from_tab_point(&self, point: TabPoint) -> WrapPoint {
|
||||
pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint {
|
||||
let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>();
|
||||
cursor.seek(&point, Bias::Right, &());
|
||||
WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
|
||||
|
@ -691,7 +691,7 @@ impl WrapSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
self.from_tab_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
|
||||
self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
|
||||
}
|
||||
|
||||
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
|
||||
|
@ -1301,7 +1301,7 @@ mod tests {
|
|||
end_row += 1;
|
||||
|
||||
let mut expected_text = self.text_chunks(start_row).collect::<String>();
|
||||
if expected_text.ends_with("\n") {
|
||||
if expected_text.ends_with('\n') {
|
||||
expected_text.push('\n');
|
||||
}
|
||||
let mut expected_text = expected_text
|
||||
|
|
|
@ -29,8 +29,8 @@ use gpui::{
|
|||
geometry::vector::{vec2f, Vector2F},
|
||||
impl_actions, impl_internal_actions,
|
||||
platform::CursorStyle,
|
||||
text_layout, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox, Entity,
|
||||
ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription, Task, View,
|
||||
text_layout, AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox,
|
||||
Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription, Task, View,
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
|
@ -425,6 +425,7 @@ pub struct Editor {
|
|||
vertical_scroll_margin: f32,
|
||||
placeholder_text: Option<Arc<str>>,
|
||||
highlighted_rows: Option<Range<u32>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
background_highlights: BTreeMap<TypeId, (fn(&Theme) -> Color, Vec<Range<Anchor>>)>,
|
||||
nav_history: Option<ItemNavHistory>,
|
||||
context_menu: Option<ContextMenu>,
|
||||
|
@ -475,6 +476,7 @@ impl Default for SelectionHistoryMode {
|
|||
|
||||
#[derive(Default)]
|
||||
struct SelectionHistory {
|
||||
#[allow(clippy::type_complexity)]
|
||||
selections_by_transaction:
|
||||
HashMap<TransactionId, (Arc<[Selection<Anchor>]>, Option<Arc<[Selection<Anchor>]>>)>,
|
||||
mode: SelectionHistoryMode,
|
||||
|
@ -492,6 +494,7 @@ impl SelectionHistory {
|
|||
.insert(transaction_id, (selections, None));
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn transaction(
|
||||
&self,
|
||||
transaction_id: TransactionId,
|
||||
|
@ -499,6 +502,7 @@ impl SelectionHistory {
|
|||
self.selections_by_transaction.get(&transaction_id)
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn transaction_mut(
|
||||
&mut self,
|
||||
transaction_id: TransactionId,
|
||||
|
@ -1023,7 +1027,7 @@ impl Editor {
|
|||
background_highlights: Default::default(),
|
||||
nav_history: None,
|
||||
context_menu: None,
|
||||
mouse_context_menu: cx.add_view(|cx| context_menu::ContextMenu::new(cx)),
|
||||
mouse_context_menu: cx.add_view(context_menu::ContextMenu::new),
|
||||
completion_tasks: Default::default(),
|
||||
next_completion_id: 0,
|
||||
available_code_actions: Default::default(),
|
||||
|
@ -1561,7 +1565,6 @@ impl Editor {
|
|||
) {
|
||||
if !self.focused {
|
||||
cx.focus_self();
|
||||
cx.emit(Event::Activate);
|
||||
}
|
||||
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
@ -1623,7 +1626,6 @@ impl Editor {
|
|||
) {
|
||||
if !self.focused {
|
||||
cx.focus_self();
|
||||
cx.emit(Event::Activate);
|
||||
}
|
||||
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
@ -1651,7 +1653,7 @@ impl Editor {
|
|||
if let Some(tail) = self.columnar_selection_tail.as_ref() {
|
||||
let tail = tail.to_display_point(&display_map);
|
||||
self.select_columns(tail, position, goal_column, &display_map, cx);
|
||||
} else if let Some(mut pending) = self.selections.pending_anchor().clone() {
|
||||
} else if let Some(mut pending) = self.selections.pending_anchor() {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let head;
|
||||
let tail;
|
||||
|
@ -1766,10 +1768,10 @@ impl Editor {
|
|||
if start_column <= display_map.line_len(row) && !display_map.is_block_line(row) {
|
||||
let start = display_map
|
||||
.clip_point(DisplayPoint::new(row, start_column), Bias::Left)
|
||||
.to_point(&display_map);
|
||||
.to_point(display_map);
|
||||
let end = display_map
|
||||
.clip_point(DisplayPoint::new(row, end_column), Bias::Right)
|
||||
.to_point(&display_map);
|
||||
.to_point(display_map);
|
||||
if reversed {
|
||||
Some(end..start)
|
||||
} else {
|
||||
|
@ -1787,6 +1789,14 @@ impl Editor {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn are_selections_empty(&self) -> bool {
|
||||
let pending_empty = match self.selections.pending_anchor() {
|
||||
Some(Selection { start, end, .. }) => start == end,
|
||||
None => true,
|
||||
};
|
||||
pending_empty && self.columnar_selection_tail.is_none()
|
||||
}
|
||||
|
||||
pub fn is_selecting(&self) -> bool {
|
||||
self.selections.pending_anchor().is_some() || self.columnar_selection_tail.is_some()
|
||||
}
|
||||
|
@ -1909,7 +1919,7 @@ impl Editor {
|
|||
cursor.row -= 1;
|
||||
cursor.column = buffer.line_len(cursor.row);
|
||||
}
|
||||
new_selection.map(|_| cursor.clone())
|
||||
new_selection.map(|_| cursor)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -2273,7 +2283,7 @@ impl Editor {
|
|||
|
||||
let query = Self::completion_query(&self.buffer.read(cx).read(cx), position.clone());
|
||||
let completions = project.update(cx, |project, cx| {
|
||||
project.completions(&buffer, buffer_position.clone(), cx)
|
||||
project.completions(&buffer, buffer_position, cx)
|
||||
});
|
||||
|
||||
let id = post_inc(&mut self.next_completion_id);
|
||||
|
@ -2363,7 +2373,7 @@ impl Editor {
|
|||
};
|
||||
let selections = self.selections.all::<usize>(cx);
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let old_range = completion.old_range.to_offset(&buffer);
|
||||
let old_range = completion.old_range.to_offset(buffer);
|
||||
let old_text = buffer.text_for_range(old_range.clone()).collect::<String>();
|
||||
|
||||
let newest_selection = self.selections.newest_anchor();
|
||||
|
@ -2801,7 +2811,7 @@ impl Editor {
|
|||
let mut tabstop_ranges = tabstop
|
||||
.iter()
|
||||
.flat_map(|tabstop_range| {
|
||||
let mut delta = 0 as isize;
|
||||
let mut delta = 0_isize;
|
||||
insertion_ranges.iter().map(move |insertion_range| {
|
||||
let insertion_start = insertion_range.start as isize + delta;
|
||||
delta +=
|
||||
|
@ -2865,7 +2875,7 @@ impl Editor {
|
|||
}
|
||||
if let Some(current_ranges) = snippet.ranges.get(snippet.active_index) {
|
||||
self.change_selections(Some(Autoscroll::Fit), cx, |s| {
|
||||
s.select_anchor_ranges(current_ranges.into_iter().cloned())
|
||||
s.select_anchor_ranges(current_ranges.iter().cloned())
|
||||
});
|
||||
// If snippet state is not at the last tabstop, push it back on the stack
|
||||
if snippet.active_index + 1 < snippet.ranges.len() {
|
||||
|
@ -2947,7 +2957,7 @@ impl Editor {
|
|||
}
|
||||
})
|
||||
});
|
||||
this.insert(&"", cx);
|
||||
this.insert("", cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -3214,13 +3224,13 @@ impl Editor {
|
|||
}
|
||||
|
||||
let buffer = &display_map.buffer_snapshot;
|
||||
let mut edit_start = Point::new(rows.start, 0).to_offset(&buffer);
|
||||
let mut edit_start = Point::new(rows.start, 0).to_offset(buffer);
|
||||
let edit_end;
|
||||
let cursor_buffer_row;
|
||||
if buffer.max_point().row >= rows.end {
|
||||
// If there's a line after the range, delete the \n from the end of the row range
|
||||
// and position the cursor on the next line.
|
||||
edit_end = Point::new(rows.end, 0).to_offset(&buffer);
|
||||
edit_end = Point::new(rows.end, 0).to_offset(buffer);
|
||||
cursor_buffer_row = rows.end;
|
||||
} else {
|
||||
// If there isn't a line after the range, delete the \n from the line before the
|
||||
|
@ -3286,7 +3296,7 @@ impl Editor {
|
|||
|
||||
while let Some(next_selection) = selections_iter.peek() {
|
||||
let next_rows = next_selection.spanned_rows(false, &display_map);
|
||||
if next_rows.start <= rows.end - 1 {
|
||||
if next_rows.start < rows.end {
|
||||
rows.end = next_rows.end;
|
||||
selections_iter.next().unwrap();
|
||||
} else {
|
||||
|
@ -3408,7 +3418,7 @@ impl Editor {
|
|||
}
|
||||
|
||||
// If we didn't move line(s), preserve the existing selections
|
||||
new_selections.extend(contiguous_row_selections.drain(..));
|
||||
new_selections.append(&mut contiguous_row_selections);
|
||||
}
|
||||
|
||||
self.transact(cx, |this, cx| {
|
||||
|
@ -3513,7 +3523,7 @@ impl Editor {
|
|||
}
|
||||
|
||||
// If we didn't move line(s), preserve the existing selections
|
||||
new_selections.extend(contiguous_row_selections.drain(..));
|
||||
new_selections.append(&mut contiguous_row_selections);
|
||||
}
|
||||
|
||||
self.transact(cx, |this, cx| {
|
||||
|
@ -3825,7 +3835,7 @@ impl Editor {
|
|||
if !selection.is_empty() && !line_mode {
|
||||
selection.goal = SelectionGoal::None;
|
||||
}
|
||||
let (cursor, goal) = movement::up(&map, selection.start, selection.goal, false);
|
||||
let (cursor, goal) = movement::up(map, selection.start, selection.goal, false);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
})
|
||||
|
@ -3857,7 +3867,7 @@ impl Editor {
|
|||
if !selection.is_empty() && !line_mode {
|
||||
selection.goal = SelectionGoal::None;
|
||||
}
|
||||
let (cursor, goal) = movement::down(&map, selection.end, selection.goal, false);
|
||||
let (cursor, goal) = movement::down(map, selection.end, selection.goal, false);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
});
|
||||
|
@ -4773,12 +4783,10 @@ impl Editor {
|
|||
} else {
|
||||
search_start = buffer.len();
|
||||
}
|
||||
} else if search_start == 0 {
|
||||
break;
|
||||
} else {
|
||||
if search_start == 0 {
|
||||
break;
|
||||
} else {
|
||||
search_start = 0;
|
||||
}
|
||||
search_start = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5101,13 +5109,7 @@ impl Editor {
|
|||
})?;
|
||||
|
||||
let rename = workspace.project().clone().update(cx, |project, cx| {
|
||||
project.perform_rename(
|
||||
buffer.clone(),
|
||||
range.start.clone(),
|
||||
new_name.clone(),
|
||||
true,
|
||||
cx,
|
||||
)
|
||||
project.perform_rename(buffer.clone(), range.start, new_name.clone(), true, cx)
|
||||
});
|
||||
|
||||
Some(cx.spawn(|workspace, mut cx| async move {
|
||||
|
@ -5286,7 +5288,7 @@ impl Editor {
|
|||
|
||||
fn push_to_selection_history(&mut self) {
|
||||
self.selection_history.push(SelectionHistoryEntry {
|
||||
selections: self.selections.disjoint_anchors().clone(),
|
||||
selections: self.selections.disjoint_anchors(),
|
||||
select_next_state: self.select_next_state.clone(),
|
||||
add_selections_state: self.add_selections_state.clone(),
|
||||
});
|
||||
|
@ -5319,7 +5321,7 @@ impl Editor {
|
|||
.update(cx, |buffer, cx| buffer.start_transaction_at(now, cx))
|
||||
{
|
||||
self.selection_history
|
||||
.insert_transaction(tx_id, self.selections.disjoint_anchors().clone());
|
||||
.insert_transaction(tx_id, self.selections.disjoint_anchors());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5333,7 +5335,7 @@ impl Editor {
|
|||
.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx))
|
||||
{
|
||||
if let Some((_, end_selections)) = self.selection_history.transaction_mut(tx_id) {
|
||||
*end_selections = Some(self.selections.disjoint_anchors().clone());
|
||||
*end_selections = Some(self.selections.disjoint_anchors());
|
||||
} else {
|
||||
log::error!("unexpectedly ended a transaction that wasn't started by this editor");
|
||||
}
|
||||
|
@ -5435,7 +5437,7 @@ impl Editor {
|
|||
}
|
||||
|
||||
let end = end.unwrap_or(max_point);
|
||||
return start.to_point(display_map)..end.to_point(display_map);
|
||||
start.to_point(display_map)..end.to_point(display_map)
|
||||
}
|
||||
|
||||
pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext<Self>) {
|
||||
|
@ -5582,6 +5584,7 @@ impl Editor {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn clear_background_highlights<T: 'static>(
|
||||
&mut self,
|
||||
cx: &mut ViewContext<Self>,
|
||||
|
@ -5623,7 +5626,7 @@ impl Editor {
|
|||
.chain(write_highlights)
|
||||
.flat_map(move |ranges| {
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&left_position, &buffer);
|
||||
let cmp = probe.end.cmp(&left_position, buffer);
|
||||
if cmp.is_ge() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -5636,7 +5639,7 @@ impl Editor {
|
|||
let right_position = right_position.clone();
|
||||
ranges[start_ix..]
|
||||
.iter()
|
||||
.take_while(move |range| range.start.cmp(&right_position, &buffer).is_le())
|
||||
.take_while(move |range| range.start.cmp(&right_position, buffer).is_le())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -5651,7 +5654,7 @@ impl Editor {
|
|||
for (color_fetcher, ranges) in self.background_highlights.values() {
|
||||
let color = color_fetcher(theme);
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&search_range.start, &buffer);
|
||||
let cmp = probe.end.cmp(&search_range.start, buffer);
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
|
@ -5661,7 +5664,7 @@ impl Editor {
|
|||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&search_range.end, &buffer).is_ge() {
|
||||
if range.start.cmp(&search_range.end, buffer).is_ge() {
|
||||
break;
|
||||
}
|
||||
let start = range
|
||||
|
@ -5893,7 +5896,7 @@ impl Editor {
|
|||
let (_, ranges) = self.text_highlights::<InputComposition>(cx)?;
|
||||
Some(
|
||||
ranges
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(move |range| {
|
||||
range.start.to_offset_utf16(&snapshot)..range.end.to_offset_utf16(&snapshot)
|
||||
})
|
||||
|
@ -5969,7 +5972,6 @@ fn compute_scroll_position(
|
|||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Event {
|
||||
Activate,
|
||||
BufferEdited,
|
||||
Edited,
|
||||
Reparsed,
|
||||
|
@ -6025,7 +6027,7 @@ impl View for Editor {
|
|||
"Editor"
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
let focused_event = EditorFocused(cx.handle());
|
||||
cx.emit_global(focused_event);
|
||||
if let Some(rename) = self.pending_rename.as_ref() {
|
||||
|
@ -6046,7 +6048,7 @@ impl View for Editor {
|
|||
}
|
||||
}
|
||||
|
||||
fn on_blur(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_out(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
let blurred_event = EditorBlurred(cx.handle());
|
||||
cx.emit_global(blurred_event);
|
||||
self.focused = false;
|
||||
|
@ -6133,10 +6135,8 @@ impl View for Editor {
|
|||
let new_selected_ranges = if let Some(range_utf16) = range_utf16 {
|
||||
let range_utf16 = OffsetUtf16(range_utf16.start)..OffsetUtf16(range_utf16.end);
|
||||
Some(this.selection_replacement_ranges(range_utf16, cx))
|
||||
} else if let Some(marked_ranges) = this.marked_text_ranges(cx) {
|
||||
Some(marked_ranges)
|
||||
} else {
|
||||
None
|
||||
this.marked_text_ranges(cx)
|
||||
};
|
||||
|
||||
if let Some(new_selected_ranges) = new_selected_ranges {
|
||||
|
@ -6197,7 +6197,7 @@ impl View for Editor {
|
|||
let snapshot = this.buffer.read(cx).read(cx);
|
||||
this.selections
|
||||
.disjoint_anchors()
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
selection.start.bias_left(&*snapshot)..selection.end.bias_right(&*snapshot)
|
||||
})
|
||||
|
@ -6380,9 +6380,9 @@ impl<T: InvalidationRegion> InvalidationStack<T> {
|
|||
if selections.len() == region.ranges().len() {
|
||||
selections
|
||||
.iter()
|
||||
.zip(region.ranges().iter().map(|r| r.to_offset(&buffer)))
|
||||
.zip(region.ranges().iter().map(|r| r.to_offset(buffer)))
|
||||
.all(|(selection, invalidation_range)| {
|
||||
let head = selection.head().to_offset(&buffer);
|
||||
let head = selection.head().to_offset(buffer);
|
||||
invalidation_range.start <= head && invalidation_range.end >= head
|
||||
})
|
||||
} else {
|
||||
|
@ -6597,7 +6597,7 @@ pub fn styled_runs_for_code_label<'a>(
|
|||
} else {
|
||||
return Default::default();
|
||||
};
|
||||
let mut muted_style = style.clone();
|
||||
let mut muted_style = style;
|
||||
muted_style.highlight(fade_out);
|
||||
|
||||
let mut runs = SmallVec::<[(Range<usize>, HighlightStyle); 3]>::new();
|
||||
|
@ -7099,10 +7099,10 @@ mod tests {
|
|||
fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
use workspace::Item;
|
||||
let pane = cx.add_view(Default::default(), |cx| Pane::new(cx));
|
||||
let (_, pane) = cx.add_window(Default::default(), Pane::new);
|
||||
let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
|
||||
|
||||
cx.add_window(Default::default(), |cx| {
|
||||
cx.add_view(&pane, |cx| {
|
||||
let mut editor = build_editor(buffer.clone(), cx);
|
||||
let handle = cx.handle();
|
||||
editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle)));
|
||||
|
@ -7189,7 +7189,7 @@ mod tests {
|
|||
Box::new(NavigationData {
|
||||
cursor_anchor: invalid_anchor.clone(),
|
||||
cursor_position: invalid_point,
|
||||
scroll_top_anchor: invalid_anchor.clone(),
|
||||
scroll_top_anchor: invalid_anchor,
|
||||
scroll_top_row: invalid_point.row,
|
||||
scroll_position: Default::default(),
|
||||
}),
|
||||
|
@ -8699,98 +8699,102 @@ mod tests {
|
|||
fn test_transpose(cx: &mut gpui::MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
|
||||
cx.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), cx);
|
||||
_ = cx
|
||||
.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), cx);
|
||||
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([1..1]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bac");
|
||||
assert_eq!(editor.selections.ranges(cx), [2..2]);
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([1..1]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bac");
|
||||
assert_eq!(editor.selections.ranges(cx), [2..2]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bca");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bca");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bac");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bac");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3]);
|
||||
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
|
||||
cx.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx);
|
||||
_ = cx
|
||||
.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx);
|
||||
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acb\nde");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3]);
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acb\nde");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3]);
|
||||
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([4..4]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acbd\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [5..5]);
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([4..4]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acbd\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [5..5]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acbde\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [6..6]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acbde\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [6..6]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acbd\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [6..6]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "acbd\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [6..6]);
|
||||
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
|
||||
cx.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx);
|
||||
_ = cx
|
||||
.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx);
|
||||
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2, 4..4]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bacd\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]);
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2, 4..4]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bacd\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcade\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcade\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcda\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcda\ne");
|
||||
assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcade\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcade\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcaed\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "bcaed\n");
|
||||
assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]);
|
||||
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
|
||||
cx.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), cx);
|
||||
_ = cx
|
||||
.add_window(Default::default(), |cx| {
|
||||
let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), cx);
|
||||
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([4..4]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "🏀🍐✋");
|
||||
assert_eq!(editor.selections.ranges(cx), [8..8]);
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([4..4]));
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "🏀🍐✋");
|
||||
assert_eq!(editor.selections.ranges(cx), [8..8]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "🏀✋🍐");
|
||||
assert_eq!(editor.selections.ranges(cx), [11..11]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "🏀✋🍐");
|
||||
assert_eq!(editor.selections.ranges(cx), [11..11]);
|
||||
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "🏀🍐✋");
|
||||
assert_eq!(editor.selections.ranges(cx), [11..11]);
|
||||
editor.transpose(&Default::default(), cx);
|
||||
assert_eq!(editor.text(cx), "🏀🍐✋");
|
||||
assert_eq!(editor.selections.ranges(cx), [11..11]);
|
||||
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
editor
|
||||
})
|
||||
.1;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -9342,7 +9346,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
view.update(cx, |view, cx| {
|
||||
|
@ -9501,7 +9505,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||
editor
|
||||
.condition(&cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
|
||||
.condition(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
|
@ -9561,7 +9565,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
view.update(cx, |view, cx| {
|
||||
|
@ -9737,7 +9741,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
view.update(cx, |view, cx| {
|
||||
|
@ -9819,7 +9823,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||
editor
|
||||
.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
|
@ -10023,7 +10027,7 @@ mod tests {
|
|||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background().clone());
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_file("/file.rs", Default::default()).await;
|
||||
|
||||
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
|
||||
|
@ -10137,7 +10141,7 @@ mod tests {
|
|||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background().clone());
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_file("/file.rs", Default::default()).await;
|
||||
|
||||
let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
|
||||
|
@ -10420,7 +10424,7 @@ mod tests {
|
|||
.map(|completion_text| lsp::CompletionItem {
|
||||
label: completion_text.to_string(),
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
range: replace_range.clone(),
|
||||
range: replace_range,
|
||||
new_text: completion_text.to_string(),
|
||||
})),
|
||||
..Default::default()
|
||||
|
@ -10851,7 +10855,7 @@ mod tests {
|
|||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
|
||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
view.update(cx, |view, cx| {
|
||||
|
@ -11079,7 +11083,7 @@ mod tests {
|
|||
let match_indices = [4, 6, 7, 8];
|
||||
assert_eq!(
|
||||
combine_syntax_and_fuzzy_match_highlights(
|
||||
&string,
|
||||
string,
|
||||
Default::default(),
|
||||
syntax_ranges.into_iter(),
|
||||
&match_indices,
|
||||
|
|
|
@ -110,10 +110,10 @@ impl EditorElement {
|
|||
self.update_view(cx, |view, cx| view.snapshot(cx))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn mouse_down(
|
||||
&self,
|
||||
position: Vector2F,
|
||||
cmd: bool,
|
||||
alt: bool,
|
||||
shift: bool,
|
||||
mut click_count: usize,
|
||||
|
@ -121,20 +121,6 @@ impl EditorElement {
|
|||
paint: &mut PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
if cmd && paint.text_bounds.contains_point(position) {
|
||||
let (point, target_point) =
|
||||
paint.point_for_position(&self.snapshot(cx), layout, position);
|
||||
if point == target_point {
|
||||
if shift {
|
||||
cx.dispatch_action(GoToFetchedTypeDefinition { point });
|
||||
} else {
|
||||
cx.dispatch_action(GoToFetchedDefinition { point });
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if paint.gutter_bounds.contains_point(position) {
|
||||
click_count = 3; // Simulate triple-click when clicking the gutter to select lines
|
||||
} else if !paint.text_bounds.contains_point(position) {
|
||||
|
@ -183,13 +169,39 @@ impl EditorElement {
|
|||
true
|
||||
}
|
||||
|
||||
fn mouse_up(&self, _position: Vector2F, cx: &mut EventContext) -> bool {
|
||||
if self.view(cx.app.as_ref()).is_selecting() {
|
||||
fn mouse_up(
|
||||
&self,
|
||||
position: Vector2F,
|
||||
cmd: bool,
|
||||
shift: bool,
|
||||
layout: &mut LayoutState,
|
||||
paint: &mut PaintState,
|
||||
cx: &mut EventContext,
|
||||
) -> bool {
|
||||
let view = self.view(cx.app.as_ref());
|
||||
let end_selection = view.is_selecting();
|
||||
let selections_empty = view.are_selections_empty();
|
||||
|
||||
if end_selection {
|
||||
cx.dispatch_action(Select(SelectPhase::End));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
if selections_empty && cmd && paint.text_bounds.contains_point(position) {
|
||||
let (point, target_point) =
|
||||
paint.point_for_position(&self.snapshot(cx), layout, position);
|
||||
|
||||
if point == target_point {
|
||||
if shift {
|
||||
cx.dispatch_action(GoToFetchedTypeDefinition { point });
|
||||
} else {
|
||||
cx.dispatch_action(GoToFetchedDefinition { point });
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
end_selection
|
||||
}
|
||||
|
||||
fn mouse_dragged(
|
||||
|
@ -685,6 +697,7 @@ impl EditorElement {
|
|||
cx.scene.pop_layer();
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn paint_highlighted_range(
|
||||
&self,
|
||||
range: Range<DisplayPoint>,
|
||||
|
@ -846,7 +859,7 @@ impl EditorElement {
|
|||
.style
|
||||
.placeholder_text
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| &self.style.text);
|
||||
.unwrap_or(&self.style.text);
|
||||
let placeholder_text = snapshot.placeholder_text();
|
||||
let placeholder_lines = placeholder_text
|
||||
.as_ref()
|
||||
|
@ -855,7 +868,7 @@ impl EditorElement {
|
|||
.skip(rows.start as usize)
|
||||
.chain(iter::repeat(""))
|
||||
.take(rows.len());
|
||||
return placeholder_lines
|
||||
placeholder_lines
|
||||
.map(|line| {
|
||||
cx.text_layout_cache.layout_str(
|
||||
line,
|
||||
|
@ -870,7 +883,7 @@ impl EditorElement {
|
|||
)],
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
.collect()
|
||||
} else {
|
||||
let style = &self.style;
|
||||
let chunks = snapshot.chunks(rows.clone(), true).map(|chunk| {
|
||||
|
@ -915,14 +928,15 @@ impl EditorElement {
|
|||
layout_highlighted_chunks(
|
||||
chunks,
|
||||
&style.text,
|
||||
&cx.text_layout_cache,
|
||||
&cx.font_cache,
|
||||
cx.text_layout_cache,
|
||||
cx.font_cache,
|
||||
MAX_LINE_LEN,
|
||||
rows.len() as usize,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_blocks(
|
||||
&mut self,
|
||||
rows: Range<u32>,
|
||||
|
@ -1288,7 +1302,9 @@ impl Element for EditorElement {
|
|||
}
|
||||
|
||||
// Render the local selections in the leader's color when following.
|
||||
let local_replica_id = view.leader_replica_id.unwrap_or(view.replica_id(cx));
|
||||
let local_replica_id = view
|
||||
.leader_replica_id
|
||||
.unwrap_or_else(|| view.replica_id(cx));
|
||||
|
||||
selections.push((
|
||||
local_replica_id,
|
||||
|
@ -1346,19 +1362,19 @@ impl Element for EditorElement {
|
|||
|
||||
self.update_view(cx.app, |view, cx| {
|
||||
let clamped = view.clamp_scroll_left(scroll_max.x());
|
||||
let autoscrolled;
|
||||
if autoscroll_horizontally {
|
||||
autoscrolled = view.autoscroll_horizontally(
|
||||
|
||||
let autoscrolled = if autoscroll_horizontally {
|
||||
view.autoscroll_horizontally(
|
||||
start_row,
|
||||
text_size.x(),
|
||||
scroll_width,
|
||||
em_width,
|
||||
&line_layouts,
|
||||
cx,
|
||||
);
|
||||
)
|
||||
} else {
|
||||
autoscrolled = false;
|
||||
}
|
||||
false
|
||||
};
|
||||
|
||||
if clamped || autoscrolled {
|
||||
snapshot = view.snapshot(cx);
|
||||
|
@ -1533,36 +1549,28 @@ impl Element for EditorElement {
|
|||
}
|
||||
|
||||
match event {
|
||||
Event::MouseDown(MouseButtonEvent {
|
||||
&Event::MouseDown(MouseButtonEvent {
|
||||
button: MouseButton::Left,
|
||||
position,
|
||||
cmd,
|
||||
alt,
|
||||
shift,
|
||||
click_count,
|
||||
..
|
||||
}) => self.mouse_down(
|
||||
*position,
|
||||
*cmd,
|
||||
*alt,
|
||||
*shift,
|
||||
*click_count,
|
||||
layout,
|
||||
paint,
|
||||
cx,
|
||||
),
|
||||
}) => self.mouse_down(position, alt, shift, click_count, layout, paint, cx),
|
||||
|
||||
Event::MouseDown(MouseButtonEvent {
|
||||
&Event::MouseDown(MouseButtonEvent {
|
||||
button: MouseButton::Right,
|
||||
position,
|
||||
..
|
||||
}) => self.mouse_right_down(*position, layout, paint, cx),
|
||||
}) => self.mouse_right_down(position, layout, paint, cx),
|
||||
|
||||
Event::MouseUp(MouseButtonEvent {
|
||||
&Event::MouseUp(MouseButtonEvent {
|
||||
button: MouseButton::Left,
|
||||
position,
|
||||
cmd,
|
||||
shift,
|
||||
..
|
||||
}) => self.mouse_up(*position, cx),
|
||||
}) => self.mouse_up(position, cmd, shift, layout, paint, cx),
|
||||
|
||||
Event::MouseMoved(MouseMovedEvent {
|
||||
pressed_button: Some(MouseButton::Left),
|
||||
|
@ -1988,8 +1996,8 @@ mod tests {
|
|||
let layouts = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let mut presenter = cx.build_presenter(window_id, 30.);
|
||||
let mut layout_cx = presenter.build_layout_context(Vector2F::zero(), false, cx);
|
||||
element.layout_line_numbers(0..6, &Default::default(), &snapshot, &mut layout_cx)
|
||||
let layout_cx = presenter.build_layout_context(Vector2F::zero(), false, cx);
|
||||
element.layout_line_numbers(0..6, &Default::default(), &snapshot, &layout_cx)
|
||||
});
|
||||
assert_eq!(layouts.len(), 6);
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ fn show_hover(
|
|||
// query the LSP for hover info
|
||||
let hover_request = cx.update(|cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.hover(&buffer, buffer_position.clone(), cx)
|
||||
project.hover(&buffer, buffer_position, cx)
|
||||
})
|
||||
});
|
||||
|
||||
|
@ -222,7 +222,7 @@ fn show_hover(
|
|||
|
||||
Some(InfoPopover {
|
||||
project: project.clone(),
|
||||
symbol_range: range.clone(),
|
||||
symbol_range: range,
|
||||
contents: hover_result.contents,
|
||||
})
|
||||
});
|
||||
|
|
|
@ -54,8 +54,8 @@ impl FollowableItem for Editor {
|
|||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
cx.add_view(pane.window_id(), |cx| {
|
||||
Editor::for_buffer(buffer, Some(project), cx)
|
||||
pane.update(&mut cx, |_, cx| {
|
||||
cx.add_view(|cx| Editor::for_buffer(buffer, Some(project), cx))
|
||||
})
|
||||
});
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
|
@ -76,14 +76,14 @@ impl FollowableItem for Editor {
|
|||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
if !selections.is_empty() {
|
||||
editor.set_selections_from_remote(selections.into(), cx);
|
||||
editor.set_selections_from_remote(selections, cx);
|
||||
}
|
||||
|
||||
if let Some(anchor) = state.scroll_top_anchor {
|
||||
editor.set_scroll_top_anchor(
|
||||
Anchor {
|
||||
buffer_id: Some(state.buffer_id as usize),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
excerpt_id,
|
||||
text_anchor: language::proto::deserialize_anchor(anchor)
|
||||
.ok_or_else(|| anyhow!("invalid scroll top"))?,
|
||||
},
|
||||
|
@ -198,19 +198,17 @@ impl FollowableItem for Editor {
|
|||
if !selections.is_empty() {
|
||||
self.set_selections_from_remote(selections, cx);
|
||||
self.request_autoscroll_remotely(Autoscroll::Newest, cx);
|
||||
} else {
|
||||
if let Some(anchor) = message.scroll_top_anchor {
|
||||
self.set_scroll_top_anchor(
|
||||
Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: language::proto::deserialize_anchor(anchor)
|
||||
.ok_or_else(|| anyhow!("invalid scroll top"))?,
|
||||
},
|
||||
vec2f(message.scroll_x, message.scroll_y),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
} else if let Some(anchor) = message.scroll_top_anchor {
|
||||
self.set_scroll_top_anchor(
|
||||
Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: language::proto::deserialize_anchor(anchor)
|
||||
.ok_or_else(|| anyhow!("invalid scroll top"))?,
|
||||
},
|
||||
vec2f(message.scroll_x, message.scroll_y),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -436,8 +434,7 @@ impl Item for Editor {
|
|||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.expect("cannot call save_as on an excerpt list")
|
||||
.clone();
|
||||
.expect("cannot call save_as on an excerpt list");
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
project.save_buffer_as(buffer, abs_path, cx)
|
||||
|
@ -469,10 +466,6 @@ impl Item for Editor {
|
|||
})
|
||||
}
|
||||
|
||||
fn should_activate_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Activate)
|
||||
}
|
||||
|
||||
fn should_close_item_on_event(event: &Event) -> bool {
|
||||
matches!(event, Event::Closed)
|
||||
}
|
||||
|
@ -507,6 +500,12 @@ pub struct CursorPosition {
|
|||
_observe_active_editor: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Default for CursorPosition {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl CursorPosition {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
|
|
@ -8,8 +8,8 @@ use util::TryFutureExt;
|
|||
use workspace::Workspace;
|
||||
|
||||
use crate::{
|
||||
Anchor, DisplayPoint, Editor, EditorSnapshot, Event, GoToDefinition, GoToTypeDefinition,
|
||||
Select, SelectPhase,
|
||||
Anchor, DisplayPoint, Editor, EditorSnapshot, GoToDefinition, GoToTypeDefinition, Select,
|
||||
SelectPhase,
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
|
@ -83,7 +83,7 @@ pub fn update_go_to_definition_link(
|
|||
&point,
|
||||
&editor.link_go_to_definition_state.last_mouse_location,
|
||||
) {
|
||||
if a.cmp(&b, &snapshot.buffer_snapshot).is_eq() {
|
||||
if a.cmp(b, &snapshot.buffer_snapshot).is_eq() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ pub fn cmd_shift_changed(
|
|||
LinkDefinitionKind::Symbol
|
||||
};
|
||||
|
||||
show_link_definition(kind, editor, point.clone(), snapshot, cx);
|
||||
show_link_definition(kind, editor, point, snapshot, cx);
|
||||
} else {
|
||||
hide_link_definition(editor, cx)
|
||||
}
|
||||
|
@ -204,12 +204,10 @@ pub fn show_link_definition(
|
|||
// query the LSP for definition info
|
||||
let definition_request = cx.update(|cx| {
|
||||
project.update(cx, |project, cx| match definition_kind {
|
||||
LinkDefinitionKind::Symbol => {
|
||||
project.definition(&buffer, buffer_position.clone(), cx)
|
||||
}
|
||||
LinkDefinitionKind::Symbol => project.definition(&buffer, buffer_position, cx),
|
||||
|
||||
LinkDefinitionKind::Type => {
|
||||
project.type_definition(&buffer, buffer_position.clone(), cx)
|
||||
project.type_definition(&buffer, buffer_position, cx)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
@ -355,7 +353,6 @@ fn go_to_fetched_definition_of_kind(
|
|||
editor_handle.update(cx, |editor, cx| {
|
||||
if !editor.focused {
|
||||
cx.focus_self();
|
||||
cx.emit(Event::Activate);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -364,7 +361,7 @@ fn go_to_fetched_definition_of_kind(
|
|||
editor_handle.update(cx, |editor, cx| {
|
||||
editor.select(
|
||||
&Select(SelectPhase::Begin {
|
||||
position: point.clone(),
|
||||
position: point,
|
||||
add: false,
|
||||
click_count: 1,
|
||||
}),
|
||||
|
|
|
@ -2,7 +2,7 @@ use context_menu::ContextMenuItem;
|
|||
use gpui::{geometry::vector::Vector2F, impl_internal_actions, MutableAppContext, ViewContext};
|
||||
|
||||
use crate::{
|
||||
DisplayPoint, Editor, EditorMode, Event, FindAllReferences, GoToDefinition, GoToTypeDefinition,
|
||||
DisplayPoint, Editor, EditorMode, FindAllReferences, GoToDefinition, GoToTypeDefinition,
|
||||
Rename, SelectMode, ToggleCodeActions,
|
||||
};
|
||||
|
||||
|
@ -25,7 +25,6 @@ pub fn deploy_context_menu(
|
|||
) {
|
||||
if !editor.focused {
|
||||
cx.focus_self();
|
||||
cx.emit(Event::Activate);
|
||||
}
|
||||
|
||||
// Don't show context menu for inline editors
|
||||
|
|
|
@ -33,7 +33,7 @@ use text::{
|
|||
use theme::SyntaxTheme;
|
||||
use util::post_inc;
|
||||
|
||||
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
|
||||
const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize];
|
||||
|
||||
pub type ExcerptId = Locator;
|
||||
|
||||
|
@ -289,10 +289,16 @@ impl MultiBuffer {
|
|||
self.read(cx).has_conflict()
|
||||
}
|
||||
|
||||
// The `is_empty` signature doesn't match what clippy expects
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self, cx: &AppContext) -> usize {
|
||||
self.read(cx).len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self, cx: &AppContext) -> bool {
|
||||
self.len(cx) != 0
|
||||
}
|
||||
|
||||
pub fn symbols_containing<T: ToOffset>(
|
||||
&self,
|
||||
offset: T,
|
||||
|
@ -338,6 +344,7 @@ impl MultiBuffer {
|
|||
_ => Default::default(),
|
||||
};
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
let mut buffer_edits: HashMap<usize, Vec<(Range<usize>, Arc<str>, bool, u32)>> =
|
||||
Default::default();
|
||||
let mut cursor = snapshot.excerpts.cursor::<usize>();
|
||||
|
@ -594,13 +601,13 @@ impl MultiBuffer {
|
|||
break;
|
||||
}
|
||||
|
||||
let mut start = excerpt.range.context.start.clone();
|
||||
let mut end = excerpt.range.context.end.clone();
|
||||
let mut start = excerpt.range.context.start;
|
||||
let mut end = excerpt.range.context.end;
|
||||
if excerpt.id == selection.start.excerpt_id {
|
||||
start = selection.start.text_anchor.clone();
|
||||
start = selection.start.text_anchor;
|
||||
}
|
||||
if excerpt.id == selection.end.excerpt_id {
|
||||
end = selection.end.text_anchor.clone();
|
||||
end = selection.end.text_anchor;
|
||||
}
|
||||
selections_by_buffer
|
||||
.entry(excerpt.buffer_id)
|
||||
|
@ -666,7 +673,7 @@ impl MultiBuffer {
|
|||
while let Some(transaction) = self.history.pop_undo() {
|
||||
let mut undone = false;
|
||||
for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions {
|
||||
if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) {
|
||||
if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) {
|
||||
undone |= buffer.update(cx, |buffer, cx| {
|
||||
let undo_to = *buffer_transaction_id;
|
||||
if let Some(entry) = buffer.peek_undo_stack() {
|
||||
|
@ -693,7 +700,7 @@ impl MultiBuffer {
|
|||
while let Some(transaction) = self.history.pop_redo() {
|
||||
let mut redone = false;
|
||||
for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions {
|
||||
if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) {
|
||||
if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) {
|
||||
redone |= buffer.update(cx, |buffer, cx| {
|
||||
let redo_to = *buffer_transaction_id;
|
||||
if let Some(entry) = buffer.peek_redo_stack() {
|
||||
|
@ -982,8 +989,8 @@ impl MultiBuffer {
|
|||
}
|
||||
|
||||
// If point is at the end of the buffer, the last excerpt is returned
|
||||
pub fn point_to_buffer_offset<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
pub fn point_to_buffer_offset<T: ToOffset>(
|
||||
&self,
|
||||
point: T,
|
||||
cx: &AppContext,
|
||||
) -> Option<(ModelHandle<Buffer>, usize)> {
|
||||
|
@ -1004,8 +1011,8 @@ impl MultiBuffer {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn range_to_buffer_ranges<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
pub fn range_to_buffer_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
cx: &AppContext,
|
||||
) -> Vec<(ModelHandle<Buffer>, Range<usize>)> {
|
||||
|
@ -1112,8 +1119,8 @@ impl MultiBuffer {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn text_anchor_for_position<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
pub fn text_anchor_for_position<T: ToOffset>(
|
||||
&self,
|
||||
position: T,
|
||||
cx: &AppContext,
|
||||
) -> Option<(ModelHandle<Buffer>, language::Anchor)> {
|
||||
|
@ -1439,7 +1446,7 @@ impl MultiBuffer {
|
|||
.flat_map(|b| &b.excerpts)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
if excerpt_ids.len() == 0 || (rng.gen() && excerpt_ids.len() < max_excerpts) {
|
||||
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
|
||||
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
|
||||
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
|
||||
buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx)));
|
||||
|
@ -1514,8 +1521,8 @@ impl MultiBuffer {
|
|||
.choose(rng)
|
||||
.map(|state| state.buffer.clone());
|
||||
|
||||
if rng.gen() && buffer.is_some() {
|
||||
buffer.unwrap().update(cx, |buffer, cx| {
|
||||
if let Some(buffer) = buffer {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
if rng.gen() {
|
||||
buffer.randomly_edit(rng, mutation_count, cx);
|
||||
} else {
|
||||
|
@ -1542,10 +1549,7 @@ impl MultiBufferSnapshot {
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn reversed_chars_at<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
position: T,
|
||||
) -> impl Iterator<Item = char> + 'a {
|
||||
pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
|
||||
let mut offset = position.to_offset(self);
|
||||
let mut cursor = self.excerpts.cursor::<usize>();
|
||||
cursor.seek(&offset, Bias::Left, &());
|
||||
|
@ -1579,16 +1583,13 @@ impl MultiBufferSnapshot {
|
|||
.flat_map(|c| c.chars().rev())
|
||||
}
|
||||
|
||||
pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator<Item = char> + 'a {
|
||||
pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
|
||||
let offset = position.to_offset(self);
|
||||
self.text_for_range(offset..self.len())
|
||||
.flat_map(|chunk| chunk.chars())
|
||||
}
|
||||
|
||||
pub fn text_for_range<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
) -> impl Iterator<Item = &'a str> {
|
||||
pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = &str> + '_ {
|
||||
self.chunks(range, false).map(|chunk| chunk.text)
|
||||
}
|
||||
|
||||
|
@ -1655,6 +1656,10 @@ impl MultiBufferSnapshot {
|
|||
self.excerpts.summary().text.len
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.excerpts.summary().text.len == 0
|
||||
}
|
||||
|
||||
pub fn max_buffer_row(&self) -> u32 {
|
||||
self.excerpts.summary().max_buffer_row
|
||||
}
|
||||
|
@ -1737,7 +1742,7 @@ impl MultiBufferSnapshot {
|
|||
*cursor.start() + overshoot
|
||||
}
|
||||
|
||||
pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferBytes<'a> {
|
||||
pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> MultiBufferBytes {
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
let mut excerpts = self.excerpts.cursor::<usize>();
|
||||
excerpts.seek(&range.start, Bias::Right, &());
|
||||
|
@ -1760,7 +1765,7 @@ impl MultiBufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn buffer_rows<'a>(&'a self, start_row: u32) -> MultiBufferRows<'a> {
|
||||
pub fn buffer_rows(&self, start_row: u32) -> MultiBufferRows {
|
||||
let mut result = MultiBufferRows {
|
||||
buffer_row_range: 0..0,
|
||||
excerpts: self.excerpts.cursor(),
|
||||
|
@ -1769,11 +1774,7 @@ impl MultiBufferSnapshot {
|
|||
result
|
||||
}
|
||||
|
||||
pub fn chunks<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
language_aware: bool,
|
||||
) -> MultiBufferChunks<'a> {
|
||||
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> MultiBufferChunks {
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
let mut chunks = MultiBufferChunks {
|
||||
range: range.clone(),
|
||||
|
@ -2033,7 +2034,7 @@ impl MultiBufferSnapshot {
|
|||
self.excerpts.summary().text.clone()
|
||||
}
|
||||
|
||||
pub fn text_summary_for_range<'a, D, O>(&'a self, range: Range<O>) -> D
|
||||
pub fn text_summary_for_range<D, O>(&self, range: Range<O>) -> D
|
||||
where
|
||||
D: TextDimension,
|
||||
O: ToOffset,
|
||||
|
@ -2204,15 +2205,15 @@ impl MultiBufferSnapshot {
|
|||
let (anchor_ix, anchor) = anchors.next().unwrap();
|
||||
let mut anchor = anchor.clone();
|
||||
|
||||
// Leave min and max anchors unchanged.
|
||||
if *old_excerpt_id == ExcerptId::max() || *old_excerpt_id == ExcerptId::min() {
|
||||
kept_position = true;
|
||||
}
|
||||
// If the old excerpt still exists at this location, then leave
|
||||
// the anchor unchanged.
|
||||
else if next_excerpt.map_or(false, |excerpt| {
|
||||
let id_invalid =
|
||||
*old_excerpt_id == ExcerptId::max() || *old_excerpt_id == ExcerptId::min();
|
||||
let still_exists = next_excerpt.map_or(false, |excerpt| {
|
||||
excerpt.id == *old_excerpt_id && excerpt.contains(&anchor)
|
||||
}) {
|
||||
});
|
||||
|
||||
// Leave min and max anchors unchanged if invalid or
|
||||
// if the old excerpt still exists at this location
|
||||
if id_invalid || still_exists {
|
||||
kept_position = true;
|
||||
}
|
||||
// If the old excerpt no longer exists at this location, then attempt to
|
||||
|
@ -2239,7 +2240,7 @@ impl MultiBufferSnapshot {
|
|||
.cmp(&excerpt.range.context.end, &excerpt.buffer)
|
||||
.is_gt()
|
||||
{
|
||||
text_anchor = excerpt.range.context.end.clone();
|
||||
text_anchor = excerpt.range.context.end;
|
||||
}
|
||||
Anchor {
|
||||
buffer_id: Some(excerpt.buffer_id),
|
||||
|
@ -2256,7 +2257,7 @@ impl MultiBufferSnapshot {
|
|||
.cmp(&excerpt.range.context.start, &excerpt.buffer)
|
||||
.is_lt()
|
||||
{
|
||||
text_anchor = excerpt.range.context.start.clone();
|
||||
text_anchor = excerpt.range.context.start;
|
||||
}
|
||||
Anchor {
|
||||
buffer_id: Some(excerpt.buffer_id),
|
||||
|
@ -2349,10 +2350,10 @@ impl MultiBufferSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn excerpt_boundaries_in_range<'a, R, T>(
|
||||
&'a self,
|
||||
pub fn excerpt_boundaries_in_range<R, T>(
|
||||
&self,
|
||||
range: R,
|
||||
) -> impl Iterator<Item = ExcerptBoundary> + 'a
|
||||
) -> impl Iterator<Item = ExcerptBoundary> + '_
|
||||
where
|
||||
R: RangeBounds<T>,
|
||||
T: ToOffset,
|
||||
|
@ -2635,13 +2636,12 @@ impl MultiBufferSnapshot {
|
|||
cursor
|
||||
.take_while(move |excerpt| excerpt.id <= range.end.excerpt_id)
|
||||
.flat_map(move |excerpt| {
|
||||
let mut query_range =
|
||||
excerpt.range.context.start.clone()..excerpt.range.context.end.clone();
|
||||
let mut query_range = excerpt.range.context.start..excerpt.range.context.end;
|
||||
if excerpt.id == range.start.excerpt_id {
|
||||
query_range.start = range.start.text_anchor.clone();
|
||||
query_range.start = range.start.text_anchor;
|
||||
}
|
||||
if excerpt.id == range.end.excerpt_id {
|
||||
query_range.end = range.end.text_anchor.clone();
|
||||
query_range.end = range.end.text_anchor;
|
||||
}
|
||||
|
||||
excerpt
|
||||
|
@ -2652,12 +2652,12 @@ impl MultiBufferSnapshot {
|
|||
let mut start = Anchor {
|
||||
buffer_id: Some(excerpt.buffer_id),
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: selection.start.clone(),
|
||||
text_anchor: selection.start,
|
||||
};
|
||||
let mut end = Anchor {
|
||||
buffer_id: Some(excerpt.buffer_id),
|
||||
excerpt_id: excerpt.id.clone(),
|
||||
text_anchor: selection.end.clone(),
|
||||
text_anchor: selection.end,
|
||||
};
|
||||
if range.start.cmp(&start, self).is_gt() {
|
||||
start = range.start.clone();
|
||||
|
@ -2862,11 +2862,7 @@ impl Excerpt {
|
|||
}
|
||||
}
|
||||
|
||||
fn chunks_in_range<'a>(
|
||||
&'a self,
|
||||
range: Range<usize>,
|
||||
language_aware: bool,
|
||||
) -> ExcerptChunks<'a> {
|
||||
fn chunks_in_range(&self, range: Range<usize>, language_aware: bool) -> ExcerptChunks {
|
||||
let content_start = self.range.context.start.to_offset(&self.buffer);
|
||||
let chunks_start = content_start + range.start;
|
||||
let chunks_end = content_start + cmp::min(range.end, self.text_summary.len);
|
||||
|
@ -2913,12 +2909,12 @@ impl Excerpt {
|
|||
.cmp(&self.range.context.start, &self.buffer)
|
||||
.is_lt()
|
||||
{
|
||||
self.range.context.start.clone()
|
||||
self.range.context.start
|
||||
} else if text_anchor
|
||||
.cmp(&self.range.context.end, &self.buffer)
|
||||
.is_gt()
|
||||
{
|
||||
self.range.context.end.clone()
|
||||
self.range.context.end
|
||||
} else {
|
||||
text_anchor
|
||||
}
|
||||
|
@ -3835,7 +3831,7 @@ mod tests {
|
|||
"Removing excerpt {}: {:?}",
|
||||
ix,
|
||||
buffer
|
||||
.text_for_range(range.to_offset(&buffer))
|
||||
.text_for_range(range.to_offset(buffer))
|
||||
.collect::<String>(),
|
||||
);
|
||||
}
|
||||
|
@ -3851,7 +3847,7 @@ mod tests {
|
|||
let bias = if rng.gen() { Bias::Left } else { Bias::Right };
|
||||
log::info!("Creating anchor at {} with bias {:?}", offset, bias);
|
||||
anchors.push(multibuffer.anchor_at(offset, bias));
|
||||
anchors.sort_by(|a, b| a.cmp(&b, &multibuffer));
|
||||
anchors.sort_by(|a, b| a.cmp(b, &multibuffer));
|
||||
}
|
||||
40..=44 if !anchors.is_empty() => {
|
||||
let multibuffer = multibuffer.read(cx).read(cx);
|
||||
|
@ -3896,7 +3892,7 @@ mod tests {
|
|||
let prev_excerpt_id = excerpt_ids
|
||||
.get(prev_excerpt_ix)
|
||||
.cloned()
|
||||
.unwrap_or(ExcerptId::max());
|
||||
.unwrap_or_else(ExcerptId::max);
|
||||
let excerpt_ix = (prev_excerpt_ix + 1).min(expected_excerpts.len());
|
||||
|
||||
log::info!(
|
||||
|
@ -3984,11 +3980,7 @@ mod tests {
|
|||
|
||||
assert_eq!(
|
||||
snapshot.max_buffer_row(),
|
||||
expected_buffer_rows
|
||||
.into_iter()
|
||||
.filter_map(|r| r)
|
||||
.max()
|
||||
.unwrap()
|
||||
expected_buffer_rows.into_iter().flatten().max().unwrap()
|
||||
);
|
||||
|
||||
let mut excerpt_starts = excerpt_starts.into_iter();
|
||||
|
|
|
@ -34,7 +34,7 @@ impl Anchor {
|
|||
&self.excerpt_id
|
||||
}
|
||||
|
||||
pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
|
||||
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
|
||||
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id);
|
||||
if excerpt_id_cmp.is_eq() {
|
||||
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
|
||||
|
@ -111,15 +111,15 @@ impl AnchorRangeExt for Range<Anchor> {
|
|||
fn cmp(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Ordering {
|
||||
match self.start.cmp(&other.start, buffer) {
|
||||
Ordering::Equal => other.end.cmp(&self.end, buffer),
|
||||
ord @ _ => ord,
|
||||
ord => ord,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize> {
|
||||
self.start.to_offset(&content)..self.end.to_offset(&content)
|
||||
self.start.to_offset(content)..self.end.to_offset(content)
|
||||
}
|
||||
|
||||
fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
|
||||
self.start.to_point(&content)..self.end.to_point(&content)
|
||||
self.start.to_point(content)..self.end.to_point(content)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ impl SelectionsCollection {
|
|||
self.pending = other.pending.clone();
|
||||
}
|
||||
|
||||
pub fn count<'a>(&self) -> usize {
|
||||
pub fn count(&self) -> usize {
|
||||
let mut count = self.disjoint.len();
|
||||
if self.pending.is_some() {
|
||||
count += 1;
|
||||
|
@ -365,7 +365,7 @@ impl<'a> MutableSelectionsCollection<'a> {
|
|||
let mut changed = false;
|
||||
self.collection.disjoint = self
|
||||
.disjoint
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter(|selection| {
|
||||
let found = selection.id == selection_id;
|
||||
changed |= found;
|
||||
|
@ -464,7 +464,7 @@ impl<'a> MutableSelectionsCollection<'a> {
|
|||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
false
|
||||
}
|
||||
|
||||
pub fn insert_range<T>(&mut self, range: Range<T>)
|
||||
|
@ -729,8 +729,7 @@ impl<'a> MutableSelectionsCollection<'a> {
|
|||
kept_end
|
||||
};
|
||||
if !kept_head {
|
||||
selections_with_lost_position
|
||||
.insert(selection.id, selection.head().excerpt_id.clone());
|
||||
selections_with_lost_position.insert(selection.id, selection.head().excerpt_id);
|
||||
}
|
||||
|
||||
Selection {
|
||||
|
@ -761,10 +760,8 @@ impl<'a> MutableSelectionsCollection<'a> {
|
|||
kept_end
|
||||
};
|
||||
if !kept_head {
|
||||
selections_with_lost_position.insert(
|
||||
pending.selection.id,
|
||||
pending.selection.head().excerpt_id.clone(),
|
||||
);
|
||||
selections_with_lost_position
|
||||
.insert(pending.selection.id, pending.selection.head().excerpt_id);
|
||||
}
|
||||
|
||||
pending.selection.start = start;
|
||||
|
@ -814,5 +811,5 @@ fn resolve<D: TextDimension + Ord + Sub<D, Output = D>>(
|
|||
selection: &Selection<Anchor>,
|
||||
buffer: &MultiBufferSnapshot,
|
||||
) -> Selection<D> {
|
||||
selection.map(|p| p.summary::<D>(&buffer))
|
||||
selection.map(|p| p.summary::<D>(buffer))
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use fuzzy::PathMatch;
|
||||
use gpui::{
|
||||
actions, elements::*, AppContext, Entity, ModelHandle, MouseState, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
actions, elements::*, AnyViewHandle, AppContext, Entity, ModelHandle, MouseState,
|
||||
MutableAppContext, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
|
||||
|
@ -53,8 +53,10 @@ impl View for FileFinder {
|
|||
ChildView::new(self.picker.clone()).boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.picker);
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.picker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -260,7 +262,7 @@ impl PickerDelegate for FileFinder {
|
|||
self.labels_for_match(path_match);
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Label::new(file_name.to_string(), style.label.clone())
|
||||
Label::new(file_name, style.label.clone())
|
||||
.with_highlights(file_name_positions)
|
||||
.boxed(),
|
||||
)
|
||||
|
@ -331,7 +333,7 @@ mod tests {
|
|||
cx.dispatch_action(window_id, SelectNext);
|
||||
cx.dispatch_action(window_id, Confirm);
|
||||
active_pane
|
||||
.condition(&cx, |pane, _| pane.active_item().is_some())
|
||||
.condition(cx, |pane, _| pane.active_item().is_some())
|
||||
.await;
|
||||
cx.read(|cx| {
|
||||
let active_item = active_pane.read(cx).active_item().unwrap();
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::{env::args, path::Path, time::Duration};
|
|||
fn main() {
|
||||
let paths = args().skip(1).collect::<Vec<_>>();
|
||||
let paths = paths.iter().map(Path::new).collect::<Vec<_>>();
|
||||
assert!(paths.len() > 0, "Must pass 1 or more paths as arguments");
|
||||
assert!(!paths.is_empty(), "Must pass 1 or more paths as arguments");
|
||||
let (stream, _handle) = EventStream::new(&paths, Duration::from_millis(100));
|
||||
stream.run(|events| {
|
||||
eprintln!("event batch");
|
||||
|
|
|
@ -233,11 +233,9 @@ impl EventStream {
|
|||
}
|
||||
}
|
||||
|
||||
if !events.is_empty() {
|
||||
if !callback(events) {
|
||||
fs::FSEventStreamStop(stream_ref);
|
||||
cf::CFRunLoopStop(cf::CFRunLoopGetCurrent());
|
||||
}
|
||||
if !events.is_empty() && !callback(events) {
|
||||
fs::FSEventStreamStop(stream_ref);
|
||||
cf::CFRunLoopStop(cf::CFRunLoopGetCurrent());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,15 +10,15 @@ impl CharBag {
|
|||
|
||||
fn insert(&mut self, c: char) {
|
||||
let c = c.to_ascii_lowercase();
|
||||
if c >= 'a' && c <= 'z' {
|
||||
if ('a'..='z').contains(&c) {
|
||||
let mut count = self.0;
|
||||
let idx = c as u8 - 'a' as u8;
|
||||
count = count >> (idx * 2);
|
||||
let idx = c as u8 - b'a';
|
||||
count >>= idx * 2;
|
||||
count = ((count << 1) | 1) & 3;
|
||||
count = count << idx * 2;
|
||||
count <<= idx * 2;
|
||||
self.0 |= count;
|
||||
} else if c >= '0' && c <= '9' {
|
||||
let idx = c as u8 - '0' as u8;
|
||||
} else if ('0'..='9').contains(&c) {
|
||||
let idx = c as u8 - b'0';
|
||||
self.0 |= 1 << (idx + 52);
|
||||
} else if c == '-' {
|
||||
self.0 |= 1 << 62;
|
||||
|
|
|
@ -35,7 +35,7 @@ trait Match: Ord {
|
|||
|
||||
trait MatchCandidate {
|
||||
fn has_chars(&self, bag: CharBag) -> bool;
|
||||
fn to_string<'a>(&'a self) -> Cow<'a, str>;
|
||||
fn to_string(&self) -> Cow<'_, str>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -64,6 +64,9 @@ pub trait PathMatchCandidateSet<'a>: Send + Sync {
|
|||
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
|
||||
fn id(&self) -> usize;
|
||||
fn len(&self) -> usize;
|
||||
fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
fn prefix(&self) -> Arc<str>;
|
||||
fn candidates(&'a self, start: usize) -> Self::Candidates;
|
||||
}
|
||||
|
@ -239,7 +242,7 @@ pub async fn match_strings(
|
|||
if results.is_empty() {
|
||||
results = segment_result;
|
||||
} else {
|
||||
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(&a));
|
||||
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
|
||||
}
|
||||
}
|
||||
results
|
||||
|
@ -299,7 +302,7 @@ pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>(
|
|||
candidate_set.prefix(),
|
||||
candidates,
|
||||
results,
|
||||
&cancel_flag,
|
||||
cancel_flag,
|
||||
);
|
||||
}
|
||||
if tree_end >= segment_end {
|
||||
|
@ -317,7 +320,7 @@ pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>(
|
|||
if results.is_empty() {
|
||||
results = segment_result;
|
||||
} else {
|
||||
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(&a));
|
||||
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
|
||||
}
|
||||
}
|
||||
results
|
||||
|
@ -426,7 +429,7 @@ impl<'a> Matcher<'a> {
|
|||
lowercase_candidate_chars.push(c.to_ascii_lowercase());
|
||||
}
|
||||
|
||||
if !self.find_last_positions(&lowercase_prefix, &lowercase_candidate_chars) {
|
||||
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -439,13 +442,13 @@ impl<'a> Matcher<'a> {
|
|||
let score = self.score_match(
|
||||
&candidate_chars,
|
||||
&lowercase_candidate_chars,
|
||||
&prefix,
|
||||
&lowercase_prefix,
|
||||
prefix,
|
||||
lowercase_prefix,
|
||||
);
|
||||
|
||||
if score > 0.0 {
|
||||
let mut mat = build_match(&candidate, score);
|
||||
if let Err(i) = results.binary_search_by(|m| mat.cmp(&m)) {
|
||||
if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
|
||||
if results.len() < self.max_results {
|
||||
mat.set_positions(self.match_positions.clone());
|
||||
results.insert(i, mat);
|
||||
|
@ -523,6 +526,7 @@ impl<'a> Matcher<'a> {
|
|||
score
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn recursive_score_match(
|
||||
&mut self,
|
||||
path: &[char],
|
||||
|
@ -579,9 +583,9 @@ impl<'a> Matcher<'a> {
|
|||
|
||||
if last == '/' {
|
||||
char_score = 0.9;
|
||||
} else if last == '-' || last == '_' || last == ' ' || last.is_numeric() {
|
||||
char_score = 0.8;
|
||||
} else if last.is_lowercase() && curr.is_uppercase() {
|
||||
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|
||||
|| (last.is_lowercase() && curr.is_uppercase())
|
||||
{
|
||||
char_score = 0.8;
|
||||
} else if last == '.' {
|
||||
char_score = 0.7;
|
||||
|
@ -662,18 +666,18 @@ mod tests {
|
|||
let mut query: &[char] = &['d', 'c'];
|
||||
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
|
||||
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
|
||||
assert_eq!(result, false);
|
||||
assert!(!result);
|
||||
|
||||
query = &['c', 'd'];
|
||||
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
|
||||
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
|
||||
assert_eq!(result, true);
|
||||
assert!(result);
|
||||
assert_eq!(matcher.last_positions, vec![2, 4]);
|
||||
|
||||
query = &['z', '/', 'z', 'f'];
|
||||
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
|
||||
let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
|
||||
assert_eq!(result, true);
|
||||
assert!(result);
|
||||
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
|
||||
}
|
||||
|
||||
|
@ -741,7 +745,7 @@ mod tests {
|
|||
fn match_query<'a>(
|
||||
query: &str,
|
||||
smart_case: bool,
|
||||
paths: &Vec<&'a str>,
|
||||
paths: &[&'a str],
|
||||
) -> Vec<(&'a str, Vec<usize>)> {
|
||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
||||
let query = query.chars().collect::<Vec<_>>();
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use editor::{display_map::ToDisplayPoint, Autoscroll, DisplayPoint, Editor};
|
||||
use gpui::{
|
||||
actions, elements::*, geometry::vector::Vector2F, Axis, Entity, MutableAppContext,
|
||||
RenderContext, View, ViewContext, ViewHandle,
|
||||
actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, Axis, Entity,
|
||||
MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use menu::{Cancel, Confirm};
|
||||
use settings::Settings;
|
||||
|
@ -183,7 +183,7 @@ impl View for GoToLine {
|
|||
.named("go to line")
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.line_editor);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ fn compile_context_predicate_parser() {
|
|||
.compile("tree_sitter_context_predicate");
|
||||
}
|
||||
|
||||
const SHADER_HEADER_PATH: &'static str = "./src/platform/mac/shaders/shaders.h";
|
||||
const SHADER_HEADER_PATH: &str = "./src/platform/mac/shaders/shaders.h";
|
||||
|
||||
fn compile_metal_shaders() {
|
||||
let shader_path = "./src/platform/mac/shaders/shaders.metal";
|
||||
|
|
|
@ -85,11 +85,11 @@ impl gpui::Element for TextElement {
|
|||
text,
|
||||
font_size,
|
||||
&[
|
||||
(1, normal.clone()),
|
||||
(1, bold.clone()),
|
||||
(1, normal.clone()),
|
||||
(1, bold.clone()),
|
||||
(text.len() - 4, normal.clone()),
|
||||
(1, normal),
|
||||
(1, bold),
|
||||
(1, normal),
|
||||
(1, bold),
|
||||
(text.len() - 4, normal),
|
||||
],
|
||||
);
|
||||
|
||||
|
|
|
@ -35,115 +35,132 @@ enum {
|
|||
sym_parenthesized = 16,
|
||||
};
|
||||
|
||||
static const char * const ts_symbol_names[] = {
|
||||
[ts_builtin_sym_end] = "end",
|
||||
[sym_identifier] = "identifier",
|
||||
[anon_sym_BANG] = "!",
|
||||
[anon_sym_AMP_AMP] = "&&",
|
||||
[anon_sym_PIPE_PIPE] = "||",
|
||||
[anon_sym_EQ_EQ] = "==",
|
||||
[anon_sym_BANG_EQ] = "!=",
|
||||
[anon_sym_LPAREN] = "(",
|
||||
[anon_sym_RPAREN] = ")",
|
||||
[sym_source] = "source",
|
||||
[sym__expression] = "_expression",
|
||||
[sym_not] = "not",
|
||||
[sym_and] = "and",
|
||||
[sym_or] = "or",
|
||||
[sym_equal] = "equal",
|
||||
[sym_not_equal] = "not_equal",
|
||||
[sym_parenthesized] = "parenthesized",
|
||||
static const char *const ts_symbol_names[] = {
|
||||
[ts_builtin_sym_end] = "end",
|
||||
[sym_identifier] = "identifier",
|
||||
[anon_sym_BANG] = "!",
|
||||
[anon_sym_AMP_AMP] = "&&",
|
||||
[anon_sym_PIPE_PIPE] = "||",
|
||||
[anon_sym_EQ_EQ] = "==",
|
||||
[anon_sym_BANG_EQ] = "!=",
|
||||
[anon_sym_LPAREN] = "(",
|
||||
[anon_sym_RPAREN] = ")",
|
||||
[sym_source] = "source",
|
||||
[sym__expression] = "_expression",
|
||||
[sym_not] = "not",
|
||||
[sym_and] = "and",
|
||||
[sym_or] = "or",
|
||||
[sym_equal] = "equal",
|
||||
[sym_not_equal] = "not_equal",
|
||||
[sym_parenthesized] = "parenthesized",
|
||||
};
|
||||
|
||||
static const TSSymbol ts_symbol_map[] = {
|
||||
[ts_builtin_sym_end] = ts_builtin_sym_end,
|
||||
[sym_identifier] = sym_identifier,
|
||||
[anon_sym_BANG] = anon_sym_BANG,
|
||||
[anon_sym_AMP_AMP] = anon_sym_AMP_AMP,
|
||||
[anon_sym_PIPE_PIPE] = anon_sym_PIPE_PIPE,
|
||||
[anon_sym_EQ_EQ] = anon_sym_EQ_EQ,
|
||||
[anon_sym_BANG_EQ] = anon_sym_BANG_EQ,
|
||||
[anon_sym_LPAREN] = anon_sym_LPAREN,
|
||||
[anon_sym_RPAREN] = anon_sym_RPAREN,
|
||||
[sym_source] = sym_source,
|
||||
[sym__expression] = sym__expression,
|
||||
[sym_not] = sym_not,
|
||||
[sym_and] = sym_and,
|
||||
[sym_or] = sym_or,
|
||||
[sym_equal] = sym_equal,
|
||||
[sym_not_equal] = sym_not_equal,
|
||||
[sym_parenthesized] = sym_parenthesized,
|
||||
[ts_builtin_sym_end] = ts_builtin_sym_end,
|
||||
[sym_identifier] = sym_identifier,
|
||||
[anon_sym_BANG] = anon_sym_BANG,
|
||||
[anon_sym_AMP_AMP] = anon_sym_AMP_AMP,
|
||||
[anon_sym_PIPE_PIPE] = anon_sym_PIPE_PIPE,
|
||||
[anon_sym_EQ_EQ] = anon_sym_EQ_EQ,
|
||||
[anon_sym_BANG_EQ] = anon_sym_BANG_EQ,
|
||||
[anon_sym_LPAREN] = anon_sym_LPAREN,
|
||||
[anon_sym_RPAREN] = anon_sym_RPAREN,
|
||||
[sym_source] = sym_source,
|
||||
[sym__expression] = sym__expression,
|
||||
[sym_not] = sym_not,
|
||||
[sym_and] = sym_and,
|
||||
[sym_or] = sym_or,
|
||||
[sym_equal] = sym_equal,
|
||||
[sym_not_equal] = sym_not_equal,
|
||||
[sym_parenthesized] = sym_parenthesized,
|
||||
};
|
||||
|
||||
static const TSSymbolMetadata ts_symbol_metadata[] = {
|
||||
[ts_builtin_sym_end] = {
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_identifier] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[anon_sym_BANG] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_AMP_AMP] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_PIPE_PIPE] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_EQ_EQ] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_BANG_EQ] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_LPAREN] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_RPAREN] = {
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[sym_source] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym__expression] = {
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_and] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_or] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_equal] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not_equal] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_parenthesized] = {
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[ts_builtin_sym_end] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_identifier] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[anon_sym_BANG] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_AMP_AMP] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_PIPE_PIPE] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_EQ_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_BANG_EQ] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_LPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[anon_sym_RPAREN] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = false,
|
||||
},
|
||||
[sym_source] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym__expression] =
|
||||
{
|
||||
.visible = false,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_and] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_or] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_not_equal] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
[sym_parenthesized] =
|
||||
{
|
||||
.visible = true,
|
||||
.named = true,
|
||||
},
|
||||
};
|
||||
|
||||
enum {
|
||||
|
@ -152,340 +169,378 @@ enum {
|
|||
field_right = 3,
|
||||
};
|
||||
|
||||
static const char * const ts_field_names[] = {
|
||||
[0] = NULL,
|
||||
[field_expression] = "expression",
|
||||
[field_left] = "left",
|
||||
[field_right] = "right",
|
||||
static const char *const ts_field_names[] = {
|
||||
[0] = NULL,
|
||||
[field_expression] = "expression",
|
||||
[field_left] = "left",
|
||||
[field_right] = "right",
|
||||
};
|
||||
|
||||
static const TSFieldMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = {
|
||||
[1] = {.index = 0, .length = 1},
|
||||
[2] = {.index = 1, .length = 2},
|
||||
[1] = {.index = 0, .length = 1},
|
||||
[2] = {.index = 1, .length = 2},
|
||||
};
|
||||
|
||||
static const TSFieldMapEntry ts_field_map_entries[] = {
|
||||
[0] =
|
||||
{field_expression, 1},
|
||||
[1] =
|
||||
{field_left, 0},
|
||||
[0] = {field_expression, 1},
|
||||
[1] = {field_left, 0},
|
||||
{field_right, 2},
|
||||
};
|
||||
|
||||
static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = {
|
||||
[0] = {0},
|
||||
static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT]
|
||||
[MAX_ALIAS_SEQUENCE_LENGTH] = {
|
||||
[0] = {0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_non_terminal_alias_map[] = {
|
||||
0,
|
||||
0,
|
||||
};
|
||||
|
||||
static bool ts_lex(TSLexer *lexer, TSStateId state) {
|
||||
START_LEXER();
|
||||
eof = lexer->eof(lexer);
|
||||
switch (state) {
|
||||
case 0:
|
||||
if (eof) ADVANCE(7);
|
||||
if (lookahead == '!') ADVANCE(10);
|
||||
if (lookahead == '&') ADVANCE(2);
|
||||
if (lookahead == '(') ADVANCE(15);
|
||||
if (lookahead == ')') ADVANCE(16);
|
||||
if (lookahead == '=') ADVANCE(4);
|
||||
if (lookahead == '|') ADVANCE(5);
|
||||
if (lookahead == '\t' ||
|
||||
lookahead == '\n' ||
|
||||
lookahead == '\r' ||
|
||||
lookahead == ' ') SKIP(0)
|
||||
if (lookahead == '-' ||
|
||||
('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') ||
|
||||
lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z')) ADVANCE(8);
|
||||
END_STATE();
|
||||
case 1:
|
||||
if (lookahead == '!') ADVANCE(9);
|
||||
if (lookahead == '(') ADVANCE(15);
|
||||
if (lookahead == '\t' ||
|
||||
lookahead == '\n' ||
|
||||
lookahead == '\r' ||
|
||||
lookahead == ' ') SKIP(1)
|
||||
if (lookahead == '-' ||
|
||||
('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') ||
|
||||
lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z')) ADVANCE(8);
|
||||
END_STATE();
|
||||
case 2:
|
||||
if (lookahead == '&') ADVANCE(11);
|
||||
END_STATE();
|
||||
case 3:
|
||||
if (lookahead == '=') ADVANCE(14);
|
||||
END_STATE();
|
||||
case 4:
|
||||
if (lookahead == '=') ADVANCE(13);
|
||||
END_STATE();
|
||||
case 5:
|
||||
if (lookahead == '|') ADVANCE(12);
|
||||
END_STATE();
|
||||
case 6:
|
||||
if (eof) ADVANCE(7);
|
||||
if (lookahead == '!') ADVANCE(3);
|
||||
if (lookahead == '&') ADVANCE(2);
|
||||
if (lookahead == ')') ADVANCE(16);
|
||||
if (lookahead == '=') ADVANCE(4);
|
||||
if (lookahead == '|') ADVANCE(5);
|
||||
if (lookahead == '\t' ||
|
||||
lookahead == '\n' ||
|
||||
lookahead == '\r' ||
|
||||
lookahead == ' ') SKIP(6)
|
||||
END_STATE();
|
||||
case 7:
|
||||
ACCEPT_TOKEN(ts_builtin_sym_end);
|
||||
END_STATE();
|
||||
case 8:
|
||||
ACCEPT_TOKEN(sym_identifier);
|
||||
if (lookahead == '-' ||
|
||||
('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') ||
|
||||
lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z')) ADVANCE(8);
|
||||
END_STATE();
|
||||
case 9:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
END_STATE();
|
||||
case 10:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
if (lookahead == '=') ADVANCE(14);
|
||||
END_STATE();
|
||||
case 11:
|
||||
ACCEPT_TOKEN(anon_sym_AMP_AMP);
|
||||
END_STATE();
|
||||
case 12:
|
||||
ACCEPT_TOKEN(anon_sym_PIPE_PIPE);
|
||||
END_STATE();
|
||||
case 13:
|
||||
ACCEPT_TOKEN(anon_sym_EQ_EQ);
|
||||
END_STATE();
|
||||
case 14:
|
||||
ACCEPT_TOKEN(anon_sym_BANG_EQ);
|
||||
END_STATE();
|
||||
case 15:
|
||||
ACCEPT_TOKEN(anon_sym_LPAREN);
|
||||
END_STATE();
|
||||
case 16:
|
||||
ACCEPT_TOKEN(anon_sym_RPAREN);
|
||||
END_STATE();
|
||||
default:
|
||||
return false;
|
||||
case 0:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(10);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(0)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 1:
|
||||
if (lookahead == '!')
|
||||
ADVANCE(9);
|
||||
if (lookahead == '(')
|
||||
ADVANCE(15);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(1)
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 2:
|
||||
if (lookahead == '&')
|
||||
ADVANCE(11);
|
||||
END_STATE();
|
||||
case 3:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 4:
|
||||
if (lookahead == '=')
|
||||
ADVANCE(13);
|
||||
END_STATE();
|
||||
case 5:
|
||||
if (lookahead == '|')
|
||||
ADVANCE(12);
|
||||
END_STATE();
|
||||
case 6:
|
||||
if (eof)
|
||||
ADVANCE(7);
|
||||
if (lookahead == '!')
|
||||
ADVANCE(3);
|
||||
if (lookahead == '&')
|
||||
ADVANCE(2);
|
||||
if (lookahead == ')')
|
||||
ADVANCE(16);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(4);
|
||||
if (lookahead == '|')
|
||||
ADVANCE(5);
|
||||
if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' ||
|
||||
lookahead == ' ')
|
||||
SKIP(6)
|
||||
END_STATE();
|
||||
case 7:
|
||||
ACCEPT_TOKEN(ts_builtin_sym_end);
|
||||
END_STATE();
|
||||
case 8:
|
||||
ACCEPT_TOKEN(sym_identifier);
|
||||
if (lookahead == '-' || ('0' <= lookahead && lookahead <= '9') ||
|
||||
('A' <= lookahead && lookahead <= 'Z') || lookahead == '_' ||
|
||||
('a' <= lookahead && lookahead <= 'z'))
|
||||
ADVANCE(8);
|
||||
END_STATE();
|
||||
case 9:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
END_STATE();
|
||||
case 10:
|
||||
ACCEPT_TOKEN(anon_sym_BANG);
|
||||
if (lookahead == '=')
|
||||
ADVANCE(14);
|
||||
END_STATE();
|
||||
case 11:
|
||||
ACCEPT_TOKEN(anon_sym_AMP_AMP);
|
||||
END_STATE();
|
||||
case 12:
|
||||
ACCEPT_TOKEN(anon_sym_PIPE_PIPE);
|
||||
END_STATE();
|
||||
case 13:
|
||||
ACCEPT_TOKEN(anon_sym_EQ_EQ);
|
||||
END_STATE();
|
||||
case 14:
|
||||
ACCEPT_TOKEN(anon_sym_BANG_EQ);
|
||||
END_STATE();
|
||||
case 15:
|
||||
ACCEPT_TOKEN(anon_sym_LPAREN);
|
||||
END_STATE();
|
||||
case 16:
|
||||
ACCEPT_TOKEN(anon_sym_RPAREN);
|
||||
END_STATE();
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static const TSLexMode ts_lex_modes[STATE_COUNT] = {
|
||||
[0] = {.lex_state = 0},
|
||||
[1] = {.lex_state = 1},
|
||||
[2] = {.lex_state = 1},
|
||||
[3] = {.lex_state = 1},
|
||||
[4] = {.lex_state = 1},
|
||||
[5] = {.lex_state = 1},
|
||||
[6] = {.lex_state = 6},
|
||||
[7] = {.lex_state = 0},
|
||||
[8] = {.lex_state = 0},
|
||||
[9] = {.lex_state = 0},
|
||||
[10] = {.lex_state = 0},
|
||||
[11] = {.lex_state = 0},
|
||||
[12] = {.lex_state = 0},
|
||||
[13] = {.lex_state = 0},
|
||||
[14] = {.lex_state = 0},
|
||||
[15] = {.lex_state = 0},
|
||||
[16] = {.lex_state = 0},
|
||||
[17] = {.lex_state = 0},
|
||||
[0] = {.lex_state = 0}, [1] = {.lex_state = 1}, [2] = {.lex_state = 1},
|
||||
[3] = {.lex_state = 1}, [4] = {.lex_state = 1}, [5] = {.lex_state = 1},
|
||||
[6] = {.lex_state = 6}, [7] = {.lex_state = 0}, [8] = {.lex_state = 0},
|
||||
[9] = {.lex_state = 0}, [10] = {.lex_state = 0}, [11] = {.lex_state = 0},
|
||||
[12] = {.lex_state = 0}, [13] = {.lex_state = 0}, [14] = {.lex_state = 0},
|
||||
[15] = {.lex_state = 0}, [16] = {.lex_state = 0}, [17] = {.lex_state = 0},
|
||||
};
|
||||
|
||||
static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = {
|
||||
[0] = {
|
||||
[ts_builtin_sym_end] = ACTIONS(1),
|
||||
[sym_identifier] = ACTIONS(1),
|
||||
[anon_sym_BANG] = ACTIONS(1),
|
||||
[anon_sym_AMP_AMP] = ACTIONS(1),
|
||||
[anon_sym_PIPE_PIPE] = ACTIONS(1),
|
||||
[anon_sym_EQ_EQ] = ACTIONS(1),
|
||||
[anon_sym_BANG_EQ] = ACTIONS(1),
|
||||
[anon_sym_LPAREN] = ACTIONS(1),
|
||||
[anon_sym_RPAREN] = ACTIONS(1),
|
||||
},
|
||||
[1] = {
|
||||
[sym_source] = STATE(15),
|
||||
[sym__expression] = STATE(13),
|
||||
[sym_not] = STATE(13),
|
||||
[sym_and] = STATE(13),
|
||||
[sym_or] = STATE(13),
|
||||
[sym_equal] = STATE(13),
|
||||
[sym_not_equal] = STATE(13),
|
||||
[sym_parenthesized] = STATE(13),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[2] = {
|
||||
[sym__expression] = STATE(7),
|
||||
[sym_not] = STATE(7),
|
||||
[sym_and] = STATE(7),
|
||||
[sym_or] = STATE(7),
|
||||
[sym_equal] = STATE(7),
|
||||
[sym_not_equal] = STATE(7),
|
||||
[sym_parenthesized] = STATE(7),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[3] = {
|
||||
[sym__expression] = STATE(14),
|
||||
[sym_not] = STATE(14),
|
||||
[sym_and] = STATE(14),
|
||||
[sym_or] = STATE(14),
|
||||
[sym_equal] = STATE(14),
|
||||
[sym_not_equal] = STATE(14),
|
||||
[sym_parenthesized] = STATE(14),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[4] = {
|
||||
[sym__expression] = STATE(11),
|
||||
[sym_not] = STATE(11),
|
||||
[sym_and] = STATE(11),
|
||||
[sym_or] = STATE(11),
|
||||
[sym_equal] = STATE(11),
|
||||
[sym_not_equal] = STATE(11),
|
||||
[sym_parenthesized] = STATE(11),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[5] = {
|
||||
[sym__expression] = STATE(12),
|
||||
[sym_not] = STATE(12),
|
||||
[sym_and] = STATE(12),
|
||||
[sym_or] = STATE(12),
|
||||
[sym_equal] = STATE(12),
|
||||
[sym_not_equal] = STATE(12),
|
||||
[sym_parenthesized] = STATE(12),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[0] =
|
||||
{
|
||||
[ts_builtin_sym_end] = ACTIONS(1),
|
||||
[sym_identifier] = ACTIONS(1),
|
||||
[anon_sym_BANG] = ACTIONS(1),
|
||||
[anon_sym_AMP_AMP] = ACTIONS(1),
|
||||
[anon_sym_PIPE_PIPE] = ACTIONS(1),
|
||||
[anon_sym_EQ_EQ] = ACTIONS(1),
|
||||
[anon_sym_BANG_EQ] = ACTIONS(1),
|
||||
[anon_sym_LPAREN] = ACTIONS(1),
|
||||
[anon_sym_RPAREN] = ACTIONS(1),
|
||||
},
|
||||
[1] =
|
||||
{
|
||||
[sym_source] = STATE(15),
|
||||
[sym__expression] = STATE(13),
|
||||
[sym_not] = STATE(13),
|
||||
[sym_and] = STATE(13),
|
||||
[sym_or] = STATE(13),
|
||||
[sym_equal] = STATE(13),
|
||||
[sym_not_equal] = STATE(13),
|
||||
[sym_parenthesized] = STATE(13),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[2] =
|
||||
{
|
||||
[sym__expression] = STATE(7),
|
||||
[sym_not] = STATE(7),
|
||||
[sym_and] = STATE(7),
|
||||
[sym_or] = STATE(7),
|
||||
[sym_equal] = STATE(7),
|
||||
[sym_not_equal] = STATE(7),
|
||||
[sym_parenthesized] = STATE(7),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[3] =
|
||||
{
|
||||
[sym__expression] = STATE(14),
|
||||
[sym_not] = STATE(14),
|
||||
[sym_and] = STATE(14),
|
||||
[sym_or] = STATE(14),
|
||||
[sym_equal] = STATE(14),
|
||||
[sym_not_equal] = STATE(14),
|
||||
[sym_parenthesized] = STATE(14),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[4] =
|
||||
{
|
||||
[sym__expression] = STATE(11),
|
||||
[sym_not] = STATE(11),
|
||||
[sym_and] = STATE(11),
|
||||
[sym_or] = STATE(11),
|
||||
[sym_equal] = STATE(11),
|
||||
[sym_not_equal] = STATE(11),
|
||||
[sym_parenthesized] = STATE(11),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
[5] =
|
||||
{
|
||||
[sym__expression] = STATE(12),
|
||||
[sym_not] = STATE(12),
|
||||
[sym_and] = STATE(12),
|
||||
[sym_or] = STATE(12),
|
||||
[sym_equal] = STATE(12),
|
||||
[sym_not_equal] = STATE(12),
|
||||
[sym_parenthesized] = STATE(12),
|
||||
[sym_identifier] = ACTIONS(3),
|
||||
[anon_sym_BANG] = ACTIONS(5),
|
||||
[anon_sym_LPAREN] = ACTIONS(7),
|
||||
},
|
||||
};
|
||||
|
||||
static const uint16_t ts_small_parse_table[] = {
|
||||
[0] = 3,
|
||||
ACTIONS(11), 1,
|
||||
anon_sym_EQ_EQ,
|
||||
ACTIONS(13), 1,
|
||||
anon_sym_BANG_EQ,
|
||||
ACTIONS(9), 4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[13] = 1,
|
||||
ACTIONS(15), 4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[20] = 1,
|
||||
ACTIONS(17), 4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[27] = 1,
|
||||
ACTIONS(19), 4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[34] = 1,
|
||||
ACTIONS(21), 4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[41] = 1,
|
||||
ACTIONS(23), 4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[48] = 2,
|
||||
ACTIONS(27), 1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(25), 3,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[57] = 3,
|
||||
ACTIONS(27), 1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(29), 1,
|
||||
ts_builtin_sym_end,
|
||||
ACTIONS(31), 1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
[67] = 3,
|
||||
ACTIONS(27), 1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(31), 1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
ACTIONS(33), 1,
|
||||
anon_sym_RPAREN,
|
||||
[77] = 1,
|
||||
ACTIONS(35), 1,
|
||||
ts_builtin_sym_end,
|
||||
[81] = 1,
|
||||
ACTIONS(37), 1,
|
||||
sym_identifier,
|
||||
[85] = 1,
|
||||
ACTIONS(39), 1,
|
||||
sym_identifier,
|
||||
[0] = 3,
|
||||
ACTIONS(11),
|
||||
1,
|
||||
anon_sym_EQ_EQ,
|
||||
ACTIONS(13),
|
||||
1,
|
||||
anon_sym_BANG_EQ,
|
||||
ACTIONS(9),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[13] = 1,
|
||||
ACTIONS(15),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[20] = 1,
|
||||
ACTIONS(17),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[27] = 1,
|
||||
ACTIONS(19),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[34] = 1,
|
||||
ACTIONS(21),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[41] = 1,
|
||||
ACTIONS(23),
|
||||
4,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_AMP_AMP,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[48] = 2,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(25),
|
||||
3,
|
||||
ts_builtin_sym_end,
|
||||
anon_sym_PIPE_PIPE,
|
||||
anon_sym_RPAREN,
|
||||
[57] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(29),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
[67] = 3,
|
||||
ACTIONS(27),
|
||||
1,
|
||||
anon_sym_AMP_AMP,
|
||||
ACTIONS(31),
|
||||
1,
|
||||
anon_sym_PIPE_PIPE,
|
||||
ACTIONS(33),
|
||||
1,
|
||||
anon_sym_RPAREN,
|
||||
[77] = 1,
|
||||
ACTIONS(35),
|
||||
1,
|
||||
ts_builtin_sym_end,
|
||||
[81] = 1,
|
||||
ACTIONS(37),
|
||||
1,
|
||||
sym_identifier,
|
||||
[85] = 1,
|
||||
ACTIONS(39),
|
||||
1,
|
||||
sym_identifier,
|
||||
};
|
||||
|
||||
static const uint32_t ts_small_parse_table_map[] = {
|
||||
[SMALL_STATE(6)] = 0,
|
||||
[SMALL_STATE(7)] = 13,
|
||||
[SMALL_STATE(8)] = 20,
|
||||
[SMALL_STATE(9)] = 27,
|
||||
[SMALL_STATE(10)] = 34,
|
||||
[SMALL_STATE(11)] = 41,
|
||||
[SMALL_STATE(12)] = 48,
|
||||
[SMALL_STATE(13)] = 57,
|
||||
[SMALL_STATE(14)] = 67,
|
||||
[SMALL_STATE(15)] = 77,
|
||||
[SMALL_STATE(16)] = 81,
|
||||
[SMALL_STATE(17)] = 85,
|
||||
[SMALL_STATE(6)] = 0, [SMALL_STATE(7)] = 13, [SMALL_STATE(8)] = 20,
|
||||
[SMALL_STATE(9)] = 27, [SMALL_STATE(10)] = 34, [SMALL_STATE(11)] = 41,
|
||||
[SMALL_STATE(12)] = 48, [SMALL_STATE(13)] = 57, [SMALL_STATE(14)] = 67,
|
||||
[SMALL_STATE(15)] = 77, [SMALL_STATE(16)] = 81, [SMALL_STATE(17)] = 85,
|
||||
};
|
||||
|
||||
static const TSParseActionEntry ts_parse_actions[] = {
|
||||
[0] = {.entry = {.count = 0, .reusable = false}},
|
||||
[1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(),
|
||||
[3] = {.entry = {.count = 1, .reusable = true}}, SHIFT(6),
|
||||
[5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(2),
|
||||
[7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(3),
|
||||
[9] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym__expression, 1),
|
||||
[11] = {.entry = {.count = 1, .reusable = true}}, SHIFT(16),
|
||||
[13] = {.entry = {.count = 1, .reusable = true}}, SHIFT(17),
|
||||
[15] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_not, 2, .production_id = 1),
|
||||
[17] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_equal, 3, .production_id = 2),
|
||||
[19] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_not_equal, 3, .production_id = 2),
|
||||
[21] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_parenthesized, 3, .production_id = 1),
|
||||
[23] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_and, 3, .production_id = 2),
|
||||
[25] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_or, 3, .production_id = 2),
|
||||
[27] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4),
|
||||
[29] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_source, 1),
|
||||
[31] = {.entry = {.count = 1, .reusable = true}}, SHIFT(5),
|
||||
[33] = {.entry = {.count = 1, .reusable = true}}, SHIFT(10),
|
||||
[35] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(),
|
||||
[37] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8),
|
||||
[39] = {.entry = {.count = 1, .reusable = true}}, SHIFT(9),
|
||||
[0] = {.entry = {.count = 0, .reusable = false}},
|
||||
[1] = {.entry = {.count = 1, .reusable = false}},
|
||||
RECOVER(),
|
||||
[3] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(6),
|
||||
[5] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(2),
|
||||
[7] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(3),
|
||||
[9] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym__expression, 1),
|
||||
[11] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(16),
|
||||
[13] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(17),
|
||||
[15] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not, 2, .production_id = 1),
|
||||
[17] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_equal, 3, .production_id = 2),
|
||||
[19] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_not_equal, 3, .production_id = 2),
|
||||
[21] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_parenthesized, 3, .production_id = 1),
|
||||
[23] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_and, 3, .production_id = 2),
|
||||
[25] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_or, 3, .production_id = 2),
|
||||
[27] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(4),
|
||||
[29] = {.entry = {.count = 1, .reusable = true}},
|
||||
REDUCE(sym_source, 1),
|
||||
[31] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(5),
|
||||
[33] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(10),
|
||||
[35] = {.entry = {.count = 1, .reusable = true}},
|
||||
ACCEPT_INPUT(),
|
||||
[37] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(8),
|
||||
[39] = {.entry = {.count = 1, .reusable = true}},
|
||||
SHIFT(9),
|
||||
};
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -497,30 +552,30 @@ extern "C" {
|
|||
|
||||
extern const TSLanguage *tree_sitter_context_predicate(void) {
|
||||
static const TSLanguage language = {
|
||||
.version = LANGUAGE_VERSION,
|
||||
.symbol_count = SYMBOL_COUNT,
|
||||
.alias_count = ALIAS_COUNT,
|
||||
.token_count = TOKEN_COUNT,
|
||||
.external_token_count = EXTERNAL_TOKEN_COUNT,
|
||||
.state_count = STATE_COUNT,
|
||||
.large_state_count = LARGE_STATE_COUNT,
|
||||
.production_id_count = PRODUCTION_ID_COUNT,
|
||||
.field_count = FIELD_COUNT,
|
||||
.max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
|
||||
.parse_table = &ts_parse_table[0][0],
|
||||
.small_parse_table = ts_small_parse_table,
|
||||
.small_parse_table_map = ts_small_parse_table_map,
|
||||
.parse_actions = ts_parse_actions,
|
||||
.symbol_names = ts_symbol_names,
|
||||
.field_names = ts_field_names,
|
||||
.field_map_slices = ts_field_map_slices,
|
||||
.field_map_entries = ts_field_map_entries,
|
||||
.symbol_metadata = ts_symbol_metadata,
|
||||
.public_symbol_map = ts_symbol_map,
|
||||
.alias_map = ts_non_terminal_alias_map,
|
||||
.alias_sequences = &ts_alias_sequences[0][0],
|
||||
.lex_modes = ts_lex_modes,
|
||||
.lex_fn = ts_lex,
|
||||
.version = LANGUAGE_VERSION,
|
||||
.symbol_count = SYMBOL_COUNT,
|
||||
.alias_count = ALIAS_COUNT,
|
||||
.token_count = TOKEN_COUNT,
|
||||
.external_token_count = EXTERNAL_TOKEN_COUNT,
|
||||
.state_count = STATE_COUNT,
|
||||
.large_state_count = LARGE_STATE_COUNT,
|
||||
.production_id_count = PRODUCTION_ID_COUNT,
|
||||
.field_count = FIELD_COUNT,
|
||||
.max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH,
|
||||
.parse_table = &ts_parse_table[0][0],
|
||||
.small_parse_table = ts_small_parse_table,
|
||||
.small_parse_table_map = ts_small_parse_table_map,
|
||||
.parse_actions = ts_parse_actions,
|
||||
.symbol_names = ts_symbol_names,
|
||||
.field_names = ts_field_names,
|
||||
.field_map_slices = ts_field_map_slices,
|
||||
.field_map_entries = ts_field_map_entries,
|
||||
.symbol_metadata = ts_symbol_metadata,
|
||||
.public_symbol_map = ts_symbol_map,
|
||||
.alias_map = ts_non_terminal_alias_map,
|
||||
.alias_sequences = &ts_alias_sequences[0][0],
|
||||
.lex_modes = ts_lex_modes,
|
||||
.lex_fn = ts_lex,
|
||||
};
|
||||
return &language;
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -389,9 +389,9 @@ impl ElementBox {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<ElementRc> for ElementBox {
|
||||
fn into(self) -> ElementRc {
|
||||
self.0
|
||||
impl From<ElementBox> for ElementRc {
|
||||
fn from(val: ElementBox) -> Self {
|
||||
val.0
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,13 +11,14 @@ use crate::{
|
|||
};
|
||||
use crate::{Element, Event, EventContext, LayoutContext, PaintContext, SizeConstraint};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Empty {
|
||||
collapsed: bool,
|
||||
}
|
||||
|
||||
impl Empty {
|
||||
pub fn new() -> Self {
|
||||
Self { collapsed: false }
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn collapsed(mut self) -> Self {
|
||||
|
|
|
@ -24,13 +24,13 @@ impl Expanded {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn to_full_width(mut self) -> Self {
|
||||
pub fn full_width(mut self) -> Self {
|
||||
self.full_width = true;
|
||||
self.full_height = false;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn to_full_height(mut self) -> Self {
|
||||
pub fn full_height(mut self) -> Self {
|
||||
self.full_width = false;
|
||||
self.full_height = true;
|
||||
self
|
||||
|
|
|
@ -53,8 +53,8 @@ impl Element for Image {
|
|||
_: &mut LayoutContext,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
let desired_size = vec2f(
|
||||
self.style.width.unwrap_or(constraint.max.x()),
|
||||
self.style.height.unwrap_or(constraint.max.y()),
|
||||
self.style.width.unwrap_or_else(|| constraint.max.x()),
|
||||
self.style.height.unwrap_or_else(|| constraint.max.y()),
|
||||
);
|
||||
let size = constrain_size_preserving_aspect_ratio(
|
||||
constraint.constrain(desired_size),
|
||||
|
|
|
@ -33,6 +33,7 @@ struct StateInner {
|
|||
logical_scroll_top: Option<ListOffset>,
|
||||
orientation: Orientation,
|
||||
overdraw: f32,
|
||||
#[allow(clippy::type_complexity)]
|
||||
scroll_handler: Option<Box<dyn FnMut(Range<usize>, &mut EventContext)>>,
|
||||
}
|
||||
|
||||
|
@ -311,19 +312,17 @@ impl Element for List {
|
|||
drop(cursor);
|
||||
state.items = new_items;
|
||||
|
||||
match event {
|
||||
Event::ScrollWheel(ScrollWheelEvent {
|
||||
position,
|
||||
delta,
|
||||
precise,
|
||||
}) => {
|
||||
if bounds.contains_point(*position) {
|
||||
if state.scroll(scroll_top, bounds.height(), *delta, *precise, cx) {
|
||||
handled = true;
|
||||
}
|
||||
}
|
||||
if let Event::ScrollWheel(ScrollWheelEvent {
|
||||
position,
|
||||
delta,
|
||||
precise,
|
||||
}) = event
|
||||
{
|
||||
if bounds.contains_point(*position)
|
||||
&& state.scroll(scroll_top, bounds.height(), *delta, *precise, cx)
|
||||
{
|
||||
handled = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
handled
|
||||
|
|
|
@ -129,7 +129,7 @@ impl Element for MouseEventHandler {
|
|||
|
||||
cx.scene.push_mouse_region(MouseRegion::from_handlers(
|
||||
cx.current_view_id(),
|
||||
Some(self.discriminant.clone()),
|
||||
Some(self.discriminant),
|
||||
hit_bounds,
|
||||
self.handlers.clone(),
|
||||
));
|
||||
|
|
|
@ -74,7 +74,7 @@ impl Element for Overlay {
|
|||
size: &mut Self::LayoutState,
|
||||
cx: &mut PaintContext,
|
||||
) {
|
||||
let mut bounds = RectF::new(self.abs_position.unwrap_or(bounds.origin()), *size);
|
||||
let mut bounds = RectF::new(self.abs_position.unwrap_or_else(|| bounds.origin()), *size);
|
||||
cx.scene.push_stacking_context(None);
|
||||
|
||||
if self.hoverable {
|
||||
|
|
|
@ -8,15 +8,14 @@ use crate::{
|
|||
SizeConstraint,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Stack {
|
||||
children: Vec<ElementBox>,
|
||||
}
|
||||
|
||||
impl Stack {
|
||||
pub fn new() -> Self {
|
||||
Stack {
|
||||
children: Vec::new(),
|
||||
}
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ impl Element for Text {
|
|||
chunks,
|
||||
&self.style,
|
||||
cx.text_layout_cache,
|
||||
&cx.font_cache,
|
||||
cx.font_cache,
|
||||
usize::MAX,
|
||||
self.text.matches('\n').count() + 1,
|
||||
);
|
||||
|
|
|
@ -45,6 +45,7 @@ pub struct LayoutState {
|
|||
pub struct UniformList {
|
||||
state: UniformListState,
|
||||
item_count: usize,
|
||||
#[allow(clippy::type_complexity)]
|
||||
append_items: Box<dyn Fn(Range<usize>, &mut Vec<ElementBox>, &mut LayoutContext)>,
|
||||
padding_top: f32,
|
||||
padding_bottom: f32,
|
||||
|
@ -310,19 +311,17 @@ impl Element for UniformList {
|
|||
handled = item.dispatch_event(event, cx) || handled;
|
||||
}
|
||||
|
||||
match event {
|
||||
Event::ScrollWheel(ScrollWheelEvent {
|
||||
position,
|
||||
delta,
|
||||
precise,
|
||||
}) => {
|
||||
if bounds.contains_point(*position) {
|
||||
if self.scroll(*position, *delta, *precise, layout.scroll_max, cx) {
|
||||
handled = true;
|
||||
}
|
||||
}
|
||||
if let Event::ScrollWheel(ScrollWheelEvent {
|
||||
position,
|
||||
delta,
|
||||
precise,
|
||||
}) = event
|
||||
{
|
||||
if bounds.contains_point(*position)
|
||||
&& self.scroll(*position, *delta, *precise, layout.scroll_max, cx)
|
||||
{
|
||||
handled = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
handled
|
||||
|
|
|
@ -332,7 +332,7 @@ impl Deterministic {
|
|||
|
||||
pub fn now(&self) -> std::time::Instant {
|
||||
let state = self.state.lock();
|
||||
state.now.clone()
|
||||
state.now
|
||||
}
|
||||
|
||||
pub fn advance_clock(&self, duration: Duration) {
|
||||
|
@ -681,6 +681,12 @@ impl Background {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for Background {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Scope<'a> {
|
||||
executor: Arc<Background>,
|
||||
futures: Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>,
|
||||
|
|
|
@ -117,7 +117,7 @@ impl FontCache {
|
|||
.font_selections
|
||||
.entry(family_id)
|
||||
.or_default()
|
||||
.insert(properties.clone(), font_id);
|
||||
.insert(*properties, font_id);
|
||||
Ok(font_id)
|
||||
}
|
||||
}
|
||||
|
@ -257,10 +257,10 @@ mod tests {
|
|||
let arial = fonts.load_family(&["Arial"]).unwrap();
|
||||
let arial_regular = fonts.select_font(arial, &Properties::new()).unwrap();
|
||||
let arial_italic = fonts
|
||||
.select_font(arial, &Properties::new().style(Style::Italic))
|
||||
.select_font(arial, Properties::new().style(Style::Italic))
|
||||
.unwrap();
|
||||
let arial_bold = fonts
|
||||
.select_font(arial, &Properties::new().weight(Weight::BOLD))
|
||||
.select_font(arial, Properties::new().weight(Weight::BOLD))
|
||||
.unwrap();
|
||||
assert_ne!(arial_regular, arial_italic);
|
||||
assert_ne!(arial_regular, arial_bold);
|
||||
|
|
|
@ -332,8 +332,7 @@ impl<'de> Deserialize<'de> for TextStyle {
|
|||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(Self::from_json(TextStyleJson::deserialize(deserializer)?)
|
||||
.map_err(|e| de::Error::custom(e))?)
|
||||
Self::from_json(TextStyleJson::deserialize(deserializer)?).map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,12 @@ enum PathVertexKind {
|
|||
Quadratic,
|
||||
}
|
||||
|
||||
impl Default for PathBuilder {
|
||||
fn default() -> Self {
|
||||
PathBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PathBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
@ -58,10 +64,7 @@ impl PathBuilder {
|
|||
|
||||
pub fn build(mut self, color: Color, clip_bounds: Option<RectF>) -> Path {
|
||||
if let Some(clip_bounds) = clip_bounds {
|
||||
self.bounds = self
|
||||
.bounds
|
||||
.intersection(clip_bounds)
|
||||
.unwrap_or(RectF::default());
|
||||
self.bounds = self.bounds.intersection(clip_bounds).unwrap_or_default();
|
||||
}
|
||||
Path {
|
||||
bounds: self.bounds,
|
||||
|
|
|
@ -202,7 +202,7 @@ impl Keymap {
|
|||
for (ix, binding) in bindings.iter().enumerate() {
|
||||
binding_indices_by_action_type
|
||||
.entry(binding.action.as_any().type_id())
|
||||
.or_insert_with(|| SmallVec::new())
|
||||
.or_insert_with(SmallVec::new)
|
||||
.push(ix);
|
||||
}
|
||||
Self {
|
||||
|
@ -211,10 +211,7 @@ impl Keymap {
|
|||
}
|
||||
}
|
||||
|
||||
fn bindings_for_action_type<'a>(
|
||||
&'a self,
|
||||
action_type: TypeId,
|
||||
) -> impl Iterator<Item = &'a Binding> {
|
||||
fn bindings_for_action_type(&self, action_type: TypeId) -> impl Iterator<Item = &'_ Binding> {
|
||||
self.binding_indices_by_action_type
|
||||
.get(&action_type)
|
||||
.map(SmallVec::as_slice)
|
||||
|
@ -253,7 +250,7 @@ impl Binding {
|
|||
|
||||
let keystrokes = keystrokes
|
||||
.split_whitespace()
|
||||
.map(|key| Keystroke::parse(key))
|
||||
.map(Keystroke::parse)
|
||||
.collect::<Result<_>>()?;
|
||||
|
||||
Ok(Self {
|
||||
|
@ -281,7 +278,7 @@ impl Keystroke {
|
|||
let mut function = false;
|
||||
let mut key = None;
|
||||
|
||||
let mut components = source.split("-").peekable();
|
||||
let mut components = source.split('-').peekable();
|
||||
while let Some(component) = components.next() {
|
||||
match component {
|
||||
"ctrl" => ctrl = true,
|
||||
|
@ -379,12 +376,12 @@ impl ContextPredicate {
|
|||
let kind = node.kind();
|
||||
|
||||
match kind {
|
||||
"source" => Self::from_node(node.child(0).ok_or(anyhow!(parse_error))?, source),
|
||||
"source" => Self::from_node(node.child(0).ok_or_else(|| anyhow!(parse_error))?, source),
|
||||
"identifier" => Ok(Self::Identifier(node.utf8_text(source)?.into())),
|
||||
"not" => {
|
||||
let child = Self::from_node(
|
||||
node.child_by_field_name("expression")
|
||||
.ok_or(anyhow!(parse_error))?,
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?;
|
||||
Ok(Self::Not(Box::new(child)))
|
||||
|
@ -392,12 +389,12 @@ impl ContextPredicate {
|
|||
"and" | "or" => {
|
||||
let left = Box::new(Self::from_node(
|
||||
node.child_by_field_name("left")
|
||||
.ok_or(anyhow!(parse_error))?,
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?);
|
||||
let right = Box::new(Self::from_node(
|
||||
node.child_by_field_name("right")
|
||||
.ok_or(anyhow!(parse_error))?,
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
)?);
|
||||
if kind == "and" {
|
||||
|
@ -409,12 +406,12 @@ impl ContextPredicate {
|
|||
"equal" | "not_equal" => {
|
||||
let left = node
|
||||
.child_by_field_name("left")
|
||||
.ok_or(anyhow!(parse_error))?
|
||||
.ok_or_else(|| anyhow!(parse_error))?
|
||||
.utf8_text(source)?
|
||||
.into();
|
||||
let right = node
|
||||
.child_by_field_name("right")
|
||||
.ok_or(anyhow!(parse_error))?
|
||||
.ok_or_else(|| anyhow!(parse_error))?
|
||||
.utf8_text(source)?
|
||||
.into();
|
||||
if kind == "equal" {
|
||||
|
@ -425,7 +422,7 @@ impl ContextPredicate {
|
|||
}
|
||||
"parenthesized" => Self::from_node(
|
||||
node.child_by_field_name("expression")
|
||||
.ok_or(anyhow!(parse_error))?,
|
||||
.ok_or_else(|| anyhow!(parse_error))?,
|
||||
source,
|
||||
),
|
||||
_ => Err(anyhow!(parse_error)),
|
||||
|
@ -604,7 +601,7 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn downcast<'a, A: Action>(action: &'a Option<Box<dyn Action>>) -> Option<&'a A> {
|
||||
fn downcast<A: Action>(action: &Option<Box<dyn Action>>) -> Option<&A> {
|
||||
action
|
||||
.as_ref()
|
||||
.and_then(|action| action.as_any().downcast_ref())
|
||||
|
|
|
@ -74,7 +74,7 @@ pub(crate) trait ForegroundPlatform {
|
|||
fn on_quit(&self, callback: Box<dyn FnMut()>);
|
||||
fn on_event(&self, callback: Box<dyn FnMut(Event) -> bool>);
|
||||
fn on_open_urls(&self, callback: Box<dyn FnMut(Vec<String>)>);
|
||||
fn run(&self, on_finish_launching: Box<dyn FnOnce() -> ()>);
|
||||
fn run(&self, on_finish_launching: Box<dyn FnOnce()>);
|
||||
|
||||
fn on_menu_command(&self, callback: Box<dyn FnMut(&dyn Action)>);
|
||||
fn on_validate_menu_command(&self, callback: Box<dyn FnMut(&dyn Action) -> bool>);
|
||||
|
|
|
@ -223,7 +223,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
|
|||
let cmd = modifiers.contains(NSEventModifierFlags::NSCommandKeyMask);
|
||||
let function = modifiers.contains(NSEventModifierFlags::NSFunctionKeyMask)
|
||||
&& first_char.map_or(true, |ch| {
|
||||
ch < NSUpArrowFunctionKey || ch > NSModeSwitchFunctionKey
|
||||
!(NSUpArrowFunctionKey..=NSModeSwitchFunctionKey).contains(&ch)
|
||||
});
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
|
|
|
@ -53,6 +53,12 @@ impl FontSystem {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for FontSystem {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl platform::FontSystem for FontSystem {
|
||||
fn add_fonts(&self, fonts: &[Arc<Vec<u8>>]) -> anyhow::Result<()> {
|
||||
self.0.write().add_fonts(fonts)
|
||||
|
@ -402,7 +408,7 @@ impl FontSystemState {
|
|||
fn wrap_line(&self, text: &str, font_id: FontId, font_size: f32, width: f32) -> Vec<usize> {
|
||||
let mut string = CFMutableAttributedString::new();
|
||||
string.replace_str(&CFString::new(text), CFRange::init(0, 0));
|
||||
let cf_range = CFRange::init(0 as isize, text.encode_utf16().count() as isize);
|
||||
let cf_range = CFRange::init(0, text.encode_utf16().count() as isize);
|
||||
let font = &self.fonts[font_id.0];
|
||||
unsafe {
|
||||
string.set_attribute(
|
||||
|
@ -505,14 +511,14 @@ mod tests {
|
|||
};
|
||||
let menlo_italic = RunStyle {
|
||||
font_id: fonts
|
||||
.select_font(&menlo, &Properties::new().style(Style::Italic))
|
||||
.select_font(&menlo, Properties::new().style(Style::Italic))
|
||||
.unwrap(),
|
||||
color: Default::default(),
|
||||
underline: Default::default(),
|
||||
};
|
||||
let menlo_bold = RunStyle {
|
||||
font_id: fonts
|
||||
.select_font(&menlo, &Properties::new().weight(Weight::BOLD))
|
||||
.select_font(&menlo, Properties::new().weight(Weight::BOLD))
|
||||
.unwrap(),
|
||||
color: Default::default(),
|
||||
underline: Default::default(),
|
||||
|
@ -599,7 +605,7 @@ mod tests {
|
|||
let name = format!("/Users/as-cii/Desktop/twog-{}.png", i);
|
||||
let path = Path::new(&name);
|
||||
let file = File::create(path).unwrap();
|
||||
let ref mut w = BufWriter::new(file);
|
||||
let w = &mut BufWriter::new(file);
|
||||
|
||||
let mut encoder = png::Encoder::new(w, bounds.width() as u32, bounds.height() as u32);
|
||||
encoder.set_color(png::ColorType::Grayscale);
|
||||
|
|
|
@ -50,7 +50,7 @@ use time::UtcOffset;
|
|||
#[allow(non_upper_case_globals)]
|
||||
const NSUTF8StringEncoding: NSUInteger = 4;
|
||||
|
||||
const MAC_PLATFORM_IVAR: &'static str = "platform";
|
||||
const MAC_PLATFORM_IVAR: &str = "platform";
|
||||
static mut APP_CLASS: *const Class = ptr::null();
|
||||
static mut APP_DELEGATE_CLASS: *const Class = ptr::null();
|
||||
|
||||
|
@ -118,7 +118,7 @@ pub struct MacForegroundPlatformState {
|
|||
validate_menu_command: Option<Box<dyn FnMut(&dyn Action) -> bool>>,
|
||||
will_open_menu: Option<Box<dyn FnMut()>>,
|
||||
open_urls: Option<Box<dyn FnMut(Vec<String>)>>,
|
||||
finish_launching: Option<Box<dyn FnOnce() -> ()>>,
|
||||
finish_launching: Option<Box<dyn FnOnce()>>,
|
||||
menu_actions: Vec<Box<dyn Action>>,
|
||||
}
|
||||
|
||||
|
@ -277,7 +277,7 @@ impl platform::ForegroundPlatform for MacForegroundPlatform {
|
|||
self.0.borrow_mut().open_urls = Some(callback);
|
||||
}
|
||||
|
||||
fn run(&self, on_finish_launching: Box<dyn FnOnce() -> ()>) {
|
||||
fn run(&self, on_finish_launching: Box<dyn FnOnce()>) {
|
||||
self.0.borrow_mut().finish_launching = Some(on_finish_launching);
|
||||
|
||||
unsafe {
|
||||
|
@ -533,7 +533,7 @@ impl platform::Platform for MacPlatform {
|
|||
fn read_from_clipboard(&self) -> Option<ClipboardItem> {
|
||||
unsafe {
|
||||
if let Some(text_bytes) = self.read_from_pasteboard(NSPasteboardTypeString) {
|
||||
let text = String::from_utf8_lossy(&text_bytes).to_string();
|
||||
let text = String::from_utf8_lossy(text_bytes).to_string();
|
||||
let hash_bytes = self
|
||||
.read_from_pasteboard(self.text_hash_pasteboard_type)
|
||||
.and_then(|bytes| bytes.try_into().ok())
|
||||
|
|
|
@ -14,8 +14,7 @@ use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
|
|||
use shaders::ToFloat2 as _;
|
||||
use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
|
||||
|
||||
const SHADERS_METALLIB: &'static [u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
|
||||
const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
|
||||
const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
|
||||
|
||||
pub struct Renderer {
|
||||
|
@ -385,10 +384,10 @@ impl Renderer {
|
|||
drawable_size: Vector2F,
|
||||
command_encoder: &metal::RenderCommandEncoderRef,
|
||||
) {
|
||||
let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
|
||||
vec2f(0., 0.),
|
||||
drawable_size / scene.scale_factor(),
|
||||
)) * scene.scale_factor())
|
||||
let clip_bounds = (layer
|
||||
.clip_bounds()
|
||||
.unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
|
||||
* scene.scale_factor())
|
||||
.round();
|
||||
command_encoder.set_scissor_rect(metal::MTLScissorRect {
|
||||
x: clip_bounds.origin_x() as NSUInteger,
|
||||
|
@ -438,8 +437,7 @@ impl Renderer {
|
|||
);
|
||||
|
||||
let buffer_contents = unsafe {
|
||||
(self.instances.contents() as *mut u8).offset(*offset as isize)
|
||||
as *mut shaders::GPUIShadow
|
||||
(self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
|
||||
};
|
||||
for (ix, shadow) in shadows.iter().enumerate() {
|
||||
let shape_bounds = shadow.bounds * scale_factor;
|
||||
|
@ -451,7 +449,7 @@ impl Renderer {
|
|||
color: shadow.color.to_uchar4(),
|
||||
};
|
||||
unsafe {
|
||||
*(buffer_contents.offset(ix as isize)) = shader_shadow;
|
||||
*(buffer_contents.add(ix)) = shader_shadow;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -503,8 +501,7 @@ impl Renderer {
|
|||
);
|
||||
|
||||
let buffer_contents = unsafe {
|
||||
(self.instances.contents() as *mut u8).offset(*offset as isize)
|
||||
as *mut shaders::GPUIQuad
|
||||
(self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
|
||||
};
|
||||
for (ix, quad) in quads.iter().enumerate() {
|
||||
let bounds = quad.bounds * scale_factor;
|
||||
|
@ -514,7 +511,7 @@ impl Renderer {
|
|||
size: bounds.size().round().to_float2(),
|
||||
background_color: quad
|
||||
.background
|
||||
.unwrap_or(Color::transparent_black())
|
||||
.unwrap_or_else(Color::transparent_black)
|
||||
.to_uchar4(),
|
||||
border_top: border_width * (quad.border.top as usize as f32),
|
||||
border_right: border_width * (quad.border.right as usize as f32),
|
||||
|
@ -524,7 +521,7 @@ impl Renderer {
|
|||
corner_radius: quad.corner_radius * scale_factor,
|
||||
};
|
||||
unsafe {
|
||||
*(buffer_contents.offset(ix as isize)) = shader_quad;
|
||||
*(buffer_contents.add(ix)) = shader_quad;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -641,9 +638,8 @@ impl Renderer {
|
|||
);
|
||||
|
||||
unsafe {
|
||||
let buffer_contents = (self.instances.contents() as *mut u8)
|
||||
.offset(*offset as isize)
|
||||
as *mut shaders::GPUISprite;
|
||||
let buffer_contents =
|
||||
(self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
|
||||
std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
|
||||
}
|
||||
|
||||
|
@ -757,9 +753,8 @@ impl Renderer {
|
|||
);
|
||||
|
||||
unsafe {
|
||||
let buffer_contents = (self.instances.contents() as *mut u8)
|
||||
.offset(*offset as isize)
|
||||
as *mut shaders::GPUIImage;
|
||||
let buffer_contents =
|
||||
(self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
|
||||
std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
|
||||
}
|
||||
|
||||
|
@ -821,10 +816,9 @@ impl Renderer {
|
|||
}
|
||||
|
||||
unsafe {
|
||||
let buffer_contents = (self.instances.contents() as *mut u8)
|
||||
.offset(*offset as isize)
|
||||
as *mut shaders::GPUISprite;
|
||||
*buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
|
||||
let buffer_contents =
|
||||
(self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
|
||||
*buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
|
||||
}
|
||||
|
||||
atlas_sprite_count += 1;
|
||||
|
@ -917,8 +911,7 @@ impl Renderer {
|
|||
);
|
||||
|
||||
let buffer_contents = unsafe {
|
||||
(self.instances.contents() as *mut u8).offset(*offset as isize)
|
||||
as *mut shaders::GPUIUnderline
|
||||
(self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
|
||||
};
|
||||
for (ix, underline) in underlines.iter().enumerate() {
|
||||
let origin = underline.origin * scale_factor;
|
||||
|
@ -935,7 +928,7 @@ impl Renderer {
|
|||
squiggly: underline.squiggly as u8,
|
||||
};
|
||||
unsafe {
|
||||
*(buffer_contents.offset(ix as isize)) = shader_underline;
|
||||
*(buffer_contents.add(ix)) = shader_underline;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ use std::{
|
|||
time::Duration,
|
||||
};
|
||||
|
||||
const WINDOW_STATE_IVAR: &'static str = "windowState";
|
||||
const WINDOW_STATE_IVAR: &str = "windowState";
|
||||
|
||||
static mut WINDOW_CLASS: *const Class = ptr::null();
|
||||
static mut VIEW_CLASS: *const Class = ptr::null();
|
||||
|
@ -72,7 +72,7 @@ impl NSRange {
|
|||
self.location != NSNotFound as NSUInteger
|
||||
}
|
||||
|
||||
fn to_range(&self) -> Option<Range<usize>> {
|
||||
fn to_range(self) -> Option<Range<usize>> {
|
||||
if self.is_valid() {
|
||||
let start = self.location as usize;
|
||||
let end = start + self.length as usize;
|
||||
|
@ -513,7 +513,7 @@ impl platform::Window for Window {
|
|||
};
|
||||
let _: () = msg_send![alert, setAlertStyle: alert_style];
|
||||
let _: () = msg_send![alert, setMessageText: ns_string(msg)];
|
||||
for (ix, answer) in answers.into_iter().enumerate() {
|
||||
for (ix, answer) in answers.iter().enumerate() {
|
||||
let button: id = msg_send![alert, addButtonWithTitle: ns_string(answer)];
|
||||
let _: () = msg_send![button, setTag: ix as NSInteger];
|
||||
}
|
||||
|
@ -721,14 +721,14 @@ extern "C" fn yes(_: &Object, _: Sel) -> BOOL {
|
|||
extern "C" fn dealloc_window(this: &Object, _: Sel) {
|
||||
unsafe {
|
||||
drop_window_state(this);
|
||||
let () = msg_send![super(this, class!(NSWindow)), dealloc];
|
||||
let _: () = msg_send![super(this, class!(NSWindow)), dealloc];
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" fn dealloc_view(this: &Object, _: Sel) {
|
||||
unsafe {
|
||||
drop_window_state(this);
|
||||
let () = msg_send![super(this, class!(NSView)), dealloc];
|
||||
let _: () = msg_send![super(this, class!(NSView)), dealloc];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -912,7 +912,7 @@ extern "C" fn cancel_operation(this: &Object, _sel: Sel, _sender: id) {
|
|||
|
||||
extern "C" fn send_event(this: &Object, _: Sel, native_event: id) {
|
||||
unsafe {
|
||||
let () = msg_send![super(this, class!(NSWindow)), sendEvent: native_event];
|
||||
let _: () = msg_send![super(this, class!(NSWindow)), sendEvent: native_event];
|
||||
get_window_state(this).borrow_mut().performed_key_equivalent = false;
|
||||
}
|
||||
}
|
||||
|
@ -991,7 +991,7 @@ extern "C" fn close_window(this: &Object, _: Sel) {
|
|||
callback();
|
||||
}
|
||||
|
||||
let () = msg_send![super(this, class!(NSWindow)), close];
|
||||
let _: () = msg_send![super(this, class!(NSWindow)), close];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1157,17 +1157,22 @@ extern "C" fn insert_text(this: &Object, _: Sel, text: id, replacement_range: NS
|
|||
.flatten()
|
||||
.is_some();
|
||||
|
||||
if is_composing || text.chars().count() > 1 || pending_key_down.is_none() {
|
||||
with_input_handler(this, |input_handler| {
|
||||
input_handler.replace_text_in_range(replacement_range, text)
|
||||
});
|
||||
} else {
|
||||
let mut pending_key_down = pending_key_down.unwrap();
|
||||
pending_key_down.1 = Some(InsertText {
|
||||
replacement_range,
|
||||
text: text.to_string(),
|
||||
});
|
||||
window_state.borrow_mut().pending_key_down = Some(pending_key_down);
|
||||
match pending_key_down {
|
||||
None | Some(_) if is_composing || text.chars().count() > 1 => {
|
||||
with_input_handler(this, |input_handler| {
|
||||
input_handler.replace_text_in_range(replacement_range, text)
|
||||
});
|
||||
}
|
||||
|
||||
Some(mut pending_key_down) => {
|
||||
pending_key_down.1 = Some(InsertText {
|
||||
replacement_range,
|
||||
text: text.to_string(),
|
||||
});
|
||||
window_state.borrow_mut().pending_key_down = Some(pending_key_down);
|
||||
}
|
||||
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ impl super::ForegroundPlatform for ForegroundPlatform {
|
|||
|
||||
fn on_open_urls(&self, _: Box<dyn FnMut(Vec<String>)>) {}
|
||||
|
||||
fn run(&self, _on_finish_launching: Box<dyn FnOnce() -> ()>) {
|
||||
fn run(&self, _on_finish_launching: Box<dyn FnOnce()>) {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ use crate::{
|
|||
text_layout::TextLayoutCache,
|
||||
Action, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AssetCache, ElementBox, Entity,
|
||||
FontSystem, ModelHandle, MouseButtonEvent, MouseMovedEvent, MouseRegion, MouseRegionId,
|
||||
ReadModel, ReadView, RenderContext, RenderParams, Scene, UpgradeModelHandle, UpgradeViewHandle,
|
||||
View, ViewHandle, WeakModelHandle, WeakViewHandle,
|
||||
ParentId, ReadModel, ReadView, RenderContext, RenderParams, Scene, UpgradeModelHandle,
|
||||
UpgradeViewHandle, View, ViewHandle, WeakModelHandle, WeakViewHandle,
|
||||
};
|
||||
use collections::{HashMap, HashSet};
|
||||
use pathfinder_geometry::vector::{vec2f, Vector2F};
|
||||
|
@ -26,7 +26,6 @@ use std::{
|
|||
pub struct Presenter {
|
||||
window_id: usize,
|
||||
pub(crate) rendered_views: HashMap<usize, ElementBox>,
|
||||
parents: HashMap<usize, usize>,
|
||||
cursor_regions: Vec<CursorRegion>,
|
||||
mouse_regions: Vec<(MouseRegion, usize)>,
|
||||
font_cache: Arc<FontCache>,
|
||||
|
@ -52,7 +51,6 @@ impl Presenter {
|
|||
Self {
|
||||
window_id,
|
||||
rendered_views: cx.render_views(window_id, titlebar_height),
|
||||
parents: Default::default(),
|
||||
cursor_regions: Default::default(),
|
||||
mouse_regions: Default::default(),
|
||||
font_cache,
|
||||
|
@ -67,22 +65,22 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn dispatch_path(&self, app: &AppContext) -> Vec<usize> {
|
||||
let mut path = Vec::new();
|
||||
if let Some(view_id) = app.focused_view_id(self.window_id) {
|
||||
self.compute_dispatch_path_from(view_id, &mut path)
|
||||
}
|
||||
path
|
||||
}
|
||||
// pub fn dispatch_path(&self, app: &AppContext) -> Vec<usize> {
|
||||
// let mut path = Vec::new();
|
||||
// if let Some(view_id) = app.focused_view_id(self.window_id) {
|
||||
// self.compute_dispatch_path_from(view_id, &mut path)
|
||||
// }
|
||||
// path
|
||||
// }
|
||||
|
||||
pub(crate) fn compute_dispatch_path_from(&self, mut view_id: usize, path: &mut Vec<usize>) {
|
||||
path.push(view_id);
|
||||
while let Some(parent_id) = self.parents.get(&view_id).copied() {
|
||||
path.push(parent_id);
|
||||
view_id = parent_id;
|
||||
}
|
||||
path.reverse();
|
||||
}
|
||||
// pub(crate) fn compute_dispatch_path_from(&self, mut view_id: usize, path: &mut Vec<usize>) {
|
||||
// path.push(view_id);
|
||||
// while let Some(parent_id) = self.parents.get(&view_id).copied() {
|
||||
// path.push(parent_id);
|
||||
// view_id = parent_id;
|
||||
// }
|
||||
// path.reverse();
|
||||
// }
|
||||
|
||||
pub fn invalidate(
|
||||
&mut self,
|
||||
|
@ -91,9 +89,8 @@ impl Presenter {
|
|||
) {
|
||||
cx.start_frame();
|
||||
for view_id in &invalidation.removed {
|
||||
invalidation.updated.remove(&view_id);
|
||||
self.rendered_views.remove(&view_id);
|
||||
self.parents.remove(&view_id);
|
||||
invalidation.updated.remove(view_id);
|
||||
self.rendered_views.remove(view_id);
|
||||
}
|
||||
for view_id in &invalidation.updated {
|
||||
self.rendered_views.insert(
|
||||
|
@ -191,7 +188,6 @@ impl Presenter {
|
|||
LayoutContext {
|
||||
window_id: self.window_id,
|
||||
rendered_views: &mut self.rendered_views,
|
||||
parents: &mut self.parents,
|
||||
font_cache: &self.font_cache,
|
||||
font_system: cx.platform().fonts(),
|
||||
text_layout_cache: &self.text_layout_cache,
|
||||
|
@ -289,7 +285,7 @@ impl Presenter {
|
|||
{
|
||||
dragged_region = Some((
|
||||
clicked_region.clone(),
|
||||
MouseRegionEvent::Drag(*prev_drag_position, e.clone()),
|
||||
MouseRegionEvent::Drag(*prev_drag_position, *e),
|
||||
));
|
||||
*prev_drag_position = *position;
|
||||
}
|
||||
|
@ -344,21 +340,11 @@ impl Presenter {
|
|||
}
|
||||
|
||||
invalidated_views.extend(event_cx.invalidated_views);
|
||||
let dispatch_directives = event_cx.dispatched_actions;
|
||||
|
||||
for view_id in invalidated_views {
|
||||
cx.notify_view(self.window_id, view_id);
|
||||
}
|
||||
|
||||
let mut dispatch_path = Vec::new();
|
||||
for directive in dispatch_directives {
|
||||
dispatch_path.clear();
|
||||
if let Some(view_id) = directive.dispatcher_view_id {
|
||||
self.compute_dispatch_path_from(view_id, &mut dispatch_path);
|
||||
}
|
||||
cx.dispatch_action_any(self.window_id, &dispatch_path, directive.action.as_ref());
|
||||
}
|
||||
|
||||
handled
|
||||
} else {
|
||||
false
|
||||
|
@ -372,9 +358,6 @@ impl Presenter {
|
|||
cx: &'a mut MutableAppContext,
|
||||
) -> (bool, EventContext<'a>) {
|
||||
let mut hover_regions = Vec::new();
|
||||
// let mut unhovered_regions = Vec::new();
|
||||
// let mut hovered_regions = Vec::new();
|
||||
|
||||
if let Event::MouseMoved(
|
||||
e @ MouseMovedEvent {
|
||||
position,
|
||||
|
@ -383,7 +366,7 @@ impl Presenter {
|
|||
},
|
||||
) = event
|
||||
{
|
||||
if let None = pressed_button {
|
||||
if pressed_button.is_none() {
|
||||
let mut style_to_assign = CursorStyle::Arrow;
|
||||
for region in self.cursor_regions.iter().rev() {
|
||||
if region.bounds.contains_point(*position) {
|
||||
|
@ -402,23 +385,17 @@ impl Presenter {
|
|||
if let Some(region_id) = region.id() {
|
||||
if !self.hovered_region_ids.contains(®ion_id) {
|
||||
invalidated_views.push(region.view_id);
|
||||
hover_regions.push((
|
||||
region.clone(),
|
||||
MouseRegionEvent::Hover(true, e.clone()),
|
||||
));
|
||||
hover_regions
|
||||
.push((region.clone(), MouseRegionEvent::Hover(true, *e)));
|
||||
self.hovered_region_ids.insert(region_id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let Some(region_id) = region.id() {
|
||||
if self.hovered_region_ids.contains(®ion_id) {
|
||||
invalidated_views.push(region.view_id);
|
||||
hover_regions.push((
|
||||
region.clone(),
|
||||
MouseRegionEvent::Hover(false, e.clone()),
|
||||
));
|
||||
self.hovered_region_ids.remove(®ion_id);
|
||||
}
|
||||
} else if let Some(region_id) = region.id() {
|
||||
if self.hovered_region_ids.contains(®ion_id) {
|
||||
invalidated_views.push(region.view_id);
|
||||
hover_regions
|
||||
.push((region.clone(), MouseRegionEvent::Hover(false, *e)));
|
||||
self.hovered_region_ids.remove(®ion_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -446,7 +423,6 @@ impl Presenter {
|
|||
) -> EventContext<'a> {
|
||||
EventContext {
|
||||
rendered_views: &mut self.rendered_views,
|
||||
dispatched_actions: Default::default(),
|
||||
font_cache: &self.font_cache,
|
||||
text_layout_cache: &self.text_layout_cache,
|
||||
view_stack: Default::default(),
|
||||
|
@ -473,15 +449,9 @@ impl Presenter {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct DispatchDirective {
|
||||
pub dispatcher_view_id: Option<usize>,
|
||||
pub action: Box<dyn Action>,
|
||||
}
|
||||
|
||||
pub struct LayoutContext<'a> {
|
||||
window_id: usize,
|
||||
rendered_views: &'a mut HashMap<usize, ElementBox>,
|
||||
parents: &'a mut HashMap<usize, usize>,
|
||||
view_stack: Vec<usize>,
|
||||
pub font_cache: &'a Arc<FontCache>,
|
||||
pub font_system: Arc<dyn FontSystem>,
|
||||
|
@ -506,9 +476,43 @@ impl<'a> LayoutContext<'a> {
|
|||
}
|
||||
|
||||
fn layout(&mut self, view_id: usize, constraint: SizeConstraint) -> Vector2F {
|
||||
if let Some(parent_id) = self.view_stack.last() {
|
||||
self.parents.insert(view_id, *parent_id);
|
||||
let print_error = |view_id| {
|
||||
format!(
|
||||
"{} with id {}",
|
||||
self.app.name_for_view(self.window_id, view_id).unwrap(),
|
||||
view_id,
|
||||
)
|
||||
};
|
||||
match (
|
||||
self.view_stack.last(),
|
||||
self.app.parents.get(&(self.window_id, view_id)),
|
||||
) {
|
||||
(Some(layout_parent), Some(ParentId::View(app_parent))) => {
|
||||
if layout_parent != app_parent {
|
||||
panic!(
|
||||
"View {} was laid out with parent {} when it was constructed with parent {}",
|
||||
print_error(view_id),
|
||||
print_error(*layout_parent),
|
||||
print_error(*app_parent))
|
||||
}
|
||||
}
|
||||
(None, Some(ParentId::View(app_parent))) => panic!(
|
||||
"View {} was laid out without a parent when it was constructed with parent {}",
|
||||
print_error(view_id),
|
||||
print_error(*app_parent)
|
||||
),
|
||||
(Some(layout_parent), Some(ParentId::Root)) => panic!(
|
||||
"View {} was laid out with parent {} when it was constructed as a window root",
|
||||
print_error(view_id),
|
||||
print_error(*layout_parent),
|
||||
),
|
||||
(_, None) => panic!(
|
||||
"View {} did not have a registered parent in the app context",
|
||||
print_error(view_id),
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.view_stack.push(view_id);
|
||||
let mut rendered_view = self.rendered_views.remove(&view_id).unwrap();
|
||||
let size = rendered_view.layout(constraint, self);
|
||||
|
@ -615,7 +619,7 @@ impl<'a> PaintContext<'a> {
|
|||
#[inline]
|
||||
pub fn paint_layer<F>(&mut self, clip_bounds: Option<RectF>, f: F)
|
||||
where
|
||||
F: FnOnce(&mut Self) -> (),
|
||||
F: FnOnce(&mut Self),
|
||||
{
|
||||
self.scene.push_layer(clip_bounds);
|
||||
f(self);
|
||||
|
@ -637,7 +641,6 @@ impl<'a> Deref for PaintContext<'a> {
|
|||
|
||||
pub struct EventContext<'a> {
|
||||
rendered_views: &'a mut HashMap<usize, ElementBox>,
|
||||
dispatched_actions: Vec<DispatchDirective>,
|
||||
pub font_cache: &'a FontCache,
|
||||
pub text_layout_cache: &'a TextLayoutCache,
|
||||
pub app: &'a mut MutableAppContext,
|
||||
|
@ -692,10 +695,8 @@ impl<'a> EventContext<'a> {
|
|||
}
|
||||
|
||||
pub fn dispatch_any_action(&mut self, action: Box<dyn Action>) {
|
||||
self.dispatched_actions.push(DispatchDirective {
|
||||
dispatcher_view_id: self.view_stack.last().copied(),
|
||||
action,
|
||||
});
|
||||
self.app
|
||||
.dispatch_any_action_at(self.window_id, *self.view_stack.last().unwrap(), action)
|
||||
}
|
||||
|
||||
pub fn dispatch_action<A: Action>(&mut self, action: A) {
|
||||
|
|
|
@ -107,6 +107,7 @@ pub struct MouseRegionId {
|
|||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct HandlerSet {
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub set: HashMap<
|
||||
(Discriminant<MouseRegionEvent>, Option<MouseButton>),
|
||||
Rc<dyn Fn(MouseRegionEvent, &mut EventContext)>,
|
||||
|
@ -115,6 +116,7 @@ pub struct HandlerSet {
|
|||
|
||||
impl HandlerSet {
|
||||
pub fn handle_all() -> Self {
|
||||
#[allow(clippy::type_complexity)]
|
||||
let mut set: HashMap<
|
||||
(Discriminant<MouseRegionEvent>, Option<MouseButton>),
|
||||
Rc<dyn Fn(MouseRegionEvent, &mut EventContext)>,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
executor, platform, Entity, FontCache, Handle, LeakDetector, MutableAppContext, Platform,
|
||||
Subscription, TestAppContext,
|
||||
elements::Empty, executor, platform, Element, ElementBox, Entity, FontCache, Handle,
|
||||
LeakDetector, MutableAppContext, Platform, RenderContext, Subscription, TestAppContext, View,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use parking_lot::Mutex;
|
||||
|
@ -162,3 +162,19 @@ where
|
|||
|
||||
Observation { rx, _subscription }
|
||||
}
|
||||
|
||||
pub struct EmptyView;
|
||||
|
||||
impl Entity for EmptyView {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl View for EmptyView {
|
||||
fn ui_name() -> &'static str {
|
||||
"empty view"
|
||||
}
|
||||
|
||||
fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
|
||||
Element::boxed(Empty::new())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ impl TextLayoutCache {
|
|||
let mut curr_frame = RwLockUpgradableReadGuard::upgrade(curr_frame);
|
||||
if let Some((key, layout)) = self.prev_frame.lock().remove_entry(key) {
|
||||
curr_frame.insert(key, layout.clone());
|
||||
Line::new(layout.clone(), runs)
|
||||
Line::new(layout, runs)
|
||||
} else {
|
||||
let layout = Arc::new(self.fonts.layout_line(text, font_size, runs));
|
||||
let key = CacheKeyValue {
|
||||
|
@ -81,7 +81,7 @@ impl TextLayoutCache {
|
|||
}
|
||||
|
||||
trait CacheKey {
|
||||
fn key<'a>(&'a self) -> CacheKeyRef<'a>;
|
||||
fn key(&self) -> CacheKeyRef;
|
||||
}
|
||||
|
||||
impl<'a> PartialEq for (dyn CacheKey + 'a) {
|
||||
|
@ -98,7 +98,7 @@ impl<'a> Hash for (dyn CacheKey + 'a) {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
#[derive(Eq)]
|
||||
struct CacheKeyValue {
|
||||
text: String,
|
||||
font_size: OrderedFloat<f32>,
|
||||
|
@ -106,15 +106,21 @@ struct CacheKeyValue {
|
|||
}
|
||||
|
||||
impl CacheKey for CacheKeyValue {
|
||||
fn key<'a>(&'a self) -> CacheKeyRef<'a> {
|
||||
fn key(&self) -> CacheKeyRef {
|
||||
CacheKeyRef {
|
||||
text: &self.text.as_str(),
|
||||
text: self.text.as_str(),
|
||||
font_size: self.font_size,
|
||||
runs: self.runs.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for CacheKeyValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.key().eq(&other.key())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for CacheKeyValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.key().hash(state);
|
||||
|
@ -135,7 +141,7 @@ struct CacheKeyRef<'a> {
|
|||
}
|
||||
|
||||
impl<'a> CacheKey for CacheKeyRef<'a> {
|
||||
fn key<'b>(&'b self) -> CacheKeyRef<'b> {
|
||||
fn key(&self) -> CacheKeyRef {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
@ -242,6 +248,10 @@ impl Line {
|
|||
self.layout.len
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.layout.len == 0
|
||||
}
|
||||
|
||||
pub fn index_for_x(&self, x: f32) -> Option<usize> {
|
||||
if x >= self.layout.width {
|
||||
None
|
||||
|
@ -300,7 +310,7 @@ impl Line {
|
|||
),
|
||||
Underline {
|
||||
color: Some(run_underline.color.unwrap_or(*run_color)),
|
||||
thickness: run_underline.thickness.into(),
|
||||
thickness: run_underline.thickness,
|
||||
squiggly: run_underline.squiggly,
|
||||
},
|
||||
));
|
||||
|
@ -484,7 +494,7 @@ impl LineWrapper {
|
|||
let mut prev_c = '\0';
|
||||
let mut char_indices = line.char_indices();
|
||||
iter::from_fn(move || {
|
||||
while let Some((ix, c)) = char_indices.next() {
|
||||
for (ix, c) in char_indices.by_ref() {
|
||||
if c == '\n' {
|
||||
continue;
|
||||
}
|
||||
|
@ -746,7 +756,7 @@ mod tests {
|
|||
let mut wrapper = LineWrapper::new(font_id, 16., font_system);
|
||||
assert_eq!(
|
||||
wrapper
|
||||
.wrap_shaped_line(&text, &line, 72.0)
|
||||
.wrap_shaped_line(text, &line, 72.0)
|
||||
.collect::<Vec<_>>(),
|
||||
&[
|
||||
ShapedBoundary {
|
||||
|
|
|
@ -25,7 +25,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
NestedMeta::Meta(Meta::NameValue(meta)) => {
|
||||
let key_name = meta.path.get_ident().map(|i| i.to_string());
|
||||
let result = (|| {
|
||||
match key_name.as_ref().map(String::as_str) {
|
||||
match key_name.as_deref() {
|
||||
Some("retries") => max_retries = parse_int(&meta.lit)?,
|
||||
Some("iterations") => num_iterations = parse_int(&meta.lit)?,
|
||||
Some("seed") => starting_seed = parse_int(&meta.lit)?,
|
||||
|
|
|
@ -442,12 +442,11 @@ impl Buffer {
|
|||
}
|
||||
|
||||
fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>) -> Self {
|
||||
let saved_mtime;
|
||||
if let Some(file) = file.as_ref() {
|
||||
saved_mtime = file.mtime();
|
||||
let saved_mtime = if let Some(file) = file.as_ref() {
|
||||
file.mtime()
|
||||
} else {
|
||||
saved_mtime = UNIX_EPOCH;
|
||||
}
|
||||
UNIX_EPOCH
|
||||
};
|
||||
|
||||
Self {
|
||||
saved_mtime,
|
||||
|
@ -737,9 +736,7 @@ impl Buffer {
|
|||
this.parsing_in_background = false;
|
||||
this.did_finish_parsing(new_tree, parsed_version, cx);
|
||||
|
||||
if parse_again && this.reparse(cx) {
|
||||
return;
|
||||
}
|
||||
if parse_again && this.reparse(cx) {}
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
@ -933,10 +930,12 @@ impl Buffer {
|
|||
indent_sizes.entry(row).or_insert_with(|| {
|
||||
let mut size = snapshot.indent_size_for_line(row);
|
||||
if size.kind == new_indent.kind {
|
||||
if delta > 0 {
|
||||
size.len = size.len + delta as u32;
|
||||
} else if delta < 0 {
|
||||
size.len = size.len.saturating_sub(-delta as u32);
|
||||
match delta.cmp(&0) {
|
||||
Ordering::Greater => size.len += delta as u32,
|
||||
Ordering::Less => {
|
||||
size.len = size.len.saturating_sub(-delta as u32)
|
||||
}
|
||||
Ordering::Equal => {}
|
||||
}
|
||||
}
|
||||
size
|
||||
|
@ -961,7 +960,7 @@ impl Buffer {
|
|||
let edits: Vec<_> = indent_sizes
|
||||
.into_iter()
|
||||
.filter_map(|(row, indent_size)| {
|
||||
let current_size = indent_size_for_line(&self, row);
|
||||
let current_size = indent_size_for_line(self, row);
|
||||
Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
|
||||
})
|
||||
.collect();
|
||||
|
@ -978,21 +977,23 @@ impl Buffer {
|
|||
return None;
|
||||
}
|
||||
|
||||
if new_size.len > current_size.len {
|
||||
let point = Point::new(row, 0);
|
||||
Some((
|
||||
point..point,
|
||||
iter::repeat(new_size.char())
|
||||
.take((new_size.len - current_size.len) as usize)
|
||||
.collect::<String>(),
|
||||
))
|
||||
} else if new_size.len < current_size.len {
|
||||
Some((
|
||||
match new_size.len.cmp(¤t_size.len) {
|
||||
Ordering::Greater => {
|
||||
let point = Point::new(row, 0);
|
||||
Some((
|
||||
point..point,
|
||||
iter::repeat(new_size.char())
|
||||
.take((new_size.len - current_size.len) as usize)
|
||||
.collect::<String>(),
|
||||
))
|
||||
}
|
||||
|
||||
Ordering::Less => Some((
|
||||
Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
|
||||
String::new(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
)),
|
||||
|
||||
Ordering::Equal => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1599,7 +1600,7 @@ impl Deref for Buffer {
|
|||
|
||||
impl BufferSnapshot {
|
||||
pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
|
||||
indent_size_for_line(&self, row)
|
||||
indent_size_for_line(self, row)
|
||||
}
|
||||
|
||||
pub fn single_indent_size(&self, cx: &AppContext) -> IndentSize {
|
||||
|
@ -1643,10 +1644,10 @@ impl BufferSnapshot {
|
|||
result
|
||||
}
|
||||
|
||||
fn suggest_autoindents<'a>(
|
||||
&'a self,
|
||||
fn suggest_autoindents(
|
||||
&self,
|
||||
row_range: Range<u32>,
|
||||
) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + 'a> {
|
||||
) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
|
||||
let language = self.language.as_ref()?;
|
||||
let grammar = language.grammar.as_ref()?;
|
||||
let config = &language.config;
|
||||
|
@ -1675,7 +1676,7 @@ impl BufferSnapshot {
|
|||
start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
|
||||
end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
|
||||
} else if Some(capture.index) == end_capture_ix {
|
||||
end = Some(Point::from_ts_point(capture.node.start_position().into()));
|
||||
end = Some(Point::from_ts_point(capture.node.start_position()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1733,15 +1734,17 @@ impl BufferSnapshot {
|
|||
let mut outdent_to_row = u32::MAX;
|
||||
|
||||
while let Some((indent_row, delta)) = indent_changes.peek() {
|
||||
if *indent_row == row {
|
||||
match delta {
|
||||
match indent_row.cmp(&row) {
|
||||
Ordering::Equal => match delta {
|
||||
Ordering::Less => outdent_from_prev_row = true,
|
||||
Ordering::Greater => indent_from_prev_row = true,
|
||||
_ => {}
|
||||
}
|
||||
} else if *indent_row > row {
|
||||
break;
|
||||
},
|
||||
|
||||
Ordering::Greater => break,
|
||||
Ordering::Less => {}
|
||||
}
|
||||
|
||||
indent_changes.next();
|
||||
}
|
||||
|
||||
|
@ -1805,11 +1808,7 @@ impl BufferSnapshot {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn chunks<'a, T: ToOffset>(
|
||||
&'a self,
|
||||
range: Range<T>,
|
||||
language_aware: bool,
|
||||
) -> BufferChunks<'a> {
|
||||
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
|
||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||
|
||||
let mut tree = None;
|
||||
|
@ -1843,7 +1842,7 @@ impl BufferSnapshot {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn for_each_line<'a>(&'a self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
|
||||
pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
|
||||
let mut line = String::new();
|
||||
let mut row = range.start.row;
|
||||
for chunk in self
|
||||
|
@ -1969,7 +1968,7 @@ impl BufferSnapshot {
|
|||
position: T,
|
||||
theme: Option<&SyntaxTheme>,
|
||||
) -> Option<Vec<OutlineItem<Anchor>>> {
|
||||
let position = position.to_offset(&self);
|
||||
let position = position.to_offset(self);
|
||||
let mut items =
|
||||
self.outline_items_containing(position.saturating_sub(1)..position + 1, theme)?;
|
||||
let mut prev_depth = None;
|
||||
|
@ -2050,7 +2049,7 @@ impl BufferSnapshot {
|
|||
|
||||
let mut offset = range.start;
|
||||
chunks.seek(offset);
|
||||
while let Some(mut chunk) = chunks.next() {
|
||||
for mut chunk in chunks.by_ref() {
|
||||
if chunk.text.len() > range.end - offset {
|
||||
chunk.text = &chunk.text[0..(range.end - offset)];
|
||||
offset = range.end;
|
||||
|
@ -2105,7 +2104,7 @@ impl BufferSnapshot {
|
|||
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
|
||||
let mut cursor = QueryCursorHandle::new();
|
||||
let matches = cursor.set_byte_range(range).matches(
|
||||
&brackets_query,
|
||||
brackets_query,
|
||||
tree.root_node(),
|
||||
TextProvider(self.as_rope()),
|
||||
);
|
||||
|
@ -2120,17 +2119,17 @@ impl BufferSnapshot {
|
|||
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
|
||||
}
|
||||
|
||||
pub fn remote_selections_in_range<'a>(
|
||||
&'a self,
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn remote_selections_in_range(
|
||||
&self,
|
||||
range: Range<Anchor>,
|
||||
) -> impl 'a
|
||||
+ Iterator<
|
||||
) -> impl Iterator<
|
||||
Item = (
|
||||
ReplicaId,
|
||||
bool,
|
||||
impl 'a + Iterator<Item = &'a Selection<Anchor>>,
|
||||
impl Iterator<Item = &Selection<Anchor>> + '_,
|
||||
),
|
||||
> {
|
||||
> + '_ {
|
||||
self.remote_selections
|
||||
.iter()
|
||||
.filter(|(replica_id, set)| {
|
||||
|
@ -2165,8 +2164,7 @@ impl BufferSnapshot {
|
|||
T: 'a + Clone + ToOffset,
|
||||
O: 'a + FromAnchor,
|
||||
{
|
||||
self.diagnostics
|
||||
.range(search_range.clone(), self, true, reversed)
|
||||
self.diagnostics.range(search_range, self, true, reversed)
|
||||
}
|
||||
|
||||
pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
|
||||
|
@ -2469,10 +2467,7 @@ impl<'a> Iterator for BufferChunks<'a> {
|
|||
|
||||
impl QueryCursorHandle {
|
||||
pub(crate) fn new() -> Self {
|
||||
let mut cursor = QUERY_CURSORS
|
||||
.lock()
|
||||
.pop()
|
||||
.unwrap_or_else(|| QueryCursor::new());
|
||||
let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
|
||||
cursor.set_match_limit(64);
|
||||
QueryCursorHandle(Some(cursor))
|
||||
}
|
||||
|
@ -2614,7 +2609,7 @@ pub fn contiguous_ranges(
|
|||
values: impl Iterator<Item = u32>,
|
||||
max_len: usize,
|
||||
) -> impl Iterator<Item = Range<u32>> {
|
||||
let mut values = values.into_iter();
|
||||
let mut values = values;
|
||||
let mut current_range: Option<Range<u32>> = None;
|
||||
std::iter::from_fn(move || loop {
|
||||
if let Some(value) = values.next() {
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
|||
use sum_tree::{self, Bias, SumTree};
|
||||
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct DiagnosticSet {
|
||||
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
|
||||
}
|
||||
|
@ -167,24 +167,15 @@ impl DiagnosticSet {
|
|||
.map(|entry| entry.resolve(buffer))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DiagnosticSet {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
diagnostics: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for DiagnosticEntry<Anchor> {
|
||||
type Summary = Summary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
Summary {
|
||||
start: self.range.start.clone(),
|
||||
end: self.range.end.clone(),
|
||||
min_start: self.range.start.clone(),
|
||||
max_end: self.range.end.clone(),
|
||||
start: self.range.start,
|
||||
end: self.range.end,
|
||||
min_start: self.range.start,
|
||||
max_end: self.range.end,
|
||||
count: 1,
|
||||
}
|
||||
}
|
||||
|
@ -217,13 +208,13 @@ impl sum_tree::Summary for Summary {
|
|||
|
||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||
if other.min_start.cmp(&self.min_start, buffer).is_lt() {
|
||||
self.min_start = other.min_start.clone();
|
||||
self.min_start = other.min_start;
|
||||
}
|
||||
if other.max_end.cmp(&self.max_end, buffer).is_gt() {
|
||||
self.max_end = other.max_end.clone();
|
||||
self.max_end = other.max_end;
|
||||
}
|
||||
self.start = other.start.clone();
|
||||
self.end = other.end.clone();
|
||||
self.start = other.start;
|
||||
self.end = other.end;
|
||||
self.count += other.count;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,10 +56,7 @@ impl HighlightId {
|
|||
}
|
||||
|
||||
pub fn style(&self, theme: &SyntaxTheme) -> Option<HighlightStyle> {
|
||||
theme
|
||||
.highlights
|
||||
.get(self.0 as usize)
|
||||
.map(|entry| entry.1.clone())
|
||||
theme.highlights.get(self.0 as usize).map(|entry| entry.1)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
|
|
@ -308,6 +308,7 @@ pub struct LanguageRegistry {
|
|||
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||
lsp_binary_statuses_rx: async_broadcast::Receiver<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||
login_shell_env_loaded: Shared<Task<()>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
lsp_binary_paths: Mutex<
|
||||
HashMap<
|
||||
LanguageServerName,
|
||||
|
@ -342,7 +343,7 @@ impl LanguageRegistry {
|
|||
if let Some(theme) = self.theme.read().clone() {
|
||||
language.set_theme(&theme.editor.syntax);
|
||||
}
|
||||
self.languages.write().push(language.clone());
|
||||
self.languages.write().push(language);
|
||||
*self.subscription.write().0.borrow_mut() = ();
|
||||
}
|
||||
|
||||
|
@ -409,7 +410,7 @@ impl LanguageRegistry {
|
|||
) -> Option<Task<Result<lsp::LanguageServer>>> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if language.fake_adapter.is_some() {
|
||||
let language = language.clone();
|
||||
let language = language;
|
||||
return Some(cx.spawn(|cx| async move {
|
||||
let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap();
|
||||
let (server, mut fake_server) = lsp::LanguageServer::fake(
|
||||
|
@ -474,7 +475,7 @@ impl LanguageRegistry {
|
|||
let server = lsp::LanguageServer::new(
|
||||
server_id,
|
||||
&server_binary_path,
|
||||
&server_args,
|
||||
server_args,
|
||||
&root_path,
|
||||
cx,
|
||||
)?;
|
||||
|
|
|
@ -408,10 +408,12 @@ pub async fn deserialize_completion(
|
|||
Ok(Completion {
|
||||
old_range: old_start..old_end,
|
||||
new_text: completion.new_text,
|
||||
label: label.unwrap_or(CodeLabel::plain(
|
||||
lsp_completion.label.clone(),
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)),
|
||||
label: label.unwrap_or_else(|| {
|
||||
CodeLabel::plain(
|
||||
lsp_completion.label.clone(),
|
||||
lsp_completion.filter_text.as_deref(),
|
||||
)
|
||||
}),
|
||||
lsp_completion,
|
||||
})
|
||||
}
|
||||
|
@ -465,7 +467,7 @@ pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transa
|
|||
.into_iter()
|
||||
.map(deserialize_local_timestamp)
|
||||
.collect(),
|
||||
start: deserialize_version(transaction.start.into()),
|
||||
start: deserialize_version(transaction.start),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) {
|
|||
let buffer_1_events = buffer_1_events.clone();
|
||||
cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
|
||||
Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
|
||||
event @ _ => buffer_1_events.borrow_mut().push(event),
|
||||
event => buffer_1_events.borrow_mut().push(event),
|
||||
})
|
||||
.detach();
|
||||
let buffer_2_events = buffer_2_events.clone();
|
||||
|
@ -190,7 +190,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.apply_diff(diff, cx).unwrap();
|
||||
assert_eq!(buffer.text(), text);
|
||||
assert_eq!(anchor.to_point(&buffer), Point::new(2, 3));
|
||||
assert_eq!(anchor.to_point(buffer), Point::new(2, 3));
|
||||
});
|
||||
|
||||
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
|
||||
|
@ -198,7 +198,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.apply_diff(diff, cx).unwrap();
|
||||
assert_eq!(buffer.text(), text);
|
||||
assert_eq!(anchor.to_point(&buffer), Point::new(4, 4));
|
||||
assert_eq!(anchor.to_point(buffer), Point::new(4, 4));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -209,11 +209,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters) ",
|
||||
|
@ -230,11 +228,11 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buf, cx| {
|
||||
buf.start_transaction();
|
||||
|
||||
let offset = buf.text().find(")").unwrap();
|
||||
let offset = buf.text().find(')').unwrap();
|
||||
buf.edit([(offset..offset, "b: C")], None, cx);
|
||||
assert!(!buf.is_parsing());
|
||||
|
||||
let offset = buf.text().find("}").unwrap();
|
||||
let offset = buf.text().find('}').unwrap();
|
||||
buf.edit([(offset..offset, " d; ")], None, cx);
|
||||
assert!(!buf.is_parsing());
|
||||
|
||||
|
@ -242,11 +240,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
|
@ -259,13 +255,13 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
// * turn field expression into a method call
|
||||
// * add a turbofish to the method call
|
||||
buffer.update(cx, |buf, cx| {
|
||||
let offset = buf.text().find(";").unwrap();
|
||||
let offset = buf.text().find(';').unwrap();
|
||||
buf.edit([(offset..offset, ".e")], None, cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.update(cx, |buf, cx| {
|
||||
let offset = buf.text().find(";").unwrap();
|
||||
let offset = buf.text().find(';').unwrap();
|
||||
buf.edit([(offset..offset, "(f)")], None, cx);
|
||||
assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
|
@ -276,11 +272,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
|
@ -297,11 +291,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a() {}");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters) ",
|
||||
|
@ -314,11 +306,9 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
"(source_file (function_item name: (identifier) ",
|
||||
"parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
|
||||
|
@ -340,21 +330,17 @@ async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, &cx),
|
||||
get_tree_sexp(&buffer, cx),
|
||||
"(source_file (expression_statement (block)))"
|
||||
);
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(Arc::new(json_lang())), cx)
|
||||
});
|
||||
buffer
|
||||
.condition(&cx, |buffer, _| !buffer.is_parsing())
|
||||
.await;
|
||||
assert_eq!(get_tree_sexp(&buffer, &cx), "(document (object))");
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -417,7 +403,7 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
// Without space, we only match on names
|
||||
assert_eq!(
|
||||
search(&outline, "oon", &cx).await,
|
||||
search(&outline, "oon", cx).await,
|
||||
&[
|
||||
("mod module", vec![]), // included as the parent of a match
|
||||
("enum LoginState", vec![]), // included as the parent of a match
|
||||
|
@ -427,18 +413,18 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
|
||||
assert_eq!(
|
||||
search(&outline, "dp p", &cx).await,
|
||||
search(&outline, "dp p", cx).await,
|
||||
&[
|
||||
("impl Drop for Person", vec![5, 8, 9, 14]),
|
||||
("fn drop", vec![]),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
search(&outline, "dpn", &cx).await,
|
||||
search(&outline, "dpn", cx).await,
|
||||
&[("impl Drop for Person", vec![5, 14, 19])]
|
||||
);
|
||||
assert_eq!(
|
||||
search(&outline, "impl ", &cx).await,
|
||||
search(&outline, "impl ", cx).await,
|
||||
&[
|
||||
("impl Eq for Person", vec![0, 1, 2, 3, 4]),
|
||||
("impl Drop for Person", vec![0, 1, 2, 3, 4]),
|
||||
|
@ -530,9 +516,9 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
|||
]
|
||||
);
|
||||
|
||||
fn symbols_containing<'a>(
|
||||
fn symbols_containing(
|
||||
position: Point,
|
||||
snapshot: &'a BufferSnapshot,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> Vec<(String, Range<Point>)> {
|
||||
snapshot
|
||||
.symbols_containing(position, None)
|
||||
|
@ -799,7 +785,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
|
|||
Ok(())
|
||||
}
|
||||
"
|
||||
.replace("|", "") // included in the string to preserve trailing whites
|
||||
.replace('|', "") // included in the string to preserve trailing whites
|
||||
.unindent()
|
||||
);
|
||||
|
||||
|
@ -971,7 +957,7 @@ fn test_autoindent_block_mode(cx: &mut MutableAppContext) {
|
|||
buffer.undo(cx);
|
||||
buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
|
||||
buffer.edit(
|
||||
[(Point::new(2, 8)..Point::new(2, 8), inserted_text.clone())],
|
||||
[(Point::new(2, 8)..Point::new(2, 8), inserted_text)],
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: vec![0],
|
||||
}),
|
||||
|
@ -1098,7 +1084,7 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
|||
if let Event::Operation(op) = event {
|
||||
network
|
||||
.borrow_mut()
|
||||
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(&op)]);
|
||||
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
@ -1202,7 +1188,7 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
|||
if let Event::Operation(op) = event {
|
||||
network.borrow_mut().broadcast(
|
||||
buffer.replica_id(),
|
||||
vec![proto::serialize_operation(&op)],
|
||||
vec![proto::serialize_operation(op)],
|
||||
);
|
||||
}
|
||||
})
|
||||
|
|
|
@ -27,8 +27,8 @@ use std::{
|
|||
use std::{path::Path, process::Stdio};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
const JSON_RPC_VERSION: &'static str = "2.0";
|
||||
const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
|
||||
const JSON_RPC_VERSION: &str = "2.0";
|
||||
const CONTENT_LEN_HEADER: &str = "Content-Length: ";
|
||||
|
||||
type NotificationHandler = Box<dyn Send + FnMut(Option<usize>, &str, AsyncAppContext)>;
|
||||
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
|
||||
|
@ -42,6 +42,7 @@ pub struct LanguageServer {
|
|||
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
||||
response_handlers: Arc<Mutex<HashMap<usize, ResponseHandler>>>,
|
||||
executor: Arc<executor::Background>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
||||
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
||||
root_path: PathBuf,
|
||||
|
@ -112,7 +113,7 @@ impl LanguageServer {
|
|||
let working_dir = if root_path.is_dir() {
|
||||
root_path
|
||||
} else {
|
||||
root_path.parent().unwrap_or(Path::new("/"))
|
||||
root_path.parent().unwrap_or_else(|| Path::new("/"))
|
||||
};
|
||||
let mut server = process::Command::new(binary_path)
|
||||
.current_dir(working_dir)
|
||||
|
@ -251,7 +252,7 @@ impl LanguageServer {
|
|||
capabilities: Default::default(),
|
||||
next_id: Default::default(),
|
||||
outbound_tx,
|
||||
executor: cx.background().clone(),
|
||||
executor: cx.background(),
|
||||
io_tasks: Mutex::new(Some((input_task, output_task))),
|
||||
output_done_rx: Mutex::new(Some(output_done_rx)),
|
||||
root_path: root_path.to_path_buf(),
|
||||
|
@ -641,7 +642,7 @@ impl LanguageServer {
|
|||
stdin_reader,
|
||||
None,
|
||||
Path::new("/"),
|
||||
cx.clone(),
|
||||
cx,
|
||||
move |msg| {
|
||||
notifications_tx
|
||||
.try_send((msg.method.to_string(), msg.params.get().to_string()))
|
||||
|
@ -651,7 +652,7 @@ impl LanguageServer {
|
|||
notifications_rx,
|
||||
};
|
||||
fake.handle_request::<request::Initialize, _, _>({
|
||||
let capabilities = capabilities.clone();
|
||||
let capabilities = capabilities;
|
||||
move |_, _| {
|
||||
let capabilities = capabilities.clone();
|
||||
let name = name.clone();
|
||||
|
@ -662,7 +663,6 @@ impl LanguageServer {
|
|||
name,
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -697,7 +697,7 @@ impl FakeLanguageServer {
|
|||
|
||||
loop {
|
||||
let (method, params) = self.notifications_rx.next().await?;
|
||||
if &method == T::METHOD {
|
||||
if method == T::METHOD {
|
||||
return Some(serde_json::from_str::<T::Params>(¶ms).unwrap());
|
||||
} else {
|
||||
log::info!("skipping message in fake language server {:?}", params);
|
||||
|
|
|
@ -4,8 +4,8 @@ use editor::{
|
|||
};
|
||||
use fuzzy::StringMatch;
|
||||
use gpui::{
|
||||
actions, elements::*, geometry::vector::Vector2F, AppContext, Entity, MouseState,
|
||||
MutableAppContext, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, AppContext, Entity,
|
||||
MouseState, MutableAppContext, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use language::Outline;
|
||||
use ordered_float::OrderedFloat;
|
||||
|
@ -52,8 +52,10 @@ impl View for OutlineView {
|
|||
ChildView::new(self.picker.clone()).boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.picker);
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.picker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,8 +115,8 @@ impl OutlineView {
|
|||
self.active_editor.update(cx, |active_editor, cx| {
|
||||
let snapshot = active_editor.snapshot(cx).display_snapshot;
|
||||
let buffer_snapshot = &snapshot.buffer_snapshot;
|
||||
let start = outline_item.range.start.to_point(&buffer_snapshot);
|
||||
let end = outline_item.range.end.to_point(&buffer_snapshot);
|
||||
let start = outline_item.range.start.to_point(buffer_snapshot);
|
||||
let end = outline_item.range.end.to_point(buffer_snapshot);
|
||||
let display_rows = start.to_display_point(&snapshot).row()
|
||||
..end.to_display_point(&snapshot).row() + 1;
|
||||
active_editor.highlight_rows(Some(display_rows));
|
||||
|
@ -181,8 +183,8 @@ impl PickerDelegate for OutlineView {
|
|||
.map(|(ix, item)| {
|
||||
let range = item.range.to_offset(&buffer);
|
||||
let distance_to_closest_endpoint = cmp::min(
|
||||
(range.start as isize - cursor_offset as isize).abs() as usize,
|
||||
(range.end as isize - cursor_offset as isize).abs() as usize,
|
||||
(range.start as isize - cursor_offset as isize).abs(),
|
||||
(range.end as isize - cursor_offset as isize).abs(),
|
||||
);
|
||||
let depth = if range.contains(&cursor_offset) {
|
||||
Some(item.depth)
|
||||
|
|
|
@ -7,8 +7,8 @@ use gpui::{
|
|||
geometry::vector::{vec2f, Vector2F},
|
||||
keymap,
|
||||
platform::CursorStyle,
|
||||
AppContext, Axis, Element, ElementBox, Entity, MouseButton, MouseState, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
AnyViewHandle, AppContext, Axis, Element, ElementBox, Entity, MouseButton, MouseState,
|
||||
MutableAppContext, RenderContext, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use menu::{Cancel, Confirm, SelectFirst, SelectIndex, SelectLast, SelectNext, SelectPrev};
|
||||
use settings::Settings;
|
||||
|
@ -118,8 +118,10 @@ impl<D: PickerDelegate> View for Picker<D> {
|
|||
cx
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.query_editor);
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.query_editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ impl __Buffer {
|
|||
pub extern "C" fn __alloc_buffer(len: u32) -> u32 {
|
||||
let vec = vec![0; len as usize];
|
||||
let buffer = unsafe { __Buffer::from_vec(vec) };
|
||||
return buffer.ptr;
|
||||
buffer.ptr
|
||||
}
|
||||
|
||||
/// Frees a given buffer, requires the size.
|
||||
|
|
|
@ -17,8 +17,7 @@ fn main() {
|
|||
|
||||
// Clear out and recreate the plugin bin directory
|
||||
let _ = std::fs::remove_dir_all(base.join("bin"));
|
||||
let _ =
|
||||
std::fs::create_dir_all(base.join("bin")).expect("Could not make plugins bin directory");
|
||||
std::fs::create_dir_all(base.join("bin")).expect("Could not make plugins bin directory");
|
||||
|
||||
// Compile the plugins using the same profile as the current Zed build
|
||||
let (profile_flags, profile_target) = match std::env::var("PROFILE").unwrap().as_str() {
|
||||
|
@ -43,7 +42,7 @@ fn main() {
|
|||
|
||||
// Get the target architecture for pre-cross-compilation of plugins
|
||||
// and create and engine with the appropriate config
|
||||
let target_triple = std::env::var("TARGET").unwrap().to_string();
|
||||
let target_triple = std::env::var("TARGET").unwrap();
|
||||
println!("cargo:rerun-if-env-changed=TARGET");
|
||||
let engine = create_default_engine(&target_triple);
|
||||
|
||||
|
@ -77,7 +76,7 @@ fn create_default_engine(target_triple: &str) -> Engine {
|
|||
let mut config = Config::default();
|
||||
config
|
||||
.target(target_triple)
|
||||
.expect(&format!("Could not set target to `{}`", target_triple));
|
||||
.unwrap_or_else(|_| panic!("Could not set target to `{}`", target_triple));
|
||||
config.async_support(true);
|
||||
config.consume_fuel(true);
|
||||
Engine::new(&config).expect("Could not create precompilation engine")
|
||||
|
|
|
@ -69,13 +69,13 @@ mod tests {
|
|||
let unsorted = vec![1, 3, 4, 2, 5];
|
||||
let sorted = vec![1, 2, 3, 4, 5];
|
||||
|
||||
assert_eq!(runtime.call(&plugin.noop, ()).await.unwrap(), ());
|
||||
runtime.call(&plugin.noop, ()).await.unwrap();
|
||||
assert_eq!(runtime.call(&plugin.constant, ()).await.unwrap(), 27);
|
||||
assert_eq!(runtime.call(&plugin.identity, 58).await.unwrap(), 58);
|
||||
assert_eq!(runtime.call(&plugin.add, (3, 4)).await.unwrap(), 7);
|
||||
assert_eq!(runtime.call(&plugin.swap, (1, 2)).await.unwrap(), (2, 1));
|
||||
assert_eq!(runtime.call(&plugin.sort, unsorted).await.unwrap(), sorted);
|
||||
assert_eq!(runtime.call(&plugin.print, "Hi!".into()).await.unwrap(), ());
|
||||
runtime.call(&plugin.print, "Hi!".into()).await.unwrap();
|
||||
assert_eq!(runtime.call(&plugin.and_back, 1).await.unwrap(), 8);
|
||||
assert_eq!(runtime.call(&plugin.imports, 1).await.unwrap(), 8);
|
||||
assert_eq!(runtime.call(&plugin.half_async, 4).await.unwrap(), 2);
|
||||
|
|
|
@ -135,7 +135,7 @@ impl PluginBuilder {
|
|||
// TODO: use try block once avaliable
|
||||
let result: Result<(WasiBuffer, Memory, _), Trap> = (|| {
|
||||
// grab a handle to the memory
|
||||
let mut plugin_memory = match caller.get_export("memory") {
|
||||
let plugin_memory = match caller.get_export("memory") {
|
||||
Some(Extern::Memory(mem)) => mem,
|
||||
_ => return Err(Trap::new("Could not grab slice of plugin memory"))?,
|
||||
};
|
||||
|
@ -144,9 +144,9 @@ impl PluginBuilder {
|
|||
|
||||
// get the args passed from Guest
|
||||
let args =
|
||||
Plugin::buffer_to_bytes(&mut plugin_memory, caller.as_context(), &buffer)?;
|
||||
Plugin::buffer_to_bytes(&plugin_memory, caller.as_context(), &buffer)?;
|
||||
|
||||
let args: A = Plugin::deserialize_to_type(&args)?;
|
||||
let args: A = Plugin::deserialize_to_type(args)?;
|
||||
|
||||
// Call the Host-side function
|
||||
let result = function(args);
|
||||
|
@ -214,7 +214,7 @@ impl PluginBuilder {
|
|||
// TODO: use try block once avaliable
|
||||
let result: Result<(WasiBuffer, Memory, Vec<u8>), Trap> = (|| {
|
||||
// grab a handle to the memory
|
||||
let mut plugin_memory = match caller.get_export("memory") {
|
||||
let plugin_memory = match caller.get_export("memory") {
|
||||
Some(Extern::Memory(mem)) => mem,
|
||||
_ => return Err(Trap::new("Could not grab slice of plugin memory"))?,
|
||||
};
|
||||
|
@ -222,7 +222,7 @@ impl PluginBuilder {
|
|||
let buffer = WasiBuffer::from_u64(packed_buffer);
|
||||
|
||||
// get the args passed from Guest
|
||||
let args = Plugin::buffer_to_type(&mut plugin_memory, &mut caller, &buffer)?;
|
||||
let args = Plugin::buffer_to_type(&plugin_memory, &mut caller, &buffer)?;
|
||||
|
||||
// Call the Host-side function
|
||||
let result: R = function(args);
|
||||
|
@ -258,7 +258,7 @@ impl PluginBuilder {
|
|||
|
||||
/// Initializes a [`Plugin`] from a given compiled Wasm module.
|
||||
/// Both binary (`.wasm`) and text (`.wat`) module formats are supported.
|
||||
pub async fn init<'a>(self, binary: PluginBinary<'a>) -> Result<Plugin, Error> {
|
||||
pub async fn init(self, binary: PluginBinary<'_>) -> Result<Plugin, Error> {
|
||||
Plugin::init(binary, self).await
|
||||
}
|
||||
}
|
||||
|
@ -324,7 +324,7 @@ impl Plugin {
|
|||
println!();
|
||||
}
|
||||
|
||||
async fn init<'a>(binary: PluginBinary<'a>, plugin: PluginBuilder) -> Result<Self, Error> {
|
||||
async fn init(binary: PluginBinary<'_>, plugin: PluginBuilder) -> Result<Self, Error> {
|
||||
// initialize the WebAssembly System Interface context
|
||||
let engine = plugin.engine;
|
||||
let mut linker = plugin.linker;
|
||||
|
@ -576,7 +576,7 @@ impl Plugin {
|
|||
.await?;
|
||||
|
||||
Self::buffer_to_type(
|
||||
&mut plugin_memory,
|
||||
&plugin_memory,
|
||||
&mut self.store,
|
||||
&WasiBuffer::from_u64(result_buffer),
|
||||
)
|
||||
|
|
|
@ -460,7 +460,7 @@ impl FakeFs {
|
|||
}
|
||||
}
|
||||
Null => {
|
||||
self.create_dir(&path).await.unwrap();
|
||||
self.create_dir(path).await.unwrap();
|
||||
}
|
||||
String(contents) => {
|
||||
self.insert_file(&path, contents).await;
|
||||
|
|
|
@ -1027,7 +1027,7 @@ impl LspCommand for GetHover {
|
|||
lsp::HoverContents::Array(marked_strings) => {
|
||||
let content: Vec<HoverBlock> = marked_strings
|
||||
.into_iter()
|
||||
.filter_map(|marked_string| HoverBlock::try_new(marked_string))
|
||||
.filter_map(HoverBlock::try_new)
|
||||
.collect();
|
||||
if content.is_empty() {
|
||||
None
|
||||
|
@ -1049,9 +1049,7 @@ impl LspCommand for GetHover {
|
|||
}
|
||||
Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(new_language))) => {
|
||||
if !current_text.is_empty() {
|
||||
let text = std::mem::replace(&mut current_text, String::new())
|
||||
.trim()
|
||||
.to_string();
|
||||
let text = std::mem::take(&mut current_text).trim().to_string();
|
||||
contents.push(HoverBlock { text, language });
|
||||
}
|
||||
|
||||
|
@ -1067,9 +1065,7 @@ impl LspCommand for GetHover {
|
|||
| Event::End(Tag::BlockQuote)
|
||||
| Event::HardBreak => {
|
||||
if !current_text.is_empty() {
|
||||
let text = std::mem::replace(&mut current_text, String::new())
|
||||
.trim()
|
||||
.to_string();
|
||||
let text = std::mem::take(&mut current_text).trim().to_string();
|
||||
contents.push(HoverBlock { text, language });
|
||||
}
|
||||
language = None;
|
||||
|
|
|
@ -114,10 +114,12 @@ pub struct Project {
|
|||
_subscriptions: Vec<gpui::Subscription>,
|
||||
opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
|
||||
shared_buffers: HashMap<PeerId, HashSet<u64>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
loading_buffers: HashMap<
|
||||
ProjectPath,
|
||||
postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
|
||||
>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
loading_local_worktrees:
|
||||
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
|
||||
opened_buffers: HashMap<u64, OpenBuffer>,
|
||||
|
@ -993,7 +995,7 @@ impl Project {
|
|||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
worktree
|
||||
.upgrade(&cx)
|
||||
.upgrade(cx)
|
||||
.map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
|
||||
})
|
||||
.collect()
|
||||
|
@ -1080,7 +1082,7 @@ impl Project {
|
|||
self.worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
let worktree = worktree.upgrade(&cx)?.read(cx);
|
||||
let worktree = worktree.upgrade(cx)?.read(cx);
|
||||
if worktree.is_visible() {
|
||||
Some(format!(
|
||||
"project-path-online:{}",
|
||||
|
@ -1121,7 +1123,7 @@ impl Project {
|
|||
}
|
||||
|
||||
pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
|
||||
paths.iter().all(|path| self.contains_path(&path, cx))
|
||||
paths.iter().all(|path| self.contains_path(path, cx))
|
||||
}
|
||||
|
||||
pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
|
||||
|
@ -1395,11 +1397,8 @@ impl Project {
|
|||
}
|
||||
|
||||
for open_buffer in self.opened_buffers.values_mut() {
|
||||
match open_buffer {
|
||||
OpenBuffer::Strong(buffer) => {
|
||||
*open_buffer = OpenBuffer::Weak(buffer.downgrade());
|
||||
}
|
||||
_ => {}
|
||||
if let OpenBuffer::Strong(buffer) = open_buffer {
|
||||
*open_buffer = OpenBuffer::Weak(buffer.downgrade());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1493,7 +1492,7 @@ impl Project {
|
|||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(self.replica_id(), text, cx)
|
||||
.with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
|
||||
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
|
||||
});
|
||||
self.register_buffer(&buffer, cx)?;
|
||||
Ok(buffer)
|
||||
|
@ -1791,7 +1790,7 @@ impl Project {
|
|||
server
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
|
||||
text_document: lsp::TextDocumentIdentifier::new(uri),
|
||||
},
|
||||
)
|
||||
.log_err();
|
||||
|
@ -1825,7 +1824,7 @@ impl Project {
|
|||
language_server = self
|
||||
.language_server_ids
|
||||
.get(&(worktree_id, adapter.name.clone()))
|
||||
.and_then(|id| self.language_servers.get(&id))
|
||||
.and_then(|id| self.language_servers.get(id))
|
||||
.and_then(|server_state| {
|
||||
if let LanguageServerState::Running { server, .. } = server_state {
|
||||
Some(server.clone())
|
||||
|
@ -1838,7 +1837,7 @@ impl Project {
|
|||
|
||||
if let Some(local_worktree) = file.worktree.read(cx).as_local() {
|
||||
if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
|
||||
self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
|
||||
self.update_buffer_diagnostics(buffer_handle, diagnostics, None, cx)
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
@ -1853,8 +1852,7 @@ impl Project {
|
|||
0,
|
||||
initial_snapshot.text(),
|
||||
),
|
||||
}
|
||||
.clone(),
|
||||
},
|
||||
)
|
||||
.log_err();
|
||||
buffer_handle.update(cx, |buffer, cx| {
|
||||
|
@ -1864,7 +1862,7 @@ impl Project {
|
|||
.completion_provider
|
||||
.as_ref()
|
||||
.and_then(|provider| provider.trigger_characters.clone())
|
||||
.unwrap_or(Vec::new()),
|
||||
.unwrap_or_default(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -1910,7 +1908,7 @@ impl Project {
|
|||
let request = self.client.request(proto::UpdateBuffer {
|
||||
project_id,
|
||||
buffer_id: buffer.read(cx).remote_id(),
|
||||
operations: vec![language::proto::serialize_operation(&operation)],
|
||||
operations: vec![language::proto::serialize_operation(operation)],
|
||||
});
|
||||
cx.background().spawn(request).detach_and_log_err(cx);
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
|
@ -2014,7 +2012,7 @@ impl Project {
|
|||
.filter_map(move |((language_server_worktree_id, _), id)| {
|
||||
if *language_server_worktree_id == worktree_id {
|
||||
if let Some(LanguageServerState::Running { adapter, server }) =
|
||||
self.language_servers.get(&id)
|
||||
self.language_servers.get(id)
|
||||
{
|
||||
return Some((adapter, server));
|
||||
}
|
||||
|
@ -2151,7 +2149,7 @@ impl Project {
|
|||
let this = this.downgrade();
|
||||
let adapter = adapter.clone();
|
||||
move |mut params, cx| {
|
||||
let this = this.clone();
|
||||
let this = this;
|
||||
let adapter = adapter.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
adapter.process_diagnostics(&mut params).await;
|
||||
|
@ -2371,7 +2369,7 @@ impl Project {
|
|||
.and_then(|provider| {
|
||||
provider.trigger_characters.clone()
|
||||
})
|
||||
.unwrap_or(Vec::new()),
|
||||
.unwrap_or_default(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -2502,10 +2500,8 @@ impl Project {
|
|||
.cloned()
|
||||
{
|
||||
for orphaned_worktree in orphaned_worktrees {
|
||||
this.language_server_ids.insert(
|
||||
(orphaned_worktree, server_name.clone()),
|
||||
new_server_id.clone(),
|
||||
);
|
||||
this.language_server_ids
|
||||
.insert((orphaned_worktree, server_name.clone()), new_server_id);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -2528,9 +2524,7 @@ impl Project {
|
|||
return;
|
||||
}
|
||||
};
|
||||
let progress = match progress.value {
|
||||
lsp::ProgressParamsValue::WorkDone(value) => value,
|
||||
};
|
||||
let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
|
||||
let language_server_status =
|
||||
if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
|
||||
status
|
||||
|
@ -2543,7 +2537,7 @@ impl Project {
|
|||
}
|
||||
|
||||
let is_disk_based_diagnostics_progress =
|
||||
Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_ref().map(|x| &**x);
|
||||
Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_deref();
|
||||
|
||||
match progress {
|
||||
lsp::WorkDoneProgress::Begin(report) => {
|
||||
|
@ -2796,7 +2790,7 @@ impl Project {
|
|||
} else {
|
||||
let group_id = post_inc(&mut self.next_diagnostic_group_id);
|
||||
let is_disk_based =
|
||||
source.map_or(false, |source| disk_based_sources.contains(&source));
|
||||
source.map_or(false, |source| disk_based_sources.contains(source));
|
||||
|
||||
sources_by_group_id.insert(group_id, source);
|
||||
primary_diagnostic_group_ids
|
||||
|
@ -3194,7 +3188,7 @@ impl Project {
|
|||
if let Some(lsp_edits) = lsp_edits {
|
||||
let edits = this
|
||||
.update(cx, |this, cx| {
|
||||
this.edits_from_lsp(&buffer, lsp_edits, None, cx)
|
||||
this.edits_from_lsp(buffer, lsp_edits, None, cx)
|
||||
})
|
||||
.await?;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
|
@ -3366,7 +3360,7 @@ impl Project {
|
|||
if let Some((worktree, rel_path)) =
|
||||
this.find_local_worktree(&abs_path, cx)
|
||||
{
|
||||
worktree_id = (&worktree.read(cx)).id();
|
||||
worktree_id = worktree.read(cx).id();
|
||||
path = rel_path;
|
||||
} else {
|
||||
path = relativize_path(&worktree_abs_path, &abs_path);
|
||||
|
@ -3613,7 +3607,7 @@ impl Project {
|
|||
.clone();
|
||||
(
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||
text.clone(),
|
||||
text,
|
||||
)
|
||||
}
|
||||
Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
|
||||
|
@ -3791,7 +3785,7 @@ impl Project {
|
|||
|
||||
let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
|
||||
cx.foreground().spawn(async move {
|
||||
if !lang_server.capabilities().code_action_provider.is_some() {
|
||||
if lang_server.capabilities().code_action_provider.is_none() {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
|
@ -4120,6 +4114,7 @@ impl Project {
|
|||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn search(
|
||||
&self,
|
||||
query: SearchQuery,
|
||||
|
@ -4341,7 +4336,7 @@ impl Project {
|
|||
) {
|
||||
let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
|
||||
return cx.spawn(|this, cx| async move {
|
||||
if !request.check_capabilities(&language_server.capabilities()) {
|
||||
if !request.check_capabilities(language_server.capabilities()) {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
|
@ -4375,7 +4370,7 @@ impl Project {
|
|||
) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
|
||||
let abs_path = abs_path.as_ref();
|
||||
if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
|
||||
Task::ready(Ok((tree.clone(), relative_path.into())))
|
||||
Task::ready(Ok((tree, relative_path)))
|
||||
} else {
|
||||
let worktree = self.create_local_worktree(abs_path, visible, cx);
|
||||
cx.foreground()
|
||||
|
@ -4455,7 +4450,7 @@ impl Project {
|
|||
|
||||
Ok(worktree)
|
||||
}
|
||||
.map_err(|err| Arc::new(err))
|
||||
.map_err(Arc::new)
|
||||
})
|
||||
.shared()
|
||||
})
|
||||
|
@ -4487,9 +4482,9 @@ impl Project {
|
|||
}
|
||||
|
||||
fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
|
||||
cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
|
||||
cx.observe(worktree, |_, _, cx| cx.notify()).detach();
|
||||
if worktree.read(cx).is_local() {
|
||||
cx.subscribe(&worktree, |this, worktree, _, cx| {
|
||||
cx.subscribe(worktree, |this, worktree, _, cx| {
|
||||
this.update_local_worktree_buffers(worktree, cx);
|
||||
})
|
||||
.detach();
|
||||
|
@ -4508,7 +4503,7 @@ impl Project {
|
|||
}
|
||||
|
||||
self.metadata_changed(true, cx);
|
||||
cx.observe_release(&worktree, |this, worktree, cx| {
|
||||
cx.observe_release(worktree, |this, worktree, cx| {
|
||||
this.remove_worktree(worktree.id(), cx);
|
||||
cx.notify();
|
||||
})
|
||||
|
@ -4610,9 +4605,9 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn language_servers_running_disk_based_diagnostics<'a>(
|
||||
&'a self,
|
||||
) -> impl 'a + Iterator<Item = usize> {
|
||||
pub fn language_servers_running_disk_based_diagnostics(
|
||||
&self,
|
||||
) -> impl Iterator<Item = usize> + '_ {
|
||||
self.language_server_statuses
|
||||
.iter()
|
||||
.filter_map(|(id, status)| {
|
||||
|
@ -4762,7 +4757,7 @@ impl Project {
|
|||
.remove(&peer_id)
|
||||
.ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
|
||||
.replica_id;
|
||||
for (_, buffer) in &this.opened_buffers {
|
||||
for buffer in this.opened_buffers.values() {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
|
||||
}
|
||||
|
@ -5088,7 +5083,7 @@ impl Project {
|
|||
let ops = payload
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(|op| language::proto::deserialize_operation(op))
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let is_remote = this.is_remote();
|
||||
match this.opened_buffers.entry(buffer_id) {
|
||||
|
@ -5125,7 +5120,7 @@ impl Project {
|
|||
let worktree = this
|
||||
.worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
|
||||
.ok_or_else(|| anyhow!("no such worktree"))?;
|
||||
let file = File::from_proto(file, worktree.clone(), cx)?;
|
||||
let file = File::from_proto(file, worktree, cx)?;
|
||||
let buffer = this
|
||||
.opened_buffers
|
||||
.get_mut(&buffer_id)
|
||||
|
@ -5791,6 +5786,7 @@ impl Project {
|
|||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn edits_from_lsp(
|
||||
&mut self,
|
||||
buffer: &ModelHandle<Buffer>,
|
||||
|
@ -5837,7 +5833,7 @@ impl Project {
|
|||
new_text.push('\n');
|
||||
}
|
||||
range.end = next_range.end;
|
||||
new_text.push_str(&next_text);
|
||||
new_text.push_str(next_text);
|
||||
lsp_edits.next();
|
||||
}
|
||||
|
||||
|
@ -5872,7 +5868,7 @@ impl Project {
|
|||
ChangeTag::Insert => {
|
||||
if moved_since_edit {
|
||||
let anchor = snapshot.anchor_after(offset);
|
||||
edits.push((anchor.clone()..anchor, value.to_string()));
|
||||
edits.push((anchor..anchor, value.to_string()));
|
||||
} else {
|
||||
edits.last_mut().unwrap().1.push_str(value);
|
||||
}
|
||||
|
@ -5882,7 +5878,7 @@ impl Project {
|
|||
}
|
||||
} else if range.end == range.start {
|
||||
let anchor = snapshot.anchor_after(range.start);
|
||||
edits.push((anchor.clone()..anchor, new_text));
|
||||
edits.push((anchor..anchor, new_text));
|
||||
} else {
|
||||
let edit_start = snapshot.anchor_after(range.start);
|
||||
let edit_end = snapshot.anchor_before(range.end);
|
||||
|
@ -5944,7 +5940,7 @@ impl Project {
|
|||
|
||||
if let Some(server_id) = self.language_server_ids.get(&key) {
|
||||
if let Some(LanguageServerState::Running { adapter, server }) =
|
||||
self.language_servers.get(&server_id)
|
||||
self.language_servers.get(server_id)
|
||||
{
|
||||
return Some((adapter, server));
|
||||
}
|
||||
|
|
|
@ -499,7 +499,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
buffer_a.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -513,7 +513,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
});
|
||||
buffer_b.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -579,7 +579,7 @@ async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
.await
|
||||
.unwrap();
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -1262,7 +1262,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
|
|||
// At the end of a line, an empty range is extended backward to include
|
||||
// the preceding character.
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||
let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
|
||||
assert_eq!(
|
||||
chunks
|
||||
.iter()
|
||||
|
@ -1511,7 +1511,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestApp
|
|||
.into_iter()
|
||||
.map(|(range, text)| {
|
||||
(
|
||||
range.start.to_point(&buffer)..range.end.to_point(&buffer),
|
||||
range.start.to_point(buffer)..range.end.to_point(buffer),
|
||||
text,
|
||||
)
|
||||
})
|
||||
|
@ -1614,7 +1614,7 @@ async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
|
|||
.into_iter()
|
||||
.map(|(range, text)| {
|
||||
(
|
||||
range.start.to_point(&buffer)..range.end.to_point(&buffer),
|
||||
range.start.to_point(buffer)..range.end.to_point(buffer),
|
||||
text,
|
||||
)
|
||||
})
|
||||
|
@ -2139,7 +2139,7 @@ async fn test_rescan_and_remote_updates(
|
|||
let tree = project.worktrees(cx).next().unwrap();
|
||||
tree.read(cx)
|
||||
.entry_for_path(path)
|
||||
.expect(&format!("no entry for path {}", path))
|
||||
.unwrap_or_else(|| panic!("no entry for path {}", path))
|
||||
.id
|
||||
})
|
||||
};
|
||||
|
@ -2149,9 +2149,9 @@ async fn test_rescan_and_remote_updates(
|
|||
let buffer4 = buffer_for_path("b/c/file4", cx).await;
|
||||
let buffer5 = buffer_for_path("b/c/file5", cx).await;
|
||||
|
||||
let file2_id = id_for_path("a/file2", &cx);
|
||||
let file3_id = id_for_path("a/file3", &cx);
|
||||
let file4_id = id_for_path("b/c/file4", &cx);
|
||||
let file2_id = id_for_path("a/file2", cx);
|
||||
let file3_id = id_for_path("a/file3", cx);
|
||||
let file4_id = id_for_path("b/c/file4", cx);
|
||||
|
||||
// Create a remote copy of this worktree.
|
||||
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
|
@ -2183,12 +2183,12 @@ async fn test_rescan_and_remote_updates(
|
|||
});
|
||||
|
||||
// Rename and delete files and directories.
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
|
||||
std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
|
||||
std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
|
||||
std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
|
||||
let expected_paths = vec![
|
||||
"a",
|
||||
|
@ -2209,9 +2209,9 @@ async fn test_rescan_and_remote_updates(
|
|||
expected_paths
|
||||
);
|
||||
|
||||
assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
|
||||
assert_eq!(id_for_path("d/file3", &cx), file3_id);
|
||||
assert_eq!(id_for_path("d/file4", &cx), file4_id);
|
||||
assert_eq!(id_for_path("a/file2.new", cx), file2_id);
|
||||
assert_eq!(id_for_path("d/file3", cx), file3_id);
|
||||
assert_eq!(id_for_path("d/file4", cx), file4_id);
|
||||
|
||||
assert_eq!(
|
||||
buffer2.read(app).file().unwrap().path().as_ref(),
|
||||
|
@ -2689,7 +2689,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
message: "error 2 hint 2".to_string(),
|
||||
related_information: Some(vec![lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location {
|
||||
uri: buffer_uri.clone(),
|
||||
uri: buffer_uri,
|
||||
range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
|
||||
},
|
||||
message: "original diagnostic".to_string(),
|
||||
|
|
|
@ -53,7 +53,7 @@ impl SearchQuery {
|
|||
query = word_query
|
||||
}
|
||||
|
||||
let multiline = query.contains("\n") || query.contains("\\n");
|
||||
let multiline = query.contains('\n') || query.contains("\\n");
|
||||
let regex = RegexBuilder::new(&query)
|
||||
.case_insensitive(!case_sensitive)
|
||||
.multi_line(multiline)
|
||||
|
|
|
@ -57,6 +57,7 @@ lazy_static! {
|
|||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
|
||||
pub struct WorktreeId(usize);
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum Worktree {
|
||||
Local(LocalWorktree),
|
||||
Remote(RemoteWorktree),
|
||||
|
@ -157,7 +158,7 @@ impl Worktree {
|
|||
cx: &mut AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
let (tree, scan_states_tx) =
|
||||
LocalWorktree::new(client, path, visible, fs.clone(), next_entry_id, cx).await?;
|
||||
LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
|
||||
tree.update(cx, |tree, cx| {
|
||||
let tree = tree.as_local_mut().unwrap();
|
||||
let abs_path = tree.abs_path().clone();
|
||||
|
@ -229,7 +230,7 @@ impl Worktree {
|
|||
cx.spawn(|mut cx| {
|
||||
let this = worktree_handle.downgrade();
|
||||
async move {
|
||||
while let Some(_) = snapshot_updated_rx.recv().await {
|
||||
while (snapshot_updated_rx.recv().await).is_some() {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.poll_snapshot(cx);
|
||||
|
@ -322,15 +323,15 @@ impl Worktree {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn diagnostic_summaries<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + 'a {
|
||||
pub fn diagnostic_summaries(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
|
||||
match self {
|
||||
Worktree::Local(worktree) => &worktree.diagnostic_summaries,
|
||||
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
|
||||
}
|
||||
.iter()
|
||||
.map(|(path, summary)| (path.0.clone(), summary.clone()))
|
||||
.map(|(path, summary)| (path.0.clone(), *summary))
|
||||
}
|
||||
|
||||
fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
|
||||
|
@ -342,7 +343,7 @@ impl Worktree {
|
|||
}
|
||||
|
||||
impl LocalWorktree {
|
||||
async fn new(
|
||||
async fn create(
|
||||
client: Arc<Client>,
|
||||
path: impl Into<Arc<Path>>,
|
||||
visible: bool,
|
||||
|
@ -386,7 +387,7 @@ impl LocalWorktree {
|
|||
};
|
||||
if let Some(metadata) = metadata {
|
||||
let entry = Entry::new(
|
||||
path.into(),
|
||||
path,
|
||||
&metadata,
|
||||
&snapshot.next_entry_id,
|
||||
snapshot.root_char_bag,
|
||||
|
@ -651,7 +652,7 @@ impl LocalWorktree {
|
|||
let abs_path = self.absolutize(&entry.path);
|
||||
let delete = cx.background().spawn({
|
||||
let fs = self.fs.clone();
|
||||
let abs_path = abs_path.clone();
|
||||
let abs_path = abs_path;
|
||||
async move {
|
||||
if entry.is_file() {
|
||||
fs.remove_file(&abs_path, Default::default()).await
|
||||
|
@ -848,7 +849,7 @@ impl LocalWorktree {
|
|||
let rpc = self.client.clone();
|
||||
let worktree_id = cx.model_id() as u64;
|
||||
let maintain_remote_snapshot = cx.background().spawn({
|
||||
let rpc = rpc.clone();
|
||||
let rpc = rpc;
|
||||
let diagnostic_summaries = self.diagnostic_summaries.clone();
|
||||
async move {
|
||||
let mut prev_snapshot = match snapshots_rx.recv().await {
|
||||
|
@ -1002,10 +1003,9 @@ impl RemoteWorktree {
|
|||
warning_count: summary.warning_count as usize,
|
||||
};
|
||||
if summary.is_empty() {
|
||||
self.diagnostic_summaries.remove(&PathKey(path.clone()));
|
||||
self.diagnostic_summaries.remove(&PathKey(path));
|
||||
} else {
|
||||
self.diagnostic_summaries
|
||||
.insert(PathKey(path.clone()), summary);
|
||||
self.diagnostic_summaries.insert(PathKey(path), summary);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1513,7 +1513,7 @@ impl LocalSnapshot {
|
|||
|
||||
let mut ignore_stack = IgnoreStack::none();
|
||||
for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
|
||||
if ignore_stack.is_abs_path_ignored(&parent_abs_path, true) {
|
||||
if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
|
||||
ignore_stack = IgnoreStack::all();
|
||||
break;
|
||||
} else if let Some(ignore) = ignore {
|
||||
|
@ -1530,8 +1530,8 @@ impl LocalSnapshot {
|
|||
}
|
||||
|
||||
async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
|
||||
let contents = fs.load(&abs_path).await?;
|
||||
let parent = abs_path.parent().unwrap_or(Path::new("/"));
|
||||
let contents = fs.load(abs_path).await?;
|
||||
let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
|
||||
let mut builder = GitignoreBuilder::new(parent);
|
||||
for line in contents.lines() {
|
||||
builder.add_line(Some(abs_path.into()), line)?;
|
||||
|
@ -1769,7 +1769,7 @@ impl language::LocalFile for File {
|
|||
.send(proto::BufferReloaded {
|
||||
project_id,
|
||||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
version: serialize_version(version),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint,
|
||||
line_ending: serialize_line_ending(line_ending) as i32,
|
||||
|
@ -2285,7 +2285,7 @@ impl BackgroundScanner {
|
|||
snapshot.scan_id += 1;
|
||||
for event in &events {
|
||||
if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
|
||||
snapshot.remove_path(&path);
|
||||
snapshot.remove_path(path);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2528,13 +2528,13 @@ impl WorktreeHandle for ModelHandle<Worktree> {
|
|||
fs.create_file(&root_path.join(filename), Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_some())
|
||||
tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
|
||||
.await;
|
||||
|
||||
fs.remove_file(&root_path.join(filename), Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_none())
|
||||
tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
|
||||
.await;
|
||||
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
|
@ -2726,7 +2726,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
|||
let kind = if entry.is_dir {
|
||||
EntryKind::Dir
|
||||
} else {
|
||||
let mut char_bag = root_char_bag.clone();
|
||||
let mut char_bag = *root_char_bag;
|
||||
char_bag.extend(
|
||||
String::from_utf8_lossy(&entry.path)
|
||||
.chars()
|
||||
|
@ -2738,7 +2738,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
|||
Ok(Entry {
|
||||
id: ProjectEntryId::from_proto(entry.id),
|
||||
kind,
|
||||
path: path.clone(),
|
||||
path,
|
||||
inode: entry.inode,
|
||||
mtime: mtime.into(),
|
||||
is_symlink: entry.is_symlink,
|
||||
|
@ -2955,7 +2955,7 @@ mod tests {
|
|||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
cx.read(|cx| {
|
||||
let tree = tree.read(cx);
|
||||
assert!(
|
||||
|
@ -2979,7 +2979,7 @@ mod tests {
|
|||
std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
|
||||
std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
|
||||
std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
cx.read(|cx| {
|
||||
let tree = tree.read(cx);
|
||||
assert!(
|
||||
|
@ -3026,7 +3026,7 @@ mod tests {
|
|||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
tree.flush_fs_events(&cx).await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
|
||||
tree.update(cx, |tree, cx| {
|
||||
tree.as_local().unwrap().write_file(
|
||||
|
@ -3052,8 +3052,8 @@ mod tests {
|
|||
tree.read_with(cx, |tree, _| {
|
||||
let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
|
||||
let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
|
||||
assert_eq!(tracked.is_ignored, false);
|
||||
assert_eq!(ignored.is_ignored, true);
|
||||
assert!(!tracked.is_ignored);
|
||||
assert!(ignored.is_ignored);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -3226,9 +3226,9 @@ mod tests {
|
|||
|
||||
let mut ignore_contents = String::new();
|
||||
for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
|
||||
write!(
|
||||
writeln!(
|
||||
ignore_contents,
|
||||
"{}\n",
|
||||
"{}",
|
||||
path_to_ignore
|
||||
.strip_prefix(&ignore_dir_path)?
|
||||
.to_str()
|
||||
|
@ -3363,7 +3363,7 @@ mod tests {
|
|||
.collect::<Vec<_>>();
|
||||
assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
|
||||
|
||||
for (ignore_parent_abs_path, _) in &self.ignores_by_parent_abs_path {
|
||||
for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
|
||||
let ignore_parent_path =
|
||||
ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
|
||||
assert!(self.entry_for_path(&ignore_parent_path).is_some());
|
||||
|
@ -3389,7 +3389,7 @@ mod tests {
|
|||
paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
|
||||
}
|
||||
}
|
||||
paths.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
paths.sort_by(|a, b| a.0.cmp(b.0));
|
||||
paths
|
||||
}
|
||||
}
|
||||
|
|
|
@ -186,15 +186,14 @@ impl ProjectPanel {
|
|||
});
|
||||
|
||||
cx.observe_focus(&filename_editor, |this, _, is_focused, cx| {
|
||||
if !is_focused {
|
||||
if this
|
||||
if !is_focused
|
||||
&& this
|
||||
.edit_state
|
||||
.as_ref()
|
||||
.map_or(false, |state| state.processing_filename.is_none())
|
||||
{
|
||||
this.edit_state = None;
|
||||
this.update_visible_entries(None, cx);
|
||||
}
|
||||
{
|
||||
this.edit_state = None;
|
||||
this.update_visible_entries(None, cx);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
@ -209,7 +208,7 @@ impl ProjectPanel {
|
|||
edit_state: None,
|
||||
filename_editor,
|
||||
clipboard_entry: None,
|
||||
context_menu: cx.add_view(|cx| ContextMenu::new(cx)),
|
||||
context_menu: cx.add_view(ContextMenu::new),
|
||||
};
|
||||
this.update_visible_entries(None, cx);
|
||||
this
|
||||
|
@ -380,13 +379,11 @@ impl ProjectPanel {
|
|||
self.index_for_selection(selection).unwrap_or_default();
|
||||
if entry_ix > 0 {
|
||||
entry_ix -= 1;
|
||||
} else if worktree_ix > 0 {
|
||||
worktree_ix -= 1;
|
||||
entry_ix = self.visible_entries[worktree_ix].1.len() - 1;
|
||||
} else {
|
||||
if worktree_ix > 0 {
|
||||
worktree_ix -= 1;
|
||||
entry_ix = self.visible_entries[worktree_ix].1.len() - 1;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let (worktree_id, worktree_entries) = &self.visible_entries[worktree_ix];
|
||||
|
@ -734,17 +731,15 @@ impl ProjectPanel {
|
|||
|
||||
self.clipboard_entry.take();
|
||||
if clipboard_entry.is_cut() {
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
})
|
||||
.map(|task| task.detach_and_log_err(cx));
|
||||
} else {
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
})
|
||||
.map(|task| task.detach_and_log_err(cx));
|
||||
if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
}) {
|
||||
task.detach_and_log_err(cx)
|
||||
}
|
||||
} else if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
}) {
|
||||
task.detach_and_log_err(cx)
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -760,10 +755,9 @@ impl ProjectPanel {
|
|||
}
|
||||
|
||||
fn index_for_selection(&self, selection: Selection) -> Option<(usize, usize, usize)> {
|
||||
let mut worktree_index = 0;
|
||||
let mut entry_index = 0;
|
||||
let mut visible_entries_index = 0;
|
||||
for (worktree_id, worktree_entries) in &self.visible_entries {
|
||||
for (worktree_index, (worktree_id, worktree_entries)) in self.visible_entries.iter().enumerate() {
|
||||
if *worktree_id == selection.worktree_id {
|
||||
for entry in worktree_entries {
|
||||
if entry.id == selection.entry_id {
|
||||
|
@ -777,7 +771,6 @@ impl ProjectPanel {
|
|||
} else {
|
||||
visible_entries_index += worktree_entries.len();
|
||||
}
|
||||
worktree_index += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
|
@ -849,10 +842,10 @@ impl ProjectPanel {
|
|||
is_ignored: false,
|
||||
});
|
||||
}
|
||||
if expanded_dir_ids.binary_search(&entry.id).is_err() {
|
||||
if entry_iter.advance_to_sibling() {
|
||||
continue;
|
||||
}
|
||||
if expanded_dir_ids.binary_search(&entry.id).is_err()
|
||||
&& entry_iter.advance_to_sibling()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
entry_iter.advance();
|
||||
}
|
||||
|
@ -982,7 +975,7 @@ impl ProjectPanel {
|
|||
if let Some(processing_filename) = &edit_state.processing_filename {
|
||||
details.is_processing = true;
|
||||
details.filename.clear();
|
||||
details.filename.push_str(&processing_filename);
|
||||
details.filename.push_str(processing_filename);
|
||||
} else {
|
||||
if edit_state.is_new_entry {
|
||||
details.filename.clear();
|
||||
|
@ -1116,7 +1109,7 @@ impl View for ProjectPanel {
|
|||
cx,
|
||||
move |this, range, items, cx| {
|
||||
let theme = cx.global::<Settings>().theme.clone();
|
||||
this.for_each_visible_entry(range.clone(), cx, |id, details, cx| {
|
||||
this.for_each_visible_entry(range, cx, |id, details, cx| {
|
||||
items.push(Self::render_entry(
|
||||
id,
|
||||
details,
|
||||
|
|
|
@ -3,8 +3,8 @@ use editor::{
|
|||
};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions, elements::*, AppContext, Entity, ModelHandle, MouseState, MutableAppContext,
|
||||
RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
actions, elements::*, AnyViewHandle, AppContext, Entity, ModelHandle, MouseState,
|
||||
MutableAppContext, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use ordered_float::OrderedFloat;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
|
@ -51,8 +51,10 @@ impl View for ProjectSymbolsView {
|
|||
ChildView::new(self.picker.clone()).boxed()
|
||||
}
|
||||
|
||||
fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
|
||||
cx.focus(&self.picker);
|
||||
fn on_focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.picker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@ impl Connection {
|
|||
killed,
|
||||
);
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn channel(
|
||||
killed: Arc<AtomicBool>,
|
||||
executor: Arc<gpui::executor::Background>,
|
||||
|
@ -76,9 +77,7 @@ impl Connection {
|
|||
|
||||
// Writes to a half-open TCP connection will error.
|
||||
if killed.load(SeqCst) {
|
||||
std::io::Result::Err(
|
||||
Error::new(ErrorKind::Other, "connection lost").into(),
|
||||
)?;
|
||||
std::io::Result::Err(Error::new(ErrorKind::Other, "connection lost"))?;
|
||||
}
|
||||
|
||||
Ok(msg)
|
||||
|
@ -87,7 +86,7 @@ impl Connection {
|
|||
});
|
||||
|
||||
let rx = rx.then({
|
||||
let killed = killed.clone();
|
||||
let killed = killed;
|
||||
let executor = Arc::downgrade(&executor);
|
||||
move |msg| {
|
||||
let killed = killed.clone();
|
||||
|
|
|
@ -94,6 +94,7 @@ pub struct ConnectionState {
|
|||
#[serde(skip)]
|
||||
outgoing_tx: mpsc::UnboundedSender<proto::Message>,
|
||||
next_message_id: Arc<AtomicU32>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
#[serde(skip)]
|
||||
response_channels:
|
||||
Arc<Mutex<Option<HashMap<u32, oneshot::Sender<(proto::Envelope, oneshot::Sender<()>)>>>>>,
|
||||
|
@ -139,7 +140,7 @@ impl Peer {
|
|||
|
||||
let connection_id = ConnectionId(self.next_connection_id.fetch_add(1, SeqCst));
|
||||
let connection_state = ConnectionState {
|
||||
outgoing_tx: outgoing_tx.clone(),
|
||||
outgoing_tx,
|
||||
next_message_id: Default::default(),
|
||||
response_channels: Arc::new(Mutex::new(Some(Default::default()))),
|
||||
};
|
||||
|
|
|
@ -265,7 +265,9 @@ entity_messages!(
|
|||
|
||||
entity_messages!(channel_id, ChannelMessageSent);
|
||||
|
||||
const MAX_BUFFER_LEN: usize = 1 * 1024 * 1024;
|
||||
const KIB: usize = 1024;
|
||||
const MIB: usize = KIB * 1024;
|
||||
const MAX_BUFFER_LEN: usize = MIB;
|
||||
|
||||
/// A stream of protobuf messages.
|
||||
pub struct MessageStream<S> {
|
||||
|
@ -273,6 +275,7 @@ pub struct MessageStream<S> {
|
|||
encoding_buffer: Vec<u8>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug)]
|
||||
pub enum Message {
|
||||
Envelope(Envelope),
|
||||
|
@ -309,7 +312,7 @@ where
|
|||
self.encoding_buffer.reserve(message.encoded_len());
|
||||
message
|
||||
.encode(&mut self.encoding_buffer)
|
||||
.map_err(|err| io::Error::from(err))?;
|
||||
.map_err(io::Error::from)?;
|
||||
let buffer =
|
||||
zstd::stream::encode_all(self.encoding_buffer.as_slice(), COMPRESSION_LEVEL)
|
||||
.unwrap();
|
||||
|
@ -360,10 +363,10 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<SystemTime> for Timestamp {
|
||||
fn into(self) -> SystemTime {
|
||||
impl From<Timestamp> for SystemTime {
|
||||
fn from(val: Timestamp) -> Self {
|
||||
UNIX_EPOCH
|
||||
.checked_add(Duration::new(self.seconds, self.nanos))
|
||||
.checked_add(Duration::new(val.seconds, val.nanos))
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
@ -451,7 +454,7 @@ mod tests {
|
|||
.unwrap();
|
||||
assert!(sink.encoding_buffer.capacity() <= MAX_BUFFER_LEN);
|
||||
|
||||
let mut stream = MessageStream::new(rx.map(|msg| anyhow::Ok(msg)));
|
||||
let mut stream = MessageStream::new(rx.map(anyhow::Ok));
|
||||
stream.read().await.unwrap();
|
||||
assert!(stream.encoding_buffer.capacity() <= MAX_BUFFER_LEN);
|
||||
stream.read().await.unwrap();
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue