mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-28 11:29:25 +00:00
Revert "lsp: Watch paths outside of worktrees at language servers request (#17173) (#17206)
Some checks are pending
CI / Check formatting and spelling (push) Waiting to run
CI / (macOS) Run Clippy and tests (push) Waiting to run
CI / (Linux) Run Clippy and tests (push) Waiting to run
CI / (Windows) Run Clippy and tests (push) Waiting to run
CI / Create a macOS bundle (push) Blocked by required conditions
CI / Create a Linux bundle (push) Blocked by required conditions
CI / Create arm64 Linux bundle (push) Blocked by required conditions
Deploy Docs / Deploy Docs (push) Waiting to run
Docs / Check formatting (push) Waiting to run
Some checks are pending
CI / Check formatting and spelling (push) Waiting to run
CI / (macOS) Run Clippy and tests (push) Waiting to run
CI / (Linux) Run Clippy and tests (push) Waiting to run
CI / (Windows) Run Clippy and tests (push) Waiting to run
CI / Create a macOS bundle (push) Blocked by required conditions
CI / Create a Linux bundle (push) Blocked by required conditions
CI / Create arm64 Linux bundle (push) Blocked by required conditions
Deploy Docs / Deploy Docs (push) Waiting to run
Docs / Check formatting (push) Waiting to run
This PR reverts #17173, as it introduced a segfault when opening any
Gleam project and the language server starts up.
This reverts commit a850731b0e
.
Release Notes:
- N/A
This commit is contained in:
parent
a850731b0e
commit
03d8e54fd4
7 changed files with 102 additions and 438 deletions
|
@ -123,7 +123,7 @@ impl PromptBuilder {
|
|||
if params.fs.is_dir(parent_dir).await {
|
||||
let (mut changes, _watcher) = params.fs.watch(parent_dir, Duration::from_secs(1)).await;
|
||||
while let Some(changed_paths) = changes.next().await {
|
||||
if changed_paths.iter().any(|p| &p.path == &templates_dir) {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
let mut log_message = format!("Prompt template overrides directory detected at {}", templates_dir.display());
|
||||
if let Ok(target) = params.fs.read_link(&templates_dir).await {
|
||||
log_message.push_str(" -> ");
|
||||
|
@ -162,18 +162,18 @@ impl PromptBuilder {
|
|||
let mut combined_changes = futures::stream::select(changes, parent_changes);
|
||||
|
||||
while let Some(changed_paths) = combined_changes.next().await {
|
||||
if changed_paths.iter().any(|p| &p.path == &templates_dir) {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
if !params.fs.is_dir(&templates_dir).await {
|
||||
log::info!("Prompt template overrides directory removed. Restoring built-in prompt templates.");
|
||||
Self::register_built_in_templates(&mut handlebars.lock()).log_err();
|
||||
break;
|
||||
}
|
||||
}
|
||||
for event in changed_paths {
|
||||
if event.path.starts_with(&templates_dir) && event.path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", event.path.display());
|
||||
if let Some(content) = params.fs.load(&event.path).await.log_err() {
|
||||
let file_name = event.path.file_stem().unwrap().to_string_lossy();
|
||||
for changed_path in changed_paths {
|
||||
if changed_path.starts_with(&templates_dir) && changed_path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", changed_path.display());
|
||||
if let Some(content) = params.fs.load(&changed_path).await.log_err() {
|
||||
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
|
||||
handlebars.lock().register_template_string(&file_name, content).log_err();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -369,9 +369,9 @@ impl ExtensionStore {
|
|||
let installed_dir = this.installed_dir.clone();
|
||||
async move {
|
||||
let (mut paths, _) = fs.watch(&installed_dir, FS_WATCH_LATENCY).await;
|
||||
while let Some(events) = paths.next().await {
|
||||
for event in events {
|
||||
let Ok(event_path) = event.path.strip_prefix(&installed_dir) else {
|
||||
while let Some(paths) = paths.next().await {
|
||||
for path in paths {
|
||||
let Ok(event_path) = path.strip_prefix(&installed_dir) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
|
|
|
@ -45,25 +45,6 @@ pub trait Watcher: Send + Sync {
|
|||
fn remove(&self, path: &Path) -> Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum PathEventKind {
|
||||
Removed,
|
||||
Created,
|
||||
Changed,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct PathEvent {
|
||||
pub path: PathBuf,
|
||||
pub kind: Option<PathEventKind>,
|
||||
}
|
||||
|
||||
impl From<PathEvent> for PathBuf {
|
||||
fn from(event: PathEvent) -> Self {
|
||||
event.path
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait Fs: Send + Sync {
|
||||
async fn create_dir(&self, path: &Path) -> Result<()>;
|
||||
|
@ -111,7 +92,7 @@ pub trait Fs: Send + Sync {
|
|||
path: &Path,
|
||||
latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
);
|
||||
|
||||
|
@ -488,38 +469,17 @@ impl Fs for RealFs {
|
|||
path: &Path,
|
||||
latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
use fsevent::{EventStream, StreamFlags};
|
||||
use fsevent::EventStream;
|
||||
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let (stream, handle) = EventStream::new(&[path], latency);
|
||||
std::thread::spawn(move || {
|
||||
stream.run(move |events| {
|
||||
smol::block_on(
|
||||
tx.send(
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| {
|
||||
let kind = if event.flags.contains(StreamFlags::ITEM_REMOVED) {
|
||||
Some(PathEventKind::Removed)
|
||||
} else if event.flags.contains(StreamFlags::ITEM_CREATED) {
|
||||
Some(PathEventKind::Created)
|
||||
} else if event.flags.contains(StreamFlags::ITEM_MODIFIED) {
|
||||
Some(PathEventKind::Changed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
PathEvent {
|
||||
path: event.path,
|
||||
kind,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
)
|
||||
.is_ok()
|
||||
smol::block_on(tx.send(events.into_iter().map(|event| event.path).collect()))
|
||||
.is_ok()
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -538,46 +498,32 @@ impl Fs for RealFs {
|
|||
path: &Path,
|
||||
latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
use notify::EventKind;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let pending_paths: Arc<Mutex<Vec<PathEvent>>> = Default::default();
|
||||
let pending_paths: Arc<Mutex<Vec<PathBuf>>> = Default::default();
|
||||
let root_path = path.to_path_buf();
|
||||
|
||||
watcher::global(|g| {
|
||||
let tx = tx.clone();
|
||||
let pending_paths = pending_paths.clone();
|
||||
g.add(move |event: ¬ify::Event| {
|
||||
let kind = match event.kind {
|
||||
EventKind::Create(_) => Some(PathEventKind::Created),
|
||||
EventKind::Modify(_) => Some(PathEventKind::Changed),
|
||||
EventKind::Remove(_) => Some(PathEventKind::Removed),
|
||||
_ => None,
|
||||
};
|
||||
let mut paths = event
|
||||
.paths
|
||||
.iter()
|
||||
.filter_map(|path| {
|
||||
path.starts_with(&root_path).then(|| PathEvent {
|
||||
path: path.clone(),
|
||||
kind,
|
||||
})
|
||||
})
|
||||
.filter(|path| path.starts_with(&root_path))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !paths.is_empty() {
|
||||
paths.sort();
|
||||
let mut pending_paths = pending_paths.lock();
|
||||
if pending_paths.is_empty() {
|
||||
tx.try_send(()).ok();
|
||||
}
|
||||
util::extend_sorted(&mut *pending_paths, paths, usize::MAX, |a, b| {
|
||||
a.path.cmp(&b.path)
|
||||
});
|
||||
util::extend_sorted(&mut *pending_paths, paths, usize::MAX, PathBuf::cmp);
|
||||
}
|
||||
})
|
||||
})
|
||||
|
@ -615,10 +561,10 @@ impl Fs for RealFs {
|
|||
path: &Path,
|
||||
_latency: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
use notify::{EventKind, Watcher};
|
||||
use notify::Watcher;
|
||||
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
|
||||
|
@ -626,21 +572,7 @@ impl Fs for RealFs {
|
|||
let tx = tx.clone();
|
||||
move |event: Result<notify::Event, _>| {
|
||||
if let Some(event) = event.log_err() {
|
||||
let kind = match event.kind {
|
||||
EventKind::Create(_) => Some(PathEventKind::Created),
|
||||
EventKind::Modify(_) => Some(PathEventKind::Changed),
|
||||
EventKind::Remove(_) => Some(PathEventKind::Removed),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
tx.try_send(
|
||||
event
|
||||
.paths
|
||||
.into_iter()
|
||||
.map(|path| PathEvent { path, kind })
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.ok();
|
||||
tx.try_send(event.paths).ok();
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -750,9 +682,9 @@ struct FakeFsState {
|
|||
root: Arc<Mutex<FakeFsEntry>>,
|
||||
next_inode: u64,
|
||||
next_mtime: SystemTime,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<PathEvent>>>,
|
||||
event_txs: Vec<smol::channel::Sender<Vec<PathBuf>>>,
|
||||
events_paused: bool,
|
||||
buffered_events: Vec<PathEvent>,
|
||||
buffered_events: Vec<PathBuf>,
|
||||
metadata_call_count: usize,
|
||||
read_dir_call_count: usize,
|
||||
}
|
||||
|
@ -861,14 +793,11 @@ impl FakeFsState {
|
|||
|
||||
fn emit_event<I, T>(&mut self, paths: I)
|
||||
where
|
||||
I: IntoIterator<Item = (T, Option<PathEventKind>)>,
|
||||
I: IntoIterator<Item = T>,
|
||||
T: Into<PathBuf>,
|
||||
{
|
||||
self.buffered_events
|
||||
.extend(paths.into_iter().map(|(path, kind)| PathEvent {
|
||||
path: path.into(),
|
||||
kind,
|
||||
}));
|
||||
.extend(paths.into_iter().map(Into::into));
|
||||
|
||||
if !self.events_paused {
|
||||
self.flush_events(self.buffered_events.len());
|
||||
|
@ -943,7 +872,7 @@ impl FakeFs {
|
|||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event([(path.to_path_buf(), None)]);
|
||||
state.emit_event([path.to_path_buf()]);
|
||||
}
|
||||
|
||||
pub async fn insert_file(&self, path: impl AsRef<Path>, content: Vec<u8>) {
|
||||
|
@ -966,7 +895,7 @@ impl FakeFs {
|
|||
}
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event([(path, None)]);
|
||||
state.emit_event([path]);
|
||||
}
|
||||
|
||||
fn write_file_internal(&self, path: impl AsRef<Path>, content: Vec<u8>) -> Result<()> {
|
||||
|
@ -981,24 +910,18 @@ impl FakeFs {
|
|||
mtime,
|
||||
content,
|
||||
}));
|
||||
let mut kind = None;
|
||||
state.write_path(path, {
|
||||
let kind = &mut kind;
|
||||
move |entry| {
|
||||
match entry {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
*kind = Some(PathEventKind::Created);
|
||||
e.insert(file);
|
||||
}
|
||||
btree_map::Entry::Occupied(mut e) => {
|
||||
*kind = Some(PathEventKind::Changed);
|
||||
*e.get_mut() = file;
|
||||
}
|
||||
state.write_path(path, move |entry| {
|
||||
match entry {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
e.insert(file);
|
||||
}
|
||||
btree_map::Entry::Occupied(mut e) => {
|
||||
*e.get_mut() = file;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
state.emit_event([(path, kind)]);
|
||||
state.emit_event([path]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1107,7 +1030,7 @@ impl FakeFs {
|
|||
f(&mut repo_state);
|
||||
|
||||
if emit_git_event {
|
||||
state.emit_event([(dot_git, None)]);
|
||||
state.emit_event([dot_git]);
|
||||
}
|
||||
} else {
|
||||
panic!("not a directory");
|
||||
|
@ -1158,7 +1081,7 @@ impl FakeFs {
|
|||
self.state.lock().emit_event(
|
||||
statuses
|
||||
.iter()
|
||||
.map(|(path, _)| (dot_git.parent().unwrap().join(path), None)),
|
||||
.map(|(path, _)| dot_git.parent().unwrap().join(path)),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1328,7 +1251,7 @@ impl Fs for FakeFs {
|
|||
state.next_inode += 1;
|
||||
state.write_path(&cur_path, |entry| {
|
||||
entry.or_insert_with(|| {
|
||||
created_dirs.push((cur_path.clone(), Some(PathEventKind::Created)));
|
||||
created_dirs.push(cur_path.clone());
|
||||
Arc::new(Mutex::new(FakeFsEntry::Dir {
|
||||
inode,
|
||||
mtime,
|
||||
|
@ -1340,7 +1263,7 @@ impl Fs for FakeFs {
|
|||
})?
|
||||
}
|
||||
|
||||
self.state.lock().emit_event(created_dirs);
|
||||
self.state.lock().emit_event(&created_dirs);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1356,12 +1279,10 @@ impl Fs for FakeFs {
|
|||
mtime,
|
||||
content: Vec::new(),
|
||||
}));
|
||||
let mut kind = Some(PathEventKind::Created);
|
||||
state.write_path(path, |entry| {
|
||||
match entry {
|
||||
btree_map::Entry::Occupied(mut e) => {
|
||||
if options.overwrite {
|
||||
kind = Some(PathEventKind::Changed);
|
||||
*e.get_mut() = file;
|
||||
} else if !options.ignore_if_exists {
|
||||
return Err(anyhow!("path already exists: {}", path.display()));
|
||||
|
@ -1373,7 +1294,7 @@ impl Fs for FakeFs {
|
|||
}
|
||||
Ok(())
|
||||
})?;
|
||||
state.emit_event([(path, kind)]);
|
||||
state.emit_event([path]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1392,7 +1313,7 @@ impl Fs for FakeFs {
|
|||
}
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event([(path, None)]);
|
||||
state.emit_event(&[path]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1467,10 +1388,7 @@ impl Fs for FakeFs {
|
|||
})
|
||||
.unwrap();
|
||||
|
||||
state.emit_event([
|
||||
(old_path, Some(PathEventKind::Removed)),
|
||||
(new_path, Some(PathEventKind::Created)),
|
||||
]);
|
||||
state.emit_event(&[old_path, new_path]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1485,11 +1403,9 @@ impl Fs for FakeFs {
|
|||
state.next_mtime += Duration::from_nanos(1);
|
||||
let source_entry = state.read_path(&source)?;
|
||||
let content = source_entry.lock().file_content(&source)?.clone();
|
||||
let mut kind = Some(PathEventKind::Created);
|
||||
let entry = state.write_path(&target, |e| match e {
|
||||
btree_map::Entry::Occupied(e) => {
|
||||
if options.overwrite {
|
||||
kind = Some(PathEventKind::Changed);
|
||||
Ok(Some(e.get().clone()))
|
||||
} else if !options.ignore_if_exists {
|
||||
return Err(anyhow!("{target:?} already exists"));
|
||||
|
@ -1509,7 +1425,7 @@ impl Fs for FakeFs {
|
|||
if let Some(entry) = entry {
|
||||
entry.lock().set_file_content(&target, content)?;
|
||||
}
|
||||
state.emit_event([(target, kind)]);
|
||||
state.emit_event(&[target]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1546,7 +1462,7 @@ impl Fs for FakeFs {
|
|||
e.remove();
|
||||
}
|
||||
}
|
||||
state.emit_event([(path, Some(PathEventKind::Removed))]);
|
||||
state.emit_event(&[path]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1575,7 +1491,7 @@ impl Fs for FakeFs {
|
|||
e.remove();
|
||||
}
|
||||
}
|
||||
state.emit_event([(path, Some(PathEventKind::Removed))]);
|
||||
state.emit_event(&[path]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -1716,7 +1632,7 @@ impl Fs for FakeFs {
|
|||
path: &Path,
|
||||
_: Duration,
|
||||
) -> (
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
|
||||
Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>,
|
||||
Arc<dyn Watcher>,
|
||||
) {
|
||||
self.simulate_random_delay().await;
|
||||
|
@ -1726,9 +1642,7 @@ impl Fs for FakeFs {
|
|||
let executor = self.executor.clone();
|
||||
(
|
||||
Box::pin(futures::StreamExt::filter(rx, move |events| {
|
||||
let result = events
|
||||
.iter()
|
||||
.any(|evt_path| evt_path.path.starts_with(&path));
|
||||
let result = events.iter().any(|evt_path| evt_path.starts_with(&path));
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
executor.simulate_random_delay().await;
|
||||
|
|
|
@ -22,8 +22,8 @@ use futures::{
|
|||
};
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use gpui::{
|
||||
AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel,
|
||||
Task, WeakModel,
|
||||
AppContext, AsyncAppContext, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task,
|
||||
WeakModel,
|
||||
};
|
||||
use http_client::HttpClient;
|
||||
use itertools::Itertools;
|
||||
|
@ -57,7 +57,7 @@ use std::{
|
|||
convert::TryInto,
|
||||
ffi::OsStr,
|
||||
iter, mem,
|
||||
ops::{ControlFlow, Range},
|
||||
ops::Range,
|
||||
path::{self, Path, PathBuf},
|
||||
process::Stdio,
|
||||
str,
|
||||
|
@ -146,7 +146,7 @@ pub struct LspStore {
|
|||
language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
|
||||
language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
|
||||
last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
|
||||
language_server_watched_paths: HashMap<LanguageServerId, Model<LanguageServerWatchedPaths>>,
|
||||
language_server_watched_paths: HashMap<LanguageServerId, HashMap<WorktreeId, GlobSet>>,
|
||||
language_server_watcher_registrations:
|
||||
HashMap<LanguageServerId, HashMap<String, Vec<FileSystemWatcher>>>,
|
||||
active_entry: Option<ProjectEntryId>,
|
||||
|
@ -3022,38 +3022,6 @@ impl LspStore {
|
|||
.map(|(key, value)| (*key, value))
|
||||
}
|
||||
|
||||
fn lsp_notify_abs_paths_changed(
|
||||
&mut self,
|
||||
server_id: LanguageServerId,
|
||||
changes: Vec<PathEvent>,
|
||||
) {
|
||||
maybe!({
|
||||
let server = self.language_server_for_id(server_id)?;
|
||||
let changes = changes
|
||||
.into_iter()
|
||||
.filter_map(|event| {
|
||||
let typ = match event.kind? {
|
||||
PathEventKind::Created => lsp::FileChangeType::CREATED,
|
||||
PathEventKind::Removed => lsp::FileChangeType::DELETED,
|
||||
PathEventKind::Changed => lsp::FileChangeType::CHANGED,
|
||||
};
|
||||
Some(lsp::FileEvent {
|
||||
uri: lsp::Url::from_file_path(&event.path).ok()?,
|
||||
typ,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !changes.is_empty() {
|
||||
server
|
||||
.notify::<lsp::notification::DidChangeWatchedFiles>(
|
||||
lsp::DidChangeWatchedFilesParams { changes },
|
||||
)
|
||||
.log_err();
|
||||
}
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
fn rebuild_watched_paths(
|
||||
&mut self,
|
||||
language_server_id: LanguageServerId,
|
||||
|
@ -3066,169 +3034,62 @@ impl LspStore {
|
|||
return;
|
||||
};
|
||||
|
||||
let mut worktree_globs = HashMap::default();
|
||||
let mut abs_globs = HashMap::default();
|
||||
log::trace!(
|
||||
"Processing new watcher paths for language server with id {}",
|
||||
language_server_id
|
||||
);
|
||||
let watched_paths = self
|
||||
.language_server_watched_paths
|
||||
.entry(language_server_id)
|
||||
.or_default();
|
||||
|
||||
let worktrees = self
|
||||
.worktree_store
|
||||
.read(cx)
|
||||
.worktrees()
|
||||
.filter_map(|worktree| {
|
||||
self.language_servers_for_worktree(worktree.read(cx).id())
|
||||
.find(|(_, _, server)| server.server_id() == language_server_id)
|
||||
.map(|_| worktree)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
enum PathToWatch {
|
||||
Worktree {
|
||||
literal_prefix: Arc<Path>,
|
||||
pattern: String,
|
||||
},
|
||||
Absolute {
|
||||
path: Arc<Path>,
|
||||
pattern: String,
|
||||
},
|
||||
}
|
||||
let mut builders = HashMap::default();
|
||||
for watcher in watchers.values().flatten() {
|
||||
let mut found_host = false;
|
||||
for worktree in &worktrees {
|
||||
for worktree in self.worktree_store.read(cx).worktrees().collect::<Vec<_>>() {
|
||||
let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
|
||||
if let Some(worktree_root_path) = tree.abs_path().to_str() {
|
||||
let path_to_watch = match &watcher.glob_pattern {
|
||||
lsp::GlobPattern::String(s) => {
|
||||
match s.strip_prefix(worktree_root_path) {
|
||||
Some(relative) => {
|
||||
let pattern = relative
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(relative)
|
||||
.to_owned();
|
||||
let literal_prefix = glob_literal_prefix(&pattern);
|
||||
|
||||
let literal_prefix = Arc::from(PathBuf::from(
|
||||
literal_prefix
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(literal_prefix),
|
||||
));
|
||||
PathToWatch::Worktree {
|
||||
literal_prefix,
|
||||
pattern,
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let path = glob_literal_prefix(s);
|
||||
let glob = &s[path.len()..];
|
||||
let pattern = glob
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(glob)
|
||||
.to_owned();
|
||||
let path = PathBuf::from(path).into();
|
||||
|
||||
PathToWatch::Absolute { path, pattern }
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(abs_path) = tree.abs_path().to_str() {
|
||||
let relative_glob_pattern = match &watcher.glob_pattern {
|
||||
lsp::GlobPattern::String(s) => Some(
|
||||
s.strip_prefix(abs_path)
|
||||
.unwrap_or(s)
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(s),
|
||||
),
|
||||
lsp::GlobPattern::Relative(rp) => {
|
||||
let Ok(mut base_uri) = match &rp.base_uri {
|
||||
let base_uri = match &rp.base_uri {
|
||||
lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
|
||||
lsp::OneOf::Right(base_uri) => base_uri,
|
||||
}
|
||||
.to_file_path() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match base_uri.strip_prefix(worktree_root_path) {
|
||||
Ok(relative) => {
|
||||
let mut literal_prefix = relative.to_owned();
|
||||
literal_prefix.push(glob_literal_prefix(&rp.pattern));
|
||||
|
||||
PathToWatch::Worktree {
|
||||
literal_prefix: literal_prefix.into(),
|
||||
pattern: rp.pattern.clone(),
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let path = glob_literal_prefix(&rp.pattern);
|
||||
let glob = &rp.pattern[path.len()..];
|
||||
let pattern = glob
|
||||
.strip_prefix(std::path::MAIN_SEPARATOR)
|
||||
.unwrap_or(glob)
|
||||
.to_owned();
|
||||
base_uri.push(path);
|
||||
|
||||
PathToWatch::Absolute {
|
||||
path: base_uri.into(),
|
||||
pattern,
|
||||
}
|
||||
}
|
||||
}
|
||||
base_uri.to_file_path().ok().and_then(|file_path| {
|
||||
(file_path.to_str() == Some(abs_path))
|
||||
.then_some(rp.pattern.as_str())
|
||||
})
|
||||
}
|
||||
};
|
||||
match path_to_watch {
|
||||
PathToWatch::Worktree {
|
||||
literal_prefix,
|
||||
pattern,
|
||||
} => {
|
||||
if let Some((tree, glob)) =
|
||||
tree.as_local_mut().zip(Glob::new(&pattern).log_err())
|
||||
{
|
||||
tree.add_path_prefix_to_scan(literal_prefix);
|
||||
worktree_globs
|
||||
.entry(tree.id())
|
||||
.or_insert_with(GlobSetBuilder::new)
|
||||
.add(glob);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
PathToWatch::Absolute { path, pattern } => {
|
||||
if let Some(glob) = Glob::new(&pattern).log_err() {
|
||||
abs_globs
|
||||
.entry(path)
|
||||
.or_insert_with(GlobSetBuilder::new)
|
||||
.add(glob);
|
||||
}
|
||||
if let Some(relative_glob_pattern) = relative_glob_pattern {
|
||||
let literal_prefix = glob_literal_prefix(relative_glob_pattern);
|
||||
tree.as_local_mut()
|
||||
.unwrap()
|
||||
.add_path_prefix_to_scan(Path::new(literal_prefix).into());
|
||||
if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
|
||||
builders
|
||||
.entry(tree.id())
|
||||
.or_insert_with(|| GlobSetBuilder::new())
|
||||
.add(glob);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
false
|
||||
});
|
||||
if glob_is_inside_worktree {
|
||||
log::trace!(
|
||||
"Watcher pattern `{}` has been attached to the worktree at `{}`",
|
||||
serde_json::to_string(&watcher.glob_pattern).unwrap(),
|
||||
worktree.read(cx).abs_path().display()
|
||||
);
|
||||
found_host = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if !found_host {
|
||||
log::error!(
|
||||
"Watcher pattern `{}` has not been attached to any worktree or absolute path",
|
||||
serde_json::to_string(&watcher.glob_pattern).unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let mut watch_builder = LanguageServerWatchedPathsBuilder::default();
|
||||
for (worktree_id, builder) in worktree_globs {
|
||||
watched_paths.clear();
|
||||
for (worktree_id, builder) in builders {
|
||||
if let Ok(globset) = builder.build() {
|
||||
watch_builder.watch_worktree(worktree_id, globset);
|
||||
watched_paths.insert(worktree_id, globset);
|
||||
}
|
||||
}
|
||||
for (abs_path, builder) in abs_globs {
|
||||
if let Ok(globset) = builder.build() {
|
||||
watch_builder.watch_abs_path(abs_path, globset);
|
||||
}
|
||||
}
|
||||
let watcher = watch_builder.build(self.fs.clone(), language_server_id, cx);
|
||||
self.language_server_watched_paths
|
||||
.insert(language_server_id, watcher);
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -5681,7 +5542,7 @@ impl LspStore {
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn update_local_worktree_language_servers(
|
||||
pub fn update_local_worktree_language_servers(
|
||||
&mut self,
|
||||
worktree_handle: &Model<Worktree>,
|
||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||
|
@ -5710,7 +5571,7 @@ impl LspStore {
|
|||
if let Some(watched_paths) = self
|
||||
.language_server_watched_paths
|
||||
.get(&server_id)
|
||||
.and_then(|paths| paths.read(cx).worktree_paths.get(&worktree_id))
|
||||
.and_then(|paths| paths.get(&worktree_id))
|
||||
{
|
||||
let params = lsp::DidChangeWatchedFilesParams {
|
||||
changes: changes
|
||||
|
@ -5871,102 +5732,6 @@ pub enum LanguageServerToQuery {
|
|||
Other(LanguageServerId),
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct LanguageServerWatchedPaths {
|
||||
worktree_paths: HashMap<WorktreeId, GlobSet>,
|
||||
abs_paths: HashMap<Arc<Path>, (GlobSet, Task<()>)>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct LanguageServerWatchedPathsBuilder {
|
||||
worktree_paths: HashMap<WorktreeId, GlobSet>,
|
||||
abs_paths: HashMap<Arc<Path>, GlobSet>,
|
||||
}
|
||||
|
||||
impl LanguageServerWatchedPathsBuilder {
|
||||
fn watch_worktree(&mut self, worktree_id: WorktreeId, glob_set: GlobSet) {
|
||||
self.worktree_paths.insert(worktree_id, glob_set);
|
||||
}
|
||||
fn watch_abs_path(&mut self, path: Arc<Path>, glob_set: GlobSet) {
|
||||
self.abs_paths.insert(path, glob_set);
|
||||
}
|
||||
fn build(
|
||||
self,
|
||||
fs: Arc<dyn Fs>,
|
||||
language_server_id: LanguageServerId,
|
||||
cx: &mut ModelContext<LspStore>,
|
||||
) -> Model<LanguageServerWatchedPaths> {
|
||||
let project = cx.weak_model();
|
||||
|
||||
cx.new_model(|cx| {
|
||||
let this_id = cx.entity_id();
|
||||
const LSP_ABS_PATH_OBSERVE: Duration = Duration::from_millis(100);
|
||||
let abs_paths = self
|
||||
.abs_paths
|
||||
.into_iter()
|
||||
.map(|(abs_path, globset)| {
|
||||
let task = cx.spawn({
|
||||
let abs_path = abs_path.clone();
|
||||
let fs = fs.clone();
|
||||
|
||||
let lsp_store = project.clone();
|
||||
|_, mut cx| async move {
|
||||
maybe!(async move {
|
||||
let mut push_updates =
|
||||
fs.watch(&abs_path, LSP_ABS_PATH_OBSERVE).await;
|
||||
while let Some(update) = push_updates.0.next().await {
|
||||
let action = lsp_store
|
||||
.update(&mut cx, |this, cx| {
|
||||
let Some(watcher) = this
|
||||
.language_server_watched_paths
|
||||
.get(&language_server_id)
|
||||
else {
|
||||
return ControlFlow::Break(());
|
||||
};
|
||||
if watcher.entity_id() != this_id {
|
||||
// This watcher is no longer registered on the project, which means that we should
|
||||
// cease operations.
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
let (globs, _) = watcher
|
||||
.read(cx)
|
||||
.abs_paths
|
||||
.get(&abs_path)
|
||||
.expect(
|
||||
"Watched abs path is not registered with a watcher",
|
||||
);
|
||||
let matching_entries = update
|
||||
.into_iter()
|
||||
.filter(|event| globs.is_match(&event.path))
|
||||
.collect::<Vec<_>>();
|
||||
this.lsp_notify_abs_paths_changed(
|
||||
language_server_id,
|
||||
matching_entries,
|
||||
);
|
||||
ControlFlow::Continue(())
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
if action.is_break() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
})
|
||||
.await;
|
||||
}
|
||||
});
|
||||
(abs_path, (globset, task))
|
||||
})
|
||||
.collect();
|
||||
LanguageServerWatchedPaths {
|
||||
worktree_paths: self.worktree_paths,
|
||||
abs_paths,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct LspBufferSnapshot {
|
||||
version: i32,
|
||||
snapshot: TextBufferSnapshot,
|
||||
|
@ -6078,9 +5843,7 @@ impl DiagnosticSummary {
|
|||
}
|
||||
|
||||
fn glob_literal_prefix(glob: &str) -> &str {
|
||||
let is_absolute = glob.starts_with(path::MAIN_SEPARATOR);
|
||||
|
||||
let mut literal_end = is_absolute as usize;
|
||||
let mut literal_end = 0;
|
||||
for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
|
||||
if part.contains(&['*', '?', '{', '}']) {
|
||||
break;
|
||||
|
@ -6092,7 +5855,6 @@ fn glob_literal_prefix(glob: &str) -> &str {
|
|||
literal_end += part.len();
|
||||
}
|
||||
}
|
||||
let literal_end = literal_end.min(glob.len());
|
||||
&glob[..literal_end]
|
||||
}
|
||||
|
||||
|
|
|
@ -169,12 +169,7 @@ impl SnippetProvider {
|
|||
|
||||
let (mut entries, _) = watcher.await;
|
||||
while let Some(entries) = entries.next().await {
|
||||
process_updates(
|
||||
this.clone(),
|
||||
entries.into_iter().map(|event| event.path).collect(),
|
||||
cx.clone(),
|
||||
)
|
||||
.await?;
|
||||
process_updates(this.clone(), entries, cx.clone()).await?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
|
|
@ -7,7 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
|||
use anyhow::{anyhow, Context as _, Result};
|
||||
use clock::ReplicaId;
|
||||
use collections::{HashMap, HashSet, VecDeque};
|
||||
use fs::{copy_recursive, Fs, PathEvent, RemoveOptions, Watcher};
|
||||
use fs::{copy_recursive, Fs, RemoveOptions, Watcher};
|
||||
use futures::{
|
||||
channel::{
|
||||
mpsc::{self, UnboundedSender},
|
||||
|
@ -1051,11 +1051,7 @@ impl LocalWorktree {
|
|||
watcher,
|
||||
};
|
||||
|
||||
scanner
|
||||
.run(Box::pin(
|
||||
events.map(|events| events.into_iter().map(Into::into).collect()),
|
||||
))
|
||||
.await;
|
||||
scanner.run(events).await;
|
||||
}
|
||||
});
|
||||
let scan_state_updater = cx.spawn(|this, mut cx| async move {
|
||||
|
@ -3526,7 +3522,7 @@ enum BackgroundScannerPhase {
|
|||
}
|
||||
|
||||
impl BackgroundScanner {
|
||||
async fn run(&mut self, mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>) {
|
||||
async fn run(&mut self, mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<PathBuf>>>>) {
|
||||
use futures::FutureExt as _;
|
||||
|
||||
// If the worktree root does not contain a git repository, then find
|
||||
|
@ -3607,8 +3603,7 @@ impl BackgroundScanner {
|
|||
while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
|
||||
paths.extend(more_paths);
|
||||
}
|
||||
self.process_events(paths.into_iter().map(Into::into).collect())
|
||||
.await;
|
||||
self.process_events(paths).await;
|
||||
}
|
||||
|
||||
// Continue processing events until the worktree is dropped.
|
||||
|
@ -3649,7 +3644,7 @@ impl BackgroundScanner {
|
|||
while let Poll::Ready(Some(more_paths)) = futures::poll!(fs_events_rx.next()) {
|
||||
paths.extend(more_paths);
|
||||
}
|
||||
self.process_events(paths.into_iter().map(Into::into).collect()).await;
|
||||
self.process_events(paths.clone()).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1158,13 +1158,13 @@ fn watch_themes(fs: Arc<dyn fs::Fs>, cx: &mut AppContext) {
|
|||
.await;
|
||||
|
||||
while let Some(paths) = events.next().await {
|
||||
for event in paths {
|
||||
if fs.metadata(&event.path).await.ok().flatten().is_some() {
|
||||
for path in paths {
|
||||
if fs.metadata(&path).await.ok().flatten().is_some() {
|
||||
if let Some(theme_registry) =
|
||||
cx.update(|cx| ThemeRegistry::global(cx).clone()).log_err()
|
||||
{
|
||||
if let Some(()) = theme_registry
|
||||
.load_user_theme(&event.path, fs.clone())
|
||||
.load_user_theme(&path, fs.clone())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
|
@ -1194,10 +1194,8 @@ fn watch_languages(fs: Arc<dyn fs::Fs>, languages: Arc<LanguageRegistry>, cx: &m
|
|||
cx.spawn(|_| async move {
|
||||
let (mut events, _) = fs.watch(path.as_path(), Duration::from_millis(100)).await;
|
||||
while let Some(event) = events.next().await {
|
||||
let has_language_file = event.iter().any(|event| {
|
||||
event
|
||||
.path
|
||||
.extension()
|
||||
let has_language_file = event.iter().any(|path| {
|
||||
path.extension()
|
||||
.map(|ext| ext.to_string_lossy().as_ref() == "scm")
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue