mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-28 21:32:39 +00:00
Fix project search filtering on projects with multiple worktrees (#9337)
Fixes #9285 Release Notes: - Fixed a bug that broke search filtering when searching a project with multiple worktrees ([#9285](https://github.com/zed-industries/zed/issues/9285)). --------- Co-authored-by: Thorsten <thorsten@zed.dev>
This commit is contained in:
parent
7629c16162
commit
fab55486f5
4 changed files with 195 additions and 90 deletions
|
@ -6166,6 +6166,7 @@ impl Project {
|
||||||
Some(tree.snapshot())
|
Some(tree.snapshot())
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
let include_root = snapshots.len() > 1;
|
||||||
|
|
||||||
let background = cx.background_executor().clone();
|
let background = cx.background_executor().clone();
|
||||||
let path_count: usize = snapshots
|
let path_count: usize = snapshots
|
||||||
|
@ -6199,8 +6200,18 @@ impl Project {
|
||||||
});
|
});
|
||||||
if is_ignored && !query.include_ignored() {
|
if is_ignored && !query.include_ignored() {
|
||||||
return None;
|
return None;
|
||||||
} else if let Some(path) = snapshot.file().map(|file| file.path()) {
|
} else if let Some(file) = snapshot.file() {
|
||||||
Some((path.clone(), (buffer, snapshot)))
|
let matched_path = if include_root {
|
||||||
|
query.file_matches(Some(&file.full_path(cx)))
|
||||||
|
} else {
|
||||||
|
query.file_matches(Some(file.path()))
|
||||||
|
};
|
||||||
|
|
||||||
|
if matched_path {
|
||||||
|
Some((file.path().clone(), (buffer, snapshot)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
unnamed_files.push(buffer);
|
unnamed_files.push(buffer);
|
||||||
None
|
None
|
||||||
|
@ -6215,6 +6226,7 @@ impl Project {
|
||||||
self.fs.clone(),
|
self.fs.clone(),
|
||||||
workers,
|
workers,
|
||||||
query.clone(),
|
query.clone(),
|
||||||
|
include_root,
|
||||||
path_count,
|
path_count,
|
||||||
snapshots,
|
snapshots,
|
||||||
matching_paths_tx,
|
matching_paths_tx,
|
||||||
|
@ -6251,21 +6263,15 @@ impl Project {
|
||||||
while let Some((entry, buffer_index)) = buffers_rx.next().await {
|
while let Some((entry, buffer_index)) = buffers_rx.next().await {
|
||||||
let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
|
let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
|
||||||
{
|
{
|
||||||
if query.file_matches(
|
query
|
||||||
snapshot.file().map(|file| file.path().as_ref()),
|
.search(snapshot, None)
|
||||||
) {
|
.await
|
||||||
query
|
.iter()
|
||||||
.search(snapshot, None)
|
.map(|range| {
|
||||||
.await
|
snapshot.anchor_before(range.start)
|
||||||
.iter()
|
..snapshot.anchor_after(range.end)
|
||||||
.map(|range| {
|
})
|
||||||
snapshot.anchor_before(range.start)
|
.collect()
|
||||||
..snapshot.anchor_after(range.end)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
} else {
|
|
||||||
Vec::new()
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
@ -6337,6 +6343,7 @@ impl Project {
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
workers: usize,
|
workers: usize,
|
||||||
query: SearchQuery,
|
query: SearchQuery,
|
||||||
|
include_root: bool,
|
||||||
path_count: usize,
|
path_count: usize,
|
||||||
snapshots: Vec<LocalSnapshot>,
|
snapshots: Vec<LocalSnapshot>,
|
||||||
matching_paths_tx: Sender<SearchMatchCandidate>,
|
matching_paths_tx: Sender<SearchMatchCandidate>,
|
||||||
|
@ -6405,7 +6412,16 @@ impl Project {
|
||||||
if unnamed_buffers.contains_key(&entry.path) {
|
if unnamed_buffers.contains_key(&entry.path) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let matches = if query.file_matches(Some(&entry.path)) {
|
|
||||||
|
let matched_path = if include_root {
|
||||||
|
let mut full_path = PathBuf::from(snapshot.root_name());
|
||||||
|
full_path.push(&entry.path);
|
||||||
|
query.file_matches(Some(&full_path))
|
||||||
|
} else {
|
||||||
|
query.file_matches(Some(&entry.path))
|
||||||
|
};
|
||||||
|
|
||||||
|
let matches = if matched_path {
|
||||||
abs_path.clear();
|
abs_path.clear();
|
||||||
abs_path.push(&snapshot.abs_path());
|
abs_path.push(&snapshot.abs_path());
|
||||||
abs_path.push(&entry.path);
|
abs_path.push(&entry.path);
|
||||||
|
|
|
@ -3758,8 +3758,8 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("two.rs".to_string(), vec![6..9]),
|
("dir/two.rs".to_string(), vec![6..9]),
|
||||||
("three.rs".to_string(), vec![37..40])
|
("dir/three.rs".to_string(), vec![37..40])
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -3783,9 +3783,9 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("two.rs".to_string(), vec![6..9]),
|
("dir/two.rs".to_string(), vec![6..9]),
|
||||||
("three.rs".to_string(), vec![37..40]),
|
("dir/three.rs".to_string(), vec![37..40]),
|
||||||
("four.rs".to_string(), vec![25..28, 36..39])
|
("dir/four.rs".to_string(), vec![25..28, 36..39])
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -3846,8 +3846,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.rs".to_string(), vec![8..12]),
|
("dir/one.rs".to_string(), vec![8..12]),
|
||||||
("two.rs".to_string(), vec![8..12]),
|
("dir/two.rs".to_string(), vec![8..12]),
|
||||||
]),
|
]),
|
||||||
"Rust only search should give only Rust files"
|
"Rust only search should give only Rust files"
|
||||||
);
|
);
|
||||||
|
@ -3871,8 +3871,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.ts".to_string(), vec![14..18]),
|
("dir/one.ts".to_string(), vec![14..18]),
|
||||||
("two.ts".to_string(), vec![14..18]),
|
("dir/two.ts".to_string(), vec![14..18]),
|
||||||
]),
|
]),
|
||||||
"TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
|
"TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
|
||||||
);
|
);
|
||||||
|
@ -3897,10 +3897,10 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.rs".to_string(), vec![8..12]),
|
("dir/one.rs".to_string(), vec![8..12]),
|
||||||
("one.ts".to_string(), vec![14..18]),
|
("dir/one.ts".to_string(), vec![14..18]),
|
||||||
("two.rs".to_string(), vec![8..12]),
|
("dir/two.rs".to_string(), vec![8..12]),
|
||||||
("two.ts".to_string(), vec![14..18]),
|
("dir/two.ts".to_string(), vec![14..18]),
|
||||||
]),
|
]),
|
||||||
"Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
|
"Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
|
||||||
);
|
);
|
||||||
|
@ -3942,10 +3942,10 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.rs".to_string(), vec![8..12]),
|
("dir/one.rs".to_string(), vec![8..12]),
|
||||||
("one.ts".to_string(), vec![14..18]),
|
("dir/one.ts".to_string(), vec![14..18]),
|
||||||
("two.rs".to_string(), vec![8..12]),
|
("dir/two.rs".to_string(), vec![8..12]),
|
||||||
("two.ts".to_string(), vec![14..18]),
|
("dir/two.ts".to_string(), vec![14..18]),
|
||||||
]),
|
]),
|
||||||
"If no exclusions match, all files should be returned"
|
"If no exclusions match, all files should be returned"
|
||||||
);
|
);
|
||||||
|
@ -3967,8 +3967,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.ts".to_string(), vec![14..18]),
|
("dir/one.ts".to_string(), vec![14..18]),
|
||||||
("two.ts".to_string(), vec![14..18]),
|
("dir/two.ts".to_string(), vec![14..18]),
|
||||||
]),
|
]),
|
||||||
"Rust exclusion search should give only TypeScript files"
|
"Rust exclusion search should give only TypeScript files"
|
||||||
);
|
);
|
||||||
|
@ -3992,8 +3992,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.rs".to_string(), vec![8..12]),
|
("dir/one.rs".to_string(), vec![8..12]),
|
||||||
("two.rs".to_string(), vec![8..12]),
|
("dir/two.rs".to_string(), vec![8..12]),
|
||||||
]),
|
]),
|
||||||
"TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
|
"TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
|
||||||
);
|
);
|
||||||
|
@ -4128,13 +4128,105 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("one.ts".to_string(), vec![14..18]),
|
("dir/one.ts".to_string(), vec![14..18]),
|
||||||
("two.ts".to_string(), vec![14..18]),
|
("dir/two.ts".to_string(), vec![14..18]),
|
||||||
]),
|
]),
|
||||||
"Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
|
"Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx);
|
||||||
|
|
||||||
|
let fs = FakeFs::new(cx.executor());
|
||||||
|
fs.insert_tree(
|
||||||
|
"/worktree-a",
|
||||||
|
json!({
|
||||||
|
"haystack.rs": r#"// NEEDLE"#,
|
||||||
|
"haystack.ts": r#"// NEEDLE"#,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
fs.insert_tree(
|
||||||
|
"/worktree-b",
|
||||||
|
json!({
|
||||||
|
"haystack.rs": r#"// NEEDLE"#,
|
||||||
|
"haystack.ts": r#"// NEEDLE"#,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let project = Project::test(
|
||||||
|
fs.clone(),
|
||||||
|
["/worktree-a".as_ref(), "/worktree-b".as_ref()],
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
search(
|
||||||
|
&project,
|
||||||
|
SearchQuery::text(
|
||||||
|
"NEEDLE",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
|
||||||
|
Vec::new()
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
cx
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap(),
|
||||||
|
HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
|
||||||
|
"should only return results from included worktree"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
search(
|
||||||
|
&project,
|
||||||
|
SearchQuery::text(
|
||||||
|
"NEEDLE",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
|
||||||
|
Vec::new()
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
cx
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap(),
|
||||||
|
HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
|
||||||
|
"should only return results from included worktree"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
search(
|
||||||
|
&project,
|
||||||
|
SearchQuery::text(
|
||||||
|
"NEEDLE",
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
vec![PathMatcher::new("*.ts").unwrap()],
|
||||||
|
Vec::new()
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
cx
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap(),
|
||||||
|
HashMap::from_iter([
|
||||||
|
("worktree-a/haystack.ts".to_string(), vec![3..9]),
|
||||||
|
("worktree-b/haystack.ts".to_string(), vec![3..9])
|
||||||
|
]),
|
||||||
|
"should return results from both worktrees"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
|
@ -4173,7 +4265,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
|
HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
|
||||||
"Only one non-ignored file should have the query"
|
"Only one non-ignored file should have the query"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -4186,15 +4278,21 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([
|
HashMap::from_iter([
|
||||||
("package.json".to_string(), vec![8..11]),
|
("dir/package.json".to_string(), vec![8..11]),
|
||||||
("target/index.txt".to_string(), vec![6..9]),
|
("dir/target/index.txt".to_string(), vec![6..9]),
|
||||||
(
|
(
|
||||||
"node_modules/prettier/package.json".to_string(),
|
"dir/node_modules/prettier/package.json".to_string(),
|
||||||
vec![9..12]
|
vec![9..12]
|
||||||
),
|
),
|
||||||
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
|
(
|
||||||
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
|
"dir/node_modules/prettier/index.ts".to_string(),
|
||||||
("node_modules/eslint/package.json".to_string(), vec![8..11]),
|
vec![15..18]
|
||||||
|
),
|
||||||
|
("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
|
||||||
|
(
|
||||||
|
"dir/node_modules/eslint/package.json".to_string(),
|
||||||
|
vec![8..11]
|
||||||
|
),
|
||||||
]),
|
]),
|
||||||
"Unrestricted search with ignored directories should find every file with the query"
|
"Unrestricted search with ignored directories should find every file with the query"
|
||||||
);
|
);
|
||||||
|
@ -4216,7 +4314,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
HashMap::from_iter([(
|
HashMap::from_iter([(
|
||||||
"node_modules/prettier/package.json".to_string(),
|
"dir/node_modules/prettier/package.json".to_string(),
|
||||||
vec![9..12]
|
vec![9..12]
|
||||||
)]),
|
)]),
|
||||||
"With search including ignored prettier directory and excluding TS files, only one file should be found"
|
"With search including ignored prettier directory and excluding TS files, only one file should be found"
|
||||||
|
@ -4313,8 +4411,13 @@ async fn search(
|
||||||
Ok(result
|
Ok(result
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(buffer, ranges)| {
|
.map(|(buffer, ranges)| {
|
||||||
buffer.update(cx, |buffer, _| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
let path = buffer.file().unwrap().path().to_string_lossy().to_string();
|
let path = buffer
|
||||||
|
.file()
|
||||||
|
.unwrap()
|
||||||
|
.full_path(cx)
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
let ranges = ranges
|
let ranges = ranges
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|range| range.to_offset(buffer))
|
.map(|range| range.to_offset(buffer))
|
||||||
|
|
|
@ -23,7 +23,13 @@ use project::{
|
||||||
};
|
};
|
||||||
use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings};
|
use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{cmp::Ordering, ffi::OsStr, ops::Range, path::Path, sync::Arc};
|
use std::{
|
||||||
|
cmp::Ordering,
|
||||||
|
ffi::OsStr,
|
||||||
|
ops::Range,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use theme::ThemeSettings;
|
use theme::ThemeSettings;
|
||||||
use ui::{prelude::*, v_flex, ContextMenu, Icon, KeyBinding, Label, ListItem};
|
use ui::{prelude::*, v_flex, ContextMenu, Icon, KeyBinding, Label, ListItem};
|
||||||
use unicase::UniCase;
|
use unicase::UniCase;
|
||||||
|
@ -991,12 +997,22 @@ impl ProjectPanel {
|
||||||
_: &NewSearchInDirectory,
|
_: &NewSearchInDirectory,
|
||||||
cx: &mut ViewContext<Self>,
|
cx: &mut ViewContext<Self>,
|
||||||
) {
|
) {
|
||||||
if let Some((_, entry)) = self.selected_entry(cx) {
|
if let Some((worktree, entry)) = self.selected_entry(cx) {
|
||||||
if entry.is_dir() {
|
if entry.is_dir() {
|
||||||
let entry = entry.clone();
|
let include_root = self.project.read(cx).visible_worktrees(cx).count() > 1;
|
||||||
|
let dir_path = if include_root {
|
||||||
|
let mut full_path = PathBuf::from(worktree.root_name());
|
||||||
|
full_path.push(&entry.path);
|
||||||
|
Arc::from(full_path)
|
||||||
|
} else {
|
||||||
|
entry.path.clone()
|
||||||
|
};
|
||||||
|
|
||||||
self.workspace
|
self.workspace
|
||||||
.update(cx, |workspace, cx| {
|
.update(cx, |workspace, cx| {
|
||||||
search::ProjectSearchView::new_search_in_directory(workspace, &entry, cx);
|
search::ProjectSearchView::new_search_in_directory(
|
||||||
|
workspace, &dir_path, cx,
|
||||||
|
);
|
||||||
})
|
})
|
||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ use gpui::{
|
||||||
use menu::Confirm;
|
use menu::Confirm;
|
||||||
use project::{
|
use project::{
|
||||||
search::{SearchInputs, SearchQuery},
|
search::{SearchInputs, SearchQuery},
|
||||||
Entry, Project,
|
Project,
|
||||||
};
|
};
|
||||||
use semantic_index::{SemanticIndex, SemanticIndexStatus};
|
use semantic_index::{SemanticIndex, SemanticIndexStatus};
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ use std::{
|
||||||
any::{Any, TypeId},
|
any::{Any, TypeId},
|
||||||
mem,
|
mem,
|
||||||
ops::{Not, Range},
|
ops::{Not, Range},
|
||||||
path::PathBuf,
|
path::{Path, PathBuf},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
use theme::ThemeSettings;
|
use theme::ThemeSettings;
|
||||||
|
@ -990,13 +990,10 @@ impl ProjectSearchView {
|
||||||
|
|
||||||
pub fn new_search_in_directory(
|
pub fn new_search_in_directory(
|
||||||
workspace: &mut Workspace,
|
workspace: &mut Workspace,
|
||||||
dir_entry: &Entry,
|
dir_path: &Path,
|
||||||
cx: &mut ViewContext<Workspace>,
|
cx: &mut ViewContext<Workspace>,
|
||||||
) {
|
) {
|
||||||
if !dir_entry.is_dir() {
|
let Some(filter_str) = dir_path.to_str() else {
|
||||||
return;
|
|
||||||
}
|
|
||||||
let Some(filter_str) = dir_entry.path.to_str() else {
|
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2806,33 +2803,6 @@ pub mod tests {
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let one_file_entry = cx.update(|cx| {
|
|
||||||
workspace
|
|
||||||
.read(cx)
|
|
||||||
.project()
|
|
||||||
.read(cx)
|
|
||||||
.entry_for_path(&(worktree_id, "a/one.rs").into(), cx)
|
|
||||||
.expect("no entry for /a/one.rs file")
|
|
||||||
});
|
|
||||||
assert!(one_file_entry.is_file());
|
|
||||||
window
|
|
||||||
.update(cx, |workspace, cx| {
|
|
||||||
ProjectSearchView::new_search_in_directory(workspace, &one_file_entry, cx)
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
let active_search_entry = cx.read(|cx| {
|
|
||||||
workspace
|
|
||||||
.read(cx)
|
|
||||||
.active_pane()
|
|
||||||
.read(cx)
|
|
||||||
.active_item()
|
|
||||||
.and_then(|item| item.downcast::<ProjectSearchView>())
|
|
||||||
});
|
|
||||||
assert!(
|
|
||||||
active_search_entry.is_none(),
|
|
||||||
"Expected no search panel to be active for file entry"
|
|
||||||
);
|
|
||||||
|
|
||||||
let a_dir_entry = cx.update(|cx| {
|
let a_dir_entry = cx.update(|cx| {
|
||||||
workspace
|
workspace
|
||||||
.read(cx)
|
.read(cx)
|
||||||
|
@ -2844,7 +2814,7 @@ pub mod tests {
|
||||||
assert!(a_dir_entry.is_dir());
|
assert!(a_dir_entry.is_dir());
|
||||||
window
|
window
|
||||||
.update(cx, |workspace, cx| {
|
.update(cx, |workspace, cx| {
|
||||||
ProjectSearchView::new_search_in_directory(workspace, &a_dir_entry, cx)
|
ProjectSearchView::new_search_in_directory(workspace, &a_dir_entry.path, cx)
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue