Merge pull request #2462 from zed-industries/kb/go-to-line-column-numbers

Support go to file_query:row:column syntax in Find File, Go To Line dialogs and CLI

Deals slightly differently with zed-industries/community#557
Deals with zed-industries/community#1184

    Fixes Go To Line not respecting column number when navigating to a place
    Changes a line-row separator from , to : to show it more uniformly with other tools
    Adjusts file finder dialogue to allow file_query:row:column syntax and opens the buffer at the lines given
    Extends CLI with file_path:row_column syntax and opens these files similarly
This commit is contained in:
Kirill Bulatov 2023-05-16 21:24:57 +03:00 committed by GitHub
commit 685f3de796
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 895 additions and 226 deletions

3
Cargo.lock generated
View file

@ -1097,6 +1097,7 @@ dependencies = [
"plist",
"serde",
"serde_derive",
"util",
]
[[package]]
@ -2185,6 +2186,7 @@ dependencies = [
"project",
"serde_json",
"settings",
"text",
"theme",
"util",
"workspace",
@ -2674,6 +2676,7 @@ dependencies = [
"postage",
"settings",
"text",
"util",
"workspace",
]

View file

@ -19,6 +19,7 @@ dirs = "3.0"
ipc-channel = "0.16"
serde.workspace = true
serde_derive.workspace = true
util = { path = "../util" }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation = "0.9"

View file

@ -1,6 +1,5 @@
pub use ipc_channel::ipc;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
#[derive(Serialize, Deserialize)]
pub struct IpcHandshake {
@ -10,7 +9,12 @@ pub struct IpcHandshake {
#[derive(Debug, Serialize, Deserialize)]
pub enum CliRequest {
Open { paths: Vec<PathBuf>, wait: bool },
// The filed is named `path` for compatibility, but now CLI can request
// opening a path at a certain row and/or column: `some/path:123` and `some/path:123:456`.
//
// Since Zed CLI has to be installed separately, there can be situations when old CLI is
// querying new Zed editors, support both formats by using `String` here and parsing it on Zed side later.
Open { paths: Vec<String>, wait: bool },
}
#[derive(Debug, Serialize, Deserialize)]

View file

@ -16,16 +16,20 @@ use std::{
path::{Path, PathBuf},
ptr,
};
use util::paths::PathLikeWithPosition;
#[derive(Parser)]
#[clap(name = "zed", global_setting(clap::AppSettings::NoAutoVersion))]
struct Args {
/// Wait for all of the given paths to be closed before exiting.
/// Wait for all of the given paths to be opened/closed before exiting.
#[clap(short, long)]
wait: bool,
/// A sequence of space-separated paths that you want to open.
#[clap()]
paths: Vec<PathBuf>,
///
/// Use `path:line:row` syntax to open a file at a specific location.
/// Non-existing paths and directories will ignore `:line:row` suffix.
#[clap(value_parser = parse_path_with_position)]
paths_with_position: Vec<PathLikeWithPosition<PathBuf>>,
/// Print Zed's version and the app path.
#[clap(short, long)]
version: bool,
@ -34,6 +38,14 @@ struct Args {
bundle_path: Option<PathBuf>,
}
fn parse_path_with_position(
argument_str: &str,
) -> Result<PathLikeWithPosition<PathBuf>, std::convert::Infallible> {
PathLikeWithPosition::parse_str(argument_str, |path_str| {
Ok(Path::new(path_str).to_path_buf())
})
}
#[derive(Debug, Deserialize)]
struct InfoPlist {
#[serde(rename = "CFBundleShortVersionString")]
@ -50,7 +62,11 @@ fn main() -> Result<()> {
return Ok(());
}
for path in args.paths.iter() {
for path in args
.paths_with_position
.iter()
.map(|path_with_position| &path_with_position.path_like)
{
if !path.exists() {
touch(path.as_path())?;
}
@ -60,10 +76,16 @@ fn main() -> Result<()> {
tx.send(CliRequest::Open {
paths: args
.paths
.paths_with_position
.into_iter()
.map(|path| fs::canonicalize(path).map_err(|error| anyhow!(error)))
.collect::<Result<Vec<PathBuf>>>()?,
.map(|path_with_position| {
let path_with_position = path_with_position.map_path_like(|path| {
fs::canonicalize(&path)
.with_context(|| format!("path {path:?} canonicalization"))
})?;
Ok(path_with_position.to_string(|path| path.display().to_string()))
})
.collect::<Result<_>>()?,
wait: args.wait,
})?;

View file

@ -1006,8 +1006,7 @@ mod tests {
.zip(expected_styles.iter().cloned())
.collect::<Vec<_>>();
assert_eq!(
rendered.text,
dbg!(expected_text),
rendered.text, expected_text,
"wrong text for input {blocks:?}"
);
assert_eq!(

View file

@ -27,7 +27,7 @@ use std::{
path::{Path, PathBuf},
};
use text::Selection;
use util::{ResultExt, TryFutureExt};
use util::{paths::FILE_ROW_COLUMN_DELIMITER, ResultExt, TryFutureExt};
use workspace::item::{BreadcrumbText, FollowableItemHandle};
use workspace::{
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
@ -1112,7 +1112,11 @@ impl View for CursorPosition {
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
if let Some(position) = self.position {
let theme = &cx.global::<Settings>().theme.workspace.status_bar;
let mut text = format!("{},{}", position.row + 1, position.column + 1);
let mut text = format!(
"{}{FILE_ROW_COLUMN_DELIMITER}{}",
position.row + 1,
position.column + 1
);
if self.selected_count > 0 {
write!(text, " ({} selected)", self.selected_count).unwrap();
}

View file

@ -16,6 +16,7 @@ menu = { path = "../menu" }
picker = { path = "../picker" }
project = { path = "../project" }
settings = { path = "../settings" }
text = { path = "../text" }
util = { path = "../util" }
theme = { path = "../theme" }
workspace = { path = "../workspace" }

View file

@ -1,3 +1,4 @@
use editor::{scroll::autoscroll::Autoscroll, Bias, Editor};
use fuzzy::PathMatch;
use gpui::{
actions, elements::*, AppContext, ModelHandle, MouseState, Task, ViewContext, WeakViewHandle,
@ -12,7 +13,8 @@ use std::{
Arc,
},
};
use util::{post_inc, ResultExt};
use text::Point;
use util::{paths::PathLikeWithPosition, post_inc, ResultExt};
use workspace::Workspace;
pub type FileFinder = Picker<FileFinderDelegate>;
@ -23,7 +25,7 @@ pub struct FileFinderDelegate {
search_count: usize,
latest_search_id: usize,
latest_search_did_cancel: bool,
latest_search_query: String,
latest_search_query: Option<PathLikeWithPosition<FileSearchQuery>>,
relative_to: Option<Arc<Path>>,
matches: Vec<PathMatch>,
selected: Option<(usize, Arc<Path>)>,
@ -60,6 +62,21 @@ pub enum Event {
Dismissed,
}
#[derive(Debug, Clone)]
struct FileSearchQuery {
raw_query: String,
file_query_end: Option<usize>,
}
impl FileSearchQuery {
fn path_query(&self) -> &str {
match self.file_query_end {
Some(file_path_end) => &self.raw_query[..file_path_end],
None => &self.raw_query,
}
}
}
impl FileFinderDelegate {
fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec<usize>, String, Vec<usize>) {
let path = &path_match.path;
@ -103,7 +120,7 @@ impl FileFinderDelegate {
search_count: 0,
latest_search_id: 0,
latest_search_did_cancel: false,
latest_search_query: String::new(),
latest_search_query: None,
relative_to,
matches: Vec::new(),
selected: None,
@ -111,7 +128,11 @@ impl FileFinderDelegate {
}
}
fn spawn_search(&mut self, query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
fn spawn_search(
&mut self,
query: PathLikeWithPosition<FileSearchQuery>,
cx: &mut ViewContext<FileFinder>,
) -> Task<()> {
let relative_to = self.relative_to.clone();
let worktrees = self
.project
@ -140,7 +161,7 @@ impl FileFinderDelegate {
cx.spawn(|picker, mut cx| async move {
let matches = fuzzy::match_path_sets(
candidate_sets.as_slice(),
&query,
query.path_like.path_query(),
relative_to,
false,
100,
@ -163,18 +184,24 @@ impl FileFinderDelegate {
&mut self,
search_id: usize,
did_cancel: bool,
query: String,
query: PathLikeWithPosition<FileSearchQuery>,
matches: Vec<PathMatch>,
cx: &mut ViewContext<FileFinder>,
) {
if search_id >= self.latest_search_id {
self.latest_search_id = search_id;
if self.latest_search_did_cancel && query == self.latest_search_query {
if self.latest_search_did_cancel
&& Some(query.path_like.path_query())
== self
.latest_search_query
.as_ref()
.map(|query| query.path_like.path_query())
{
util::extend_sorted(&mut self.matches, matches.into_iter(), 100, |a, b| b.cmp(a));
} else {
self.matches = matches;
}
self.latest_search_query = query;
self.latest_search_query = Some(query);
self.latest_search_did_cancel = did_cancel;
cx.notify();
}
@ -209,13 +236,25 @@ impl PickerDelegate for FileFinderDelegate {
cx.notify();
}
fn update_matches(&mut self, query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
if query.is_empty() {
fn update_matches(&mut self, raw_query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
if raw_query.is_empty() {
self.latest_search_id = post_inc(&mut self.search_count);
self.matches.clear();
cx.notify();
Task::ready(())
} else {
let raw_query = &raw_query;
let query = PathLikeWithPosition::parse_str(raw_query, |path_like_str| {
Ok::<_, std::convert::Infallible>(FileSearchQuery {
raw_query: raw_query.to_owned(),
file_query_end: if path_like_str == raw_query {
None
} else {
Some(path_like_str.len())
},
})
})
.expect("infallible");
self.spawn_search(query, cx)
}
}
@ -228,12 +267,49 @@ impl PickerDelegate for FileFinderDelegate {
path: m.path.clone(),
};
workspace.update(cx, |workspace, cx| {
let open_task = workspace.update(cx, |workspace, cx| {
workspace.open_path(project_path.clone(), None, true, cx)
});
let workspace = workspace.downgrade();
let row = self
.latest_search_query
.as_ref()
.and_then(|query| query.row)
.map(|row| row.saturating_sub(1));
let col = self
.latest_search_query
.as_ref()
.and_then(|query| query.column)
.unwrap_or(0)
.saturating_sub(1);
cx.spawn(|_, mut cx| async move {
let item = open_task.await.log_err()?;
if let Some(row) = row {
if let Some(active_editor) = item.downcast::<Editor>() {
active_editor
.downgrade()
.update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot;
let point = snapshot
.buffer_snapshot
.clip_point(Point::new(row, col), Bias::Left);
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([point..point])
});
})
.log_err();
}
}
workspace
.open_path(project_path.clone(), None, true, cx)
.detach_and_log_err(cx);
workspace.dismiss_modal(cx);
.update(&mut cx, |workspace, cx| workspace.dismiss_modal(cx))
.log_err();
Some(())
})
.detach();
}
}
}
@ -270,6 +346,7 @@ impl PickerDelegate for FileFinderDelegate {
mod tests {
use super::*;
use editor::Editor;
use gpui::executor::Deterministic;
use menu::{Confirm, SelectNext};
use serde_json::json;
use workspace::{AppState, Workspace};
@ -337,6 +414,186 @@ mod tests {
});
}
#[gpui::test]
async fn test_row_column_numbers_query_inside_file(
deterministic: Arc<Deterministic>,
cx: &mut gpui::TestAppContext,
) {
let app_state = cx.update(|cx| {
super::init(cx);
editor::init(cx);
AppState::test(cx)
});
let first_file_name = "first.rs";
let first_file_contents = "// First Rust file";
app_state
.fs
.as_fake()
.insert_tree(
"/src",
json!({
"test": {
first_file_name: first_file_contents,
"second.rs": "// Second Rust file",
}
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
cx.dispatch_action(window_id, Toggle);
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
let file_query = &first_file_name[..3];
let file_row = 1;
let file_column = 3;
assert!(file_column <= first_file_contents.len());
let query_inside_file = format!("{file_query}:{file_row}:{file_column}");
finder
.update(cx, |finder, cx| {
finder
.delegate_mut()
.update_matches(query_inside_file.to_string(), cx)
})
.await;
finder.read_with(cx, |finder, _| {
let finder = finder.delegate();
assert_eq!(finder.matches.len(), 1);
let latest_search_query = finder
.latest_search_query
.as_ref()
.expect("Finder should have a query after the update_matches call");
assert_eq!(latest_search_query.path_like.raw_query, query_inside_file);
assert_eq!(
latest_search_query.path_like.file_query_end,
Some(file_query.len())
);
assert_eq!(latest_search_query.row, Some(file_row));
assert_eq!(latest_search_query.column, Some(file_column as u32));
});
let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone());
cx.dispatch_action(window_id, SelectNext);
cx.dispatch_action(window_id, Confirm);
active_pane
.condition(cx, |pane, _| pane.active_item().is_some())
.await;
let editor = cx.update(|cx| {
let active_item = active_pane.read(cx).active_item().unwrap();
active_item.downcast::<Editor>().unwrap()
});
deterministic.advance_clock(std::time::Duration::from_secs(2));
deterministic.start_waiting();
deterministic.finish_waiting();
editor.update(cx, |editor, cx| {
let all_selections = editor.selections.all_adjusted(cx);
assert_eq!(
all_selections.len(),
1,
"Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}"
);
let caret_selection = all_selections.into_iter().next().unwrap();
assert_eq!(caret_selection.start, caret_selection.end,
"Caret selection should have its start and end at the same position");
assert_eq!(file_row, caret_selection.start.row + 1,
"Query inside file should get caret with the same focus row");
assert_eq!(file_column, caret_selection.start.column as usize + 1,
"Query inside file should get caret with the same focus column");
});
}
#[gpui::test]
async fn test_row_column_numbers_query_outside_file(
deterministic: Arc<Deterministic>,
cx: &mut gpui::TestAppContext,
) {
let app_state = cx.update(|cx| {
super::init(cx);
editor::init(cx);
AppState::test(cx)
});
let first_file_name = "first.rs";
let first_file_contents = "// First Rust file";
app_state
.fs
.as_fake()
.insert_tree(
"/src",
json!({
"test": {
first_file_name: first_file_contents,
"second.rs": "// Second Rust file",
}
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
cx.dispatch_action(window_id, Toggle);
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
let file_query = &first_file_name[..3];
let file_row = 200;
let file_column = 300;
assert!(file_column > first_file_contents.len());
let query_outside_file = format!("{file_query}:{file_row}:{file_column}");
finder
.update(cx, |finder, cx| {
finder
.delegate_mut()
.update_matches(query_outside_file.to_string(), cx)
})
.await;
finder.read_with(cx, |finder, _| {
let finder = finder.delegate();
assert_eq!(finder.matches.len(), 1);
let latest_search_query = finder
.latest_search_query
.as_ref()
.expect("Finder should have a query after the update_matches call");
assert_eq!(latest_search_query.path_like.raw_query, query_outside_file);
assert_eq!(
latest_search_query.path_like.file_query_end,
Some(file_query.len())
);
assert_eq!(latest_search_query.row, Some(file_row));
assert_eq!(latest_search_query.column, Some(file_column as u32));
});
let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone());
cx.dispatch_action(window_id, SelectNext);
cx.dispatch_action(window_id, Confirm);
active_pane
.condition(cx, |pane, _| pane.active_item().is_some())
.await;
let editor = cx.update(|cx| {
let active_item = active_pane.read(cx).active_item().unwrap();
active_item.downcast::<Editor>().unwrap()
});
deterministic.advance_clock(std::time::Duration::from_secs(2));
deterministic.start_waiting();
deterministic.finish_waiting();
editor.update(cx, |editor, cx| {
let all_selections = editor.selections.all_adjusted(cx);
assert_eq!(
all_selections.len(),
1,
"Expected to have 1 selection (caret) after file finder confirm, but got: {all_selections:?}"
);
let caret_selection = all_selections.into_iter().next().unwrap();
assert_eq!(caret_selection.start, caret_selection.end,
"Caret selection should have its start and end at the same position");
assert_eq!(0, caret_selection.start.row,
"Excessive rows (as in query outside file borders) should get trimmed to last file row");
assert_eq!(first_file_contents.len(), caret_selection.start.column as usize,
"Excessive columns (as in query outside file borders) should get trimmed to selected row's last column");
});
}
#[gpui::test]
async fn test_matching_cancellation(cx: &mut gpui::TestAppContext) {
let app_state = cx.update(AppState::test);
@ -371,7 +628,7 @@ mod tests {
)
});
let query = "hi".to_string();
let query = test_path_like("hi");
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search(query.clone(), cx))
.await;
@ -455,7 +712,9 @@ mod tests {
)
});
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search("hi".into(), cx))
.update(cx, |f, cx| {
f.delegate_mut().spawn_search(test_path_like("hi"), cx)
})
.await;
finder.read_with(cx, |f, _| assert_eq!(f.delegate().matches.len(), 7));
}
@ -491,7 +750,9 @@ mod tests {
// Even though there is only one worktree, that worktree's filename
// is included in the matching, because the worktree is a single file.
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search("thf".into(), cx))
.update(cx, |f, cx| {
f.delegate_mut().spawn_search(test_path_like("thf"), cx)
})
.await;
cx.read(|cx| {
let finder = finder.read(cx);
@ -509,7 +770,9 @@ mod tests {
// Since the worktree root is a file, searching for its name followed by a slash does
// not match anything.
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search("thf/".into(), cx))
.update(cx, |f, cx| {
f.delegate_mut().spawn_search(test_path_like("thf/"), cx)
})
.await;
finder.read_with(cx, |f, _| assert_eq!(f.delegate().matches.len(), 0));
}
@ -553,7 +816,9 @@ mod tests {
// Run a search that matches two files with the same relative path.
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search("a.t".into(), cx))
.update(cx, |f, cx| {
f.delegate_mut().spawn_search(test_path_like("a.t"), cx)
})
.await;
// Can switch between different matches with the same relative path.
@ -609,7 +874,7 @@ mod tests {
finder
.update(cx, |f, cx| {
f.delegate_mut().spawn_search("a.txt".into(), cx)
f.delegate_mut().spawn_search(test_path_like("a.txt"), cx)
})
.await;
@ -651,11 +916,27 @@ mod tests {
)
});
finder
.update(cx, |f, cx| f.delegate_mut().spawn_search("dir".into(), cx))
.update(cx, |f, cx| {
f.delegate_mut().spawn_search(test_path_like("dir"), cx)
})
.await;
cx.read(|cx| {
let finder = finder.read(cx);
assert_eq!(finder.delegate().matches.len(), 0);
});
}
fn test_path_like(test_str: &str) -> PathLikeWithPosition<FileSearchQuery> {
PathLikeWithPosition::parse_str(test_str, |path_like_str| {
Ok::<_, std::convert::Infallible>(FileSearchQuery {
raw_query: test_str.to_owned(),
file_query_end: if path_like_str == test_str {
None
} else {
Some(path_like_str.len())
},
})
})
.unwrap()
}
}

View file

@ -16,3 +16,4 @@ settings = { path = "../settings" }
text = { path = "../text" }
workspace = { path = "../workspace" }
postage.workspace = true
util = { path = "../util" }

View file

@ -1,6 +1,6 @@
use std::sync::Arc;
use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor};
use gpui::{
actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, AppContext, Axis, Entity,
View, ViewContext, ViewHandle,
@ -8,6 +8,7 @@ use gpui::{
use menu::{Cancel, Confirm};
use settings::Settings;
use text::{Bias, Point};
use util::paths::FILE_ROW_COLUMN_DELIMITER;
use workspace::{Modal, Workspace};
actions!(go_to_line, [Toggle]);
@ -75,15 +76,16 @@ impl GoToLine {
fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
self.prev_scroll_position.take();
self.active_editor.update(cx, |active_editor, cx| {
if let Some(rows) = active_editor.highlighted_rows() {
if let Some(point) = self.point_from_query(cx) {
self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
let position = DisplayPoint::new(rows.start, 0).to_point(&snapshot);
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
active_editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([position..position])
s.select_ranges([point..point])
});
}
});
});
}
cx.emit(Event::Dismissed);
}
@ -96,16 +98,7 @@ impl GoToLine {
match event {
editor::Event::Blurred => cx.emit(Event::Dismissed),
editor::Event::BufferEdited { .. } => {
let line_editor = self.line_editor.read(cx).text(cx);
let mut components = line_editor.trim().split(&[',', ':'][..]);
let row = components.next().and_then(|row| row.parse::<u32>().ok());
let column = components.next().and_then(|row| row.parse::<u32>().ok());
if let Some(point) = row.map(|row| {
Point::new(
row.saturating_sub(1),
column.map(|column| column.saturating_sub(1)).unwrap_or(0),
)
}) {
if let Some(point) = self.point_from_query(cx) {
self.active_editor.update(cx, |active_editor, cx| {
let snapshot = active_editor.snapshot(cx).display_snapshot;
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
@ -120,6 +113,20 @@ impl GoToLine {
_ => {}
}
}
fn point_from_query(&self, cx: &ViewContext<Self>) -> Option<Point> {
let line_editor = self.line_editor.read(cx).text(cx);
let mut components = line_editor
.splitn(2, FILE_ROW_COLUMN_DELIMITER)
.map(str::trim)
.fuse();
let row = components.next().and_then(|row| row.parse::<u32>().ok())?;
let column = components.next().and_then(|col| col.parse::<u32>().ok());
Some(Point::new(
row.saturating_sub(1),
column.unwrap_or(0).saturating_sub(1),
))
}
}
impl Entity for GoToLine {
@ -147,7 +154,7 @@ impl View for GoToLine {
let theme = &cx.global::<Settings>().theme.picker;
let label = format!(
"{},{} of {} lines",
"{}{FILE_ROW_COLUMN_DELIMITER}{} of {} lines",
self.cursor_point.row + 1,
self.cursor_point.column + 1,
self.max_point.row + 1

View file

@ -1,5 +1,7 @@
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
lazy_static::lazy_static! {
pub static ref HOME: PathBuf = dirs::home_dir().expect("failed to determine home directory");
pub static ref CONFIG_DIR: PathBuf = HOME.join(".config").join("zed");
@ -70,3 +72,208 @@ pub fn compact(path: &Path) -> PathBuf {
path.to_path_buf()
}
}
/// A delimiter to use in `path_query:row_number:column_number` strings parsing.
pub const FILE_ROW_COLUMN_DELIMITER: char = ':';
/// A representation of a path-like string with optional row and column numbers.
/// Matching values example: `te`, `test.rs:22`, `te:22:5`, etc.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PathLikeWithPosition<P> {
pub path_like: P,
pub row: Option<u32>,
// Absent if row is absent.
pub column: Option<u32>,
}
impl<P> PathLikeWithPosition<P> {
/// Parses a string that possibly has `:row:column` suffix.
/// Ignores trailing `:`s, so `test.rs:22:` is parsed as `test.rs:22`.
/// If any of the row/column component parsing fails, the whole string is then parsed as a path like.
pub fn parse_str<E>(
s: &str,
parse_path_like_str: impl Fn(&str) -> Result<P, E>,
) -> Result<Self, E> {
let fallback = |fallback_str| {
Ok(Self {
path_like: parse_path_like_str(fallback_str)?,
row: None,
column: None,
})
};
match s.trim().split_once(FILE_ROW_COLUMN_DELIMITER) {
Some((path_like_str, maybe_row_and_col_str)) => {
let path_like_str = path_like_str.trim();
let maybe_row_and_col_str = maybe_row_and_col_str.trim();
if path_like_str.is_empty() {
fallback(s)
} else if maybe_row_and_col_str.is_empty() {
fallback(path_like_str)
} else {
let (row_parse_result, maybe_col_str) =
match maybe_row_and_col_str.split_once(FILE_ROW_COLUMN_DELIMITER) {
Some((maybe_row_str, maybe_col_str)) => {
(maybe_row_str.parse::<u32>(), maybe_col_str.trim())
}
None => (maybe_row_and_col_str.parse::<u32>(), ""),
};
match row_parse_result {
Ok(row) => {
if maybe_col_str.is_empty() {
Ok(Self {
path_like: parse_path_like_str(path_like_str)?,
row: Some(row),
column: None,
})
} else {
match maybe_col_str.parse::<u32>() {
Ok(col) => Ok(Self {
path_like: parse_path_like_str(path_like_str)?,
row: Some(row),
column: Some(col),
}),
Err(_) => fallback(s),
}
}
}
Err(_) => fallback(s),
}
}
}
None => fallback(s),
}
}
pub fn map_path_like<P2, E>(
self,
mapping: impl FnOnce(P) -> Result<P2, E>,
) -> Result<PathLikeWithPosition<P2>, E> {
Ok(PathLikeWithPosition {
path_like: mapping(self.path_like)?,
row: self.row,
column: self.column,
})
}
pub fn to_string(&self, path_like_to_string: impl Fn(&P) -> String) -> String {
let path_like_string = path_like_to_string(&self.path_like);
if let Some(row) = self.row {
if let Some(column) = self.column {
format!("{path_like_string}:{row}:{column}")
} else {
format!("{path_like_string}:{row}")
}
} else {
path_like_string
}
}
}
#[cfg(test)]
mod tests {
use super::*;
type TestPath = PathLikeWithPosition<String>;
fn parse_str(s: &str) -> TestPath {
TestPath::parse_str(s, |s| Ok::<_, std::convert::Infallible>(s.to_string()))
.expect("infallible")
}
#[test]
fn path_with_position_parsing_positive() {
let input_and_expected = [
(
"test_file.rs",
PathLikeWithPosition {
path_like: "test_file.rs".to_string(),
row: None,
column: None,
},
),
(
"test_file.rs:1",
PathLikeWithPosition {
path_like: "test_file.rs".to_string(),
row: Some(1),
column: None,
},
),
(
"test_file.rs:1:2",
PathLikeWithPosition {
path_like: "test_file.rs".to_string(),
row: Some(1),
column: Some(2),
},
),
];
for (input, expected) in input_and_expected {
let actual = parse_str(input);
assert_eq!(
actual, expected,
"For positive case input str '{input}', got a parse mismatch"
);
}
}
#[test]
fn path_with_position_parsing_negative() {
for input in [
"test_file.rs:a",
"test_file.rs:a:b",
"test_file.rs::",
"test_file.rs::1",
"test_file.rs:1::",
"test_file.rs::1:2",
"test_file.rs:1::2",
"test_file.rs:1:2:",
"test_file.rs:1:2:3",
] {
let actual = parse_str(input);
assert_eq!(
actual,
PathLikeWithPosition {
path_like: input.to_string(),
row: None,
column: None,
},
"For negative case input str '{input}', got a parse mismatch"
);
}
}
// Trim off trailing `:`s for otherwise valid input.
#[test]
fn path_with_position_parsing_special() {
let input_and_expected = [
(
"test_file.rs:",
PathLikeWithPosition {
path_like: "test_file.rs".to_string(),
row: None,
column: None,
},
),
(
"test_file.rs:1:",
PathLikeWithPosition {
path_like: "test_file.rs".to_string(),
row: Some(1),
column: None,
},
),
];
for (input, expected) in input_and_expected {
let actual = parse_str(input);
assert_eq!(
actual, expected,
"For special case input str '{input}', got a parse mismatch"
);
}
}
}

View file

@ -462,7 +462,6 @@ mod tests {
let (_, _workspace) = cx.add_window(|cx| {
Workspace::new(
Some(serialized_workspace),
0,
project.clone(),
Arc::new(AppState {
@ -480,6 +479,11 @@ mod tests {
)
});
cx.update(|cx| {
Workspace::load_workspace(_workspace.downgrade(), serialized_workspace, Vec::new(), cx)
})
.await;
cx.foreground().run_until_parked();
//Should terminate
}
@ -605,7 +609,6 @@ mod tests {
let project = Project::test(fs, [], cx).await;
let (window_id, workspace) = cx.add_window(|cx| {
Workspace::new(
None,
0,
project.clone(),
Arc::new(AppState {

View file

@ -1,5 +1,6 @@
use crate::{
dock::DockPosition, ItemDeserializers, Member, Pane, PaneAxis, Workspace, WorkspaceId,
dock::DockPosition, item::ItemHandle, ItemDeserializers, Member, Pane, PaneAxis, Workspace,
WorkspaceId,
};
use anyhow::{anyhow, Context, Result};
use async_recursion::async_recursion;
@ -97,17 +98,23 @@ impl SerializedPaneGroup {
workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>,
cx: &mut AsyncAppContext,
) -> Option<(Member, Option<ViewHandle<Pane>>)> {
) -> Option<(
Member,
Option<ViewHandle<Pane>>,
Vec<Option<Box<dyn ItemHandle>>>,
)> {
match self {
SerializedPaneGroup::Group { axis, children } => {
let mut current_active_pane = None;
let mut members = Vec::new();
let mut items = Vec::new();
for child in children {
if let Some((new_member, active_pane)) = child
if let Some((new_member, active_pane, new_items)) = child
.deserialize(project, workspace_id, workspace, cx)
.await
{
members.push(new_member);
items.extend(new_items);
current_active_pane = current_active_pane.or(active_pane);
}
}
@ -117,7 +124,7 @@ impl SerializedPaneGroup {
}
if members.len() == 1 {
return Some((members.remove(0), current_active_pane));
return Some((members.remove(0), current_active_pane, items));
}
Some((
@ -126,6 +133,7 @@ impl SerializedPaneGroup {
members,
}),
current_active_pane,
items,
))
}
SerializedPaneGroup::Pane(serialized_pane) => {
@ -133,7 +141,7 @@ impl SerializedPaneGroup {
.update(cx, |workspace, cx| workspace.add_pane(cx).downgrade())
.log_err()?;
let active = serialized_pane.active;
serialized_pane
let new_items = serialized_pane
.deserialize_to(project, &pane, workspace_id, workspace, cx)
.await
.log_err()?;
@ -143,7 +151,7 @@ impl SerializedPaneGroup {
.log_err()?
{
let pane = pane.upgrade(cx)?;
Some((Member::Pane(pane.clone()), active.then(|| pane)))
Some((Member::Pane(pane.clone()), active.then(|| pane), new_items))
} else {
let pane = pane.upgrade(cx)?;
workspace
@ -174,7 +182,8 @@ impl SerializedPane {
workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>,
cx: &mut AsyncAppContext,
) -> Result<()> {
) -> Result<Vec<Option<Box<dyn ItemHandle>>>> {
let mut items = Vec::new();
let mut active_item_index = None;
for (index, item) in self.children.iter().enumerate() {
let project = project.clone();
@ -192,6 +201,8 @@ impl SerializedPane {
.await
.log_err();
items.push(item_handle.clone());
if let Some(item_handle) = item_handle {
workspace.update(cx, |workspace, cx| {
let pane_handle = pane_handle
@ -213,7 +224,7 @@ impl SerializedPane {
})?;
}
anyhow::Ok(())
anyhow::Ok(items)
}
}

View file

@ -82,7 +82,7 @@ use status_bar::StatusBar;
pub use status_bar::StatusItemView;
use theme::{Theme, ThemeRegistry};
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
use util::{paths, ResultExt};
use util::{async_iife, paths, ResultExt};
lazy_static! {
static ref ZED_WINDOW_SIZE: Option<Vector2F> = env::var("ZED_WINDOW_SIZE")
@ -493,7 +493,6 @@ struct FollowerState {
impl Workspace {
pub fn new(
serialized_workspace: Option<SerializedWorkspace>,
workspace_id: WorkspaceId,
project: ModelHandle<Project>,
app_state: Arc<AppState>,
@ -659,16 +658,6 @@ impl Workspace {
this.project_remote_id_changed(project.read(cx).remote_id(), cx);
cx.defer(|this, cx| this.update_window_title(cx));
if let Some(serialized_workspace) = serialized_workspace {
cx.defer(move |_, cx| {
Self::load_from_serialized_workspace(weak_handle, serialized_workspace, cx)
});
} else if project.read(cx).is_local() {
if cx.global::<Settings>().default_dock_anchor != DockAnchor::Expanded {
Dock::show(&mut this, false, cx);
}
}
this
}
@ -690,18 +679,15 @@ impl Workspace {
);
cx.spawn(|mut cx| async move {
let mut serialized_workspace =
persistence::DB.workspace_for_roots(&abs_paths.as_slice());
let serialized_workspace = persistence::DB.workspace_for_roots(&abs_paths.as_slice());
let paths_to_open = serialized_workspace
.as_ref()
.map(|workspace| workspace.location.paths())
.unwrap_or(Arc::new(abs_paths));
let paths_to_open = Arc::new(abs_paths);
// Get project paths for all of the abs_paths
let mut worktree_roots: HashSet<Arc<Path>> = Default::default();
let mut project_paths = Vec::new();
for path in paths_to_open.iter() {
let mut project_paths: Vec<(PathBuf, Option<ProjectPath>)> =
Vec::with_capacity(paths_to_open.len());
for path in paths_to_open.iter().cloned() {
if let Some((worktree, project_entry)) = cx
.update(|cx| {
Workspace::project_path_for_path(project_handle.clone(), &path, true, cx)
@ -710,9 +696,9 @@ impl Workspace {
.log_err()
{
worktree_roots.insert(worktree.read_with(&mut cx, |tree, _| tree.abs_path()));
project_paths.push(Some(project_entry));
project_paths.push((path, Some(project_entry)));
} else {
project_paths.push(None);
project_paths.push((path, None));
}
}
@ -732,27 +718,17 @@ impl Workspace {
))
});
let build_workspace =
|cx: &mut ViewContext<Workspace>,
serialized_workspace: Option<SerializedWorkspace>| {
let mut workspace = Workspace::new(
serialized_workspace,
workspace_id,
project_handle.clone(),
app_state.clone(),
cx,
);
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
workspace
};
let build_workspace = |cx: &mut ViewContext<Workspace>| {
let mut workspace =
Workspace::new(workspace_id, project_handle.clone(), app_state.clone(), cx);
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
workspace
};
let workspace = requesting_window_id
.and_then(|window_id| {
cx.update(|cx| {
cx.replace_root_view(window_id, |cx| {
build_workspace(cx, serialized_workspace.take())
})
})
cx.update(|cx| cx.replace_root_view(window_id, |cx| build_workspace(cx)))
})
.unwrap_or_else(|| {
let (bounds, display) = if let Some(bounds) = window_bounds_override {
@ -790,44 +766,21 @@ impl Workspace {
// Use the serialized workspace to construct the new window
cx.add_window(
(app_state.build_window_options)(bounds, display, cx.platform().as_ref()),
|cx| build_workspace(cx, serialized_workspace),
|cx| build_workspace(cx),
)
.1
});
let workspace = workspace.downgrade();
notify_if_database_failed(&workspace, &mut cx);
// Call open path for each of the project paths
// (this will bring them to the front if they were in the serialized workspace)
debug_assert!(paths_to_open.len() == project_paths.len());
let tasks = paths_to_open
.iter()
.cloned()
.zip(project_paths.into_iter())
.map(|(abs_path, project_path)| {
let workspace = workspace.clone();
cx.spawn(|mut cx| {
let fs = app_state.fs.clone();
async move {
let project_path = project_path?;
if fs.is_file(&abs_path).await {
Some(
workspace
.update(&mut cx, |workspace, cx| {
workspace.open_path(project_path, None, true, cx)
})
.log_err()?
.await,
)
} else {
None
}
}
})
});
let opened_items = futures::future::join_all(tasks.into_iter()).await;
let opened_items = open_items(
serialized_workspace,
&workspace,
project_paths,
app_state,
cx,
)
.await;
(workspace, opened_items)
})
@ -1118,6 +1071,8 @@ impl Workspace {
visible: bool,
cx: &mut ViewContext<Self>,
) -> Task<Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>> {
log::info!("open paths {:?}", abs_paths);
let fs = self.app_state.fs.clone();
// Sort the paths to ensure we add worktrees for parents before their children.
@ -2536,13 +2491,15 @@ impl Workspace {
}
}
fn load_from_serialized_workspace(
pub(crate) fn load_workspace(
workspace: WeakViewHandle<Workspace>,
serialized_workspace: SerializedWorkspace,
paths_to_open: Vec<Option<ProjectPath>>,
cx: &mut AppContext,
) {
) -> Task<Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>> {
cx.spawn(|mut cx| async move {
let (project, dock_pane_handle, old_center_pane) =
let result = async_iife! {{
let (project, dock_pane_handle, old_center_pane) =
workspace.read_with(&cx, |workspace, _| {
(
workspace.project().clone(),
@ -2551,74 +2508,107 @@ impl Workspace {
)
})?;
serialized_workspace
.dock_pane
.deserialize_to(
&project,
&dock_pane_handle,
serialized_workspace.id,
&workspace,
&mut cx,
)
.await?;
let dock_items = serialized_workspace
.dock_pane
.deserialize_to(
&project,
&dock_pane_handle,
serialized_workspace.id,
&workspace,
&mut cx,
)
.await?;
// Traverse the splits tree and add to things
let center_group = serialized_workspace
.center_group
.deserialize(&project, serialized_workspace.id, &workspace, &mut cx)
.await;
// Remove old panes from workspace panes list
workspace.update(&mut cx, |workspace, cx| {
if let Some((center_group, active_pane)) = center_group {
workspace.remove_panes(workspace.center.root.clone(), cx);
// Swap workspace center group
workspace.center = PaneGroup::with_root(center_group);
// Change the focus to the workspace first so that we retrigger focus in on the pane.
cx.focus_self();
if let Some(active_pane) = active_pane {
cx.focus(&active_pane);
} else {
cx.focus(workspace.panes.last().unwrap());
}
} else {
let old_center_handle = old_center_pane.and_then(|weak| weak.upgrade(cx));
if let Some(old_center_handle) = old_center_handle {
cx.focus(&old_center_handle)
} else {
cx.focus_self()
}
let mut center_items = None;
let mut center_group = None;
// Traverse the splits tree and add to things
if let Some((group, active_pane, items)) = serialized_workspace
.center_group
.deserialize(&project, serialized_workspace.id, &workspace, &mut cx)
.await {
center_items = Some(items);
center_group = Some((group, active_pane))
}
if workspace.left_sidebar().read(cx).is_open()
!= serialized_workspace.left_sidebar_open
{
workspace.toggle_sidebar(SidebarSide::Left, cx);
}
let resulting_list = cx.read(|cx| {
let mut opened_items = center_items
.unwrap_or_default()
.into_iter()
.chain(dock_items.into_iter())
.filter_map(|item| {
let item = item?;
let project_path = item.project_path(cx)?;
Some((project_path, item))
})
.collect::<HashMap<_, _>>();
// Note that without after_window, the focus_self() and
// the focus the dock generates start generating alternating
// focus due to the deferred execution each triggering each other
cx.after_window_update(move |workspace, cx| {
Dock::set_dock_position(
workspace,
serialized_workspace.dock_position,
false,
cx,
);
paths_to_open
.into_iter()
.map(|path_to_open| {
path_to_open.map(|path_to_open| {
Ok(opened_items.remove(&path_to_open))
})
.transpose()
.map(|item| item.flatten())
.transpose()
})
.collect::<Vec<_>>()
});
cx.notify();
})?;
// Remove old panes from workspace panes list
workspace.update(&mut cx, |workspace, cx| {
if let Some((center_group, active_pane)) = center_group {
workspace.remove_panes(workspace.center.root.clone(), cx);
// Serialize ourself to make sure our timestamps and any pane / item changes are replicated
workspace.read_with(&cx, |workspace, cx| workspace.serialize_workspace(cx))?;
anyhow::Ok(())
// Swap workspace center group
workspace.center = PaneGroup::with_root(center_group);
// Change the focus to the workspace first so that we retrigger focus in on the pane.
cx.focus_self();
if let Some(active_pane) = active_pane {
cx.focus(&active_pane);
} else {
cx.focus(workspace.panes.last().unwrap());
}
} else {
let old_center_handle = old_center_pane.and_then(|weak| weak.upgrade(cx));
if let Some(old_center_handle) = old_center_handle {
cx.focus(&old_center_handle)
} else {
cx.focus_self()
}
}
if workspace.left_sidebar().read(cx).is_open()
!= serialized_workspace.left_sidebar_open
{
workspace.toggle_sidebar(SidebarSide::Left, cx);
}
// Note that without after_window, the focus_self() and
// the focus the dock generates start generating alternating
// focus due to the deferred execution each triggering each other
cx.after_window_update(move |workspace, cx| {
Dock::set_dock_position(
workspace,
serialized_workspace.dock_position,
false,
cx,
);
});
cx.notify();
})?;
// Serialize ourself to make sure our timestamps and any pane / item changes are replicated
workspace.read_with(&cx, |workspace, cx| workspace.serialize_workspace(cx))?;
Ok::<_, anyhow::Error>(resulting_list)
}};
result.await.unwrap_or_default()
})
.detach_and_log_err(cx);
}
#[cfg(any(test, feature = "test-support"))]
@ -2634,10 +2624,99 @@ impl Workspace {
dock_default_item_factory: |_, _| None,
background_actions: || &[],
});
Self::new(None, 0, project, app_state, cx)
Self::new(0, project, app_state, cx)
}
}
async fn open_items(
serialized_workspace: Option<SerializedWorkspace>,
workspace: &WeakViewHandle<Workspace>,
mut project_paths_to_open: Vec<(PathBuf, Option<ProjectPath>)>,
app_state: Arc<AppState>,
mut cx: AsyncAppContext,
) -> Vec<Option<anyhow::Result<Box<dyn ItemHandle>>>> {
let mut opened_items = Vec::with_capacity(project_paths_to_open.len());
if let Some(serialized_workspace) = serialized_workspace {
let workspace = workspace.clone();
let restored_items = cx
.update(|cx| {
Workspace::load_workspace(
workspace,
serialized_workspace,
project_paths_to_open
.iter()
.map(|(_, project_path)| dbg!(project_path))
.cloned()
.collect(),
cx,
)
})
.await;
let restored_project_paths = cx.read(|cx| {
restored_items
.iter()
.filter_map(|item| item.as_ref()?.as_ref().ok()?.project_path(cx))
.collect::<HashSet<_>>()
});
opened_items = restored_items;
project_paths_to_open
.iter_mut()
.for_each(|(_, project_path)| {
if let Some(project_path_to_open) = project_path {
if restored_project_paths.contains(project_path_to_open) {
*project_path = None;
}
}
});
} else {
for _ in 0..project_paths_to_open.len() {
opened_items.push(None);
}
}
assert!(opened_items.len() == project_paths_to_open.len());
let tasks =
project_paths_to_open
.into_iter()
.enumerate()
.map(|(i, (abs_path, project_path))| {
let workspace = workspace.clone();
cx.spawn(|mut cx| {
let fs = app_state.fs.clone();
async move {
let file_project_path = project_path?;
if fs.is_file(&abs_path).await {
Some((
i,
workspace
.update(&mut cx, |workspace, cx| {
workspace.open_path(file_project_path, None, true, cx)
})
.log_err()?
.await,
))
} else {
None
}
}
})
});
for maybe_opened_path in futures::future::join_all(tasks.into_iter())
.await
.into_iter()
{
if let Some((i, path_open_result)) = maybe_opened_path {
opened_items[i] = Some(path_open_result);
}
}
opened_items
}
fn notify_if_database_failed(workspace: &WeakViewHandle<Workspace>, cx: &mut AsyncAppContext) {
const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml";
@ -2877,8 +2956,6 @@ pub fn open_paths(
Vec<Option<Result<Box<dyn ItemHandle>, anyhow::Error>>>,
)>,
> {
log::info!("open paths {:?}", abs_paths);
let app_state = app_state.clone();
let abs_paths = abs_paths.to_vec();
cx.spawn(|mut cx| async move {
@ -3008,8 +3085,7 @@ pub fn join_remote_project(
let (_, workspace) = cx.add_window(
(app_state.build_window_options)(None, None, cx.platform().as_ref()),
|cx| {
let mut workspace =
Workspace::new(Default::default(), 0, project, app_state.clone(), cx);
let mut workspace = Workspace::new(0, project, app_state.clone(), cx);
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
workspace
},

View file

@ -10,7 +10,7 @@ use cli::{
};
use client::{self, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
use db::kvp::KEY_VALUE_STORE;
use editor::Editor;
use editor::{scroll::autoscroll::Autoscroll, Editor};
use futures::{
channel::{mpsc, oneshot},
FutureExt, SinkExt, StreamExt,
@ -30,13 +30,14 @@ use settings::{
use simplelog::ConfigBuilder;
use smol::process::Command;
use std::{
collections::HashMap,
env,
ffi::OsStr,
fs::OpenOptions,
io::Write as _,
os::unix::prelude::OsStrExt,
panic,
path::PathBuf,
path::{Path, PathBuf},
sync::{
atomic::{AtomicBool, Ordering},
Arc, Weak,
@ -44,8 +45,13 @@ use std::{
thread,
time::Duration,
};
use sum_tree::Bias;
use terminal_view::{get_working_directory, TerminalView};
use util::http::{self, HttpClient};
use text::Point;
use util::{
http::{self, HttpClient},
paths::PathLikeWithPosition,
};
use welcome::{show_welcome_experience, FIRST_OPEN};
use fs::RealFs;
@ -678,13 +684,38 @@ async fn handle_cli_connection(
if let Some(request) = requests.next().await {
match request {
CliRequest::Open { paths, wait } => {
let mut caret_positions = HashMap::new();
let paths = if paths.is_empty() {
workspace::last_opened_workspace_paths()
.await
.map(|location| location.paths().to_vec())
.unwrap_or(paths)
.unwrap_or_default()
} else {
paths
.into_iter()
.filter_map(|path_with_position_string| {
let path_with_position = PathLikeWithPosition::parse_str(
&path_with_position_string,
|path_str| {
Ok::<_, std::convert::Infallible>(
Path::new(path_str).to_path_buf(),
)
},
)
.expect("Infallible");
let path = path_with_position.path_like;
if let Some(row) = path_with_position.row {
if path.is_file() {
let row = row.saturating_sub(1);
let col =
path_with_position.column.unwrap_or(0).saturating_sub(1);
caret_positions.insert(path.clone(), Point::new(row, col));
}
}
Some(path)
})
.collect()
};
let mut errored = false;
@ -694,11 +725,32 @@ async fn handle_cli_connection(
{
Ok((workspace, items)) => {
let mut item_release_futures = Vec::new();
cx.update(|cx| {
for (item, path) in items.into_iter().zip(&paths) {
match item {
Some(Ok(item)) => {
let released = oneshot::channel();
for (item, path) in items.into_iter().zip(&paths) {
match item {
Some(Ok(item)) => {
if let Some(point) = caret_positions.remove(path) {
if let Some(active_editor) = item.downcast::<Editor>() {
active_editor
.downgrade()
.update(&mut cx, |editor, cx| {
let snapshot =
editor.snapshot(cx).display_snapshot;
let point = snapshot
.buffer_snapshot
.clip_point(point, Bias::Left);
editor.change_selections(
Some(Autoscroll::center()),
cx,
|s| s.select_ranges([point..point]),
);
})
.log_err();
}
}
let released = oneshot::channel();
cx.update(|cx| {
item.on_release(
cx,
Box::new(move |_| {
@ -706,23 +758,20 @@ async fn handle_cli_connection(
}),
)
.detach();
item_release_futures.push(released.1);
}
Some(Err(err)) => {
responses
.send(CliResponse::Stderr {
message: format!(
"error opening {:?}: {}",
path, err
),
})
.log_err();
errored = true;
}
None => {}
});
item_release_futures.push(released.1);
}
Some(Err(err)) => {
responses
.send(CliResponse::Stderr {
message: format!("error opening {:?}: {}", path, err),
})
.log_err();
errored = true;
}
None => {}
}
});
}
if wait {
let background = cx.background();