mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-26 10:40:54 +00:00
Normalize line endings when parsing completions
Co-Authored-By: Max Brunsfeld <max@zed.dev>
This commit is contained in:
parent
d3db700db4
commit
2c1906d710
5 changed files with 65 additions and 9 deletions
|
@ -964,7 +964,7 @@ impl Buffer {
|
|||
cx.background().spawn(async move {
|
||||
let old_text = old_text.to_string();
|
||||
let line_ending = LineEnding::detect(&new_text);
|
||||
LineEnding::strip_carriage_returns(&mut new_text);
|
||||
LineEnding::normalize(&mut new_text);
|
||||
let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_str())
|
||||
.iter_all_changes()
|
||||
.map(|c| (c.tag(), c.value().len()))
|
||||
|
|
|
@ -26,8 +26,9 @@ use language::{
|
|||
},
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
|
||||
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
|
||||
Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
|
||||
Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
||||
Language, LanguageRegistry, LanguageServerName, LineEnding, LocalFile, LspAdapter,
|
||||
OffsetRangeExt, Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16,
|
||||
Transaction,
|
||||
};
|
||||
use lsp::{
|
||||
DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
|
||||
|
@ -3381,7 +3382,8 @@ impl Project {
|
|||
return None;
|
||||
}
|
||||
|
||||
let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
|
||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref()
|
||||
{
|
||||
// If the language server provides a range to overwrite, then
|
||||
// check that the range is valid.
|
||||
Some(lsp::CompletionTextEdit::Edit(edit)) => {
|
||||
|
@ -3431,6 +3433,7 @@ impl Project {
|
|||
}
|
||||
};
|
||||
|
||||
LineEnding::normalize(&mut new_text);
|
||||
Some(Completion {
|
||||
old_range,
|
||||
new_text,
|
||||
|
|
|
@ -1850,6 +1850,59 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "TypeScript".into(),
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::language_typescript()),
|
||||
);
|
||||
let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
json!({
|
||||
"a.ts": "",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| project.languages.add(Arc::new(language)));
|
||||
let buffer = project
|
||||
.update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let fake_server = fake_language_servers.next().await.unwrap();
|
||||
|
||||
let text = "let a = b.fqn";
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
|
||||
let completions = project.update(cx, |project, cx| {
|
||||
project.completions(&buffer, text.len(), cx)
|
||||
});
|
||||
|
||||
fake_server
|
||||
.handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
lsp::CompletionItem {
|
||||
label: "fullyQualifiedName?".into(),
|
||||
insert_text: Some("fully\rQualified\r\nName".into()),
|
||||
..Default::default()
|
||||
},
|
||||
])))
|
||||
})
|
||||
.next()
|
||||
.await;
|
||||
let completions = completions.await.unwrap();
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "fully\nQualified\nName");
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
|
||||
let mut language = Language::new(
|
||||
|
|
|
@ -43,7 +43,7 @@ fn test_random_edits(mut rng: StdRng) {
|
|||
.take(reference_string_len)
|
||||
.collect::<String>();
|
||||
let mut buffer = Buffer::new(0, 0, reference_string.clone().into());
|
||||
LineEnding::strip_carriage_returns(&mut reference_string);
|
||||
LineEnding::normalize(&mut reference_string);
|
||||
|
||||
buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
let mut buffer_versions = Vec::new();
|
||||
|
|
|
@ -555,7 +555,7 @@ pub struct UndoOperation {
|
|||
impl Buffer {
|
||||
pub fn new(replica_id: u16, remote_id: u64, mut base_text: String) -> Buffer {
|
||||
let line_ending = LineEnding::detect(&base_text);
|
||||
LineEnding::strip_carriage_returns(&mut base_text);
|
||||
LineEnding::normalize(&mut base_text);
|
||||
|
||||
let history = History::new(base_text.into());
|
||||
let mut fragments = SumTree::new();
|
||||
|
@ -691,7 +691,7 @@ impl Buffer {
|
|||
|
||||
let mut fragment_start = old_fragments.start().visible;
|
||||
for (range, new_text) in edits {
|
||||
let new_text = LineEnding::strip_carriage_returns_from_arc(new_text.into());
|
||||
let new_text = LineEnding::normalize_arc(new_text.into());
|
||||
let fragment_end = old_fragments.end(&None).visible;
|
||||
|
||||
// If the current fragment ends before this range, then jump ahead to the first fragment
|
||||
|
@ -2385,13 +2385,13 @@ impl LineEnding {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn strip_carriage_returns(text: &mut String) {
|
||||
pub fn normalize(text: &mut String) {
|
||||
if let Cow::Owned(replaced) = CARRIAGE_RETURNS_REGEX.replace_all(text, "\n") {
|
||||
*text = replaced;
|
||||
}
|
||||
}
|
||||
|
||||
fn strip_carriage_returns_from_arc(text: Arc<str>) -> Arc<str> {
|
||||
fn normalize_arc(text: Arc<str>) -> Arc<str> {
|
||||
if let Cow::Owned(replaced) = CARRIAGE_RETURNS_REGEX.replace_all(&text, "\n") {
|
||||
replaced.into()
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue