mirror of
https://github.com/zed-industries/zed.git
synced 2025-02-05 18:25:57 +00:00
Merge pull request #1197 from zed-industries/fix-typescript-completions
Use word boundaries instead of syntax to infer completion edit ranges
This commit is contained in:
commit
04c9f849da
2 changed files with 59 additions and 29 deletions
|
@ -1667,32 +1667,33 @@ impl BufferSnapshot {
|
||||||
.and_then(|language| language.grammar.as_ref())
|
.and_then(|language| language.grammar.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_for_word_token_at<T: ToOffset + ToPoint>(
|
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
|
||||||
&self,
|
let mut start = start.to_offset(self);
|
||||||
position: T,
|
let mut end = start;
|
||||||
) -> Option<Range<usize>> {
|
let mut next_chars = self.chars_at(start).peekable();
|
||||||
let offset = position.to_offset(self);
|
let mut prev_chars = self.reversed_chars_at(start).peekable();
|
||||||
|
let word_kind = cmp::max(
|
||||||
|
prev_chars.peek().copied().map(char_kind),
|
||||||
|
next_chars.peek().copied().map(char_kind),
|
||||||
|
);
|
||||||
|
|
||||||
// Find the first leaf node that touches the position.
|
for ch in prev_chars {
|
||||||
let tree = self.tree.as_ref()?;
|
if Some(char_kind(ch)) == word_kind && ch != '\n' {
|
||||||
let mut cursor = tree.root_node().walk();
|
start -= ch.len_utf8();
|
||||||
while cursor.goto_first_child_for_byte(offset).is_some() {}
|
} else {
|
||||||
let node = cursor.node();
|
break;
|
||||||
if node.child_count() > 0 {
|
}
|
||||||
return None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that the leaf node contains word characters.
|
for ch in next_chars {
|
||||||
let range = node.byte_range();
|
if Some(char_kind(ch)) == word_kind && ch != '\n' {
|
||||||
if self
|
end += ch.len_utf8();
|
||||||
.text_for_range(range.clone())
|
} else {
|
||||||
.flat_map(str::chars)
|
break;
|
||||||
.any(|c| c.is_alphanumeric())
|
}
|
||||||
{
|
|
||||||
return Some(range);
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
(start..end, word_kind)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
||||||
|
|
|
@ -18,10 +18,10 @@ use gpui::{
|
||||||
use language::{
|
use language::{
|
||||||
point_to_lsp,
|
point_to_lsp,
|
||||||
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
||||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
|
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
|
||||||
Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
|
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
|
||||||
LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
|
Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
|
||||||
PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
||||||
};
|
};
|
||||||
use lsp::{
|
use lsp::{
|
||||||
DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
|
DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
|
||||||
|
@ -3182,9 +3182,12 @@ impl Project {
|
||||||
let Range { start, end } = range_for_token
|
let Range { start, end } = range_for_token
|
||||||
.get_or_insert_with(|| {
|
.get_or_insert_with(|| {
|
||||||
let offset = position.to_offset(&snapshot);
|
let offset = position.to_offset(&snapshot);
|
||||||
snapshot
|
let (range, kind) = snapshot.surrounding_word(offset);
|
||||||
.range_for_word_token_at(offset)
|
if kind == Some(CharKind::Word) {
|
||||||
.unwrap_or_else(|| offset..offset)
|
range
|
||||||
|
} else {
|
||||||
|
offset..offset
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.clone();
|
.clone();
|
||||||
let text = lsp_completion
|
let text = lsp_completion
|
||||||
|
@ -7633,6 +7636,32 @@ mod tests {
|
||||||
completions[0].old_range.to_offset(&snapshot),
|
completions[0].old_range.to_offset(&snapshot),
|
||||||
text.len() - 3..text.len()
|
text.len() - 3..text.len()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let text = "let a = \"atoms/cmp\"";
|
||||||
|
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
|
||||||
|
let completions = project.update(cx, |project, cx| {
|
||||||
|
project.completions(&buffer, text.len() - 1, cx)
|
||||||
|
});
|
||||||
|
|
||||||
|
fake_server
|
||||||
|
.handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
|
||||||
|
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||||
|
lsp::CompletionItem {
|
||||||
|
label: "component".into(),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
])))
|
||||||
|
})
|
||||||
|
.next()
|
||||||
|
.await;
|
||||||
|
let completions = completions.await.unwrap();
|
||||||
|
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||||
|
assert_eq!(completions.len(), 1);
|
||||||
|
assert_eq!(completions[0].new_text, "component");
|
||||||
|
assert_eq!(
|
||||||
|
completions[0].old_range.to_offset(&snapshot),
|
||||||
|
text.len() - 4..text.len() - 1
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test(iterations = 10)]
|
#[gpui::test(iterations = 10)]
|
||||||
|
|
Loading…
Reference in a new issue