Opt into language-aware features when getting buffer chunks

We use chunks a lot to transform points and sync the various display maps,
and always querying tree-sitter or the LSP diagnostics in those cases is
unnecessarily expensive.
This commit is contained in:
Antonio Scandurra 2022-02-03 11:21:30 +01:00
parent bd441723a0
commit ab26a175a4
10 changed files with 91 additions and 53 deletions

View file

@ -250,12 +250,16 @@ impl DisplaySnapshot {
pub fn text_chunks(&self, display_row: u32) -> impl Iterator<Item = &str> {
self.blocks_snapshot
.chunks(display_row..self.max_point().row() + 1)
.chunks(display_row..self.max_point().row() + 1, false)
.map(|h| h.text)
}
pub fn chunks<'a>(&'a self, display_rows: Range<u32>) -> DisplayChunks<'a> {
self.blocks_snapshot.chunks(display_rows)
pub fn chunks<'a>(
&'a self,
display_rows: Range<u32>,
language_aware: bool,
) -> DisplayChunks<'a> {
self.blocks_snapshot.chunks(display_rows, language_aware)
}
pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a {
@ -1117,7 +1121,7 @@ mod tests {
) -> Vec<(String, Option<Color>)> {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
let mut chunks: Vec<(String, Option<Color>)> = Vec::new();
for chunk in snapshot.chunks(rows) {
for chunk in snapshot.chunks(rows, true) {
let color = chunk
.highlight_id
.and_then(|id| id.style(theme).map(|s| s.color));

View file

@ -460,12 +460,12 @@ impl<'a> BlockMapWriter<'a> {
impl BlockSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(0..self.transforms.summary().output_rows)
self.chunks(0..self.transforms.summary().output_rows, false)
.map(|chunk| chunk.text)
.collect()
}
pub fn chunks<'a>(&'a self, rows: Range<u32>) -> BlockChunks<'a> {
pub fn chunks<'a>(&'a self, rows: Range<u32>, language_aware: bool) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
let input_end = {
@ -493,7 +493,9 @@ impl BlockSnapshot {
cursor.start().1 .0 + overshoot
};
BlockChunks {
input_chunks: self.wrap_snapshot.chunks(input_start..input_end),
input_chunks: self
.wrap_snapshot
.chunks(input_start..input_end, language_aware),
input_chunk: Default::default(),
transforms: cursor,
output_row: rows.start,
@ -1335,7 +1337,7 @@ mod tests {
for start_row in 0..expected_row_count {
let expected_text = expected_lines[start_row..].join("\n");
let actual_text = blocks_snapshot
.chunks(start_row as u32..expected_row_count as u32)
.chunks(start_row as u32..expected_row_count as u32, false)
.map(|chunk| chunk.text)
.collect::<String>();
assert_eq!(

View file

@ -489,7 +489,7 @@ impl FoldSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(FoldOffset(0)..self.len())
self.chunks(FoldOffset(0)..self.len(), false)
.map(|c| c.text)
.collect()
}
@ -629,11 +629,11 @@ impl FoldSnapshot {
pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator<Item = char> {
let start = start.to_offset(self);
self.chunks(start..self.len())
self.chunks(start..self.len(), false)
.flat_map(|chunk| chunk.text.chars())
}
pub fn chunks<'a>(&'a self, range: Range<FoldOffset>) -> FoldChunks<'a> {
pub fn chunks<'a>(&'a self, range: Range<FoldOffset>, language_aware: bool) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
transform_cursor.seek(&range.end, Bias::Right, &());
@ -646,7 +646,9 @@ impl FoldSnapshot {
FoldChunks {
transform_cursor,
buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end),
buffer_chunks: self
.buffer_snapshot
.chunks(buffer_start..buffer_end, language_aware),
buffer_chunk: None,
buffer_offset: buffer_start,
output_offset: range.start.0,
@ -1393,7 +1395,7 @@ mod tests {
let text = &expected_text[start.0..end.0];
assert_eq!(
snapshot
.chunks(start..end)
.chunks(start..end, false)
.map(|c| c.text)
.collect::<String>(),
text,

View file

@ -34,7 +34,7 @@ impl TabMap {
let mut delta = 0;
for chunk in old_snapshot
.fold_snapshot
.chunks(fold_edit.old.end..max_offset)
.chunks(fold_edit.old.end..max_offset, false)
{
let patterns: &[_] = &['\t', '\n'];
if let Some(ix) = chunk.text.find(patterns) {
@ -109,7 +109,7 @@ impl TabSnapshot {
self.max_point()
};
for c in self
.chunks(range.start..line_end)
.chunks(range.start..line_end, false)
.flat_map(|chunk| chunk.text.chars())
{
if c == '\n' {
@ -123,7 +123,7 @@ impl TabSnapshot {
last_line_chars = first_line_chars;
} else {
for _ in self
.chunks(TabPoint::new(range.end.row(), 0)..range.end)
.chunks(TabPoint::new(range.end.row(), 0)..range.end, false)
.flat_map(|chunk| chunk.text.chars())
{
last_line_chars += 1;
@ -143,7 +143,7 @@ impl TabSnapshot {
self.fold_snapshot.version
}
pub fn chunks<'a>(&'a self, range: Range<TabPoint>) -> TabChunks<'a> {
pub fn chunks<'a>(&'a self, range: Range<TabPoint>, language_aware: bool) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) =
self.to_fold_point(range.start, Bias::Left);
let input_start = input_start.to_offset(&self.fold_snapshot);
@ -158,7 +158,9 @@ impl TabSnapshot {
};
TabChunks {
fold_chunks: self.fold_snapshot.chunks(input_start..input_end),
fold_chunks: self
.fold_snapshot
.chunks(input_start..input_end, language_aware),
column: expanded_char_column,
output_position: range.start.0,
max_output_position: range.end.0,
@ -177,7 +179,7 @@ impl TabSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.chunks(TabPoint::zero()..self.max_point())
self.chunks(TabPoint::zero()..self.max_point(), false)
.map(|chunk| chunk.text)
.collect()
}
@ -490,7 +492,7 @@ mod tests {
assert_eq!(
expected_text,
tabs_snapshot
.chunks(start..end)
.chunks(start..end, false)
.map(|c| c.text)
.collect::<String>(),
"chunks({:?}..{:?})",

View file

@ -433,8 +433,10 @@ impl WrapSnapshot {
let mut line = String::new();
let mut remaining = None;
let mut chunks = new_tab_snapshot
.chunks(TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point());
let mut chunks = new_tab_snapshot.chunks(
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
false,
);
let mut edit_transforms = Vec::<Transform>::new();
for _ in edit.new_rows.start..edit.new_rows.end {
while let Some(chunk) =
@ -559,11 +561,11 @@ impl WrapSnapshot {
}
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
self.chunks(wrap_row..self.max_point().row() + 1)
self.chunks(wrap_row..self.max_point().row() + 1, false)
.map(|h| h.text)
}
pub fn chunks<'a>(&'a self, rows: Range<u32>) -> WrapChunks<'a> {
pub fn chunks<'a>(&'a self, rows: Range<u32>, language_aware: bool) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
@ -576,7 +578,9 @@ impl WrapSnapshot {
.to_tab_point(output_end)
.min(self.tab_snapshot.max_point());
WrapChunks {
input_chunks: self.tab_snapshot.chunks(input_start..input_end),
input_chunks: self
.tab_snapshot
.chunks(input_start..input_end, language_aware),
input_chunk: Default::default(),
output_position: output_start,
max_output_row: rows.end,
@ -1288,7 +1292,7 @@ mod tests {
}
let actual_text = self
.chunks(start_row..end_row)
.chunks(start_row..end_row, true)
.map(|c| c.text)
.collect::<String>();
assert_eq!(

View file

@ -598,7 +598,7 @@ impl EditorElement {
.collect();
} else {
let style = &self.settings.style;
let chunks = snapshot.chunks(rows.clone()).map(|chunk| {
let chunks = snapshot.chunks(rows.clone(), true).map(|chunk| {
let highlight_style = chunk
.highlight_id
.and_then(|highlight_id| highlight_id.style(&style.syntax));

View file

@ -125,6 +125,7 @@ pub struct MultiBufferChunks<'a> {
range: Range<usize>,
excerpts: Cursor<'a, Excerpt, usize>,
excerpt_chunks: Option<ExcerptChunks<'a>>,
language_aware: bool,
}
pub struct MultiBufferBytes<'a> {
@ -1112,7 +1113,9 @@ impl Entity for MultiBuffer {
impl MultiBufferSnapshot {
pub fn text(&self) -> String {
self.chunks(0..self.len()).map(|chunk| chunk.text).collect()
self.chunks(0..self.len(), false)
.map(|chunk| chunk.text)
.collect()
}
pub fn reversed_chars_at<'a, T: ToOffset>(
@ -1162,7 +1165,7 @@ impl MultiBufferSnapshot {
&'a self,
range: Range<T>,
) -> impl Iterator<Item = &'a str> {
self.chunks(range).map(|chunk| chunk.text)
self.chunks(range, false).map(|chunk| chunk.text)
}
pub fn is_line_blank(&self, row: u32) -> bool {
@ -1320,12 +1323,17 @@ impl MultiBufferSnapshot {
result
}
pub fn chunks<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferChunks<'a> {
pub fn chunks<'a, T: ToOffset>(
&'a self,
range: Range<T>,
language_aware: bool,
) -> MultiBufferChunks<'a> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut chunks = MultiBufferChunks {
range: range.clone(),
excerpts: self.excerpts.cursor(),
excerpt_chunks: None,
language_aware: language_aware,
};
chunks.seek(range.start);
chunks
@ -2108,7 +2116,11 @@ impl Excerpt {
}
}
fn chunks_in_range<'a>(&'a self, range: Range<usize>) -> ExcerptChunks<'a> {
fn chunks_in_range<'a>(
&'a self,
range: Range<usize>,
language_aware: bool,
) -> ExcerptChunks<'a> {
let content_start = self.range.start.to_offset(&self.buffer);
let chunks_start = content_start + range.start;
let chunks_end = content_start + cmp::min(range.end, self.text_summary.bytes);
@ -2122,7 +2134,7 @@ impl Excerpt {
0
};
let content_chunks = self.buffer.chunks(chunks_start..chunks_end);
let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware);
ExcerptChunks {
content_chunks,
@ -2321,6 +2333,7 @@ impl<'a> MultiBufferChunks<'a> {
if let Some(excerpt) = self.excerpts.item() {
self.excerpt_chunks = Some(excerpt.chunks_in_range(
self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(),
self.language_aware,
));
} else {
self.excerpt_chunks = None;
@ -2340,8 +2353,10 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
} else {
self.excerpts.next(&());
let excerpt = self.excerpts.item()?;
self.excerpt_chunks =
Some(excerpt.chunks_in_range(0..self.range.end - self.excerpts.start()));
self.excerpt_chunks = Some(excerpt.chunks_in_range(
0..self.range.end - self.excerpts.start(),
self.language_aware,
));
self.next()
}
}
@ -3096,7 +3111,7 @@ mod tests {
let mut buffer_point_utf16 = buffer_start_point_utf16;
for ch in buffer
.snapshot()
.chunks(buffer_range.clone())
.chunks(buffer_range.clone(), false)
.flat_map(|c| c.text.chars())
{
for _ in 0..ch.len_utf8() {

View file

@ -607,7 +607,7 @@ async fn regex_search(
let mut line = String::new();
let mut line_offset = 0;
for (chunk_ix, chunk) in buffer
.chunks(0..buffer.len())
.chunks(0..buffer.len(), false)
.map(|c| c.text)
.chain(["\n"])
.enumerate()

View file

@ -2092,28 +2092,37 @@ impl BufferSnapshot {
None
}
pub fn chunks<'a, T: ToOffset>(&'a self, range: Range<T>) -> BufferChunks<'a> {
pub fn chunks<'a, T: ToOffset>(
&'a self,
range: Range<T>,
language_aware: bool,
) -> BufferChunks<'a> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.start,
is_start: true,
severity: entry.diagnostic.severity,
});
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.end,
is_start: false,
severity: entry.diagnostic.severity,
});
let mut tree = None;
let mut diagnostic_endpoints = Vec::new();
if language_aware {
tree = self.tree.as_ref();
for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.start,
is_start: true,
severity: entry.diagnostic.severity,
});
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: entry.range.end,
is_start: false,
severity: entry.diagnostic.severity,
});
}
diagnostic_endpoints
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
}
diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
BufferChunks::new(
self.text.as_rope(),
range,
self.tree.as_ref(),
tree,
self.grammar(),
diagnostic_endpoints,
)
@ -2157,7 +2166,7 @@ impl BufferSnapshot {
TextProvider(self.as_rope()),
);
let mut chunks = self.chunks(0..self.len());
let mut chunks = self.chunks(0..self.len(), true);
let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;

View file

@ -1090,7 +1090,7 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
range: Range<T>,
) -> Vec<(String, Option<DiagnosticSeverity>)> {
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
for chunk in buffer.snapshot().chunks(range) {
for chunk in buffer.snapshot().chunks(range, true) {
if chunks
.last()
.map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)