mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-12 05:15:00 +00:00
broken: implement bracket coloring
This commit is contained in:
parent
3a33deb467
commit
50ab0e1e32
5 changed files with 200 additions and 62 deletions
|
@ -45,7 +45,7 @@ use inlay_map::{InlayMap, InlaySnapshot};
|
||||||
pub use inlay_map::{InlayOffset, InlayPoint};
|
pub use inlay_map::{InlayOffset, InlayPoint};
|
||||||
use invisibles::{is_invisible, replacement};
|
use invisibles::{is_invisible, replacement};
|
||||||
use language::{
|
use language::{
|
||||||
language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point,
|
language_settings::language_settings, ChunkKind, ChunkRenderer, OffsetUtf16, Point,
|
||||||
Subscription as BufferSubscription,
|
Subscription as BufferSubscription,
|
||||||
};
|
};
|
||||||
use lsp::DiagnosticSeverity;
|
use lsp::DiagnosticSeverity;
|
||||||
|
@ -547,10 +547,11 @@ pub enum ChunkReplacement {
|
||||||
Str(SharedString),
|
Str(SharedString),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
pub struct HighlightedChunk<'a> {
|
pub struct HighlightedChunk<'a> {
|
||||||
pub text: &'a str,
|
pub text: &'a str,
|
||||||
pub style: Option<HighlightStyle>,
|
pub style: Option<HighlightStyle>,
|
||||||
pub is_tab: bool,
|
pub kind: ChunkKind,
|
||||||
pub replacement: Option<ChunkReplacement>,
|
pub replacement: Option<ChunkReplacement>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -562,8 +563,9 @@ impl<'a> HighlightedChunk<'a> {
|
||||||
let mut chars = self.text.chars().peekable();
|
let mut chars = self.text.chars().peekable();
|
||||||
let mut text = self.text;
|
let mut text = self.text;
|
||||||
let style = self.style;
|
let style = self.style;
|
||||||
let is_tab = self.is_tab;
|
|
||||||
let renderer = self.replacement;
|
let renderer = self.replacement;
|
||||||
|
let kind = self.kind;
|
||||||
|
|
||||||
iter::from_fn(move || {
|
iter::from_fn(move || {
|
||||||
let mut prefix_len = 0;
|
let mut prefix_len = 0;
|
||||||
while let Some(&ch) = chars.peek() {
|
while let Some(&ch) = chars.peek() {
|
||||||
|
@ -578,7 +580,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||||
return Some(HighlightedChunk {
|
return Some(HighlightedChunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
style,
|
style,
|
||||||
is_tab,
|
kind,
|
||||||
replacement: renderer.clone(),
|
replacement: renderer.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -604,7 +606,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||||
return Some(HighlightedChunk {
|
return Some(HighlightedChunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
style: Some(invisible_style),
|
style: Some(invisible_style),
|
||||||
is_tab: false,
|
kind: ChunkKind::Other,
|
||||||
replacement: Some(ChunkReplacement::Str(replacement.into())),
|
replacement: Some(ChunkReplacement::Str(replacement.into())),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
@ -627,7 +629,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||||
return Some(HighlightedChunk {
|
return Some(HighlightedChunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
style: Some(invisible_style),
|
style: Some(invisible_style),
|
||||||
is_tab: false,
|
kind: ChunkKind::Other,
|
||||||
replacement: renderer.clone(),
|
replacement: renderer.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -639,7 +641,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||||
Some(HighlightedChunk {
|
Some(HighlightedChunk {
|
||||||
text: remainder,
|
text: remainder,
|
||||||
style,
|
style,
|
||||||
is_tab,
|
kind,
|
||||||
replacement: renderer.clone(),
|
replacement: renderer.clone(),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -902,7 +904,7 @@ impl DisplaySnapshot {
|
||||||
HighlightedChunk {
|
HighlightedChunk {
|
||||||
text: chunk.text,
|
text: chunk.text,
|
||||||
style: highlight_style,
|
style: highlight_style,
|
||||||
is_tab: chunk.is_tab,
|
kind: chunk.kind,
|
||||||
replacement: chunk.renderer.map(ChunkReplacement::Renderer),
|
replacement: chunk.renderer.map(ChunkReplacement::Renderer),
|
||||||
}
|
}
|
||||||
.highlight_invisibles(editor_style)
|
.highlight_invisibles(editor_style)
|
||||||
|
|
|
@ -2,7 +2,7 @@ use super::{
|
||||||
fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
|
fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
|
||||||
Highlights,
|
Highlights,
|
||||||
};
|
};
|
||||||
use language::{Chunk, Point};
|
use language::{Chunk, ChunkKind, Point};
|
||||||
use multi_buffer::MultiBufferSnapshot;
|
use multi_buffer::MultiBufferSnapshot;
|
||||||
use std::{cmp, mem, num::NonZeroU32, ops::Range};
|
use std::{cmp, mem, num::NonZeroU32, ops::Range};
|
||||||
use sum_tree::Bias;
|
use sum_tree::Bias;
|
||||||
|
@ -265,7 +265,7 @@ impl TabSnapshot {
|
||||||
tab_size: self.tab_size,
|
tab_size: self.tab_size,
|
||||||
chunk: Chunk {
|
chunk: Chunk {
|
||||||
text: &SPACES[0..(to_next_stop as usize)],
|
text: &SPACES[0..(to_next_stop as usize)],
|
||||||
is_tab: true,
|
kind: ChunkKind::Tab,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
inside_leading_tab: to_next_stop > 0,
|
inside_leading_tab: to_next_stop > 0,
|
||||||
|
@ -522,7 +522,7 @@ impl<'a> TabChunks<'a> {
|
||||||
self.max_output_position = range.end.0;
|
self.max_output_position = range.end.0;
|
||||||
self.chunk = Chunk {
|
self.chunk = Chunk {
|
||||||
text: &SPACES[0..(to_next_stop as usize)],
|
text: &SPACES[0..(to_next_stop as usize)],
|
||||||
is_tab: true,
|
kind: ChunkKind::Tab,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
self.inside_leading_tab = to_next_stop > 0;
|
self.inside_leading_tab = to_next_stop > 0;
|
||||||
|
@ -574,7 +574,7 @@ impl<'a> Iterator for TabChunks<'a> {
|
||||||
self.output_position = next_output_position;
|
self.output_position = next_output_position;
|
||||||
return Some(Chunk {
|
return Some(Chunk {
|
||||||
text: &SPACES[..len as usize],
|
text: &SPACES[..len as usize],
|
||||||
is_tab: true,
|
kind: ChunkKind::Tab,
|
||||||
..self.chunk.clone()
|
..self.chunk.clone()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -718,11 +718,11 @@ mod tests {
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
for chunk in snapshot.chunks(start..snapshot.max_point(), false, Highlights::default())
|
for chunk in snapshot.chunks(start..snapshot.max_point(), false, Highlights::default())
|
||||||
{
|
{
|
||||||
if chunk.is_tab != was_tab {
|
if chunk.kind.is_tab() != was_tab {
|
||||||
if !text.is_empty() {
|
if !text.is_empty() {
|
||||||
chunks.push((mem::take(&mut text), was_tab));
|
chunks.push((mem::take(&mut text), was_tab));
|
||||||
}
|
}
|
||||||
was_tab = chunk.is_tab;
|
was_tab = chunk.kind.is_tab();
|
||||||
}
|
}
|
||||||
text.push_str(chunk.text);
|
text.push_str(chunk.text);
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,7 +44,7 @@ use language::{
|
||||||
IndentGuideBackgroundColoring, IndentGuideColoring, IndentGuideSettings,
|
IndentGuideBackgroundColoring, IndentGuideColoring, IndentGuideSettings,
|
||||||
ShowWhitespaceSetting,
|
ShowWhitespaceSetting,
|
||||||
},
|
},
|
||||||
ChunkRendererContext,
|
ChunkKind, ChunkRendererContext,
|
||||||
};
|
};
|
||||||
use lsp::DiagnosticSeverity;
|
use lsp::DiagnosticSeverity;
|
||||||
use multi_buffer::{
|
use multi_buffer::{
|
||||||
|
@ -4606,12 +4606,11 @@ impl LineWithInvisibles {
|
||||||
|
|
||||||
let ellipsis = SharedString::from("⋯");
|
let ellipsis = SharedString::from("⋯");
|
||||||
|
|
||||||
for highlighted_chunk in chunks.chain([HighlightedChunk {
|
let last_chunk = HighlightedChunk {
|
||||||
text: "\n",
|
text: "\n",
|
||||||
style: None,
|
..HighlightedChunk::default()
|
||||||
is_tab: false,
|
};
|
||||||
replacement: None,
|
for highlighted_chunk in chunks.chain([last_chunk]) {
|
||||||
}]) {
|
|
||||||
if let Some(replacement) = highlighted_chunk.replacement {
|
if let Some(replacement) = highlighted_chunk.replacement {
|
||||||
if !line.is_empty() {
|
if !line.is_empty() {
|
||||||
let shaped_line = cx
|
let shaped_line = cx
|
||||||
|
@ -4734,10 +4733,22 @@ impl LineWithInvisibles {
|
||||||
line_exceeded_max_len = true;
|
line_exceeded_max_len = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut color = text_style.color;
|
||||||
|
let accents = cx.theme().accents();
|
||||||
|
// update text color if chunk is a bracket, and bracket coloring is enabled
|
||||||
|
if let ChunkKind::Bracket { depth } = highlighted_chunk.kind {
|
||||||
|
// TODO 1: we can't remote negative depth because we can't parse
|
||||||
|
// files all the way from the beginning, find another approach
|
||||||
|
// TODO 2: only apply if the bracket coloring setting is enabled
|
||||||
|
if depth > 0 {
|
||||||
|
color = accents.color_for_index(depth as u32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
styles.push(TextRun {
|
styles.push(TextRun {
|
||||||
len: line_chunk.len(),
|
len: line_chunk.len(),
|
||||||
font: text_style.font(),
|
font: text_style.font(),
|
||||||
color: text_style.color,
|
color,
|
||||||
background_color: text_style.background_color,
|
background_color: text_style.background_color,
|
||||||
underline: text_style.underline,
|
underline: text_style.underline,
|
||||||
strikethrough: text_style.strikethrough,
|
strikethrough: text_style.strikethrough,
|
||||||
|
@ -4747,7 +4758,7 @@ impl LineWithInvisibles {
|
||||||
// Line wrap pads its contents with fake whitespaces,
|
// Line wrap pads its contents with fake whitespaces,
|
||||||
// avoid printing them
|
// avoid printing them
|
||||||
let is_soft_wrapped = is_row_soft_wrapped(row);
|
let is_soft_wrapped = is_row_soft_wrapped(row);
|
||||||
if highlighted_chunk.is_tab {
|
if highlighted_chunk.kind.is_tab() {
|
||||||
if non_whitespace_added || !is_soft_wrapped {
|
if non_whitespace_added || !is_soft_wrapped {
|
||||||
invisibles.push(Invisible::Tab {
|
invisibles.push(Invisible::Tab {
|
||||||
line_start_offset: line.len(),
|
line_start_offset: line.len(),
|
||||||
|
|
|
@ -66,6 +66,7 @@ pub use text::{
|
||||||
Transaction, TransactionId, Unclipped,
|
Transaction, TransactionId, Unclipped,
|
||||||
};
|
};
|
||||||
use theme::SyntaxTheme;
|
use theme::SyntaxTheme;
|
||||||
|
use tree_sitter::Query;
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
use util::RandomCharIter;
|
use util::RandomCharIter;
|
||||||
use util::{debug_panic, RangeExt};
|
use util::{debug_panic, RangeExt};
|
||||||
|
@ -479,11 +480,92 @@ struct IndentSuggestion {
|
||||||
within_error: bool,
|
within_error: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BufferChunkHighlights<'a> {
|
pub struct BufferChunkHighlights<'a> {
|
||||||
captures: SyntaxMapCaptures<'a>,
|
captures: SyntaxMapCaptures<'a>,
|
||||||
next_capture: Option<SyntaxMapCapture<'a>>,
|
next_capture: Option<SyntaxMapCapture<'a>>,
|
||||||
stack: Vec<(usize, HighlightId)>,
|
/// A stack of captures, holds `(end_offset, highlight_id, capture_index)`.
|
||||||
|
///
|
||||||
|
/// - `end_offset`: where the capture ends
|
||||||
|
/// - `highlight_id`: corresponding highlight id for the captured syntax node
|
||||||
|
/// - `capture_index`: capture id for node in highlights query
|
||||||
|
stack: Vec<(usize, HighlightId, u32)>,
|
||||||
highlight_maps: Vec<HighlightMap>,
|
highlight_maps: Vec<HighlightMap>,
|
||||||
|
bracket_tracker: Option<BracketTracker>,
|
||||||
|
language: &'a Language,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BufferChunkHighlights<'_> {
|
||||||
|
fn is_capture_a_bracket(&self, capture_index: u32) -> bool {
|
||||||
|
self.bracket_tracker.as_ref().is_some_and(|tracker| {
|
||||||
|
[tracker.open_bracket_ix, tracker.close_bracket_ix].contains(&capture_index)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_bracket_depth(&mut self, capture_index: u32) {
|
||||||
|
if let Some(tracker) = self.bracket_tracker.as_mut() {
|
||||||
|
if capture_index == tracker.open_bracket_ix {
|
||||||
|
tracker.depth += 1;
|
||||||
|
}
|
||||||
|
if capture_index == tracker.close_bracket_ix {
|
||||||
|
tracker.depth -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bracket_depth(&self) -> i32 {
|
||||||
|
self.bracket_tracker
|
||||||
|
.as_ref()
|
||||||
|
.map_or(0, |tracker| tracker.depth)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> BufferChunkHighlights<'a> {
|
||||||
|
pub fn new(
|
||||||
|
captures: SyntaxMapCaptures<'a>,
|
||||||
|
highlight_maps: Vec<HighlightMap>,
|
||||||
|
language: &'a Language,
|
||||||
|
) -> Self {
|
||||||
|
// NOTE: only tracks brackets on the top-level grammar, ignores nested grammars
|
||||||
|
let bracket_tracker = language
|
||||||
|
.grammar
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|grammar| grammar.highlights_query.as_ref())
|
||||||
|
.and_then(BracketTracker::try_new);
|
||||||
|
|
||||||
|
Self {
|
||||||
|
captures,
|
||||||
|
next_capture: None,
|
||||||
|
stack: Default::default(),
|
||||||
|
highlight_maps,
|
||||||
|
bracket_tracker,
|
||||||
|
language,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct BracketTracker {
|
||||||
|
// Current depth.
|
||||||
|
// TODO: this shouldn't be negative, but as Zed parses buffers from the
|
||||||
|
// middle, this bracket tracking approach can't keep track of the depth
|
||||||
|
depth: i32,
|
||||||
|
/// The tree-sitter capture index for an opening bracket.
|
||||||
|
open_bracket_ix: u32,
|
||||||
|
/// The tree-sitter capture index for a closing bracket.
|
||||||
|
close_bracket_ix: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BracketTracker {
|
||||||
|
/// Create a BracketTracker if the required captures are provided.
|
||||||
|
pub fn try_new(query: &Query) -> Option<Self> {
|
||||||
|
Some(Self {
|
||||||
|
depth: 0,
|
||||||
|
// TODO: cache this linear search when creating the highlights_query,
|
||||||
|
// BufferChunks is created a ton of times, and so is BracketTracker
|
||||||
|
open_bracket_ix: query.capture_index_for_name("punctuation.bracket.open")?,
|
||||||
|
close_bracket_ix: query.capture_index_for_name("punctuation.bracket.close")?,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An iterator that yields chunks of a buffer's text, along with their
|
/// An iterator that yields chunks of a buffer's text, along with their
|
||||||
|
@ -516,12 +598,30 @@ pub struct Chunk<'a> {
|
||||||
pub diagnostic_severity: Option<DiagnosticSeverity>,
|
pub diagnostic_severity: Option<DiagnosticSeverity>,
|
||||||
/// Whether this chunk of text is marked as unnecessary.
|
/// Whether this chunk of text is marked as unnecessary.
|
||||||
pub is_unnecessary: bool,
|
pub is_unnecessary: bool,
|
||||||
/// Whether this chunk of text was originally a tab character.
|
/// If this chunk is a particular kind, store additional info about it.
|
||||||
pub is_tab: bool,
|
pub kind: ChunkKind,
|
||||||
/// An optional recipe for how the chunk should be presented.
|
/// An optional recipe for how the chunk should be presented.
|
||||||
pub renderer: Option<ChunkRenderer>,
|
pub renderer: Option<ChunkRenderer>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Store some info about this chunk for special treatment down the road.
|
||||||
|
#[derive(Clone, Copy, Debug, Default)]
|
||||||
|
pub enum ChunkKind {
|
||||||
|
/// Not a special chunk type.
|
||||||
|
#[default]
|
||||||
|
Other,
|
||||||
|
/// Whether the chunk of text was originally a tab character.
|
||||||
|
Tab,
|
||||||
|
/// Brackets can be colored by depth.
|
||||||
|
Bracket { depth: i32 },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChunkKind {
|
||||||
|
pub fn is_tab(&self) -> bool {
|
||||||
|
matches!(self, Self::Tab)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A recipe for how the chunk should be presented.
|
/// A recipe for how the chunk should be presented.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ChunkRenderer {
|
pub struct ChunkRenderer {
|
||||||
|
@ -2774,13 +2874,28 @@ impl BufferSnapshot {
|
||||||
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
|
pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
|
|
||||||
let mut syntax = None;
|
let mut chunk_highlights = None;
|
||||||
if language_aware {
|
if language_aware {
|
||||||
syntax = Some(self.get_highlights(range.clone()));
|
if let Some(language) = self.language.as_ref() {
|
||||||
|
let (captures, highlight_maps) = self.get_highlights(range.clone());
|
||||||
|
|
||||||
|
chunk_highlights = Some(BufferChunkHighlights::new(
|
||||||
|
captures,
|
||||||
|
highlight_maps,
|
||||||
|
&language,
|
||||||
|
));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// We want to look at diagnostic spans only when iterating over language-annotated chunks.
|
// We want to look at diagnostic spans only when iterating over language-annotated chunks.
|
||||||
let diagnostics = language_aware;
|
let diagnostics = language_aware;
|
||||||
BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
|
BufferChunks::new(
|
||||||
|
self.text.as_rope(),
|
||||||
|
range,
|
||||||
|
chunk_highlights,
|
||||||
|
diagnostics,
|
||||||
|
Some(self),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Invokes the given callback for each line of text in the given range of the buffer.
|
/// Invokes the given callback for each line of text in the given range of the buffer.
|
||||||
|
@ -4058,20 +4173,10 @@ impl<'a> BufferChunks<'a> {
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(
|
||||||
text: &'a Rope,
|
text: &'a Rope,
|
||||||
range: Range<usize>,
|
range: Range<usize>,
|
||||||
syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
|
highlights: Option<BufferChunkHighlights<'a>>,
|
||||||
diagnostics: bool,
|
diagnostics: bool,
|
||||||
buffer_snapshot: Option<&'a BufferSnapshot>,
|
buffer_snapshot: Option<&'a BufferSnapshot>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut highlights = None;
|
|
||||||
if let Some((captures, highlight_maps)) = syntax {
|
|
||||||
highlights = Some(BufferChunkHighlights {
|
|
||||||
captures,
|
|
||||||
next_capture: None,
|
|
||||||
stack: Default::default(),
|
|
||||||
highlight_maps,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
|
let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
|
||||||
let chunks = text.chunks_in_range(range.clone());
|
let chunks = text.chunks_in_range(range.clone());
|
||||||
|
|
||||||
|
@ -4097,10 +4202,15 @@ impl<'a> BufferChunks<'a> {
|
||||||
self.chunks.set_range(self.range.clone());
|
self.chunks.set_range(self.range.clone());
|
||||||
if let Some(highlights) = self.highlights.as_mut() {
|
if let Some(highlights) = self.highlights.as_mut() {
|
||||||
if old_range.start <= self.range.start && old_range.end >= self.range.end {
|
if old_range.start <= self.range.start && old_range.end >= self.range.end {
|
||||||
// Reuse existing highlights stack, as the new range is a subrange of the old one.
|
// Reuse existing highlights stack, as the new range is a subrange of the old one.
|
||||||
highlights
|
while let Some(&(end_offset, _, capture_index)) = highlights.stack.last() {
|
||||||
.stack
|
if end_offset > range.start {
|
||||||
.retain(|(end_offset, _)| *end_offset > range.start);
|
break;
|
||||||
|
} else {
|
||||||
|
highlights.stack.pop();
|
||||||
|
highlights.update_bracket_depth(capture_index);
|
||||||
|
}
|
||||||
|
}
|
||||||
if let Some(capture) = &highlights.next_capture {
|
if let Some(capture) = &highlights.next_capture {
|
||||||
if range.start >= capture.node.start_byte() {
|
if range.start >= capture.node.start_byte() {
|
||||||
let next_capture_end = capture.node.end_byte();
|
let next_capture_end = capture.node.end_byte();
|
||||||
|
@ -4108,19 +4218,19 @@ impl<'a> BufferChunks<'a> {
|
||||||
highlights.stack.push((
|
highlights.stack.push((
|
||||||
next_capture_end,
|
next_capture_end,
|
||||||
highlights.highlight_maps[capture.grammar_index].get(capture.index),
|
highlights.highlight_maps[capture.grammar_index].get(capture.index),
|
||||||
|
capture.index,
|
||||||
));
|
));
|
||||||
|
highlights.update_bracket_depth(capture.index);
|
||||||
}
|
}
|
||||||
highlights.next_capture.take();
|
highlights.next_capture.take();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if let Some(snapshot) = self.buffer_snapshot {
|
} else if let Some(snapshot) = self.buffer_snapshot {
|
||||||
|
// Can't reuse existing highlights stack, reset it
|
||||||
let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
|
let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
|
||||||
*highlights = BufferChunkHighlights {
|
|
||||||
captures,
|
*highlights =
|
||||||
next_capture: None,
|
BufferChunkHighlights::new(captures, highlight_maps, &highlights.language);
|
||||||
stack: Default::default(),
|
|
||||||
highlight_maps,
|
|
||||||
};
|
|
||||||
} else {
|
} else {
|
||||||
// We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
|
// We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
|
||||||
// Seeking such BufferChunks is not supported.
|
// Seeking such BufferChunks is not supported.
|
||||||
|
@ -4216,9 +4326,10 @@ impl<'a> Iterator for BufferChunks<'a> {
|
||||||
let mut next_diagnostic_endpoint = usize::MAX;
|
let mut next_diagnostic_endpoint = usize::MAX;
|
||||||
|
|
||||||
if let Some(highlights) = self.highlights.as_mut() {
|
if let Some(highlights) = self.highlights.as_mut() {
|
||||||
while let Some((parent_capture_end, _)) = highlights.stack.last() {
|
while let Some(&(parent_capture_end, _, capture_index)) = highlights.stack.last() {
|
||||||
if *parent_capture_end <= self.range.start {
|
if parent_capture_end <= self.range.start {
|
||||||
highlights.stack.pop();
|
highlights.stack.pop();
|
||||||
|
highlights.update_bracket_depth(capture_index);
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -4237,7 +4348,8 @@ impl<'a> Iterator for BufferChunks<'a> {
|
||||||
highlights.highlight_maps[capture.grammar_index].get(capture.index);
|
highlights.highlight_maps[capture.grammar_index].get(capture.index);
|
||||||
highlights
|
highlights
|
||||||
.stack
|
.stack
|
||||||
.push((capture.node.end_byte(), highlight_id));
|
.push((capture.node.end_byte(), highlight_id, capture.index));
|
||||||
|
highlights.update_bracket_depth(capture.index);
|
||||||
highlights.next_capture = highlights.captures.next();
|
highlights.next_capture = highlights.captures.next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4258,28 +4370,38 @@ impl<'a> Iterator for BufferChunks<'a> {
|
||||||
self.diagnostic_endpoints = diagnostic_endpoints;
|
self.diagnostic_endpoints = diagnostic_endpoints;
|
||||||
|
|
||||||
let chunk = self.chunks.peek()?;
|
let chunk = self.chunks.peek()?;
|
||||||
|
|
||||||
let chunk_start = self.range.start;
|
let chunk_start = self.range.start;
|
||||||
|
|
||||||
let mut chunk_end = (self.chunks.offset() + chunk.len())
|
let mut chunk_end = (self.chunks.offset() + chunk.len())
|
||||||
.min(next_capture_start)
|
.min(next_capture_start)
|
||||||
.min(next_diagnostic_endpoint);
|
.min(next_diagnostic_endpoint);
|
||||||
let mut highlight_id = None;
|
let mut highlight_id = None;
|
||||||
|
let mut chunk_kind = ChunkKind::Other;
|
||||||
|
|
||||||
if let Some(highlights) = self.highlights.as_ref() {
|
if let Some(highlights) = self.highlights.as_ref() {
|
||||||
if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
|
if let Some(&(parent_capture_end, parent_highlight_id, parent_index)) =
|
||||||
chunk_end = chunk_end.min(*parent_capture_end);
|
highlights.stack.last()
|
||||||
highlight_id = Some(*parent_highlight_id);
|
{
|
||||||
|
chunk_end = chunk_end.min(parent_capture_end);
|
||||||
|
highlight_id = Some(parent_highlight_id);
|
||||||
|
if highlights.is_capture_a_bracket(parent_index) {
|
||||||
|
chunk_kind = ChunkKind::Bracket {
|
||||||
|
depth: highlights.bracket_depth(),
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let slice = &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
|
let text = &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
|
||||||
self.range.start = chunk_end;
|
self.range.start = chunk_end;
|
||||||
if self.range.start == self.chunks.offset() + chunk.len() {
|
if self.range.start == self.chunks.offset() + chunk.len() {
|
||||||
self.chunks.next().unwrap();
|
self.chunks.next().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Chunk {
|
Some(Chunk {
|
||||||
text: slice,
|
text,
|
||||||
syntax_highlight_id: highlight_id,
|
syntax_highlight_id: highlight_id,
|
||||||
|
kind: chunk_kind,
|
||||||
diagnostic_severity: self.current_diagnostic_severity(),
|
diagnostic_severity: self.current_diagnostic_severity(),
|
||||||
is_unnecessary: self.current_code_is_unnecessary(),
|
is_unnecessary: self.current_code_is_unnecessary(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
|
|
@ -1437,9 +1437,12 @@ impl Language {
|
||||||
});
|
});
|
||||||
let highlight_maps = vec![grammar.highlight_map()];
|
let highlight_maps = vec![grammar.highlight_map()];
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
for chunk in
|
|
||||||
BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None)
|
let chunk_highlights =
|
||||||
{
|
Some(BufferChunkHighlights::new(captures, highlight_maps, &self));
|
||||||
|
|
||||||
|
let chunks = BufferChunks::new(text, range, chunk_highlights, false, None);
|
||||||
|
for chunk in chunks {
|
||||||
let end_offset = offset + chunk.text.len();
|
let end_offset = offset + chunk.text.len();
|
||||||
if let Some(highlight_id) = chunk.syntax_highlight_id {
|
if let Some(highlight_id) = chunk.syntax_highlight_id {
|
||||||
if !highlight_id.is_default() {
|
if !highlight_id.is_default() {
|
||||||
|
|
Loading…
Reference in a new issue