Remove theme parameters from buffer/display map's chunks methods

Change Chunks to contain highlight ids instead of actual highlight
styles. Retrieve the actual highlight style from the theme in the
editor element layer.

This is to set us up to perform syntax highlighting in other code
paths where the theme is not available.
This commit is contained in:
Max Brunsfeld 2022-02-02 16:33:04 -08:00
parent 101add8da3
commit 88adddb324
11 changed files with 141 additions and 168 deletions

View file

@ -12,7 +12,6 @@ use language::{Point, Subscription as BufferSubscription};
use std::ops::Range; use std::ops::Range;
use sum_tree::Bias; use sum_tree::Bias;
use tab_map::TabMap; use tab_map::TabMap;
use theme::SyntaxTheme;
use wrap_map::WrapMap; use wrap_map::WrapMap;
pub use block_map::{ pub use block_map::{
@ -251,16 +250,12 @@ impl DisplaySnapshot {
pub fn text_chunks(&self, display_row: u32) -> impl Iterator<Item = &str> { pub fn text_chunks(&self, display_row: u32) -> impl Iterator<Item = &str> {
self.blocks_snapshot self.blocks_snapshot
.chunks(display_row..self.max_point().row() + 1, None) .chunks(display_row..self.max_point().row() + 1)
.map(|h| h.text) .map(|h| h.text)
} }
pub fn chunks<'a>( pub fn chunks<'a>(&'a self, display_rows: Range<u32>) -> DisplayChunks<'a> {
&'a self, self.blocks_snapshot.chunks(display_rows)
display_rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> DisplayChunks<'a> {
self.blocks_snapshot.chunks(display_rows, theme)
} }
pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a { pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a {
@ -1122,8 +1117,10 @@ mod tests {
) -> Vec<(String, Option<Color>)> { ) -> Vec<(String, Option<Color>)> {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
let mut chunks: Vec<(String, Option<Color>)> = Vec::new(); let mut chunks: Vec<(String, Option<Color>)> = Vec::new();
for chunk in snapshot.chunks(rows, Some(theme)) { for chunk in snapshot.chunks(rows) {
let color = chunk.highlight_style.map(|s| s.color); let color = chunk
.highlight_id
.and_then(|id| id.style(theme).map(|s| s.color));
if let Some((last_chunk, last_color)) = chunks.last_mut() { if let Some((last_chunk, last_color)) = chunks.last_mut() {
if color == *last_color { if color == *last_color {
last_chunk.push_str(chunk.text); last_chunk.push_str(chunk.text);

View file

@ -15,7 +15,6 @@ use std::{
}; };
use sum_tree::{Bias, SumTree}; use sum_tree::{Bias, SumTree};
use text::{Edit, Point}; use text::{Edit, Point};
use theme::SyntaxTheme;
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
@ -461,16 +460,12 @@ impl<'a> BlockMapWriter<'a> {
impl BlockSnapshot { impl BlockSnapshot {
#[cfg(test)] #[cfg(test)]
pub fn text(&self) -> String { pub fn text(&self) -> String {
self.chunks(0..self.transforms.summary().output_rows, None) self.chunks(0..self.transforms.summary().output_rows)
.map(|chunk| chunk.text) .map(|chunk| chunk.text)
.collect() .collect()
} }
pub fn chunks<'a>( pub fn chunks<'a>(&'a self, rows: Range<u32>) -> BlockChunks<'a> {
&'a self,
rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
let input_end = { let input_end = {
@ -498,7 +493,7 @@ impl BlockSnapshot {
cursor.start().1 .0 + overshoot cursor.start().1 .0 + overshoot
}; };
BlockChunks { BlockChunks {
input_chunks: self.wrap_snapshot.chunks(input_start..input_end, theme), input_chunks: self.wrap_snapshot.chunks(input_start..input_end),
input_chunk: Default::default(), input_chunk: Default::default(),
transforms: cursor, transforms: cursor,
output_row: rows.start, output_row: rows.start,
@ -715,7 +710,7 @@ impl<'a> Iterator for BlockChunks<'a> {
return Some(Chunk { return Some(Chunk {
text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count as usize]) }, text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count as usize]) },
highlight_style: None, highlight_id: None,
diagnostic: None, diagnostic: None,
}); });
} }
@ -1340,7 +1335,7 @@ mod tests {
for start_row in 0..expected_row_count { for start_row in 0..expected_row_count {
let expected_text = expected_lines[start_row..].join("\n"); let expected_text = expected_lines[start_row..].join("\n");
let actual_text = blocks_snapshot let actual_text = blocks_snapshot
.chunks(start_row as u32..expected_row_count as u32, None) .chunks(start_row as u32..expected_row_count as u32)
.map(|chunk| chunk.text) .map(|chunk| chunk.text)
.collect::<String>(); .collect::<String>();
assert_eq!( assert_eq!(

View file

@ -11,7 +11,6 @@ use std::{
sync::atomic::{AtomicUsize, Ordering::SeqCst}, sync::atomic::{AtomicUsize, Ordering::SeqCst},
}; };
use sum_tree::{Bias, Cursor, FilterCursor, SumTree}; use sum_tree::{Bias, Cursor, FilterCursor, SumTree};
use theme::SyntaxTheme;
pub trait ToFoldPoint { pub trait ToFoldPoint {
fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint; fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint;
@ -490,7 +489,7 @@ impl FoldSnapshot {
#[cfg(test)] #[cfg(test)]
pub fn text(&self) -> String { pub fn text(&self) -> String {
self.chunks(FoldOffset(0)..self.len(), None) self.chunks(FoldOffset(0)..self.len())
.map(|c| c.text) .map(|c| c.text)
.collect() .collect()
} }
@ -630,15 +629,11 @@ impl FoldSnapshot {
pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator<Item = char> { pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator<Item = char> {
let start = start.to_offset(self); let start = start.to_offset(self);
self.chunks(start..self.len(), None) self.chunks(start..self.len())
.flat_map(|chunk| chunk.text.chars()) .flat_map(|chunk| chunk.text.chars())
} }
pub fn chunks<'a>( pub fn chunks<'a>(&'a self, range: Range<FoldOffset>) -> FoldChunks<'a> {
&'a self,
range: Range<FoldOffset>,
theme: Option<&'a SyntaxTheme>,
) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>(); let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
transform_cursor.seek(&range.end, Bias::Right, &()); transform_cursor.seek(&range.end, Bias::Right, &());
@ -651,7 +646,7 @@ impl FoldSnapshot {
FoldChunks { FoldChunks {
transform_cursor, transform_cursor,
buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end, theme), buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end),
buffer_chunk: None, buffer_chunk: None,
buffer_offset: buffer_start, buffer_offset: buffer_start,
output_offset: range.start.0, output_offset: range.start.0,
@ -976,7 +971,7 @@ impl<'a> Iterator for FoldChunks<'a> {
self.output_offset += output_text.len(); self.output_offset += output_text.len();
return Some(Chunk { return Some(Chunk {
text: output_text, text: output_text,
highlight_style: None, highlight_id: None,
diagnostic: None, diagnostic: None,
}); });
} }
@ -1398,7 +1393,7 @@ mod tests {
let text = &expected_text[start.0..end.0]; let text = &expected_text[start.0..end.0];
assert_eq!( assert_eq!(
snapshot snapshot
.chunks(start..end, None) .chunks(start..end)
.map(|c| c.text) .map(|c| c.text)
.collect::<String>(), .collect::<String>(),
text, text,

View file

@ -5,7 +5,6 @@ use parking_lot::Mutex;
use std::{cmp, mem, ops::Range}; use std::{cmp, mem, ops::Range};
use sum_tree::Bias; use sum_tree::Bias;
use text::Point; use text::Point;
use theme::SyntaxTheme;
pub struct TabMap(Mutex<TabSnapshot>); pub struct TabMap(Mutex<TabSnapshot>);
@ -35,7 +34,7 @@ impl TabMap {
let mut delta = 0; let mut delta = 0;
for chunk in old_snapshot for chunk in old_snapshot
.fold_snapshot .fold_snapshot
.chunks(fold_edit.old.end..max_offset, None) .chunks(fold_edit.old.end..max_offset)
{ {
let patterns: &[_] = &['\t', '\n']; let patterns: &[_] = &['\t', '\n'];
if let Some(ix) = chunk.text.find(patterns) { if let Some(ix) = chunk.text.find(patterns) {
@ -110,7 +109,7 @@ impl TabSnapshot {
self.max_point() self.max_point()
}; };
for c in self for c in self
.chunks(range.start..line_end, None) .chunks(range.start..line_end)
.flat_map(|chunk| chunk.text.chars()) .flat_map(|chunk| chunk.text.chars())
{ {
if c == '\n' { if c == '\n' {
@ -124,7 +123,7 @@ impl TabSnapshot {
last_line_chars = first_line_chars; last_line_chars = first_line_chars;
} else { } else {
for _ in self for _ in self
.chunks(TabPoint::new(range.end.row(), 0)..range.end, None) .chunks(TabPoint::new(range.end.row(), 0)..range.end)
.flat_map(|chunk| chunk.text.chars()) .flat_map(|chunk| chunk.text.chars())
{ {
last_line_chars += 1; last_line_chars += 1;
@ -144,11 +143,7 @@ impl TabSnapshot {
self.fold_snapshot.version self.fold_snapshot.version
} }
pub fn chunks<'a>( pub fn chunks<'a>(&'a self, range: Range<TabPoint>) -> TabChunks<'a> {
&'a self,
range: Range<TabPoint>,
theme: Option<&'a SyntaxTheme>,
) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) = let (input_start, expanded_char_column, to_next_stop) =
self.to_fold_point(range.start, Bias::Left); self.to_fold_point(range.start, Bias::Left);
let input_start = input_start.to_offset(&self.fold_snapshot); let input_start = input_start.to_offset(&self.fold_snapshot);
@ -163,7 +158,7 @@ impl TabSnapshot {
}; };
TabChunks { TabChunks {
fold_chunks: self.fold_snapshot.chunks(input_start..input_end, theme), fold_chunks: self.fold_snapshot.chunks(input_start..input_end),
column: expanded_char_column, column: expanded_char_column,
output_position: range.start.0, output_position: range.start.0,
max_output_position: range.end.0, max_output_position: range.end.0,
@ -182,7 +177,7 @@ impl TabSnapshot {
#[cfg(test)] #[cfg(test)]
pub fn text(&self) -> String { pub fn text(&self) -> String {
self.chunks(TabPoint::zero()..self.max_point(), None) self.chunks(TabPoint::zero()..self.max_point())
.map(|chunk| chunk.text) .map(|chunk| chunk.text)
.collect() .collect()
} }
@ -495,7 +490,7 @@ mod tests {
assert_eq!( assert_eq!(
expected_text, expected_text,
tabs_snapshot tabs_snapshot
.chunks(start..end, None) .chunks(start..end)
.map(|c| c.text) .map(|c| c.text)
.collect::<String>(), .collect::<String>(),
"chunks({:?}..{:?})", "chunks({:?}..{:?})",

View file

@ -13,7 +13,6 @@ use smol::future::yield_now;
use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration}; use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, SumTree}; use sum_tree::{Bias, Cursor, SumTree};
use text::Patch; use text::Patch;
use theme::SyntaxTheme;
pub use super::tab_map::TextSummary; pub use super::tab_map::TextSummary;
pub type WrapEdit = text::Edit<u32>; pub type WrapEdit = text::Edit<u32>;
@ -434,10 +433,8 @@ impl WrapSnapshot {
let mut line = String::new(); let mut line = String::new();
let mut remaining = None; let mut remaining = None;
let mut chunks = new_tab_snapshot.chunks( let mut chunks = new_tab_snapshot
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(), .chunks(TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point());
None,
);
let mut edit_transforms = Vec::<Transform>::new(); let mut edit_transforms = Vec::<Transform>::new();
for _ in edit.new_rows.start..edit.new_rows.end { for _ in edit.new_rows.start..edit.new_rows.end {
while let Some(chunk) = while let Some(chunk) =
@ -562,15 +559,11 @@ impl WrapSnapshot {
} }
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> { pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
self.chunks(wrap_row..self.max_point().row() + 1, None) self.chunks(wrap_row..self.max_point().row() + 1)
.map(|h| h.text) .map(|h| h.text)
} }
pub fn chunks<'a>( pub fn chunks<'a>(&'a self, rows: Range<u32>) -> WrapChunks<'a> {
&'a self,
rows: Range<u32>,
theme: Option<&'a SyntaxTheme>,
) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0); let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0); let output_end = WrapPoint::new(rows.end, 0);
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
@ -583,7 +576,7 @@ impl WrapSnapshot {
.to_tab_point(output_end) .to_tab_point(output_end)
.min(self.tab_snapshot.max_point()); .min(self.tab_snapshot.max_point());
WrapChunks { WrapChunks {
input_chunks: self.tab_snapshot.chunks(input_start..input_end, theme), input_chunks: self.tab_snapshot.chunks(input_start..input_end),
input_chunk: Default::default(), input_chunk: Default::default(),
output_position: output_start, output_position: output_start,
max_output_row: rows.end, max_output_row: rows.end,
@ -1295,7 +1288,7 @@ mod tests {
} }
let actual_text = self let actual_text = self
.chunks(start_row..end_row, None) .chunks(start_row..end_row)
.map(|c| c.text) .map(|c| c.text)
.collect::<String>(); .collect::<String>();
assert_eq!( assert_eq!(

View file

@ -1628,7 +1628,7 @@ impl Editor {
.map(|(id, completion)| { .map(|(id, completion)| {
StringMatchCandidate::new( StringMatchCandidate::new(
id, id,
completion.label()[completion.filter_range()].into(), completion.lsp_completion.label[completion.filter_range()].into(),
) )
}) })
.collect(), .collect(),

View file

@ -598,31 +598,32 @@ impl EditorElement {
.collect(); .collect();
} else { } else {
let style = &self.settings.style; let style = &self.settings.style;
let chunks = snapshot let chunks = snapshot.chunks(rows.clone()).map(|chunk| {
.chunks(rows.clone(), Some(&style.syntax)) let highlight_style = chunk
.map(|chunk| { .highlight_id
let highlight = if let Some(severity) = chunk.diagnostic { .and_then(|highlight_id| highlight_id.style(&style.syntax));
let diagnostic_style = super::diagnostic_style(severity, true, style); let highlight = if let Some(severity) = chunk.diagnostic {
let underline = Some(Underline { let diagnostic_style = super::diagnostic_style(severity, true, style);
color: diagnostic_style.message.text.color, let underline = Some(Underline {
thickness: 1.0.into(), color: diagnostic_style.message.text.color,
squiggly: true, thickness: 1.0.into(),
}); squiggly: true,
if let Some(mut highlight) = chunk.highlight_style { });
highlight.underline = underline; if let Some(mut highlight) = highlight_style {
Some(highlight) highlight.underline = underline;
} else { Some(highlight)
Some(HighlightStyle {
underline,
color: style.text.color,
font_properties: style.text.font_properties,
})
}
} else { } else {
chunk.highlight_style Some(HighlightStyle {
}; underline,
(chunk.text, highlight) color: style.text.color,
}); font_properties: style.text.font_properties,
})
}
} else {
highlight_style
};
(chunk.text, highlight)
});
layout_highlighted_chunks( layout_highlighted_chunks(
chunks, chunks,
&style.text, &style.text,

View file

@ -125,7 +125,6 @@ pub struct MultiBufferChunks<'a> {
range: Range<usize>, range: Range<usize>,
excerpts: Cursor<'a, Excerpt, usize>, excerpts: Cursor<'a, Excerpt, usize>,
excerpt_chunks: Option<ExcerptChunks<'a>>, excerpt_chunks: Option<ExcerptChunks<'a>>,
theme: Option<&'a SyntaxTheme>,
} }
pub struct MultiBufferBytes<'a> { pub struct MultiBufferBytes<'a> {
@ -1113,9 +1112,7 @@ impl Entity for MultiBuffer {
impl MultiBufferSnapshot { impl MultiBufferSnapshot {
pub fn text(&self) -> String { pub fn text(&self) -> String {
self.chunks(0..self.len(), None) self.chunks(0..self.len()).map(|chunk| chunk.text).collect()
.map(|chunk| chunk.text)
.collect()
} }
pub fn reversed_chars_at<'a, T: ToOffset>( pub fn reversed_chars_at<'a, T: ToOffset>(
@ -1165,7 +1162,7 @@ impl MultiBufferSnapshot {
&'a self, &'a self,
range: Range<T>, range: Range<T>,
) -> impl Iterator<Item = &'a str> { ) -> impl Iterator<Item = &'a str> {
self.chunks(range, None).map(|chunk| chunk.text) self.chunks(range).map(|chunk| chunk.text)
} }
pub fn is_line_blank(&self, row: u32) -> bool { pub fn is_line_blank(&self, row: u32) -> bool {
@ -1323,17 +1320,12 @@ impl MultiBufferSnapshot {
result result
} }
pub fn chunks<'a, T: ToOffset>( pub fn chunks<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferChunks<'a> {
&'a self,
range: Range<T>,
theme: Option<&'a SyntaxTheme>,
) -> MultiBufferChunks<'a> {
let range = range.start.to_offset(self)..range.end.to_offset(self); let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut chunks = MultiBufferChunks { let mut chunks = MultiBufferChunks {
range: range.clone(), range: range.clone(),
excerpts: self.excerpts.cursor(), excerpts: self.excerpts.cursor(),
excerpt_chunks: None, excerpt_chunks: None,
theme,
}; };
chunks.seek(range.start); chunks.seek(range.start);
chunks chunks
@ -2116,11 +2108,7 @@ impl Excerpt {
} }
} }
fn chunks_in_range<'a>( fn chunks_in_range<'a>(&'a self, range: Range<usize>) -> ExcerptChunks<'a> {
&'a self,
range: Range<usize>,
theme: Option<&'a SyntaxTheme>,
) -> ExcerptChunks<'a> {
let content_start = self.range.start.to_offset(&self.buffer); let content_start = self.range.start.to_offset(&self.buffer);
let chunks_start = content_start + range.start; let chunks_start = content_start + range.start;
let chunks_end = content_start + cmp::min(range.end, self.text_summary.bytes); let chunks_end = content_start + cmp::min(range.end, self.text_summary.bytes);
@ -2134,7 +2122,7 @@ impl Excerpt {
0 0
}; };
let content_chunks = self.buffer.chunks(chunks_start..chunks_end, theme); let content_chunks = self.buffer.chunks(chunks_start..chunks_end);
ExcerptChunks { ExcerptChunks {
content_chunks, content_chunks,
@ -2333,7 +2321,6 @@ impl<'a> MultiBufferChunks<'a> {
if let Some(excerpt) = self.excerpts.item() { if let Some(excerpt) = self.excerpts.item() {
self.excerpt_chunks = Some(excerpt.chunks_in_range( self.excerpt_chunks = Some(excerpt.chunks_in_range(
self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(), self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(),
self.theme,
)); ));
} else { } else {
self.excerpt_chunks = None; self.excerpt_chunks = None;
@ -2353,9 +2340,8 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
} else { } else {
self.excerpts.next(&()); self.excerpts.next(&());
let excerpt = self.excerpts.item()?; let excerpt = self.excerpts.item()?;
self.excerpt_chunks = Some( self.excerpt_chunks =
excerpt.chunks_in_range(0..self.range.end - self.excerpts.start(), self.theme), Some(excerpt.chunks_in_range(0..self.range.end - self.excerpts.start()));
);
self.next() self.next()
} }
} }
@ -3110,7 +3096,7 @@ mod tests {
let mut buffer_point_utf16 = buffer_start_point_utf16; let mut buffer_point_utf16 = buffer_start_point_utf16;
for ch in buffer for ch in buffer
.snapshot() .snapshot()
.chunks(buffer_range.clone(), None) .chunks(buffer_range.clone())
.flat_map(|c| c.text.chars()) .flat_map(|c| c.text.chars())
{ {
for _ in 0..ch.len_utf8() { for _ in 0..ch.len_utf8() {

View file

@ -607,7 +607,7 @@ async fn regex_search(
let mut line = String::new(); let mut line = String::new();
let mut line_offset = 0; let mut line_offset = 0;
for (chunk_ix, chunk) in buffer for (chunk_ix, chunk) in buffer
.chunks(0..buffer.len(), None) .chunks(0..buffer.len())
.map(|c| c.text) .map(|c| c.text)
.chain(["\n"]) .chain(["\n"])
.enumerate() .enumerate()

View file

@ -12,7 +12,7 @@ use crate::{
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use clock::ReplicaId; use clock::ReplicaId;
use futures::FutureExt as _; use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task}; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use lsp::LanguageServer; use lsp::LanguageServer;
use parking_lot::Mutex; use parking_lot::Mutex;
@ -358,7 +358,6 @@ struct BufferChunkHighlights<'a> {
next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
stack: Vec<(usize, HighlightId)>, stack: Vec<(usize, HighlightId)>,
highlight_map: HighlightMap, highlight_map: HighlightMap,
theme: &'a SyntaxTheme,
_query_cursor: QueryCursorHandle, _query_cursor: QueryCursorHandle,
} }
@ -376,7 +375,7 @@ pub struct BufferChunks<'a> {
#[derive(Clone, Copy, Debug, Default)] #[derive(Clone, Copy, Debug, Default)]
pub struct Chunk<'a> { pub struct Chunk<'a> {
pub text: &'a str, pub text: &'a str,
pub highlight_style: Option<HighlightStyle>, pub highlight_id: Option<HighlightId>,
pub diagnostic: Option<DiagnosticSeverity>, pub diagnostic: Option<DiagnosticSeverity>,
} }
@ -387,7 +386,7 @@ pub(crate) struct Diff {
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
struct DiagnosticEndpoint { pub(crate) struct DiagnosticEndpoint {
offset: usize, offset: usize,
is_start: bool, is_start: bool,
severity: DiagnosticSeverity, severity: DiagnosticSeverity,
@ -2117,67 +2116,31 @@ impl BufferSnapshot {
None None
} }
pub fn chunks<'a, T: ToOffset>( pub fn chunks<'a, T: ToOffset>(&'a self, range: Range<T>) -> BufferChunks<'a> {
&'a self,
range: Range<T>,
theme: Option<&'a SyntaxTheme>,
) -> BufferChunks<'a> {
let range = range.start.to_offset(self)..range.end.to_offset(self); let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut highlights = None;
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new(); let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
if let Some(theme) = theme { for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
for entry in self.diagnostics_in_range::<_, usize>(range.clone()) { diagnostic_endpoints.push(DiagnosticEndpoint {
diagnostic_endpoints.push(DiagnosticEndpoint { offset: entry.range.start,
offset: entry.range.start, is_start: true,
is_start: true, severity: entry.diagnostic.severity,
severity: entry.diagnostic.severity, });
}); diagnostic_endpoints.push(DiagnosticEndpoint {
diagnostic_endpoints.push(DiagnosticEndpoint { offset: entry.range.end,
offset: entry.range.end, is_start: false,
is_start: false, severity: entry.diagnostic.severity,
severity: entry.diagnostic.severity, });
});
}
diagnostic_endpoints
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
let mut query_cursor = QueryCursorHandle::new();
// TODO - add a Tree-sitter API to remove the need for this.
let cursor = unsafe {
std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
};
let captures = cursor.set_byte_range(range.clone()).captures(
&grammar.highlights_query,
tree.root_node(),
TextProvider(self.text.as_rope()),
);
highlights = Some(BufferChunkHighlights {
captures,
next_capture: None,
stack: Default::default(),
highlight_map: grammar.highlight_map(),
_query_cursor: query_cursor,
theme,
})
}
} }
diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable(); BufferChunks::new(
let chunks = self.text.as_rope().chunks_in_range(range.clone()); self.text.as_rope(),
BufferChunks {
range, range,
chunks, self.tree.as_ref(),
self.grammar(),
diagnostic_endpoints, diagnostic_endpoints,
error_depth: 0, )
warning_depth: 0,
information_depth: 0,
hint_depth: 0,
highlights,
}
} }
pub fn language(&self) -> Option<&Arc<Language>> { pub fn language(&self) -> Option<&Arc<Language>> {
@ -2218,7 +2181,7 @@ impl BufferSnapshot {
TextProvider(self.as_rope()), TextProvider(self.as_rope()),
); );
let mut chunks = self.chunks(0..self.len(), theme); let mut chunks = self.chunks(0..self.len());
let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?; let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?; let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
@ -2272,7 +2235,11 @@ impl BufferSnapshot {
} else { } else {
offset += chunk.text.len(); offset += chunk.text.len();
} }
if let Some(style) = chunk.highlight_style { let style = chunk
.highlight_id
.zip(theme)
.and_then(|(highlight, theme)| highlight.style(theme));
if let Some(style) = style {
let start = text.len(); let start = text.len();
let end = start + chunk.text.len(); let end = start + chunk.text.len();
highlight_ranges.push((start..end, style)); highlight_ranges.push((start..end, style));
@ -2460,6 +2427,50 @@ impl<'a> Iterator for ByteChunks<'a> {
unsafe impl<'a> Send for BufferChunks<'a> {} unsafe impl<'a> Send for BufferChunks<'a> {}
impl<'a> BufferChunks<'a> { impl<'a> BufferChunks<'a> {
pub(crate) fn new(
text: &'a Rope,
range: Range<usize>,
tree: Option<&'a Tree>,
grammar: Option<&'a Arc<Grammar>>,
diagnostic_endpoints: Vec<DiagnosticEndpoint>,
) -> Self {
let mut highlights = None;
if let Some((grammar, tree)) = grammar.zip(tree) {
let mut query_cursor = QueryCursorHandle::new();
// TODO - add a Tree-sitter API to remove the need for this.
let cursor = unsafe {
std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
};
let captures = cursor.set_byte_range(range.clone()).captures(
&grammar.highlights_query,
tree.root_node(),
TextProvider(text),
);
highlights = Some(BufferChunkHighlights {
captures,
next_capture: None,
stack: Default::default(),
highlight_map: grammar.highlight_map(),
_query_cursor: query_cursor,
})
}
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
let chunks = text.chunks_in_range(range.clone());
BufferChunks {
range,
chunks,
diagnostic_endpoints,
error_depth: 0,
warning_depth: 0,
information_depth: 0,
hint_depth: 0,
highlights,
}
}
pub fn seek(&mut self, offset: usize) { pub fn seek(&mut self, offset: usize) {
self.range.start = offset; self.range.start = offset;
self.chunks.seek(self.range.start); self.chunks.seek(self.range.start);
@ -2568,11 +2579,11 @@ impl<'a> Iterator for BufferChunks<'a> {
let mut chunk_end = (self.chunks.offset() + chunk.len()) let mut chunk_end = (self.chunks.offset() + chunk.len())
.min(next_capture_start) .min(next_capture_start)
.min(next_diagnostic_endpoint); .min(next_diagnostic_endpoint);
let mut highlight_style = None; let mut highlight_id = None;
if let Some(highlights) = self.highlights.as_ref() { if let Some(highlights) = self.highlights.as_ref() {
if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() { if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
chunk_end = chunk_end.min(*parent_capture_end); chunk_end = chunk_end.min(*parent_capture_end);
highlight_style = parent_highlight_id.style(highlights.theme); highlight_id = Some(*parent_highlight_id);
} }
} }
@ -2585,7 +2596,7 @@ impl<'a> Iterator for BufferChunks<'a> {
Some(Chunk { Some(Chunk {
text: slice, text: slice,
highlight_style, highlight_id,
diagnostic: self.current_diagnostic_severity(), diagnostic: self.current_diagnostic_severity(),
}) })
} else { } else {

View file

@ -1090,7 +1090,7 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
range: Range<T>, range: Range<T>,
) -> Vec<(String, Option<DiagnosticSeverity>)> { ) -> Vec<(String, Option<DiagnosticSeverity>)> {
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new(); let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
for chunk in buffer.snapshot().chunks(range, Some(&Default::default())) { for chunk in buffer.snapshot().chunks(range) {
if chunks if chunks
.last() .last()
.map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic) .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)