Eliminate non-highlighted chunks APIs
Now we only have a single code path for chunks across all layers, but highlighting is optional and controlled by a flag. Co-Authored-By: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
parent
7dd9b9539e
commit
52a4c15c14
9 changed files with 206 additions and 379 deletions
|
@ -4,7 +4,7 @@ mod patch;
|
||||||
mod tab_map;
|
mod tab_map;
|
||||||
mod wrap_map;
|
mod wrap_map;
|
||||||
|
|
||||||
use block_map::{BlockId, BlockMap, BlockPoint, BlockProperties};
|
use block_map::{BlockId, BlockMap, BlockPoint};
|
||||||
use buffer::Rope;
|
use buffer::Rope;
|
||||||
use fold_map::{FoldMap, ToFoldPoint as _};
|
use fold_map::{FoldMap, ToFoldPoint as _};
|
||||||
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
|
use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
|
||||||
|
@ -14,7 +14,7 @@ use sum_tree::Bias;
|
||||||
use tab_map::TabMap;
|
use tab_map::TabMap;
|
||||||
use wrap_map::WrapMap;
|
use wrap_map::WrapMap;
|
||||||
|
|
||||||
pub use block_map::{BufferRows, HighlightedChunks};
|
pub use block_map::{BlockDisposition, BlockProperties, BufferRows, Chunks};
|
||||||
|
|
||||||
pub trait ToDisplayPoint {
|
pub trait ToDisplayPoint {
|
||||||
fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint;
|
fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint;
|
||||||
|
@ -213,6 +213,7 @@ impl DisplayMapSnapshot {
|
||||||
self.tabs_snapshot
|
self.tabs_snapshot
|
||||||
.to_tab_point(point.to_fold_point(&self.folds_snapshot, bias)),
|
.to_tab_point(point.to_fold_point(&self.folds_snapshot, bias)),
|
||||||
),
|
),
|
||||||
|
bias,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -228,21 +229,19 @@ impl DisplayMapSnapshot {
|
||||||
DisplayPoint(self.blocks_snapshot.max_point())
|
DisplayPoint(self.blocks_snapshot.max_point())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks_at(&self, display_row: u32) -> wrap_map::Chunks {
|
pub fn text_chunks(&self, display_row: u32) -> impl Iterator<Item = &str> {
|
||||||
self.wraps_snapshot.chunks_at(display_row)
|
self.blocks_snapshot
|
||||||
|
.chunks(display_row..self.max_point().row() + 1, false)
|
||||||
|
.map(|h| h.text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlighted_chunks_for_rows(
|
pub fn chunks(&mut self, display_rows: Range<u32>) -> block_map::Chunks {
|
||||||
&mut self,
|
self.blocks_snapshot.chunks(display_rows, true)
|
||||||
display_rows: Range<u32>,
|
|
||||||
) -> block_map::HighlightedChunks {
|
|
||||||
self.blocks_snapshot
|
|
||||||
.highlighted_chunks_for_rows(display_rows)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a {
|
pub fn chars_at<'a>(&'a self, point: DisplayPoint) -> impl Iterator<Item = char> + 'a {
|
||||||
let mut column = 0;
|
let mut column = 0;
|
||||||
let mut chars = self.chunks_at(point.row()).flat_map(str::chars);
|
let mut chars = self.text_chunks(point.row()).flat_map(str::chars);
|
||||||
while column < point.column() {
|
while column < point.column() {
|
||||||
if let Some(c) = chars.next() {
|
if let Some(c) = chars.next() {
|
||||||
column += c.len_utf8() as u32;
|
column += c.len_utf8() as u32;
|
||||||
|
@ -309,12 +308,12 @@ impl DisplayMapSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn text(&self) -> String {
|
pub fn text(&self) -> String {
|
||||||
self.chunks_at(0).collect()
|
self.text_chunks(0).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn line(&self, display_row: u32) -> String {
|
pub fn line(&self, display_row: u32) -> String {
|
||||||
let mut result = String::new();
|
let mut result = String::new();
|
||||||
for chunk in self.chunks_at(display_row) {
|
for chunk in self.text_chunks(display_row) {
|
||||||
if let Some(ix) = chunk.find('\n') {
|
if let Some(ix) = chunk.find('\n') {
|
||||||
result.push_str(&chunk[0..ix]);
|
result.push_str(&chunk[0..ix]);
|
||||||
break;
|
break;
|
||||||
|
@ -610,7 +609,7 @@ mod tests {
|
||||||
|
|
||||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot.chunks_at(0).collect::<String>(),
|
snapshot.text_chunks(0).collect::<String>(),
|
||||||
"one two \nthree four \nfive\nsix seven \neight"
|
"one two \nthree four \nfive\nsix seven \neight"
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -659,7 +658,7 @@ mod tests {
|
||||||
|
|
||||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot.chunks_at(1).collect::<String>(),
|
snapshot.text_chunks(1).collect::<String>(),
|
||||||
"three four \nfive\nsix and \nseven eight"
|
"three four \nfive\nsix and \nseven eight"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -668,13 +667,13 @@ mod tests {
|
||||||
|
|
||||||
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot.chunks_at(1).collect::<String>(),
|
snapshot.text_chunks(1).collect::<String>(),
|
||||||
"three \nfour five\nsix and \nseven \neight"
|
"three \nfour five\nsix and \nseven \neight"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_chunks_at(cx: &mut gpui::MutableAppContext) {
|
fn test_text_chunks(cx: &mut gpui::MutableAppContext) {
|
||||||
let text = sample_text(6, 6);
|
let text = sample_text(6, 6);
|
||||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||||
let tab_size = 4;
|
let tab_size = 4;
|
||||||
|
@ -701,7 +700,7 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
map.update(cx, |map, cx| map.snapshot(cx))
|
map.update(cx, |map, cx| map.snapshot(cx))
|
||||||
.chunks_at(1)
|
.text_chunks(1)
|
||||||
.collect::<String>()
|
.collect::<String>()
|
||||||
.lines()
|
.lines()
|
||||||
.next(),
|
.next(),
|
||||||
|
@ -709,7 +708,7 @@ mod tests {
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
map.update(cx, |map, cx| map.snapshot(cx))
|
map.update(cx, |map, cx| map.snapshot(cx))
|
||||||
.chunks_at(2)
|
.text_chunks(2)
|
||||||
.collect::<String>()
|
.collect::<String>()
|
||||||
.lines()
|
.lines()
|
||||||
.next(),
|
.next(),
|
||||||
|
@ -718,7 +717,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_highlighted_chunks_at(mut cx: gpui::TestAppContext) {
|
async fn test_chunks(mut cx: gpui::TestAppContext) {
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
|
|
||||||
let text = r#"
|
let text = r#"
|
||||||
|
@ -767,7 +766,7 @@ mod tests {
|
||||||
let map =
|
let map =
|
||||||
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
|
cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cx.update(|cx| highlighted_chunks(0..5, &map, &theme, cx)),
|
cx.update(|cx| chunks(0..5, &map, &theme, cx)),
|
||||||
vec![
|
vec![
|
||||||
("fn ".to_string(), None),
|
("fn ".to_string(), None),
|
||||||
("outer".to_string(), Some("fn.name")),
|
("outer".to_string(), Some("fn.name")),
|
||||||
|
@ -778,7 +777,7 @@ mod tests {
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cx.update(|cx| highlighted_chunks(3..5, &map, &theme, cx)),
|
cx.update(|cx| chunks(3..5, &map, &theme, cx)),
|
||||||
vec![
|
vec![
|
||||||
(" fn ".to_string(), Some("mod.body")),
|
(" fn ".to_string(), Some("mod.body")),
|
||||||
("inner".to_string(), Some("fn.name")),
|
("inner".to_string(), Some("fn.name")),
|
||||||
|
@ -790,7 +789,7 @@ mod tests {
|
||||||
map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
|
map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cx.update(|cx| highlighted_chunks(0..2, &map, &theme, cx)),
|
cx.update(|cx| chunks(0..2, &map, &theme, cx)),
|
||||||
vec![
|
vec![
|
||||||
("fn ".to_string(), None),
|
("fn ".to_string(), None),
|
||||||
("out".to_string(), Some("fn.name")),
|
("out".to_string(), Some("fn.name")),
|
||||||
|
@ -803,7 +802,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_highlighted_chunks_with_soft_wrapping(mut cx: gpui::TestAppContext) {
|
async fn test_chunks_with_soft_wrapping(mut cx: gpui::TestAppContext) {
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
|
|
||||||
cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX);
|
cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX);
|
||||||
|
@ -855,7 +854,7 @@ mod tests {
|
||||||
let map = cx
|
let map = cx
|
||||||
.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, Some(40.0), cx));
|
.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, Some(40.0), cx));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cx.update(|cx| highlighted_chunks(0..5, &map, &theme, cx)),
|
cx.update(|cx| chunks(0..5, &map, &theme, cx)),
|
||||||
[
|
[
|
||||||
("fn \n".to_string(), None),
|
("fn \n".to_string(), None),
|
||||||
("oute\nr".to_string(), Some("fn.name")),
|
("oute\nr".to_string(), Some("fn.name")),
|
||||||
|
@ -863,7 +862,7 @@ mod tests {
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cx.update(|cx| highlighted_chunks(3..5, &map, &theme, cx)),
|
cx.update(|cx| chunks(3..5, &map, &theme, cx)),
|
||||||
[("{}\n\n".to_string(), None)]
|
[("{}\n\n".to_string(), None)]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -871,7 +870,7 @@ mod tests {
|
||||||
map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
|
map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cx.update(|cx| highlighted_chunks(1..4, &map, &theme, cx)),
|
cx.update(|cx| chunks(1..4, &map, &theme, cx)),
|
||||||
[
|
[
|
||||||
("out".to_string(), Some("fn.name")),
|
("out".to_string(), Some("fn.name")),
|
||||||
("…\n".to_string(), None),
|
("…\n".to_string(), None),
|
||||||
|
@ -946,11 +945,11 @@ mod tests {
|
||||||
let map = map.update(cx, |map, cx| map.snapshot(cx));
|
let map = map.update(cx, |map, cx| map.snapshot(cx));
|
||||||
assert_eq!(map.text(), "✅ α\nβ \n🏀β γ");
|
assert_eq!(map.text(), "✅ α\nβ \n🏀β γ");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
map.chunks_at(0).collect::<String>(),
|
map.text_chunks(0).collect::<String>(),
|
||||||
"✅ α\nβ \n🏀β γ"
|
"✅ α\nβ \n🏀β γ"
|
||||||
);
|
);
|
||||||
assert_eq!(map.chunks_at(1).collect::<String>(), "β \n🏀β γ");
|
assert_eq!(map.text_chunks(1).collect::<String>(), "β \n🏀β γ");
|
||||||
assert_eq!(map.chunks_at(2).collect::<String>(), "🏀β γ");
|
assert_eq!(map.text_chunks(2).collect::<String>(), "🏀β γ");
|
||||||
|
|
||||||
let point = Point::new(0, "✅\t\t".len() as u32);
|
let point = Point::new(0, "✅\t\t".len() as u32);
|
||||||
let display_point = DisplayPoint::new(0, "✅ ".len() as u32);
|
let display_point = DisplayPoint::new(0, "✅ ".len() as u32);
|
||||||
|
@ -1007,7 +1006,7 @@ mod tests {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn highlighted_chunks<'a>(
|
fn chunks<'a>(
|
||||||
rows: Range<u32>,
|
rows: Range<u32>,
|
||||||
map: &ModelHandle<DisplayMap>,
|
map: &ModelHandle<DisplayMap>,
|
||||||
theme: &'a SyntaxTheme,
|
theme: &'a SyntaxTheme,
|
||||||
|
@ -1015,7 +1014,7 @@ mod tests {
|
||||||
) -> Vec<(String, Option<&'a str>)> {
|
) -> Vec<(String, Option<&'a str>)> {
|
||||||
let mut snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
let mut snapshot = map.update(cx, |map, cx| map.snapshot(cx));
|
||||||
let mut chunks: Vec<(String, Option<&str>)> = Vec::new();
|
let mut chunks: Vec<(String, Option<&str>)> = Vec::new();
|
||||||
for chunk in snapshot.highlighted_chunks_for_rows(rows) {
|
for chunk in snapshot.chunks(rows) {
|
||||||
let style_name = chunk.highlight_id.name(theme);
|
let style_name = chunk.highlight_id.name(theme);
|
||||||
if let Some((last_chunk, last_style_name)) = chunks.last_mut() {
|
if let Some((last_chunk, last_style_name)) = chunks.last_mut() {
|
||||||
if style_name == *last_style_name {
|
if style_name == *last_style_name {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use super::wrap_map::{self, Edit as WrapEdit, Snapshot as WrapSnapshot, WrapPoint};
|
use super::wrap_map::{self, Edit as WrapEdit, Snapshot as WrapSnapshot, WrapPoint};
|
||||||
use buffer::{rope, Anchor, Bias, Edit, Point, Rope, ToOffset, ToPoint as _};
|
use buffer::{rope, Anchor, Bias, Edit, Point, Rope, ToOffset, ToPoint as _};
|
||||||
use gpui::{fonts::HighlightStyle, AppContext, ModelHandle};
|
use gpui::{fonts::HighlightStyle, AppContext, ModelHandle};
|
||||||
use language::{Buffer, HighlightedChunk};
|
use language::{Buffer, Chunk};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use std::{
|
use std::{
|
||||||
cmp::{self, Ordering},
|
cmp::{self, Ordering},
|
||||||
|
@ -52,14 +52,14 @@ where
|
||||||
P: Clone,
|
P: Clone,
|
||||||
T: Clone,
|
T: Clone,
|
||||||
{
|
{
|
||||||
position: P,
|
pub position: P,
|
||||||
text: T,
|
pub text: T,
|
||||||
runs: Vec<(usize, HighlightStyle)>,
|
pub runs: Vec<(usize, HighlightStyle)>,
|
||||||
disposition: BlockDisposition,
|
pub disposition: BlockDisposition,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
enum BlockDisposition {
|
pub enum BlockDisposition {
|
||||||
Above,
|
Above,
|
||||||
Below,
|
Below,
|
||||||
}
|
}
|
||||||
|
@ -76,10 +76,10 @@ struct TransformSummary {
|
||||||
output: Point,
|
output: Point,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct HighlightedChunks<'a> {
|
pub struct Chunks<'a> {
|
||||||
transforms: sum_tree::Cursor<'a, Transform, (BlockPoint, WrapPoint)>,
|
transforms: sum_tree::Cursor<'a, Transform, (BlockPoint, WrapPoint)>,
|
||||||
input_chunks: wrap_map::HighlightedChunks<'a>,
|
input_chunks: wrap_map::Chunks<'a>,
|
||||||
input_chunk: HighlightedChunk<'a>,
|
input_chunk: Chunk<'a>,
|
||||||
block_chunks: Option<BlockChunks<'a>>,
|
block_chunks: Option<BlockChunks<'a>>,
|
||||||
output_position: BlockPoint,
|
output_position: BlockPoint,
|
||||||
max_output_position: BlockPoint,
|
max_output_position: BlockPoint,
|
||||||
|
@ -433,12 +433,12 @@ impl<'a> BlockMapWriter<'a> {
|
||||||
impl BlockSnapshot {
|
impl BlockSnapshot {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn text(&mut self) -> String {
|
fn text(&mut self) -> String {
|
||||||
self.highlighted_chunks_for_rows(0..self.max_point().0.row + 1)
|
self.chunks(0..self.max_point().0.row + 1, false)
|
||||||
.map(|chunk| chunk.text)
|
.map(|chunk| chunk.text)
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlighted_chunks_for_rows(&mut self, rows: Range<u32>) -> HighlightedChunks {
|
pub fn chunks(&self, rows: Range<u32>, highlights: bool) -> Chunks {
|
||||||
let max_output_position = self.max_point().min(BlockPoint::new(rows.end, 0));
|
let max_output_position = self.max_point().min(BlockPoint::new(rows.end, 0));
|
||||||
let mut cursor = self.transforms.cursor::<(BlockPoint, WrapPoint)>();
|
let mut cursor = self.transforms.cursor::<(BlockPoint, WrapPoint)>();
|
||||||
let output_position = BlockPoint::new(rows.start, 0);
|
let output_position = BlockPoint::new(rows.start, 0);
|
||||||
|
@ -449,8 +449,8 @@ impl BlockSnapshot {
|
||||||
let input_end_row = self.to_wrap_point(BlockPoint::new(rows.end, 0)).row();
|
let input_end_row = self.to_wrap_point(BlockPoint::new(rows.end, 0)).row();
|
||||||
let input_chunks = self
|
let input_chunks = self
|
||||||
.wrap_snapshot
|
.wrap_snapshot
|
||||||
.highlighted_chunks_for_rows(input_start_row..input_end_row);
|
.chunks(input_start_row..input_end_row, highlights);
|
||||||
HighlightedChunks {
|
Chunks {
|
||||||
input_chunks,
|
input_chunks,
|
||||||
input_chunk: Default::default(),
|
input_chunk: Default::default(),
|
||||||
block_chunks: None,
|
block_chunks: None,
|
||||||
|
@ -532,9 +532,9 @@ impl BlockSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_block_point(&self, wrap_point: WrapPoint) -> BlockPoint {
|
pub fn to_block_point(&self, wrap_point: WrapPoint, bias: Bias) -> BlockPoint {
|
||||||
let mut cursor = self.transforms.cursor::<(WrapPoint, BlockPoint)>();
|
let mut cursor = self.transforms.cursor::<(WrapPoint, BlockPoint)>();
|
||||||
cursor.seek(&wrap_point, Bias::Right, &());
|
cursor.seek(&wrap_point, bias, &());
|
||||||
while let Some(item) = cursor.item() {
|
while let Some(item) = cursor.item() {
|
||||||
if item.is_isomorphic() {
|
if item.is_isomorphic() {
|
||||||
break;
|
break;
|
||||||
|
@ -581,8 +581,8 @@ impl Transform {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for HighlightedChunks<'a> {
|
impl<'a> Iterator for Chunks<'a> {
|
||||||
type Item = HighlightedChunk<'a>;
|
type Item = Chunk<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.output_position >= self.max_output_position {
|
if self.output_position >= self.max_output_position {
|
||||||
|
@ -630,7 +630,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
self.transforms.next(&());
|
self.transforms.next(&());
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(HighlightedChunk {
|
Some(Chunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
..self.input_chunk
|
..self.input_chunk
|
||||||
})
|
})
|
||||||
|
@ -665,7 +665,7 @@ impl<'a> BlockChunks<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for BlockChunks<'a> {
|
impl<'a> Iterator for BlockChunks<'a> {
|
||||||
type Item = HighlightedChunk<'a>;
|
type Item = Chunk<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.chunk.is_none() {
|
if self.chunk.is_none() {
|
||||||
|
@ -693,7 +693,7 @@ impl<'a> Iterator for BlockChunks<'a> {
|
||||||
Some(suffix)
|
Some(suffix)
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(HighlightedChunk {
|
Some(Chunk {
|
||||||
text: chunk,
|
text: chunk,
|
||||||
highlight_id: Default::default(),
|
highlight_id: Default::default(),
|
||||||
diagnostic: None,
|
diagnostic: None,
|
||||||
|
@ -712,33 +712,36 @@ impl<'a> Iterator for BufferRows<'a> {
|
||||||
let (buffer_row, is_wrapped) = self.input_buffer_row.unwrap();
|
let (buffer_row, is_wrapped) = self.input_buffer_row.unwrap();
|
||||||
let in_block = self.in_block;
|
let in_block = self.in_block;
|
||||||
|
|
||||||
log::info!(
|
// log::info!(
|
||||||
"============== Iterator next. Output row: {}, Input row: {}, Buffer row: {}, In block {} ===============",
|
// "============== next - (output_row: {}, input_row: {}, buffer_row: {}, in_block: {}) ===============",
|
||||||
self.output_row,
|
// self.output_row,
|
||||||
self.input_row,
|
// self.input_row,
|
||||||
buffer_row,
|
// buffer_row,
|
||||||
in_block
|
// in_block
|
||||||
);
|
// );
|
||||||
|
|
||||||
self.output_row += 1;
|
self.output_row += 1;
|
||||||
let output_point = BlockPoint::new(self.output_row, 0);
|
let output_point = BlockPoint::new(self.output_row, 0);
|
||||||
let transform_end = self.transforms.end(&()).0;
|
let transform_end = self.transforms.end(&()).0;
|
||||||
// if output_point > transform_end || output_point == transform_end && in_block {
|
// if output_point > transform_end || output_point == transform_end && in_block {
|
||||||
if output_point >= transform_end {
|
if output_point >= transform_end {
|
||||||
log::info!(" Calling next once");
|
// log::info!(" Calling next once");
|
||||||
self.transforms.next(&());
|
self.transforms.next(&());
|
||||||
if self.transforms.end(&()).0 < output_point {
|
if self.transforms.end(&()).0 < output_point {
|
||||||
log::info!(" Calling next twice");
|
// log::info!(" Calling next twice");
|
||||||
self.transforms.next(&());
|
self.transforms.next(&());
|
||||||
}
|
}
|
||||||
self.in_block = self.transforms.item().map_or(false, |t| !t.is_isomorphic());
|
|
||||||
|
|
||||||
log::info!(
|
if let Some(transform) = self.transforms.item() {
|
||||||
" Advanced to the next transform (block text: {:?}). Output row: {}, Transform starts at: {:?}",
|
self.in_block = !transform.is_isomorphic();
|
||||||
self.transforms.item().and_then(|t| t.block.as_ref()).map(|b| b.text.to_string()),
|
}
|
||||||
self.output_row,
|
|
||||||
self.transforms.start().1
|
// log::info!(
|
||||||
);
|
// " Advanced to the next transform (block text: {:?}). Output row: {}, Transform starts at: {:?}",
|
||||||
|
// self.transforms.item().and_then(|t| t.block.as_ref()).map(|b| b.text.to_string()),
|
||||||
|
// self.output_row,
|
||||||
|
// self.transforms.start().1
|
||||||
|
// );
|
||||||
|
|
||||||
let mut new_input_position = self.transforms.start().1 .0;
|
let mut new_input_position = self.transforms.start().1 .0;
|
||||||
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
if self.transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
||||||
|
@ -749,24 +752,24 @@ impl<'a> Iterator for BufferRows<'a> {
|
||||||
if new_input_position.row > self.input_row {
|
if new_input_position.row > self.input_row {
|
||||||
self.input_row = new_input_position.row;
|
self.input_row = new_input_position.row;
|
||||||
self.input_buffer_row = self.input_buffer_rows.next();
|
self.input_buffer_row = self.input_buffer_rows.next();
|
||||||
log::info!(
|
// log::info!(
|
||||||
" Advanced the input buffer row. Input row: {}, Input buffer row {:?}",
|
// " Advanced the input buffer row. Input row: {}, Input buffer row {:?}",
|
||||||
self.input_row,
|
// self.input_row,
|
||||||
self.input_buffer_row
|
// self.input_buffer_row
|
||||||
)
|
// )
|
||||||
}
|
}
|
||||||
} else if self.transforms.item().map_or(true, |t| t.is_isomorphic()) {
|
} else if self.transforms.item().map_or(true, |t| t.is_isomorphic()) {
|
||||||
self.input_row += 1;
|
self.input_row += 1;
|
||||||
self.input_buffer_row = self.input_buffer_rows.next();
|
self.input_buffer_row = self.input_buffer_rows.next();
|
||||||
log::info!(
|
// log::info!(
|
||||||
" Advancing in isomorphic transform (off the end: {}). Input row: {}, Input buffer row {:?}",
|
// " Advancing in isomorphic transform (off the end: {}). Input row: {}, Input buffer row {:?}",
|
||||||
self.transforms.item().is_none(),
|
// self.transforms.item().is_none(),
|
||||||
self.input_row,
|
// self.input_row,
|
||||||
self.input_buffer_row
|
// self.input_buffer_row
|
||||||
)
|
// )
|
||||||
}
|
}
|
||||||
|
|
||||||
Some((buffer_row, !is_wrapped && !in_block))
|
Some((buffer_row, false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -886,7 +889,7 @@ mod tests {
|
||||||
"aaa\nBLOCK 1\nBLOCK 2\nbbb\nccc\nddd\nBLOCK 3"
|
"aaa\nBLOCK 1\nBLOCK 2\nbbb\nccc\nddd\nBLOCK 3"
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot.to_block_point(WrapPoint::new(1, 0)),
|
snapshot.to_block_point(WrapPoint::new(1, 0), Bias::Right),
|
||||||
BlockPoint::new(3, 0)
|
BlockPoint::new(3, 0)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -1194,7 +1197,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
|
let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
|
||||||
expected_buffer_rows.push((buffer_row, !soft_wrapped));
|
expected_buffer_rows.push((buffer_row, false));
|
||||||
expected_text.push_str(input_line);
|
expected_text.push_str(input_line);
|
||||||
|
|
||||||
while let Some((_, block)) = sorted_blocks.peek() {
|
while let Some((_, block)) = sorted_blocks.peek() {
|
||||||
|
@ -1215,7 +1218,7 @@ mod tests {
|
||||||
assert_eq!(blocks_snapshot.text(), expected_text);
|
assert_eq!(blocks_snapshot.text(), expected_text);
|
||||||
for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() {
|
for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() {
|
||||||
let wrap_point = WrapPoint::new(row, 0);
|
let wrap_point = WrapPoint::new(row, 0);
|
||||||
let block_point = blocks_snapshot.to_block_point(wrap_point);
|
let block_point = blocks_snapshot.to_block_point(wrap_point, Bias::Right);
|
||||||
assert_eq!(blocks_snapshot.to_wrap_point(block_point), wrap_point);
|
assert_eq!(blocks_snapshot.to_wrap_point(block_point), wrap_point);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use gpui::{AppContext, ModelHandle};
|
use gpui::{AppContext, ModelHandle};
|
||||||
use language::{
|
use language::{
|
||||||
Anchor, AnchorRangeExt, Buffer, HighlightId, HighlightedChunk, Point, PointUtf16, TextSummary,
|
Anchor, AnchorRangeExt, Buffer, Chunk, HighlightId, Point, PointUtf16, TextSummary, ToOffset,
|
||||||
ToOffset,
|
|
||||||
};
|
};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -499,7 +498,9 @@ pub struct Snapshot {
|
||||||
impl Snapshot {
|
impl Snapshot {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn text(&self) -> String {
|
pub fn text(&self) -> String {
|
||||||
self.chunks_at(FoldOffset(0)).collect()
|
self.chunks(FoldOffset(0)..self.len(), false)
|
||||||
|
.map(|c| c.text)
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -551,7 +552,6 @@ impl Snapshot {
|
||||||
summary
|
summary
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub fn len(&self) -> FoldOffset {
|
pub fn len(&self) -> FoldOffset {
|
||||||
FoldOffset(self.transforms.summary().output.bytes)
|
FoldOffset(self.transforms.summary().output.bytes)
|
||||||
}
|
}
|
||||||
|
@ -628,21 +628,13 @@ impl Snapshot {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks_at(&self, offset: FoldOffset) -> Chunks {
|
pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator<Item = char> {
|
||||||
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
|
let start = start.to_offset(self);
|
||||||
transform_cursor.seek(&offset, Bias::Right, &());
|
self.chunks(start..self.len(), false)
|
||||||
let overshoot = offset.0 - transform_cursor.start().0 .0;
|
.flat_map(|chunk| chunk.text.chars())
|
||||||
let buffer_offset = transform_cursor.start().1 + overshoot;
|
|
||||||
Chunks {
|
|
||||||
transform_cursor,
|
|
||||||
buffer_offset,
|
|
||||||
buffer_chunks: self
|
|
||||||
.buffer_snapshot
|
|
||||||
.text_for_range(buffer_offset..self.buffer_snapshot.len()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlighted_chunks(&mut self, range: Range<FoldOffset>) -> HighlightedChunks {
|
pub fn chunks(&self, range: Range<FoldOffset>, enable_highlights: bool) -> Chunks {
|
||||||
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
|
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
|
||||||
|
|
||||||
transform_cursor.seek(&range.end, Bias::Right, &());
|
transform_cursor.seek(&range.end, Bias::Right, &());
|
||||||
|
@ -653,21 +645,16 @@ impl Snapshot {
|
||||||
let overshoot = range.start.0 - transform_cursor.start().0 .0;
|
let overshoot = range.start.0 - transform_cursor.start().0 .0;
|
||||||
let buffer_start = transform_cursor.start().1 + overshoot;
|
let buffer_start = transform_cursor.start().1 + overshoot;
|
||||||
|
|
||||||
HighlightedChunks {
|
Chunks {
|
||||||
transform_cursor,
|
transform_cursor,
|
||||||
buffer_offset: buffer_start,
|
buffer_offset: buffer_start,
|
||||||
buffer_chunks: self
|
buffer_chunks: self
|
||||||
.buffer_snapshot
|
.buffer_snapshot
|
||||||
.highlighted_text_for_range(buffer_start..buffer_end),
|
.chunks(buffer_start..buffer_end, enable_highlights),
|
||||||
buffer_chunk: None,
|
buffer_chunk: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chars_at<'a>(&'a self, point: FoldPoint) -> impl Iterator<Item = char> + 'a {
|
|
||||||
let offset = point.to_offset(self);
|
|
||||||
self.chunks_at(offset).flat_map(str::chars)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn clip_offset(&self, offset: FoldOffset, bias: Bias) -> FoldOffset {
|
pub fn clip_offset(&self, offset: FoldOffset, bias: Bias) -> FoldOffset {
|
||||||
let mut cursor = self.transforms.cursor::<(FoldOffset, usize)>();
|
let mut cursor = self.transforms.cursor::<(FoldOffset, usize)>();
|
||||||
|
@ -948,66 +935,13 @@ impl<'a> Iterator for BufferRows<'a> {
|
||||||
|
|
||||||
pub struct Chunks<'a> {
|
pub struct Chunks<'a> {
|
||||||
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
|
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
|
||||||
buffer_chunks: buffer::Chunks<'a>,
|
buffer_chunks: language::Chunks<'a>,
|
||||||
|
buffer_chunk: Option<(usize, Chunk<'a>)>,
|
||||||
buffer_offset: usize,
|
buffer_offset: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for Chunks<'a> {
|
impl<'a> Iterator for Chunks<'a> {
|
||||||
type Item = &'a str;
|
type Item = Chunk<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
let transform = if let Some(item) = self.transform_cursor.item() {
|
|
||||||
item
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
// If we're in a fold, then return the fold's display text and
|
|
||||||
// advance the transform and buffer cursors to the end of the fold.
|
|
||||||
if let Some(output_text) = transform.output_text {
|
|
||||||
self.buffer_offset += transform.summary.input.bytes;
|
|
||||||
self.buffer_chunks.seek(self.buffer_offset);
|
|
||||||
|
|
||||||
while self.buffer_offset >= self.transform_cursor.end(&()).1
|
|
||||||
&& self.transform_cursor.item().is_some()
|
|
||||||
{
|
|
||||||
self.transform_cursor.next(&());
|
|
||||||
}
|
|
||||||
|
|
||||||
return Some(output_text);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, take a chunk from the buffer's text.
|
|
||||||
if let Some(mut chunk) = self.buffer_chunks.peek() {
|
|
||||||
let offset_in_chunk = self.buffer_offset - self.buffer_chunks.offset();
|
|
||||||
chunk = &chunk[offset_in_chunk..];
|
|
||||||
|
|
||||||
// Truncate the chunk so that it ends at the next fold.
|
|
||||||
let region_end = self.transform_cursor.end(&()).1 - self.buffer_offset;
|
|
||||||
if chunk.len() >= region_end {
|
|
||||||
chunk = &chunk[0..region_end];
|
|
||||||
self.transform_cursor.next(&());
|
|
||||||
} else {
|
|
||||||
self.buffer_chunks.next();
|
|
||||||
}
|
|
||||||
|
|
||||||
self.buffer_offset += chunk.len();
|
|
||||||
return Some(chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct HighlightedChunks<'a> {
|
|
||||||
transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
|
|
||||||
buffer_chunks: language::HighlightedChunks<'a>,
|
|
||||||
buffer_chunk: Option<(usize, HighlightedChunk<'a>)>,
|
|
||||||
buffer_offset: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for HighlightedChunks<'a> {
|
|
||||||
type Item = HighlightedChunk<'a>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
let transform = if let Some(item) = self.transform_cursor.item() {
|
let transform = if let Some(item) = self.transform_cursor.item() {
|
||||||
|
@ -1029,7 +963,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
self.transform_cursor.next(&());
|
self.transform_cursor.next(&());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some(HighlightedChunk {
|
return Some(Chunk {
|
||||||
text: output_text,
|
text: output_text,
|
||||||
highlight_id: HighlightId::default(),
|
highlight_id: HighlightId::default(),
|
||||||
diagnostic: None,
|
diagnostic: None,
|
||||||
|
@ -1428,11 +1362,14 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _ in 0..5 {
|
for _ in 0..5 {
|
||||||
let offset = snapshot
|
let start = snapshot
|
||||||
.clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right);
|
.clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot.chunks_at(offset).collect::<String>(),
|
snapshot
|
||||||
&expected_text[offset.0..],
|
.chunks(start..snapshot.len(), false)
|
||||||
|
.map(|c| c.text)
|
||||||
|
.collect::<String>(),
|
||||||
|
&expected_text[start.0..],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot, ToFoldPoint};
|
use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot, ToFoldPoint};
|
||||||
use buffer::Point;
|
use buffer::Point;
|
||||||
use language::{rope, HighlightedChunk};
|
use language::{rope, Chunk};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use std::{mem, ops::Range};
|
use std::{mem, ops::Range};
|
||||||
use sum_tree::Bias;
|
use sum_tree::Bias;
|
||||||
|
@ -22,6 +22,7 @@ impl TabMap {
|
||||||
mut fold_edits: Vec<FoldEdit>,
|
mut fold_edits: Vec<FoldEdit>,
|
||||||
) -> (Snapshot, Vec<Edit>) {
|
) -> (Snapshot, Vec<Edit>) {
|
||||||
let mut old_snapshot = self.0.lock();
|
let mut old_snapshot = self.0.lock();
|
||||||
|
let max_offset = old_snapshot.fold_snapshot.len();
|
||||||
let new_snapshot = Snapshot {
|
let new_snapshot = Snapshot {
|
||||||
fold_snapshot,
|
fold_snapshot,
|
||||||
tab_size: old_snapshot.tab_size,
|
tab_size: old_snapshot.tab_size,
|
||||||
|
@ -32,11 +33,11 @@ impl TabMap {
|
||||||
let mut delta = 0;
|
let mut delta = 0;
|
||||||
for chunk in old_snapshot
|
for chunk in old_snapshot
|
||||||
.fold_snapshot
|
.fold_snapshot
|
||||||
.chunks_at(fold_edit.old_bytes.end)
|
.chunks(fold_edit.old_bytes.end..max_offset, false)
|
||||||
{
|
{
|
||||||
let patterns: &[_] = &['\t', '\n'];
|
let patterns: &[_] = &['\t', '\n'];
|
||||||
if let Some(ix) = chunk.find(patterns) {
|
if let Some(ix) = chunk.text.find(patterns) {
|
||||||
if &chunk[ix..ix + 1] == "\t" {
|
if &chunk.text[ix..ix + 1] == "\t" {
|
||||||
fold_edit.old_bytes.end.0 += delta + ix + 1;
|
fold_edit.old_bytes.end.0 += delta + ix + 1;
|
||||||
fold_edit.new_bytes.end.0 += delta + ix + 1;
|
fold_edit.new_bytes.end.0 += delta + ix + 1;
|
||||||
}
|
}
|
||||||
|
@ -44,7 +45,7 @@ impl TabMap {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
delta += chunk.len();
|
delta += chunk.text.len();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,7 +111,10 @@ impl Snapshot {
|
||||||
|
|
||||||
let mut first_line_chars = 0;
|
let mut first_line_chars = 0;
|
||||||
let mut first_line_bytes = 0;
|
let mut first_line_bytes = 0;
|
||||||
for c in self.chunks_at(range.start).flat_map(|chunk| chunk.chars()) {
|
for c in self
|
||||||
|
.chunks(range.start..self.max_point(), false)
|
||||||
|
.flat_map(|chunk| chunk.text.chars())
|
||||||
|
{
|
||||||
if c == '\n'
|
if c == '\n'
|
||||||
|| (range.start.row() == range.end.row() && first_line_bytes == range.end.column())
|
|| (range.start.row() == range.end.row() && first_line_bytes == range.end.column())
|
||||||
{
|
{
|
||||||
|
@ -123,8 +127,11 @@ impl Snapshot {
|
||||||
let mut last_line_chars = 0;
|
let mut last_line_chars = 0;
|
||||||
let mut last_line_bytes = 0;
|
let mut last_line_bytes = 0;
|
||||||
for c in self
|
for c in self
|
||||||
.chunks_at(TabPoint::new(range.end.row(), 0).max(range.start))
|
.chunks(
|
||||||
.flat_map(|chunk| chunk.chars())
|
TabPoint::new(range.end.row(), 0).max(range.start)..self.max_point(),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.flat_map(|chunk| chunk.text.chars())
|
||||||
{
|
{
|
||||||
if last_line_bytes == range.end.column() {
|
if last_line_bytes == range.end.column() {
|
||||||
break;
|
break;
|
||||||
|
@ -146,21 +153,7 @@ impl Snapshot {
|
||||||
self.fold_snapshot.version
|
self.fold_snapshot.version
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks_at(&self, point: TabPoint) -> Chunks {
|
pub fn chunks(&self, range: Range<TabPoint>, highlights: bool) -> Chunks {
|
||||||
let (point, expanded_char_column, to_next_stop) = self.to_fold_point(point, Bias::Left);
|
|
||||||
let fold_chunks = self
|
|
||||||
.fold_snapshot
|
|
||||||
.chunks_at(point.to_offset(&self.fold_snapshot));
|
|
||||||
Chunks {
|
|
||||||
fold_chunks,
|
|
||||||
column: expanded_char_column,
|
|
||||||
tab_size: self.tab_size,
|
|
||||||
chunk: &SPACES[0..to_next_stop],
|
|
||||||
skip_leading_tab: to_next_stop > 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn highlighted_chunks(&mut self, range: Range<TabPoint>) -> HighlightedChunks {
|
|
||||||
let (input_start, expanded_char_column, to_next_stop) =
|
let (input_start, expanded_char_column, to_next_stop) =
|
||||||
self.to_fold_point(range.start, Bias::Left);
|
self.to_fold_point(range.start, Bias::Left);
|
||||||
let input_start = input_start.to_offset(&self.fold_snapshot);
|
let input_start = input_start.to_offset(&self.fold_snapshot);
|
||||||
|
@ -168,13 +161,13 @@ impl Snapshot {
|
||||||
.to_fold_point(range.end, Bias::Right)
|
.to_fold_point(range.end, Bias::Right)
|
||||||
.0
|
.0
|
||||||
.to_offset(&self.fold_snapshot);
|
.to_offset(&self.fold_snapshot);
|
||||||
HighlightedChunks {
|
Chunks {
|
||||||
fold_chunks: self
|
fold_chunks: self
|
||||||
.fold_snapshot
|
.fold_snapshot
|
||||||
.highlighted_chunks(input_start..input_end),
|
.chunks(input_start..input_end, highlights),
|
||||||
column: expanded_char_column,
|
column: expanded_char_column,
|
||||||
tab_size: self.tab_size,
|
tab_size: self.tab_size,
|
||||||
chunk: HighlightedChunk {
|
chunk: Chunk {
|
||||||
text: &SPACES[0..to_next_stop],
|
text: &SPACES[0..to_next_stop],
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
|
@ -188,7 +181,9 @@ impl Snapshot {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub fn text(&self) -> String {
|
pub fn text(&self) -> String {
|
||||||
self.chunks_at(Default::default()).collect()
|
self.chunks(TabPoint::zero()..self.max_point(), false)
|
||||||
|
.map(|chunk| chunk.text)
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn max_point(&self) -> TabPoint {
|
pub fn max_point(&self) -> TabPoint {
|
||||||
|
@ -379,63 +374,14 @@ const SPACES: &'static str = " ";
|
||||||
|
|
||||||
pub struct Chunks<'a> {
|
pub struct Chunks<'a> {
|
||||||
fold_chunks: fold_map::Chunks<'a>,
|
fold_chunks: fold_map::Chunks<'a>,
|
||||||
chunk: &'a str,
|
chunk: Chunk<'a>,
|
||||||
column: usize,
|
column: usize,
|
||||||
tab_size: usize,
|
tab_size: usize,
|
||||||
skip_leading_tab: bool,
|
skip_leading_tab: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for Chunks<'a> {
|
impl<'a> Iterator for Chunks<'a> {
|
||||||
type Item = &'a str;
|
type Item = Chunk<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
if self.chunk.is_empty() {
|
|
||||||
if let Some(chunk) = self.fold_chunks.next() {
|
|
||||||
self.chunk = chunk;
|
|
||||||
if self.skip_leading_tab {
|
|
||||||
self.chunk = &self.chunk[1..];
|
|
||||||
self.skip_leading_tab = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (ix, c) in self.chunk.char_indices() {
|
|
||||||
match c {
|
|
||||||
'\t' => {
|
|
||||||
if ix > 0 {
|
|
||||||
let (prefix, suffix) = self.chunk.split_at(ix);
|
|
||||||
self.chunk = suffix;
|
|
||||||
return Some(prefix);
|
|
||||||
} else {
|
|
||||||
self.chunk = &self.chunk[1..];
|
|
||||||
let len = self.tab_size - self.column % self.tab_size;
|
|
||||||
self.column += len;
|
|
||||||
return Some(&SPACES[0..len]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'\n' => self.column = 0,
|
|
||||||
_ => self.column += 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = Some(self.chunk);
|
|
||||||
self.chunk = "";
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct HighlightedChunks<'a> {
|
|
||||||
fold_chunks: fold_map::HighlightedChunks<'a>,
|
|
||||||
chunk: HighlightedChunk<'a>,
|
|
||||||
column: usize,
|
|
||||||
tab_size: usize,
|
|
||||||
skip_leading_tab: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for HighlightedChunks<'a> {
|
|
||||||
type Item = HighlightedChunk<'a>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.chunk.text.is_empty() {
|
if self.chunk.text.is_empty() {
|
||||||
|
@ -456,7 +402,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
if ix > 0 {
|
if ix > 0 {
|
||||||
let (prefix, suffix) = self.chunk.text.split_at(ix);
|
let (prefix, suffix) = self.chunk.text.split_at(ix);
|
||||||
self.chunk.text = suffix;
|
self.chunk.text = suffix;
|
||||||
return Some(HighlightedChunk {
|
return Some(Chunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
..self.chunk
|
..self.chunk
|
||||||
});
|
});
|
||||||
|
@ -464,7 +410,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
self.chunk.text = &self.chunk.text[1..];
|
self.chunk.text = &self.chunk.text[1..];
|
||||||
let len = self.tab_size - self.column % self.tab_size;
|
let len = self.tab_size - self.column % self.tab_size;
|
||||||
self.column += len;
|
self.column += len;
|
||||||
return Some(HighlightedChunk {
|
return Some(Chunk {
|
||||||
text: &SPACES[0..len],
|
text: &SPACES[0..len],
|
||||||
..self.chunk
|
..self.chunk
|
||||||
});
|
});
|
||||||
|
|
|
@ -7,7 +7,7 @@ use gpui::{
|
||||||
fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext,
|
fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext,
|
||||||
Task,
|
Task,
|
||||||
};
|
};
|
||||||
use language::{HighlightedChunk, Point};
|
use language::{Chunk, Point};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use smol::future::yield_now;
|
use smol::future::yield_now;
|
||||||
use std::{collections::VecDeque, mem, ops::Range, time::Duration};
|
use std::{collections::VecDeque, mem, ops::Range, time::Duration};
|
||||||
|
@ -54,14 +54,7 @@ pub struct WrapPoint(pub super::Point);
|
||||||
|
|
||||||
pub struct Chunks<'a> {
|
pub struct Chunks<'a> {
|
||||||
input_chunks: tab_map::Chunks<'a>,
|
input_chunks: tab_map::Chunks<'a>,
|
||||||
input_chunk: &'a str,
|
input_chunk: Chunk<'a>,
|
||||||
output_position: WrapPoint,
|
|
||||||
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct HighlightedChunks<'a> {
|
|
||||||
input_chunks: tab_map::HighlightedChunks<'a>,
|
|
||||||
input_chunk: HighlightedChunk<'a>,
|
|
||||||
output_position: WrapPoint,
|
output_position: WrapPoint,
|
||||||
max_output_row: u32,
|
max_output_row: u32,
|
||||||
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
|
||||||
|
@ -430,10 +423,15 @@ impl Snapshot {
|
||||||
|
|
||||||
let mut line = String::new();
|
let mut line = String::new();
|
||||||
let mut remaining = None;
|
let mut remaining = None;
|
||||||
let mut chunks = new_tab_snapshot.chunks_at(TabPoint::new(edit.new_rows.start, 0));
|
let mut chunks = new_tab_snapshot.chunks(
|
||||||
|
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
|
||||||
|
false,
|
||||||
|
);
|
||||||
let mut edit_transforms = Vec::<Transform>::new();
|
let mut edit_transforms = Vec::<Transform>::new();
|
||||||
for _ in edit.new_rows.start..edit.new_rows.end {
|
for _ in edit.new_rows.start..edit.new_rows.end {
|
||||||
while let Some(chunk) = remaining.take().or_else(|| chunks.next()) {
|
while let Some(chunk) =
|
||||||
|
remaining.take().or_else(|| chunks.next().map(|c| c.text))
|
||||||
|
{
|
||||||
if let Some(ix) = chunk.find('\n') {
|
if let Some(ix) = chunk.find('\n') {
|
||||||
line.push_str(&chunk[..ix + 1]);
|
line.push_str(&chunk[..ix + 1]);
|
||||||
remaining = Some(&chunk[ix + 1..]);
|
remaining = Some(&chunk[ix + 1..]);
|
||||||
|
@ -552,24 +550,12 @@ impl Snapshot {
|
||||||
unsafe { Patch::new_unchecked(wrap_edits) }
|
unsafe { Patch::new_unchecked(wrap_edits) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks_at(&self, wrap_row: u32) -> Chunks {
|
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
|
||||||
let point = WrapPoint::new(wrap_row, 0);
|
self.chunks(wrap_row..self.max_point().row() + 1, false)
|
||||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
|
.map(|h| h.text)
|
||||||
transforms.seek(&point, Bias::Right, &());
|
|
||||||
let mut input_position = TabPoint(transforms.start().1 .0);
|
|
||||||
if transforms.item().map_or(false, |t| t.is_isomorphic()) {
|
|
||||||
input_position.0 += point.0 - transforms.start().0 .0;
|
|
||||||
}
|
|
||||||
let input_chunks = self.tab_snapshot.chunks_at(input_position);
|
|
||||||
Chunks {
|
|
||||||
input_chunks,
|
|
||||||
transforms,
|
|
||||||
output_position: point,
|
|
||||||
input_chunk: "",
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlighted_chunks_for_rows(&mut self, rows: Range<u32>) -> HighlightedChunks {
|
pub fn chunks(&self, rows: Range<u32>, highlights: bool) -> Chunks {
|
||||||
let output_start = WrapPoint::new(rows.start, 0);
|
let output_start = WrapPoint::new(rows.start, 0);
|
||||||
let output_end = WrapPoint::new(rows.end, 0);
|
let output_end = WrapPoint::new(rows.end, 0);
|
||||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
|
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
|
||||||
|
@ -581,8 +567,8 @@ impl Snapshot {
|
||||||
let input_end = self
|
let input_end = self
|
||||||
.to_tab_point(output_end)
|
.to_tab_point(output_end)
|
||||||
.min(self.tab_snapshot.max_point());
|
.min(self.tab_snapshot.max_point());
|
||||||
HighlightedChunks {
|
Chunks {
|
||||||
input_chunks: self.tab_snapshot.highlighted_chunks(input_start..input_end),
|
input_chunks: self.tab_snapshot.chunks(input_start..input_end, highlights),
|
||||||
input_chunk: Default::default(),
|
input_chunk: Default::default(),
|
||||||
output_position: output_start,
|
output_position: output_start,
|
||||||
max_output_row: rows.end,
|
max_output_row: rows.end,
|
||||||
|
@ -600,7 +586,7 @@ impl Snapshot {
|
||||||
|
|
||||||
pub fn line_len(&self, row: u32) -> u32 {
|
pub fn line_len(&self, row: u32) -> u32 {
|
||||||
let mut len = 0;
|
let mut len = 0;
|
||||||
for chunk in self.chunks_at(row) {
|
for chunk in self.text_chunks(row) {
|
||||||
if let Some(newline_ix) = chunk.find('\n') {
|
if let Some(newline_ix) = chunk.find('\n') {
|
||||||
len += newline_ix;
|
len += newline_ix;
|
||||||
break;
|
break;
|
||||||
|
@ -733,52 +719,7 @@ impl Snapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for Chunks<'a> {
|
impl<'a> Iterator for Chunks<'a> {
|
||||||
type Item = &'a str;
|
type Item = Chunk<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
let transform = self.transforms.item()?;
|
|
||||||
if let Some(display_text) = transform.display_text {
|
|
||||||
if self.output_position > self.transforms.start().0 {
|
|
||||||
self.output_position.0.column += transform.summary.output.lines.column;
|
|
||||||
self.transforms.next(&());
|
|
||||||
return Some(&display_text[1..]);
|
|
||||||
} else {
|
|
||||||
self.output_position.0 += transform.summary.output.lines;
|
|
||||||
self.transforms.next(&());
|
|
||||||
return Some(display_text);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.input_chunk.is_empty() {
|
|
||||||
self.input_chunk = self.input_chunks.next().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut input_len = 0;
|
|
||||||
let transform_end = self.transforms.end(&()).0;
|
|
||||||
for c in self.input_chunk.chars() {
|
|
||||||
let char_len = c.len_utf8();
|
|
||||||
input_len += char_len;
|
|
||||||
if c == '\n' {
|
|
||||||
*self.output_position.row_mut() += 1;
|
|
||||||
*self.output_position.column_mut() = 0;
|
|
||||||
} else {
|
|
||||||
*self.output_position.column_mut() += char_len as u32;
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.output_position >= transform_end {
|
|
||||||
self.transforms.next(&());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let (prefix, suffix) = self.input_chunk.split_at(input_len);
|
|
||||||
self.input_chunk = suffix;
|
|
||||||
Some(prefix)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for HighlightedChunks<'a> {
|
|
||||||
type Item = HighlightedChunk<'a>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
if self.output_position.row() >= self.max_output_row {
|
if self.output_position.row() >= self.max_output_row {
|
||||||
|
@ -803,7 +744,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
|
|
||||||
self.output_position.0 += summary;
|
self.output_position.0 += summary;
|
||||||
self.transforms.next(&());
|
self.transforms.next(&());
|
||||||
return Some(HighlightedChunk {
|
return Some(Chunk {
|
||||||
text: &display_text[start_ix..end_ix],
|
text: &display_text[start_ix..end_ix],
|
||||||
..self.input_chunk
|
..self.input_chunk
|
||||||
});
|
});
|
||||||
|
@ -833,7 +774,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
|
|
||||||
let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
|
let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
|
||||||
self.input_chunk.text = suffix;
|
self.input_chunk.text = suffix;
|
||||||
Some(HighlightedChunk {
|
Some(Chunk {
|
||||||
text: prefix,
|
text: prefix,
|
||||||
..self.input_chunk
|
..self.input_chunk
|
||||||
})
|
})
|
||||||
|
@ -1216,7 +1157,7 @@ mod tests {
|
||||||
|
|
||||||
impl Snapshot {
|
impl Snapshot {
|
||||||
pub fn text(&self) -> String {
|
pub fn text(&self) -> String {
|
||||||
self.chunks_at(0).collect()
|
self.text_chunks(0).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_chunks(&mut self, rng: &mut impl Rng) {
|
fn verify_chunks(&mut self, rng: &mut impl Rng) {
|
||||||
|
@ -1225,7 +1166,7 @@ mod tests {
|
||||||
let start_row = rng.gen_range(0..=end_row);
|
let start_row = rng.gen_range(0..=end_row);
|
||||||
end_row += 1;
|
end_row += 1;
|
||||||
|
|
||||||
let mut expected_text = self.chunks_at(start_row).collect::<String>();
|
let mut expected_text = self.text_chunks(start_row).collect::<String>();
|
||||||
if expected_text.ends_with("\n") {
|
if expected_text.ends_with("\n") {
|
||||||
expected_text.push('\n');
|
expected_text.push('\n');
|
||||||
}
|
}
|
||||||
|
@ -1239,7 +1180,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
let actual_text = self
|
let actual_text = self
|
||||||
.highlighted_chunks_for_rows(start_row..end_row)
|
.chunks(start_row..end_row, false)
|
||||||
.map(|c| c.text)
|
.map(|c| c.text)
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -17,7 +17,7 @@ use gpui::{
|
||||||
MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
|
MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
|
||||||
};
|
};
|
||||||
use json::json;
|
use json::json;
|
||||||
use language::{DiagnosticSeverity, HighlightedChunk};
|
use language::{Chunk, DiagnosticSeverity};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{
|
use std::{
|
||||||
cmp::{self, Ordering},
|
cmp::{self, Ordering},
|
||||||
|
@ -493,9 +493,9 @@ impl EditorElement {
|
||||||
let mut styles = Vec::new();
|
let mut styles = Vec::new();
|
||||||
let mut row = rows.start;
|
let mut row = rows.start;
|
||||||
let mut line_exceeded_max_len = false;
|
let mut line_exceeded_max_len = false;
|
||||||
let chunks = snapshot.highlighted_chunks_for_rows(rows.clone());
|
let chunks = snapshot.chunks(rows.clone());
|
||||||
|
|
||||||
let newline_chunk = HighlightedChunk {
|
let newline_chunk = Chunk {
|
||||||
text: "\n",
|
text: "\n",
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
|
@ -2663,12 +2663,8 @@ impl Snapshot {
|
||||||
self.display_snapshot.buffer_rows(start_row)
|
self.display_snapshot.buffer_rows(start_row)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlighted_chunks_for_rows(
|
pub fn chunks(&mut self, display_rows: Range<u32>) -> display_map::Chunks {
|
||||||
&mut self,
|
self.display_snapshot.chunks(display_rows)
|
||||||
display_rows: Range<u32>,
|
|
||||||
) -> display_map::HighlightedChunks {
|
|
||||||
self.display_snapshot
|
|
||||||
.highlighted_chunks_for_rows(display_rows)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scroll_position(&self) -> Vector2F {
|
pub fn scroll_position(&self) -> Vector2F {
|
||||||
|
|
|
@ -78,7 +78,6 @@ pub struct Snapshot {
|
||||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
||||||
is_parsing: bool,
|
is_parsing: bool,
|
||||||
language: Option<Arc<Language>>,
|
language: Option<Arc<Language>>,
|
||||||
query_cursor: QueryCursorHandle,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
@ -191,11 +190,12 @@ struct Highlights<'a> {
|
||||||
next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
|
next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
|
||||||
stack: Vec<(usize, HighlightId)>,
|
stack: Vec<(usize, HighlightId)>,
|
||||||
highlight_map: HighlightMap,
|
highlight_map: HighlightMap,
|
||||||
|
_query_cursor: QueryCursorHandle,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct HighlightedChunks<'a> {
|
pub struct Chunks<'a> {
|
||||||
range: Range<usize>,
|
range: Range<usize>,
|
||||||
chunks: Chunks<'a>,
|
chunks: rope::Chunks<'a>,
|
||||||
diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
|
diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
|
||||||
error_depth: usize,
|
error_depth: usize,
|
||||||
warning_depth: usize,
|
warning_depth: usize,
|
||||||
|
@ -205,7 +205,7 @@ pub struct HighlightedChunks<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Default)]
|
#[derive(Clone, Copy, Debug, Default)]
|
||||||
pub struct HighlightedChunk<'a> {
|
pub struct Chunk<'a> {
|
||||||
pub text: &'a str,
|
pub text: &'a str,
|
||||||
pub highlight_id: HighlightId,
|
pub highlight_id: HighlightId,
|
||||||
pub diagnostic: Option<DiagnosticSeverity>,
|
pub diagnostic: Option<DiagnosticSeverity>,
|
||||||
|
@ -342,7 +342,6 @@ impl Buffer {
|
||||||
diagnostics: self.diagnostics.clone(),
|
diagnostics: self.diagnostics.clone(),
|
||||||
is_parsing: self.parsing_in_background,
|
is_parsing: self.parsing_in_background,
|
||||||
language: self.language.clone(),
|
language: self.language.clone(),
|
||||||
query_cursor: QueryCursorHandle::new(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1635,51 +1634,56 @@ impl Snapshot {
|
||||||
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlighted_text_for_range<T: ToOffset>(
|
pub fn chunks<T: ToOffset>(&self, range: Range<T>, highlight: bool) -> Chunks {
|
||||||
&mut self,
|
|
||||||
range: Range<T>,
|
|
||||||
) -> HighlightedChunks {
|
|
||||||
let range = range.start.to_offset(&*self)..range.end.to_offset(&*self);
|
let range = range.start.to_offset(&*self)..range.end.to_offset(&*self);
|
||||||
|
|
||||||
|
let mut highlights = None;
|
||||||
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
|
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
|
||||||
for (_, range, diagnostic) in
|
if highlight {
|
||||||
self.diagnostics
|
for (_, range, diagnostic) in
|
||||||
.intersecting_ranges(range.clone(), self.content(), true)
|
self.diagnostics
|
||||||
{
|
.intersecting_ranges(range.clone(), self.content(), true)
|
||||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
{
|
||||||
offset: range.start,
|
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||||
is_start: true,
|
offset: range.start,
|
||||||
severity: diagnostic.severity,
|
is_start: true,
|
||||||
});
|
severity: diagnostic.severity,
|
||||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
});
|
||||||
offset: range.end,
|
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||||
is_start: false,
|
offset: range.end,
|
||||||
severity: diagnostic.severity,
|
is_start: false,
|
||||||
});
|
severity: diagnostic.severity,
|
||||||
}
|
});
|
||||||
diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
|
}
|
||||||
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
|
diagnostic_endpoints
|
||||||
|
.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
|
||||||
|
|
||||||
let chunks = self.text.as_rope().chunks_in_range(range.clone());
|
|
||||||
let highlights =
|
|
||||||
if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) {
|
if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) {
|
||||||
let captures = self.query_cursor.set_byte_range(range.clone()).captures(
|
let mut query_cursor = QueryCursorHandle::new();
|
||||||
|
|
||||||
|
// TODO - add a Tree-sitter API to remove the need for this.
|
||||||
|
let cursor = unsafe {
|
||||||
|
std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
|
||||||
|
};
|
||||||
|
let captures = cursor.set_byte_range(range.clone()).captures(
|
||||||
&language.highlights_query,
|
&language.highlights_query,
|
||||||
tree.root_node(),
|
tree.root_node(),
|
||||||
TextProvider(self.text.as_rope()),
|
TextProvider(self.text.as_rope()),
|
||||||
);
|
);
|
||||||
|
highlights = Some(Highlights {
|
||||||
Some(Highlights {
|
|
||||||
captures,
|
captures,
|
||||||
next_capture: None,
|
next_capture: None,
|
||||||
stack: Default::default(),
|
stack: Default::default(),
|
||||||
highlight_map: language.highlight_map(),
|
highlight_map: language.highlight_map(),
|
||||||
|
_query_cursor: query_cursor,
|
||||||
})
|
})
|
||||||
} else {
|
}
|
||||||
None
|
}
|
||||||
};
|
|
||||||
|
|
||||||
HighlightedChunks {
|
let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
|
||||||
|
let chunks = self.text.as_rope().chunks_in_range(range.clone());
|
||||||
|
|
||||||
|
Chunks {
|
||||||
range,
|
range,
|
||||||
chunks,
|
chunks,
|
||||||
diagnostic_endpoints,
|
diagnostic_endpoints,
|
||||||
|
@ -1700,7 +1704,6 @@ impl Clone for Snapshot {
|
||||||
diagnostics: self.diagnostics.clone(),
|
diagnostics: self.diagnostics.clone(),
|
||||||
is_parsing: self.is_parsing,
|
is_parsing: self.is_parsing,
|
||||||
language: self.language.clone(),
|
language: self.language.clone(),
|
||||||
query_cursor: QueryCursorHandle::new(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1731,7 +1734,9 @@ impl<'a> Iterator for ByteChunks<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> HighlightedChunks<'a> {
|
unsafe impl<'a> Send for Chunks<'a> {}
|
||||||
|
|
||||||
|
impl<'a> Chunks<'a> {
|
||||||
pub fn seek(&mut self, offset: usize) {
|
pub fn seek(&mut self, offset: usize) {
|
||||||
self.range.start = offset;
|
self.range.start = offset;
|
||||||
self.chunks.seek(self.range.start);
|
self.chunks.seek(self.range.start);
|
||||||
|
@ -1790,8 +1795,8 @@ impl<'a> HighlightedChunks<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for HighlightedChunks<'a> {
|
impl<'a> Iterator for Chunks<'a> {
|
||||||
type Item = HighlightedChunk<'a>;
|
type Item = Chunk<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
let mut next_capture_start = usize::MAX;
|
let mut next_capture_start = usize::MAX;
|
||||||
|
@ -1855,7 +1860,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
|
||||||
self.chunks.next().unwrap();
|
self.chunks.next().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(HighlightedChunk {
|
Some(Chunk {
|
||||||
text: slice,
|
text: slice,
|
||||||
highlight_id,
|
highlight_id,
|
||||||
diagnostic: self.current_diagnostic_severity(),
|
diagnostic: self.current_diagnostic_severity(),
|
||||||
|
|
|
@ -901,12 +901,12 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn chunks_with_diagnostics<T: ToOffset>(
|
fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
|
||||||
buffer: &Buffer,
|
buffer: &Buffer,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> Vec<(String, Option<DiagnosticSeverity>)> {
|
) -> Vec<(String, Option<DiagnosticSeverity>)> {
|
||||||
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
|
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
|
||||||
for chunk in buffer.snapshot().highlighted_text_for_range(range) {
|
for chunk in buffer.snapshot().chunks(range, true) {
|
||||||
if chunks
|
if chunks
|
||||||
.last()
|
.last()
|
||||||
.map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
|
.map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue