Fill in some missing methods on MultiBuffer, MultiBufferSnapshot
This commit is contained in:
parent
5b31c1ba4e
commit
a758bd4f8d
10 changed files with 287 additions and 189 deletions
|
@ -54,7 +54,7 @@ impl DisplayMap {
|
||||||
let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
||||||
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
|
let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
|
||||||
let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
|
let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
|
||||||
let block_map = BlockMap::new(buffer.clone(), snapshot);
|
let block_map = BlockMap::new(snapshot);
|
||||||
cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
|
cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
|
||||||
DisplayMap {
|
DisplayMap {
|
||||||
buffer,
|
buffer,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot};
|
use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot};
|
||||||
use gpui::{AppContext, ElementBox, ModelHandle};
|
use gpui::{AppContext, ElementBox};
|
||||||
use language::{
|
use language::{
|
||||||
multi_buffer::{Anchor, MultiBuffer, ToOffset, ToPoint as _},
|
multi_buffer::{Anchor, ToOffset, ToPoint as _},
|
||||||
Chunk,
|
Chunk,
|
||||||
};
|
};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
|
@ -22,7 +22,6 @@ use theme::SyntaxTheme;
|
||||||
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
|
const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
|
||||||
|
|
||||||
pub struct BlockMap {
|
pub struct BlockMap {
|
||||||
buffer: ModelHandle<MultiBuffer>,
|
|
||||||
next_block_id: AtomicUsize,
|
next_block_id: AtomicUsize,
|
||||||
wrap_snapshot: Mutex<WrapSnapshot>,
|
wrap_snapshot: Mutex<WrapSnapshot>,
|
||||||
blocks: Vec<Arc<Block>>,
|
blocks: Vec<Arc<Block>>,
|
||||||
|
@ -112,9 +111,8 @@ pub struct BlockBufferRows<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockMap {
|
impl BlockMap {
|
||||||
pub fn new(buffer: ModelHandle<MultiBuffer>, wrap_snapshot: WrapSnapshot) -> Self {
|
pub fn new(wrap_snapshot: WrapSnapshot) -> Self {
|
||||||
Self {
|
Self {
|
||||||
buffer,
|
|
||||||
next_block_id: AtomicUsize::new(0),
|
next_block_id: AtomicUsize::new(0),
|
||||||
blocks: Vec::new(),
|
blocks: Vec::new(),
|
||||||
transforms: Mutex::new(SumTree::from_item(
|
transforms: Mutex::new(SumTree::from_item(
|
||||||
|
@ -869,6 +867,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
|
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
|
||||||
use gpui::{elements::Empty, Element};
|
use gpui::{elements::Empty, Element};
|
||||||
|
use language::multi_buffer::MultiBuffer;
|
||||||
use rand::prelude::*;
|
use rand::prelude::*;
|
||||||
use std::env;
|
use std::env;
|
||||||
use text::RandomCharIter;
|
use text::RandomCharIter;
|
||||||
|
@ -902,7 +901,7 @@ mod tests {
|
||||||
let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
||||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
|
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
|
||||||
let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
|
let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
|
||||||
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
|
let mut block_map = BlockMap::new(wraps_snapshot.clone());
|
||||||
|
|
||||||
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
|
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
|
||||||
writer.insert(vec![
|
writer.insert(vec![
|
||||||
|
@ -1069,7 +1068,7 @@ mod tests {
|
||||||
let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
|
||||||
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
|
let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
|
||||||
let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
|
let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
|
||||||
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
|
let mut block_map = BlockMap::new(wraps_snapshot.clone());
|
||||||
|
|
||||||
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
|
let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
|
||||||
writer.insert(vec![
|
writer.insert(vec![
|
||||||
|
@ -1127,7 +1126,7 @@ mod tests {
|
||||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
|
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
|
||||||
let (wrap_map, wraps_snapshot) =
|
let (wrap_map, wraps_snapshot) =
|
||||||
WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
|
WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
|
||||||
let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot);
|
let mut block_map = BlockMap::new(wraps_snapshot);
|
||||||
let mut expected_blocks = Vec::new();
|
let mut expected_blocks = Vec::new();
|
||||||
|
|
||||||
for _ in 0..operations {
|
for _ in 0..operations {
|
||||||
|
|
|
@ -1224,7 +1224,6 @@ mod tests {
|
||||||
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
|
let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
|
||||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||||
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
|
let mut map = FoldMap::new(buffer_snapshot.clone()).0;
|
||||||
let buffer = buffer.read(cx);
|
|
||||||
|
|
||||||
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
|
let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
|
||||||
writer.fold(vec![
|
writer.fold(vec![
|
||||||
|
|
|
@ -559,7 +559,7 @@ impl Editor {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
|
pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
|
||||||
self.buffer.read(cx).read(cx).language()
|
self.buffer.read(cx).language(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_placeholder_text(
|
pub fn set_placeholder_text(
|
||||||
|
@ -2996,7 +2996,7 @@ impl Editor {
|
||||||
let buffer = self.buffer.read(cx);
|
let buffer = self.buffer.read(cx);
|
||||||
let replica_id = buffer.replica_id();
|
let replica_id = buffer.replica_id();
|
||||||
buffer
|
buffer
|
||||||
.selection_sets()
|
.selection_sets(cx)
|
||||||
.filter(move |(set_id, set)| {
|
.filter(move |(set_id, set)| {
|
||||||
set.active && (set_id.replica_id != replica_id || **set_id == self.selection_set_id)
|
set.active && (set_id.replica_id != replica_id || **set_id == self.selection_set_id)
|
||||||
})
|
})
|
||||||
|
|
|
@ -76,9 +76,7 @@ impl ItemHandle for BufferItemHandle {
|
||||||
font_properties,
|
font_properties,
|
||||||
underline: None,
|
underline: None,
|
||||||
};
|
};
|
||||||
let language = buffer
|
let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language(cx));
|
||||||
.upgrade(cx)
|
|
||||||
.and_then(|buf| buf.read(cx).read(cx).language());
|
|
||||||
let soft_wrap = match settings.soft_wrap(language) {
|
let soft_wrap = match settings.soft_wrap(language) {
|
||||||
settings::SoftWrap::None => crate::SoftWrap::None,
|
settings::SoftWrap::None => crate::SoftWrap::None,
|
||||||
settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth,
|
settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth,
|
||||||
|
@ -222,11 +220,11 @@ impl ItemView for Editor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_dirty(&self, cx: &AppContext) -> bool {
|
fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||||
self.buffer().read(cx).is_dirty()
|
self.buffer().read(cx).is_dirty(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||||
self.buffer().read(cx).has_conflict()
|
self.buffer().read(cx).has_conflict(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -829,31 +829,6 @@ impl Buffer {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_in_range<'a, T, O>(
|
|
||||||
&'a self,
|
|
||||||
search_range: Range<T>,
|
|
||||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
|
||||||
where
|
|
||||||
T: 'a + ToOffset,
|
|
||||||
O: 'a + FromAnchor,
|
|
||||||
{
|
|
||||||
self.diagnostics.range(search_range, self, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn diagnostic_group<'a, O>(
|
|
||||||
&'a self,
|
|
||||||
group_id: usize,
|
|
||||||
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
|
||||||
where
|
|
||||||
O: 'a + FromAnchor,
|
|
||||||
{
|
|
||||||
self.diagnostics.group(group_id, self)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn diagnostics_update_count(&self) -> usize {
|
|
||||||
self.diagnostics_update_count
|
|
||||||
}
|
|
||||||
|
|
||||||
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
if let Some(indent_columns) = self.compute_autoindents() {
|
if let Some(indent_columns) = self.compute_autoindents() {
|
||||||
let indent_columns = cx.background().spawn(indent_columns);
|
let indent_columns = cx.background().spawn(indent_columns);
|
||||||
|
@ -1057,47 +1032,6 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
|
||||||
if let Some(tree) = self.syntax_tree() {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
|
||||||
let mut node = root.descendant_for_byte_range(range.start, range.end);
|
|
||||||
while node.map_or(false, |n| n.byte_range() == range) {
|
|
||||||
node = node.unwrap().parent();
|
|
||||||
}
|
|
||||||
node.map(|n| n.byte_range())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
|
||||||
&self,
|
|
||||||
range: Range<T>,
|
|
||||||
) -> Option<(Range<usize>, Range<usize>)> {
|
|
||||||
let (grammar, tree) = self.grammar().zip(self.syntax_tree())?;
|
|
||||||
let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
|
|
||||||
let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
|
|
||||||
|
|
||||||
// Find bracket pairs that *inclusively* contain the given range.
|
|
||||||
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
|
|
||||||
let mut cursor = QueryCursorHandle::new();
|
|
||||||
let matches = cursor.set_byte_range(range).matches(
|
|
||||||
&grammar.brackets_query,
|
|
||||||
tree.root_node(),
|
|
||||||
TextProvider(self.as_rope()),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Get the ranges of the innermost pair of brackets.
|
|
||||||
matches
|
|
||||||
.filter_map(|mat| {
|
|
||||||
let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
|
|
||||||
let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
|
|
||||||
Some((open.byte_range(), close.byte_range()))
|
|
||||||
})
|
|
||||||
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
|
pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
|
||||||
// TODO: it would be nice to not allocate here.
|
// TODO: it would be nice to not allocate here.
|
||||||
let old_text = self.text();
|
let old_text = self.text();
|
||||||
|
@ -1745,12 +1679,78 @@ impl BufferSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn language(&self) -> Option<&Arc<Language>> {
|
||||||
|
self.language.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
fn grammar(&self) -> Option<&Arc<Grammar>> {
|
fn grammar(&self) -> Option<&Arc<Grammar>> {
|
||||||
self.language
|
self.language
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|language| language.grammar.as_ref())
|
.and_then(|language| language.grammar.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
||||||
|
if let Some(tree) = self.tree.as_ref() {
|
||||||
|
let root = tree.root_node();
|
||||||
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
|
let mut node = root.descendant_for_byte_range(range.start, range.end);
|
||||||
|
while node.map_or(false, |n| n.byte_range() == range) {
|
||||||
|
node = node.unwrap().parent();
|
||||||
|
}
|
||||||
|
node.map(|n| n.byte_range())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
||||||
|
&self,
|
||||||
|
range: Range<T>,
|
||||||
|
) -> Option<(Range<usize>, Range<usize>)> {
|
||||||
|
let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
|
||||||
|
let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
|
||||||
|
let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
|
||||||
|
|
||||||
|
// Find bracket pairs that *inclusively* contain the given range.
|
||||||
|
let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
|
||||||
|
let mut cursor = QueryCursorHandle::new();
|
||||||
|
let matches = cursor.set_byte_range(range).matches(
|
||||||
|
&grammar.brackets_query,
|
||||||
|
tree.root_node(),
|
||||||
|
TextProvider(self.as_rope()),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get the ranges of the innermost pair of brackets.
|
||||||
|
matches
|
||||||
|
.filter_map(|mat| {
|
||||||
|
let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
|
||||||
|
let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
|
||||||
|
Some((open.byte_range(), close.byte_range()))
|
||||||
|
})
|
||||||
|
.min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn diagnostics_in_range<'a, T, O>(
|
||||||
|
&'a self,
|
||||||
|
search_range: Range<T>,
|
||||||
|
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||||
|
where
|
||||||
|
T: 'a + ToOffset,
|
||||||
|
O: 'a + FromAnchor,
|
||||||
|
{
|
||||||
|
self.diagnostics.range(search_range, self, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn diagnostic_group<'a, O>(
|
||||||
|
&'a self,
|
||||||
|
group_id: usize,
|
||||||
|
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||||
|
where
|
||||||
|
O: 'a + FromAnchor,
|
||||||
|
{
|
||||||
|
self.diagnostics.group(group_id, self)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn diagnostics_update_count(&self) -> usize {
|
pub fn diagnostics_update_count(&self) -> usize {
|
||||||
self.diagnostics_update_count
|
self.diagnostics_update_count
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ use crate::{
|
||||||
BufferSnapshot, DiagnosticEntry, File, Language,
|
BufferSnapshot, DiagnosticEntry, File, Language,
|
||||||
};
|
};
|
||||||
pub use anchor::{Anchor, AnchorRangeExt};
|
pub use anchor::{Anchor, AnchorRangeExt};
|
||||||
use anyhow::anyhow;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
@ -15,6 +14,7 @@ pub use selection::SelectionSet;
|
||||||
use std::{
|
use std::{
|
||||||
cell::{Ref, RefCell},
|
cell::{Ref, RefCell},
|
||||||
cmp, io,
|
cmp, io,
|
||||||
|
iter::Peekable,
|
||||||
ops::{Range, Sub},
|
ops::{Range, Sub},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::SystemTime,
|
time::SystemTime,
|
||||||
|
@ -58,7 +58,6 @@ struct BufferState {
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct MultiBufferSnapshot {
|
pub struct MultiBufferSnapshot {
|
||||||
excerpts: SumTree<Excerpt>,
|
excerpts: SumTree<Excerpt>,
|
||||||
replica_id: ReplicaId,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ExcerptProperties<'a, T> {
|
pub struct ExcerptProperties<'a, T> {
|
||||||
|
@ -91,7 +90,7 @@ pub struct MultiBufferChunks<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MultiBufferBytes<'a> {
|
pub struct MultiBufferBytes<'a> {
|
||||||
chunks: MultiBufferChunks<'a>,
|
chunks: Peekable<MultiBufferChunks<'a>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MultiBuffer {
|
impl MultiBuffer {
|
||||||
|
@ -336,12 +335,46 @@ impl MultiBuffer {
|
||||||
set_id: Option<SelectionSetId>,
|
set_id: Option<SelectionSetId>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
todo!()
|
self.as_singleton()
|
||||||
|
.unwrap()
|
||||||
|
.update(cx, |buffer, cx| buffer.set_active_selection_set(set_id, cx))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn selection_sets(&self) -> impl Iterator<Item = (&SelectionSetId, &SelectionSet)> {
|
pub fn selection_sets(
|
||||||
todo!();
|
&self,
|
||||||
None.into_iter()
|
cx: &AppContext,
|
||||||
|
) -> impl Iterator<Item = (&SelectionSetId, &SelectionSet)> {
|
||||||
|
let excerpt_id = self.snapshot.borrow().excerpts.first().unwrap().id.clone();
|
||||||
|
let selection_sets: &mut HashMap<SelectionSetId, SelectionSet> =
|
||||||
|
unsafe { &mut *(&self.selection_sets as *const _ as *mut _) };
|
||||||
|
selection_sets.clear();
|
||||||
|
for (selection_set_id, set) in self.as_singleton().unwrap().read(cx).selection_sets() {
|
||||||
|
selection_sets.insert(
|
||||||
|
*selection_set_id,
|
||||||
|
SelectionSet {
|
||||||
|
id: set.id,
|
||||||
|
active: set.active,
|
||||||
|
selections: set
|
||||||
|
.selections
|
||||||
|
.iter()
|
||||||
|
.map(|selection| Selection {
|
||||||
|
id: selection.id,
|
||||||
|
start: Anchor {
|
||||||
|
excerpt_id: excerpt_id.clone(),
|
||||||
|
text_anchor: selection.start.clone(),
|
||||||
|
},
|
||||||
|
end: Anchor {
|
||||||
|
excerpt_id: excerpt_id.clone(),
|
||||||
|
text_anchor: selection.end.clone(),
|
||||||
|
},
|
||||||
|
reversed: selection.reversed,
|
||||||
|
goal: selection.goal,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
self.selection_sets.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push<O>(&mut self, props: ExcerptProperties<O>, cx: &mut ModelContext<Self>) -> ExcerptId
|
pub fn push<O>(&mut self, props: ExcerptProperties<O>, cx: &mut ModelContext<Self>) -> ExcerptId
|
||||||
|
@ -382,7 +415,13 @@ impl MultiBuffer {
|
||||||
&mut self,
|
&mut self,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
|
) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
|
||||||
todo!()
|
self.as_singleton()
|
||||||
|
.unwrap()
|
||||||
|
.update(cx, |buffer, cx| buffer.save(cx))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
|
||||||
|
self.as_singleton().unwrap().read(cx).language()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> {
|
pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> {
|
||||||
|
@ -390,16 +429,16 @@ impl MultiBuffer {
|
||||||
.and_then(|buffer| buffer.read(cx).file())
|
.and_then(|buffer| buffer.read(cx).file())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_dirty(&self) -> bool {
|
pub fn is_dirty(&self, cx: &AppContext) -> bool {
|
||||||
todo!()
|
self.as_singleton().unwrap().read(cx).is_dirty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_conflict(&self) -> bool {
|
pub fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||||
todo!()
|
self.as_singleton().unwrap().read(cx).has_conflict()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_parsing(&self, _: &AppContext) -> bool {
|
pub fn is_parsing(&self, cx: &AppContext) -> bool {
|
||||||
todo!()
|
self.as_singleton().unwrap().read(cx).is_parsing()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync(&self, cx: &AppContext) {
|
fn sync(&self, cx: &AppContext) {
|
||||||
|
@ -473,12 +512,21 @@ impl MultiBuffer {
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl MultiBuffer {
|
impl MultiBuffer {
|
||||||
pub fn randomly_edit<R: rand::Rng>(&mut self, _: &mut R, _: usize, _: &mut ModelContext<Self>) {
|
pub fn randomly_edit<R: rand::Rng>(
|
||||||
todo!()
|
&mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
count: usize,
|
||||||
|
cx: &mut ModelContext<Self>,
|
||||||
|
) {
|
||||||
|
self.as_singleton()
|
||||||
|
.unwrap()
|
||||||
|
.update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn randomly_mutate<R: rand::Rng>(&mut self, rng: &mut R, cx: &mut ModelContext<Self>) {
|
pub fn randomly_mutate<R: rand::Rng>(&mut self, rng: &mut R, cx: &mut ModelContext<Self>) {
|
||||||
todo!()
|
self.as_singleton()
|
||||||
|
.unwrap()
|
||||||
|
.update(cx, |buffer, cx| buffer.randomly_mutate(rng, cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -487,10 +535,6 @@ impl Entity for MultiBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MultiBufferSnapshot {
|
impl MultiBufferSnapshot {
|
||||||
pub fn replica_id(&self) -> ReplicaId {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn text(&self) -> String {
|
pub fn text(&self) -> String {
|
||||||
self.chunks(0..self.len(), None)
|
self.chunks(0..self.len(), None)
|
||||||
.map(|chunk| chunk.text)
|
.map(|chunk| chunk.text)
|
||||||
|
@ -501,8 +545,9 @@ impl MultiBufferSnapshot {
|
||||||
&'a self,
|
&'a self,
|
||||||
position: T,
|
position: T,
|
||||||
) -> impl Iterator<Item = char> + 'a {
|
) -> impl Iterator<Item = char> + 'a {
|
||||||
todo!();
|
// TODO
|
||||||
None.into_iter()
|
let offset = position.to_offset(self);
|
||||||
|
self.as_singleton().unwrap().reversed_chars_at(offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator<Item = char> + 'a {
|
pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator<Item = char> + 'a {
|
||||||
|
@ -523,11 +568,22 @@ impl MultiBufferSnapshot {
|
||||||
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
.all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains_str_at<T>(&self, _: T, _: &str) -> bool
|
pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
|
||||||
where
|
where
|
||||||
T: ToOffset,
|
T: ToOffset,
|
||||||
{
|
{
|
||||||
todo!()
|
let offset = position.to_offset(self);
|
||||||
|
self.as_singleton().unwrap().contains_str_at(offset, needle)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_singleton(&self) -> Option<&BufferSnapshot> {
|
||||||
|
let mut excerpts = self.excerpts.iter();
|
||||||
|
let buffer = excerpts.next().map(|excerpt| &excerpt.buffer);
|
||||||
|
if excerpts.next().is_none() {
|
||||||
|
buffer
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
|
@ -610,7 +666,9 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferBytes<'a> {
|
pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferBytes<'a> {
|
||||||
todo!()
|
MultiBufferBytes {
|
||||||
|
chunks: self.chunks(range, None).peekable(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks<'a, T: ToOffset>(
|
pub fn chunks<'a, T: ToOffset>(
|
||||||
|
@ -618,48 +676,15 @@ impl MultiBufferSnapshot {
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
theme: Option<&'a SyntaxTheme>,
|
theme: Option<&'a SyntaxTheme>,
|
||||||
) -> MultiBufferChunks<'a> {
|
) -> MultiBufferChunks<'a> {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let mut result = MultiBufferChunks {
|
||||||
let mut cursor = self.excerpts.cursor::<usize>();
|
range: 0..range.end.to_offset(self),
|
||||||
cursor.seek(&range.start, Bias::Right, &());
|
cursor: self.excerpts.cursor::<usize>(),
|
||||||
|
header_height: 0,
|
||||||
let mut header_height: u8 = 0;
|
excerpt_chunks: None,
|
||||||
let excerpt_chunks = cursor.item().map(|excerpt| {
|
|
||||||
let buffer_range = excerpt.range.to_offset(&excerpt.buffer);
|
|
||||||
header_height = excerpt.header_height;
|
|
||||||
|
|
||||||
let buffer_start;
|
|
||||||
let start_overshoot = range.start - cursor.start();
|
|
||||||
if start_overshoot < excerpt.header_height as usize {
|
|
||||||
header_height -= start_overshoot as u8;
|
|
||||||
buffer_start = buffer_range.start;
|
|
||||||
} else {
|
|
||||||
buffer_start =
|
|
||||||
buffer_range.start + start_overshoot - excerpt.header_height as usize;
|
|
||||||
header_height = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let buffer_end;
|
|
||||||
let end_overshoot = range.end - cursor.start();
|
|
||||||
if end_overshoot < excerpt.header_height as usize {
|
|
||||||
header_height -= excerpt.header_height - end_overshoot as u8;
|
|
||||||
buffer_end = buffer_start;
|
|
||||||
} else {
|
|
||||||
buffer_end = cmp::min(
|
|
||||||
buffer_range.end,
|
|
||||||
buffer_range.start + end_overshoot - excerpt.header_height as usize,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
excerpt.buffer.chunks(buffer_start..buffer_end, theme)
|
|
||||||
});
|
|
||||||
|
|
||||||
MultiBufferChunks {
|
|
||||||
range,
|
|
||||||
cursor,
|
|
||||||
header_height,
|
|
||||||
excerpt_chunks,
|
|
||||||
theme,
|
theme,
|
||||||
}
|
};
|
||||||
|
result.seek(range.start.to_offset(self));
|
||||||
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn offset_to_point(&self, offset: usize) -> Point {
|
pub fn offset_to_point(&self, offset: usize) -> Point {
|
||||||
|
@ -736,33 +761,43 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn indent_column_for_line(&self, row: u32) -> u32 {
|
pub fn indent_column_for_line(&self, row: u32) -> u32 {
|
||||||
todo!()
|
if let Some((buffer, range)) = self.buffer_line_for_row(row) {
|
||||||
|
buffer
|
||||||
|
.indent_column_for_line(range.start.row)
|
||||||
|
.min(range.end.column)
|
||||||
|
.saturating_sub(range.start.column)
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn line_len(&self, row: u32) -> u32 {
|
pub fn line_len(&self, row: u32) -> u32 {
|
||||||
|
if let Some((_, range)) = self.buffer_line_for_row(row) {
|
||||||
|
range.end.column - range.start.column
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn buffer_line_for_row(&self, row: u32) -> Option<(&BufferSnapshot, Range<Point>)> {
|
||||||
let mut cursor = self.excerpts.cursor::<Point>();
|
let mut cursor = self.excerpts.cursor::<Point>();
|
||||||
cursor.seek(&Point::new(row, 0), Bias::Right, &());
|
cursor.seek(&Point::new(row, 0), Bias::Right, &());
|
||||||
if let Some(excerpt) = cursor.item() {
|
if let Some(excerpt) = cursor.item() {
|
||||||
let overshoot = row - cursor.start().row;
|
let overshoot = row - cursor.start().row;
|
||||||
let header_height = excerpt.header_height as u32;
|
let header_height = excerpt.header_height as u32;
|
||||||
if overshoot < header_height {
|
if overshoot >= header_height {
|
||||||
0
|
|
||||||
} else {
|
|
||||||
let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer);
|
let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer);
|
||||||
let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer);
|
let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer);
|
||||||
let buffer_row = excerpt_start.row + overshoot - header_height;
|
let buffer_row = excerpt_start.row + overshoot - header_height;
|
||||||
let mut len = excerpt.buffer.line_len(buffer_row);
|
let line_start = Point::new(buffer_row, 0);
|
||||||
if buffer_row == excerpt_end.row {
|
let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row));
|
||||||
len = excerpt_end.column;
|
return Some((
|
||||||
}
|
&excerpt.buffer,
|
||||||
if buffer_row == excerpt_start.row {
|
line_start.max(excerpt_start)..line_end.min(excerpt_end),
|
||||||
len -= excerpt_start.column
|
));
|
||||||
}
|
|
||||||
len
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn max_point(&self) -> Point {
|
pub fn max_point(&self) -> Point {
|
||||||
|
@ -940,26 +975,42 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
|
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
|
||||||
todo!()
|
let offset = position.to_offset(self);
|
||||||
|
let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>();
|
||||||
|
cursor.seek(&offset, bias, &());
|
||||||
|
if let Some(excerpt) = cursor.item() {
|
||||||
|
let overshoot =
|
||||||
|
(offset - cursor.start().0).saturating_sub(excerpt.header_height as usize);
|
||||||
|
let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer);
|
||||||
|
Anchor {
|
||||||
|
excerpt_id: excerpt.id.clone(),
|
||||||
|
text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias),
|
||||||
|
}
|
||||||
|
} else if offset == 0 && bias == Bias::Left {
|
||||||
|
Anchor::min()
|
||||||
|
} else {
|
||||||
|
Anchor::max()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_count(&self) -> usize {
|
pub fn parse_count(&self) -> usize {
|
||||||
todo!()
|
self.as_singleton().unwrap().parse_count()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
pub fn enclosing_bracket_ranges<T: ToOffset>(
|
||||||
&self,
|
&self,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> Option<(Range<usize>, Range<usize>)> {
|
) -> Option<(Range<usize>, Range<usize>)> {
|
||||||
todo!()
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
|
self.as_singleton().unwrap().enclosing_bracket_ranges(range)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_update_count(&self) -> usize {
|
pub fn diagnostics_update_count(&self) -> usize {
|
||||||
todo!()
|
self.as_singleton().unwrap().diagnostics_update_count()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn language<'a>(&self) -> Option<&'a Arc<Language>> {
|
pub fn language(&self) -> Option<&Arc<Language>> {
|
||||||
todo!()
|
self.as_singleton().unwrap().language()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostic_group<'a, O>(
|
pub fn diagnostic_group<'a, O>(
|
||||||
|
@ -967,26 +1018,28 @@ impl MultiBufferSnapshot {
|
||||||
group_id: usize,
|
group_id: usize,
|
||||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
||||||
where
|
where
|
||||||
O: 'a,
|
O: text::FromAnchor + 'a,
|
||||||
{
|
{
|
||||||
todo!();
|
self.as_singleton().unwrap().diagnostic_group(group_id)
|
||||||
None.into_iter()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_in_range<'a, T, O>(
|
pub fn diagnostics_in_range<'a, T, O>(
|
||||||
&'a self,
|
&'a self,
|
||||||
search_range: Range<T>,
|
range: Range<T>,
|
||||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
||||||
where
|
where
|
||||||
T: 'a + ToOffset,
|
T: 'a + ToOffset,
|
||||||
O: 'a,
|
O: 'a + text::FromAnchor,
|
||||||
{
|
{
|
||||||
todo!();
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
None.into_iter()
|
self.as_singleton().unwrap().diagnostics_in_range(range)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
|
||||||
todo!()
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
|
self.as_singleton()
|
||||||
|
.unwrap()
|
||||||
|
.range_for_syntax_ancestor(range)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn buffer_snapshot_for_excerpt<'a>(
|
fn buffer_snapshot_for_excerpt<'a>(
|
||||||
|
@ -996,7 +1049,7 @@ impl MultiBufferSnapshot {
|
||||||
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
|
||||||
cursor.seek(&Some(excerpt_id), Bias::Left, &());
|
cursor.seek(&Some(excerpt_id), Bias::Left, &());
|
||||||
if let Some(excerpt) = cursor.item() {
|
if let Some(excerpt) = cursor.item() {
|
||||||
if *cursor.start() == Some(excerpt_id) {
|
if excerpt.id == *excerpt_id {
|
||||||
return Some(&excerpt.buffer);
|
return Some(&excerpt.buffer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1114,11 +1167,43 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> {
|
||||||
|
|
||||||
impl<'a> MultiBufferChunks<'a> {
|
impl<'a> MultiBufferChunks<'a> {
|
||||||
pub fn offset(&self) -> usize {
|
pub fn offset(&self) -> usize {
|
||||||
todo!()
|
self.range.start
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seek(&mut self, offset: usize) {
|
pub fn seek(&mut self, offset: usize) {
|
||||||
todo!()
|
self.range.start = offset;
|
||||||
|
self.cursor.seek_forward(&offset, Bias::Right, &());
|
||||||
|
self.header_height = 0;
|
||||||
|
self.excerpt_chunks = None;
|
||||||
|
if let Some(excerpt) = self.cursor.item() {
|
||||||
|
let buffer_range = excerpt.range.to_offset(&excerpt.buffer);
|
||||||
|
self.header_height = excerpt.header_height;
|
||||||
|
|
||||||
|
let buffer_start;
|
||||||
|
let start_overshoot = self.range.start - self.cursor.start();
|
||||||
|
if start_overshoot < excerpt.header_height as usize {
|
||||||
|
self.header_height -= start_overshoot as u8;
|
||||||
|
buffer_start = buffer_range.start;
|
||||||
|
} else {
|
||||||
|
buffer_start =
|
||||||
|
buffer_range.start + start_overshoot - excerpt.header_height as usize;
|
||||||
|
self.header_height = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let buffer_end;
|
||||||
|
let end_overshoot = self.range.end - self.cursor.start();
|
||||||
|
if end_overshoot < excerpt.header_height as usize {
|
||||||
|
self.header_height -= excerpt.header_height - end_overshoot as u8;
|
||||||
|
buffer_end = buffer_start;
|
||||||
|
} else {
|
||||||
|
buffer_end = cmp::min(
|
||||||
|
buffer_range.end,
|
||||||
|
buffer_range.start + end_overshoot - excerpt.header_height as usize,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.excerpt_chunks = Some(excerpt.buffer.chunks(buffer_start..buffer_end, self.theme));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1134,16 +1219,19 @@ impl<'a> Iterator for MultiBufferChunks<'a> {
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
self.range.start += self.header_height as usize;
|
||||||
self.header_height = 0;
|
self.header_height = 0;
|
||||||
return Some(chunk);
|
return Some(chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(excerpt_chunks) = self.excerpt_chunks.as_mut() {
|
if let Some(excerpt_chunks) = self.excerpt_chunks.as_mut() {
|
||||||
if let Some(chunk) = excerpt_chunks.next() {
|
if let Some(chunk) = excerpt_chunks.next() {
|
||||||
|
self.range.start += chunk.text.len();
|
||||||
return Some(chunk);
|
return Some(chunk);
|
||||||
}
|
}
|
||||||
self.excerpt_chunks.take();
|
self.excerpt_chunks.take();
|
||||||
if self.cursor.end(&()) <= self.range.end {
|
if self.cursor.end(&()) <= self.range.end {
|
||||||
|
self.range.start += 1;
|
||||||
return Some(Chunk {
|
return Some(Chunk {
|
||||||
text: "\n",
|
text: "\n",
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -1180,7 +1268,7 @@ impl<'a> Iterator for MultiBufferBytes<'a> {
|
||||||
type Item = &'a [u8];
|
type Item = &'a [u8];
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
todo!()
|
self.chunks.next().map(|chunk| chunk.text.as_bytes())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -539,6 +539,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
// The diagnostics have moved down since they were created.
|
// The diagnostics have moved down since they were created.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
|
.snapshot()
|
||||||
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
|
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
|
@ -606,6 +607,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
|
.snapshot()
|
||||||
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
|
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
|
@ -685,6 +687,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
|
.snapshot()
|
||||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
|
@ -870,6 +873,7 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
buffer.update_diagnostics(None, diagnostics, cx).unwrap();
|
buffer.update_diagnostics(None, diagnostics, cx).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
|
.snapshot()
|
||||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
|
@ -922,7 +926,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
|
buffer
|
||||||
|
.snapshot()
|
||||||
|
.diagnostic_group::<Point>(0)
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: Point::new(1, 8)..Point::new(1, 9),
|
range: Point::new(1, 8)..Point::new(1, 9),
|
||||||
|
@ -945,7 +952,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
|
buffer
|
||||||
|
.snapshot()
|
||||||
|
.diagnostic_group::<Point>(1)
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range: Point::new(1, 13)..Point::new(1, 15),
|
range: Point::new(1, 13)..Point::new(1, 15),
|
||||||
|
@ -1022,11 +1032,13 @@ impl Buffer {
|
||||||
&self,
|
&self,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> Option<(Range<Point>, Range<Point>)> {
|
) -> Option<(Range<Point>, Range<Point>)> {
|
||||||
self.enclosing_bracket_ranges(range).map(|(start, end)| {
|
self.snapshot()
|
||||||
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
.enclosing_bracket_ranges(range)
|
||||||
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
.map(|(start, end)| {
|
||||||
(point_start, point_end)
|
let point_start = start.start.to_point(self)..start.end.to_point(self);
|
||||||
})
|
let point_end = end.start.to_point(self)..end.end.to_point(self);
|
||||||
|
(point_start, point_end)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3721,6 +3721,7 @@ mod tests {
|
||||||
|
|
||||||
buffer.read_with(&cx, |buffer, _| {
|
buffer.read_with(&cx, |buffer, _| {
|
||||||
let diagnostics = buffer
|
let diagnostics = buffer
|
||||||
|
.snapshot()
|
||||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -1707,6 +1707,7 @@ mod tests {
|
||||||
buffer_b.read_with(&cx_b, |buffer, _| {
|
buffer_b.read_with(&cx_b, |buffer, _| {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
|
.snapshot()
|
||||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue