Merge branch 'fragment-locators' into HEAD
This commit is contained in:
commit
5e516f59c0
22 changed files with 1317 additions and 1280 deletions
|
@ -21,6 +21,15 @@ pub struct Lamport {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Local {
|
impl Local {
|
||||||
|
pub const MIN: Self = Self {
|
||||||
|
replica_id: ReplicaId::MIN,
|
||||||
|
value: Seq::MIN,
|
||||||
|
};
|
||||||
|
pub const MAX: Self = Self {
|
||||||
|
replica_id: ReplicaId::MAX,
|
||||||
|
value: Seq::MAX,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn new(replica_id: ReplicaId) -> Self {
|
pub fn new(replica_id: ReplicaId) -> Self {
|
||||||
Self {
|
Self {
|
||||||
replica_id,
|
replica_id,
|
||||||
|
|
|
@ -407,7 +407,7 @@ struct SelectNextState {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct BracketPairState {
|
struct BracketPairState {
|
||||||
ranges: AnchorRangeSet,
|
ranges: Vec<Range<Anchor>>,
|
||||||
pair: BracketPair,
|
pair: BracketPair,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1292,10 +1292,9 @@ impl Editor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn autoclose_pairs(&mut self, cx: &mut ViewContext<Self>) {
|
fn autoclose_pairs(&mut self, cx: &mut ViewContext<Self>) {
|
||||||
let selections = self.selections::<usize>(cx);
|
let selections = self.selections::<usize>(cx).collect::<Vec<_>>();
|
||||||
let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| {
|
let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| {
|
||||||
let buffer_snapshot = buffer.snapshot(cx);
|
let autoclose_pair = buffer.language().and_then(|language| {
|
||||||
let autoclose_pair = buffer_snapshot.language().and_then(|language| {
|
|
||||||
let first_selection_start = selections.first().unwrap().start;
|
let first_selection_start = selections.first().unwrap().start;
|
||||||
let pair = language.brackets().iter().find(|pair| {
|
let pair = language.brackets().iter().find(|pair| {
|
||||||
buffer_snapshot.contains_str_at(
|
buffer_snapshot.contains_str_at(
|
||||||
|
@ -1333,15 +1332,14 @@ impl Editor {
|
||||||
if pair.end.len() == 1 {
|
if pair.end.len() == 1 {
|
||||||
let mut delta = 0;
|
let mut delta = 0;
|
||||||
Some(BracketPairState {
|
Some(BracketPairState {
|
||||||
ranges: buffer.anchor_range_set(
|
ranges: selections
|
||||||
Bias::Left,
|
.iter()
|
||||||
Bias::Right,
|
.map(move |selection| {
|
||||||
selections.iter().map(move |selection| {
|
|
||||||
let offset = selection.start + delta;
|
let offset = selection.start + delta;
|
||||||
delta += 1;
|
delta += 1;
|
||||||
offset..offset
|
buffer.anchor_before(offset)..buffer.anchor_after(offset)
|
||||||
}),
|
})
|
||||||
),
|
.collect(),
|
||||||
pair,
|
pair,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -1349,26 +1347,26 @@ impl Editor {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
self.autoclose_stack.extend(new_autoclose_pair_state);
|
self.autoclose_stack.extend(new_autoclose_pair);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
|
fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
|
||||||
let old_selections = self.selections::<usize>(cx);
|
let old_selections = self.selections::<usize>(cx).collect::<Vec<_>>();
|
||||||
let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() {
|
let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() {
|
||||||
autoclose_pair_state
|
autoclose_pair
|
||||||
} else {
|
} else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
if text != autoclose_pair_state.pair.end {
|
if text != autoclose_pair.pair.end {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len());
|
debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len());
|
||||||
|
|
||||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||||
if old_selections
|
if old_selections
|
||||||
.iter()
|
.iter()
|
||||||
.zip(autoclose_pair_state.ranges.ranges::<usize>(&buffer))
|
.zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer)))
|
||||||
.all(|(selection, autoclose_range)| {
|
.all(|(selection, autoclose_range)| {
|
||||||
let autoclose_range_end = autoclose_range.end.to_offset(&buffer);
|
let autoclose_range_end = autoclose_range.end.to_offset(&buffer);
|
||||||
selection.is_empty() && selection.start == autoclose_range_end
|
selection.is_empty() && selection.start == autoclose_range_end
|
||||||
|
@ -2832,12 +2830,12 @@ impl Editor {
|
||||||
loop {
|
loop {
|
||||||
let next_group = buffer
|
let next_group = buffer
|
||||||
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
|
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
|
||||||
.find_map(|(range, diagnostic)| {
|
.find_map(|entry| {
|
||||||
if diagnostic.is_primary
|
if entry.diagnostic.is_primary
|
||||||
&& !range.is_empty()
|
&& !entry.range.is_empty()
|
||||||
&& Some(range.end) != active_primary_range.as_ref().map(|r| *r.end())
|
&& Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end())
|
||||||
{
|
{
|
||||||
Some((range, diagnostic.group_id))
|
Some((entry.range, entry.diagnostic.group_id))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -2872,11 +2870,11 @@ impl Editor {
|
||||||
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
|
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
|
||||||
let is_valid = buffer
|
let is_valid = buffer
|
||||||
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
|
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
|
||||||
.any(|(range, diagnostic)| {
|
.any(|entry| {
|
||||||
diagnostic.is_primary
|
entry.diagnostic.is_primary
|
||||||
&& !range.is_empty()
|
&& !entry.range.is_empty()
|
||||||
&& range.start == primary_range_start
|
&& entry.range.start == primary_range_start
|
||||||
&& diagnostic.message == active_diagnostics.primary_message
|
&& entry.diagnostic.message == active_diagnostics.primary_message
|
||||||
});
|
});
|
||||||
|
|
||||||
if is_valid != active_diagnostics.is_valid {
|
if is_valid != active_diagnostics.is_valid {
|
||||||
|
@ -2907,15 +2905,15 @@ impl Editor {
|
||||||
let mut group_end = Point::zero();
|
let mut group_end = Point::zero();
|
||||||
let diagnostic_group = buffer
|
let diagnostic_group = buffer
|
||||||
.diagnostic_group::<Point>(group_id)
|
.diagnostic_group::<Point>(group_id)
|
||||||
.map(|(range, diagnostic)| {
|
.map(|entry| {
|
||||||
if range.end > group_end {
|
if entry.range.end > group_end {
|
||||||
group_end = range.end;
|
group_end = entry.range.end;
|
||||||
}
|
}
|
||||||
if diagnostic.is_primary {
|
if entry.diagnostic.is_primary {
|
||||||
primary_range = Some(range.clone());
|
primary_range = Some(entry.range.clone());
|
||||||
primary_message = Some(diagnostic.message.clone());
|
primary_message = Some(entry.diagnostic.message.clone());
|
||||||
}
|
}
|
||||||
(range, diagnostic.clone())
|
entry
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let primary_range = primary_range.unwrap();
|
let primary_range = primary_range.unwrap();
|
||||||
|
@ -2925,13 +2923,13 @@ impl Editor {
|
||||||
|
|
||||||
let blocks = display_map
|
let blocks = display_map
|
||||||
.insert_blocks(
|
.insert_blocks(
|
||||||
diagnostic_group.iter().map(|(range, diagnostic)| {
|
diagnostic_group.iter().map(|entry| {
|
||||||
let build_settings = self.build_settings.clone();
|
let build_settings = self.build_settings.clone();
|
||||||
let diagnostic = diagnostic.clone();
|
let diagnostic = entry.diagnostic.clone();
|
||||||
let message_height = diagnostic.message.lines().count() as u8;
|
let message_height = diagnostic.message.lines().count() as u8;
|
||||||
|
|
||||||
BlockProperties {
|
BlockProperties {
|
||||||
position: range.start,
|
position: entry.range.start,
|
||||||
height: message_height,
|
height: message_height,
|
||||||
render: Arc::new(move |cx| {
|
render: Arc::new(move |cx| {
|
||||||
let settings = build_settings.borrow()(cx.cx);
|
let settings = build_settings.borrow()(cx.cx);
|
||||||
|
@ -2944,11 +2942,7 @@ impl Editor {
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.zip(
|
.zip(diagnostic_group.into_iter().map(|entry| entry.diagnostic))
|
||||||
diagnostic_group
|
|
||||||
.into_iter()
|
|
||||||
.map(|(_, diagnostic)| diagnostic),
|
|
||||||
)
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Some(ActiveDiagnosticGroup {
|
Some(ActiveDiagnosticGroup {
|
||||||
|
@ -3171,12 +3165,12 @@ impl Editor {
|
||||||
self.add_selections_state = None;
|
self.add_selections_state = None;
|
||||||
self.select_next_state = None;
|
self.select_next_state = None;
|
||||||
self.select_larger_syntax_node_stack.clear();
|
self.select_larger_syntax_node_stack.clear();
|
||||||
while let Some(autoclose_pair_state) = self.autoclose_stack.last() {
|
while let Some(autoclose_pair) = self.autoclose_stack.last() {
|
||||||
let all_selections_inside_autoclose_ranges =
|
let all_selections_inside_autoclose_ranges =
|
||||||
if selections.len() == autoclose_pair_state.ranges.len() {
|
if selections.len() == autoclose_pair.ranges.len() {
|
||||||
selections
|
selections
|
||||||
.iter()
|
.iter()
|
||||||
.zip(autoclose_pair_state.ranges.ranges::<Point>(&buffer))
|
.zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer)))
|
||||||
.all(|(selection, autoclose_range)| {
|
.all(|(selection, autoclose_range)| {
|
||||||
let head = selection.head().to_point(&buffer);
|
let head = selection.head().to_point(&buffer);
|
||||||
autoclose_range.start <= head && autoclose_range.end >= head
|
autoclose_range.start <= head && autoclose_range.end >= head
|
||||||
|
|
|
@ -324,14 +324,13 @@ impl DiagnosticMessage {
|
||||||
|
|
||||||
fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
|
fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
|
||||||
let editor = editor.read(cx);
|
let editor = editor.read(cx);
|
||||||
let cursor_position = editor.newest_selection(cx).head();
|
let cursor_position = editor.newest_selection::<usize>(cx).head();
|
||||||
let new_diagnostic = editor
|
let buffer = editor.buffer().read(cx);
|
||||||
.buffer()
|
let new_diagnostic = buffer
|
||||||
.read(cx)
|
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
|
||||||
.diagnostics_in_range::<usize, usize>(cursor_position..cursor_position)
|
.filter(|entry| !entry.range.is_empty())
|
||||||
.filter(|(range, _)| !range.is_empty())
|
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||||
.min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len()))
|
.map(|entry| entry.diagnostic);
|
||||||
.map(|(_, diagnostic)| diagnostic.clone());
|
|
||||||
if new_diagnostic != self.diagnostic {
|
if new_diagnostic != self.diagnostic {
|
||||||
self.diagnostic = new_diagnostic;
|
self.diagnostic = new_diagnostic;
|
||||||
cx.notify();
|
cx.notify();
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
|
use crate::diagnostic_set::DiagnosticEntry;
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
|
diagnostic_set::DiagnosticSet,
|
||||||
highlight_map::{HighlightId, HighlightMap},
|
highlight_map::{HighlightId, HighlightMap},
|
||||||
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
|
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
|
||||||
PLAIN_TEXT,
|
PLAIN_TEXT,
|
||||||
|
@ -21,6 +23,7 @@ use std::{
|
||||||
ffi::OsString,
|
ffi::OsString,
|
||||||
future::Future,
|
future::Future,
|
||||||
iter::{Iterator, Peekable},
|
iter::{Iterator, Peekable},
|
||||||
|
mem,
|
||||||
ops::{Deref, DerefMut, Range},
|
ops::{Deref, DerefMut, Range},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str,
|
str,
|
||||||
|
@ -28,6 +31,7 @@ use std::{
|
||||||
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
|
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
|
||||||
vec,
|
vec,
|
||||||
};
|
};
|
||||||
|
use text::operation_queue::OperationQueue;
|
||||||
pub use text::{Buffer as TextBuffer, Operation as _, *};
|
pub use text::{Buffer as TextBuffer, Operation as _, *};
|
||||||
use theme::SyntaxTheme;
|
use theme::SyntaxTheme;
|
||||||
use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
|
use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
|
||||||
|
@ -61,9 +65,10 @@ pub struct Buffer {
|
||||||
syntax_tree: Mutex<Option<SyntaxTree>>,
|
syntax_tree: Mutex<Option<SyntaxTree>>,
|
||||||
parsing_in_background: bool,
|
parsing_in_background: bool,
|
||||||
parse_count: usize,
|
parse_count: usize,
|
||||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
diagnostics: DiagnosticSet,
|
||||||
diagnostics_update_count: usize,
|
diagnostics_update_count: usize,
|
||||||
language_server: Option<LanguageServerState>,
|
language_server: Option<LanguageServerState>,
|
||||||
|
deferred_ops: OperationQueue<Operation>,
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) operations: Vec<Operation>,
|
pub(crate) operations: Vec<Operation>,
|
||||||
}
|
}
|
||||||
|
@ -71,7 +76,7 @@ pub struct Buffer {
|
||||||
pub struct BufferSnapshot {
|
pub struct BufferSnapshot {
|
||||||
text: text::BufferSnapshot,
|
text: text::BufferSnapshot,
|
||||||
tree: Option<Tree>,
|
tree: Option<Tree>,
|
||||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
diagnostics: DiagnosticSet,
|
||||||
diagnostics_update_count: usize,
|
diagnostics_update_count: usize,
|
||||||
is_parsing: bool,
|
is_parsing: bool,
|
||||||
language: Option<Arc<Language>>,
|
language: Option<Arc<Language>>,
|
||||||
|
@ -101,10 +106,13 @@ struct LanguageServerSnapshot {
|
||||||
path: Arc<Path>,
|
path: Arc<Path>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Operation {
|
pub enum Operation {
|
||||||
Buffer(text::Operation),
|
Buffer(text::Operation),
|
||||||
UpdateDiagnostics(AnchorRangeMultimap<Diagnostic>),
|
UpdateDiagnostics {
|
||||||
|
diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
|
||||||
|
lamport_timestamp: clock::Lamport,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
@ -173,8 +181,8 @@ struct SyntaxTree {
|
||||||
struct AutoindentRequest {
|
struct AutoindentRequest {
|
||||||
selection_set_ids: HashSet<SelectionSetId>,
|
selection_set_ids: HashSet<SelectionSetId>,
|
||||||
before_edit: BufferSnapshot,
|
before_edit: BufferSnapshot,
|
||||||
edited: AnchorSet,
|
edited: Vec<Anchor>,
|
||||||
inserted: Option<AnchorRangeSet>,
|
inserted: Option<Vec<Range<Anchor>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -275,9 +283,11 @@ impl Buffer {
|
||||||
buffer.add_raw_selection_set(set.id, set);
|
buffer.add_raw_selection_set(set.id, set);
|
||||||
}
|
}
|
||||||
let mut this = Self::build(buffer, file);
|
let mut this = Self::build(buffer, file);
|
||||||
if let Some(diagnostics) = message.diagnostics {
|
this.apply_diagnostic_update(
|
||||||
this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx);
|
Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
|
||||||
}
|
cx,
|
||||||
|
);
|
||||||
|
|
||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -294,7 +304,7 @@ impl Buffer {
|
||||||
.selection_sets()
|
.selection_sets()
|
||||||
.map(|(_, set)| proto::serialize_selection_set(set))
|
.map(|(_, set)| proto::serialize_selection_set(set))
|
||||||
.collect(),
|
.collect(),
|
||||||
diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)),
|
diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -331,6 +341,7 @@ impl Buffer {
|
||||||
diagnostics: Default::default(),
|
diagnostics: Default::default(),
|
||||||
diagnostics_update_count: 0,
|
diagnostics_update_count: 0,
|
||||||
language_server: None,
|
language_server: None,
|
||||||
|
deferred_ops: OperationQueue::new(),
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
operations: Default::default(),
|
operations: Default::default(),
|
||||||
}
|
}
|
||||||
|
@ -690,6 +701,8 @@ impl Buffer {
|
||||||
mut diagnostics: Vec<lsp::Diagnostic>,
|
mut diagnostics: Vec<lsp::Diagnostic>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Result<Operation> {
|
) -> Result<Operation> {
|
||||||
|
diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
|
||||||
|
|
||||||
let version = version.map(|version| version as usize);
|
let version = version.map(|version| version as usize);
|
||||||
let content = if let Some(version) = version {
|
let content = if let Some(version) = version {
|
||||||
let language_server = self.language_server.as_mut().unwrap();
|
let language_server = self.language_server.as_mut().unwrap();
|
||||||
|
@ -710,8 +723,6 @@ impl Buffer {
|
||||||
.and_then(|language| language.disk_based_diagnostic_sources())
|
.and_then(|language| language.disk_based_diagnostic_sources())
|
||||||
.unwrap_or(&empty_set);
|
.unwrap_or(&empty_set);
|
||||||
|
|
||||||
diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
|
|
||||||
self.diagnostics = {
|
|
||||||
let mut edits_since_save = content
|
let mut edits_since_save = content
|
||||||
.edits_since::<PointUtf16>(&self.saved_version)
|
.edits_since::<PointUtf16>(&self.saved_version)
|
||||||
.peekable();
|
.peekable();
|
||||||
|
@ -771,30 +782,33 @@ impl Buffer {
|
||||||
diagnostics_by_group_id
|
diagnostics_by_group_id
|
||||||
.entry(group_id)
|
.entry(group_id)
|
||||||
.or_insert(Vec::new())
|
.or_insert(Vec::new())
|
||||||
.push((
|
.push(DiagnosticEntry {
|
||||||
range,
|
range,
|
||||||
Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
|
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
|
||||||
message: diagnostic.message.clone(),
|
message: diagnostic.message.clone(),
|
||||||
group_id,
|
group_id,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
},
|
},
|
||||||
));
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
content.anchor_range_multimap(
|
drop(edits_since_save);
|
||||||
Bias::Left,
|
let mut diagnostics = mem::take(&mut self.diagnostics);
|
||||||
Bias::Right,
|
diagnostics.reset(
|
||||||
diagnostics_by_group_id
|
diagnostics_by_group_id
|
||||||
.into_values()
|
.into_values()
|
||||||
.flat_map(|mut diagnostics| {
|
.flat_map(|mut diagnostics| {
|
||||||
let primary_diagnostic =
|
let primary = diagnostics
|
||||||
diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap();
|
.iter_mut()
|
||||||
primary_diagnostic.1.is_primary = true;
|
.min_by_key(|entry| entry.diagnostic.severity)
|
||||||
|
.unwrap();
|
||||||
|
primary.diagnostic.is_primary = true;
|
||||||
diagnostics
|
diagnostics
|
||||||
}),
|
}),
|
||||||
)
|
self,
|
||||||
};
|
);
|
||||||
|
self.diagnostics = diagnostics;
|
||||||
|
|
||||||
if let Some(version) = version {
|
if let Some(version) = version {
|
||||||
let language_server = self.language_server.as_mut().unwrap();
|
let language_server = self.language_server.as_mut().unwrap();
|
||||||
|
@ -811,32 +825,31 @@ impl Buffer {
|
||||||
self.diagnostics_update_count += 1;
|
self.diagnostics_update_count += 1;
|
||||||
cx.notify();
|
cx.notify();
|
||||||
cx.emit(Event::DiagnosticsUpdated);
|
cx.emit(Event::DiagnosticsUpdated);
|
||||||
Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
|
Ok(Operation::UpdateDiagnostics {
|
||||||
|
diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
|
||||||
|
lamport_timestamp: self.lamport_timestamp(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_in_range<'a, T, O>(
|
pub fn diagnostics_in_range<'a, T, O>(
|
||||||
&'a self,
|
&'a self,
|
||||||
search_range: Range<T>,
|
search_range: Range<T>,
|
||||||
) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
|
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||||
where
|
where
|
||||||
T: 'a + ToOffset,
|
T: 'a + ToOffset,
|
||||||
O: 'a + FromAnchor,
|
O: 'a + FromAnchor,
|
||||||
{
|
{
|
||||||
self.diagnostics
|
self.diagnostics.range(search_range, self, true)
|
||||||
.intersecting_ranges(search_range, self, true)
|
|
||||||
.map(move |(_, range, diagnostic)| (range, diagnostic))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostic_group<'a, O>(
|
pub fn diagnostic_group<'a, O>(
|
||||||
&'a self,
|
&'a self,
|
||||||
group_id: usize,
|
group_id: usize,
|
||||||
) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
|
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||||
where
|
where
|
||||||
O: 'a + FromAnchor,
|
O: 'a + FromAnchor,
|
||||||
{
|
{
|
||||||
self.diagnostics
|
self.diagnostics.group(group_id, self)
|
||||||
.filter(self, move |diagnostic| diagnostic.group_id == group_id)
|
|
||||||
.map(move |(_, range, diagnostic)| (range, diagnostic))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_update_count(&self) -> usize {
|
pub fn diagnostics_update_count(&self) -> usize {
|
||||||
|
@ -879,13 +892,13 @@ impl Buffer {
|
||||||
for request in autoindent_requests {
|
for request in autoindent_requests {
|
||||||
let old_to_new_rows = request
|
let old_to_new_rows = request
|
||||||
.edited
|
.edited
|
||||||
.iter::<Point>(&request.before_edit)
|
.iter()
|
||||||
.map(|point| point.row)
|
.map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
|
||||||
.zip(
|
.zip(
|
||||||
request
|
request
|
||||||
.edited
|
.edited
|
||||||
.iter::<Point>(&snapshot)
|
.iter()
|
||||||
.map(|point| point.row),
|
.map(|anchor| anchor.summary::<Point>(&snapshot).row),
|
||||||
)
|
)
|
||||||
.collect::<BTreeMap<u32, u32>>();
|
.collect::<BTreeMap<u32, u32>>();
|
||||||
|
|
||||||
|
@ -947,7 +960,8 @@ impl Buffer {
|
||||||
if let Some(inserted) = request.inserted.as_ref() {
|
if let Some(inserted) = request.inserted.as_ref() {
|
||||||
let inserted_row_ranges = contiguous_ranges(
|
let inserted_row_ranges = contiguous_ranges(
|
||||||
inserted
|
inserted
|
||||||
.ranges::<Point>(&snapshot)
|
.iter()
|
||||||
|
.map(|range| range.to_point(&snapshot))
|
||||||
.flat_map(|range| range.start.row..range.end.row + 1),
|
.flat_map(|range| range.start.row..range.end.row + 1),
|
||||||
max_rows_between_yields,
|
max_rows_between_yields,
|
||||||
);
|
);
|
||||||
|
@ -1264,17 +1278,17 @@ impl Buffer {
|
||||||
self.pending_autoindent.take();
|
self.pending_autoindent.take();
|
||||||
let autoindent_request = if autoindent && self.language.is_some() {
|
let autoindent_request = if autoindent && self.language.is_some() {
|
||||||
let before_edit = self.snapshot();
|
let before_edit = self.snapshot();
|
||||||
let edited = self.anchor_set(
|
let edited = ranges
|
||||||
Bias::Left,
|
.iter()
|
||||||
ranges.iter().filter_map(|range| {
|
.filter_map(|range| {
|
||||||
let start = range.start.to_point(self);
|
let start = range.start.to_point(self);
|
||||||
if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
|
if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(range.start)
|
Some(self.anchor_before(range.start))
|
||||||
}
|
}
|
||||||
}),
|
})
|
||||||
);
|
.collect();
|
||||||
Some((before_edit, edited))
|
Some((before_edit, edited))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -1289,17 +1303,19 @@ impl Buffer {
|
||||||
let mut inserted = None;
|
let mut inserted = None;
|
||||||
if let Some(first_newline_ix) = first_newline_ix {
|
if let Some(first_newline_ix) = first_newline_ix {
|
||||||
let mut delta = 0isize;
|
let mut delta = 0isize;
|
||||||
inserted = Some(self.anchor_range_set(
|
inserted = Some(
|
||||||
Bias::Left,
|
ranges
|
||||||
Bias::Right,
|
.iter()
|
||||||
ranges.iter().map(|range| {
|
.map(|range| {
|
||||||
let start = (delta + range.start as isize) as usize + first_newline_ix + 1;
|
let start =
|
||||||
|
(delta + range.start as isize) as usize + first_newline_ix + 1;
|
||||||
let end = (delta + range.start as isize) as usize + new_text_len;
|
let end = (delta + range.start as isize) as usize + new_text_len;
|
||||||
delta +=
|
delta +=
|
||||||
(range.end as isize - range.start as isize) + new_text_len as isize;
|
(range.end as isize - range.start as isize) + new_text_len as isize;
|
||||||
start..end
|
self.anchor_before(start)..self.anchor_after(end)
|
||||||
}),
|
})
|
||||||
));
|
.collect(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let selection_set_ids = self
|
let selection_set_ids = self
|
||||||
|
@ -1401,17 +1417,23 @@ impl Buffer {
|
||||||
self.pending_autoindent.take();
|
self.pending_autoindent.take();
|
||||||
let was_dirty = self.is_dirty();
|
let was_dirty = self.is_dirty();
|
||||||
let old_version = self.version.clone();
|
let old_version = self.version.clone();
|
||||||
|
let mut deferred_ops = Vec::new();
|
||||||
let buffer_ops = ops
|
let buffer_ops = ops
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|op| match op {
|
.filter_map(|op| match op {
|
||||||
Operation::Buffer(op) => Some(op),
|
Operation::Buffer(op) => Some(op),
|
||||||
Operation::UpdateDiagnostics(diagnostics) => {
|
_ => {
|
||||||
self.apply_diagnostic_update(diagnostics, cx);
|
if self.can_apply_op(&op) {
|
||||||
|
self.apply_op(op, cx);
|
||||||
|
} else {
|
||||||
|
deferred_ops.push(op);
|
||||||
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
self.text.apply_ops(buffer_ops)?;
|
self.text.apply_ops(buffer_ops)?;
|
||||||
|
self.flush_deferred_ops(cx);
|
||||||
self.did_edit(&old_version, was_dirty, cx);
|
self.did_edit(&old_version, was_dirty, cx);
|
||||||
// Notify independently of whether the buffer was edited as the operations could include a
|
// Notify independently of whether the buffer was edited as the operations could include a
|
||||||
// selection update.
|
// selection update.
|
||||||
|
@ -1419,12 +1441,49 @@ impl Buffer {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
|
let mut deferred_ops = Vec::new();
|
||||||
|
for op in self.deferred_ops.drain().iter().cloned() {
|
||||||
|
if self.can_apply_op(&op) {
|
||||||
|
self.apply_op(op, cx);
|
||||||
|
} else {
|
||||||
|
deferred_ops.push(op);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.deferred_ops.insert(deferred_ops);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn can_apply_op(&self, operation: &Operation) -> bool {
|
||||||
|
match operation {
|
||||||
|
Operation::Buffer(_) => {
|
||||||
|
unreachable!("buffer operations should never be applied at this layer")
|
||||||
|
}
|
||||||
|
Operation::UpdateDiagnostics { diagnostics, .. } => {
|
||||||
|
diagnostics.iter().all(|diagnostic| {
|
||||||
|
self.text.can_resolve(&diagnostic.range.start)
|
||||||
|
&& self.text.can_resolve(&diagnostic.range.end)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
|
||||||
|
match operation {
|
||||||
|
Operation::Buffer(_) => {
|
||||||
|
unreachable!("buffer operations should never be applied at this layer")
|
||||||
|
}
|
||||||
|
Operation::UpdateDiagnostics { diagnostics, .. } => {
|
||||||
|
self.apply_diagnostic_update(diagnostics, cx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn apply_diagnostic_update(
|
fn apply_diagnostic_update(
|
||||||
&mut self,
|
&mut self,
|
||||||
diagnostics: AnchorRangeMultimap<Diagnostic>,
|
diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
self.diagnostics = diagnostics;
|
self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
|
||||||
self.diagnostics_update_count += 1;
|
self.diagnostics_update_count += 1;
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
@ -1632,19 +1691,19 @@ impl BufferSnapshot {
|
||||||
let mut highlights = None;
|
let mut highlights = None;
|
||||||
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
|
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
|
||||||
if let Some(theme) = theme {
|
if let Some(theme) = theme {
|
||||||
for (_, range, diagnostic) in
|
for entry in self
|
||||||
self.diagnostics
|
.diagnostics
|
||||||
.intersecting_ranges(range.clone(), self, true)
|
.range::<_, usize>(range.clone(), self, true)
|
||||||
{
|
{
|
||||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||||
offset: range.start,
|
offset: entry.range.start,
|
||||||
is_start: true,
|
is_start: true,
|
||||||
severity: diagnostic.severity,
|
severity: entry.diagnostic.severity,
|
||||||
});
|
});
|
||||||
diagnostic_endpoints.push(DiagnosticEndpoint {
|
diagnostic_endpoints.push(DiagnosticEndpoint {
|
||||||
offset: range.end,
|
offset: entry.range.end,
|
||||||
is_start: false,
|
is_start: false,
|
||||||
severity: diagnostic.severity,
|
severity: entry.diagnostic.severity,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
diagnostic_endpoints
|
diagnostic_endpoints
|
||||||
|
@ -1939,6 +1998,19 @@ impl ToPointUtf16 for lsp::Position {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl operation_queue::Operation for Operation {
|
||||||
|
fn lamport_timestamp(&self) -> clock::Lamport {
|
||||||
|
match self {
|
||||||
|
Operation::Buffer(_) => {
|
||||||
|
unreachable!("buffer operations should never be deferred at this layer")
|
||||||
|
}
|
||||||
|
Operation::UpdateDiagnostics {
|
||||||
|
lamport_timestamp, ..
|
||||||
|
} => *lamport_timestamp,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn diagnostic_ranges<'a>(
|
fn diagnostic_ranges<'a>(
|
||||||
diagnostic: &'a lsp::Diagnostic,
|
diagnostic: &'a lsp::Diagnostic,
|
||||||
abs_path: Option<&'a Path>,
|
abs_path: Option<&'a Path>,
|
||||||
|
@ -1968,7 +2040,7 @@ fn diagnostic_ranges<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contiguous_ranges(
|
pub fn contiguous_ranges(
|
||||||
values: impl IntoIterator<Item = u32>,
|
values: impl Iterator<Item = u32>,
|
||||||
max_len: usize,
|
max_len: usize,
|
||||||
) -> impl Iterator<Item = Range<u32>> {
|
) -> impl Iterator<Item = Range<u32>> {
|
||||||
let mut values = values.into_iter();
|
let mut values = values.into_iter();
|
||||||
|
|
165
crates/language/src/diagnostic_set.rs
Normal file
165
crates/language/src/diagnostic_set.rs
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
use crate::Diagnostic;
|
||||||
|
use std::{
|
||||||
|
cmp::{Ordering, Reverse},
|
||||||
|
iter,
|
||||||
|
ops::Range,
|
||||||
|
};
|
||||||
|
use sum_tree::{self, Bias, SumTree};
|
||||||
|
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
|
||||||
|
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub struct DiagnosticSet {
|
||||||
|
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub struct DiagnosticEntry<T> {
|
||||||
|
pub range: Range<T>,
|
||||||
|
pub diagnostic: Diagnostic,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Summary {
|
||||||
|
start: Anchor,
|
||||||
|
end: Anchor,
|
||||||
|
min_start: Anchor,
|
||||||
|
max_end: Anchor,
|
||||||
|
count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DiagnosticSet {
|
||||||
|
pub fn from_sorted_entries<I>(iter: I, buffer: &text::Snapshot) -> Self
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
|
||||||
|
{
|
||||||
|
Self {
|
||||||
|
diagnostics: SumTree::from_iter(iter, buffer),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reset<I>(&mut self, iter: I, buffer: &text::Snapshot)
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
|
||||||
|
{
|
||||||
|
let mut entries = iter.into_iter().collect::<Vec<_>>();
|
||||||
|
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
|
||||||
|
self.diagnostics = SumTree::from_iter(
|
||||||
|
entries.into_iter().map(|entry| DiagnosticEntry {
|
||||||
|
range: buffer.anchor_before(entry.range.start)
|
||||||
|
..buffer.anchor_after(entry.range.end),
|
||||||
|
diagnostic: entry.diagnostic,
|
||||||
|
}),
|
||||||
|
buffer,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
|
||||||
|
self.diagnostics.iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range<'a, T, O>(
|
||||||
|
&'a self,
|
||||||
|
range: Range<T>,
|
||||||
|
buffer: &'a text::Snapshot,
|
||||||
|
inclusive: bool,
|
||||||
|
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
|
||||||
|
where
|
||||||
|
T: 'a + ToOffset,
|
||||||
|
O: FromAnchor,
|
||||||
|
{
|
||||||
|
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
|
||||||
|
let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
|
||||||
|
let mut cursor = self.diagnostics.filter::<_, ()>(
|
||||||
|
{
|
||||||
|
move |summary: &Summary| {
|
||||||
|
let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap();
|
||||||
|
let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap();
|
||||||
|
if inclusive {
|
||||||
|
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
|
||||||
|
} else {
|
||||||
|
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
buffer,
|
||||||
|
);
|
||||||
|
|
||||||
|
iter::from_fn({
|
||||||
|
move || {
|
||||||
|
if let Some(diagnostic) = cursor.item() {
|
||||||
|
cursor.next(buffer);
|
||||||
|
Some(diagnostic.resolve(buffer))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn group<'a, O: FromAnchor>(
|
||||||
|
&'a self,
|
||||||
|
group_id: usize,
|
||||||
|
buffer: &'a text::Snapshot,
|
||||||
|
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
|
||||||
|
self.iter()
|
||||||
|
.filter(move |entry| entry.diagnostic.group_id == group_id)
|
||||||
|
.map(|entry| entry.resolve(buffer))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sum_tree::Item for DiagnosticEntry<Anchor> {
|
||||||
|
type Summary = Summary;
|
||||||
|
|
||||||
|
fn summary(&self) -> Self::Summary {
|
||||||
|
Summary {
|
||||||
|
start: self.range.start.clone(),
|
||||||
|
end: self.range.end.clone(),
|
||||||
|
min_start: self.range.start.clone(),
|
||||||
|
max_end: self.range.end.clone(),
|
||||||
|
count: 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DiagnosticEntry<Anchor> {
|
||||||
|
pub fn resolve<O: FromAnchor>(&self, buffer: &text::Snapshot) -> DiagnosticEntry<O> {
|
||||||
|
DiagnosticEntry {
|
||||||
|
range: O::from_anchor(&self.range.start, buffer)
|
||||||
|
..O::from_anchor(&self.range.end, buffer),
|
||||||
|
diagnostic: self.diagnostic.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Summary {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
start: Anchor::min(),
|
||||||
|
end: Anchor::max(),
|
||||||
|
min_start: Anchor::max(),
|
||||||
|
max_end: Anchor::min(),
|
||||||
|
count: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sum_tree::Summary for Summary {
|
||||||
|
type Context = text::Snapshot;
|
||||||
|
|
||||||
|
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||||
|
if other
|
||||||
|
.min_start
|
||||||
|
.cmp(&self.min_start, buffer)
|
||||||
|
.unwrap()
|
||||||
|
.is_lt()
|
||||||
|
{
|
||||||
|
self.min_start = other.min_start.clone();
|
||||||
|
}
|
||||||
|
if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() {
|
||||||
|
self.max_end = other.max_end.clone();
|
||||||
|
}
|
||||||
|
self.start = other.start.clone();
|
||||||
|
self.end = other.end.clone();
|
||||||
|
self.count += other.count;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
mod buffer;
|
mod buffer;
|
||||||
|
mod diagnostic_set;
|
||||||
mod highlight_map;
|
mod highlight_map;
|
||||||
pub mod multi_buffer;
|
pub mod multi_buffer;
|
||||||
pub mod proto;
|
pub mod proto;
|
||||||
|
@ -8,6 +9,7 @@ mod tests;
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
pub use buffer::Operation;
|
pub use buffer::Operation;
|
||||||
pub use buffer::*;
|
pub use buffer::*;
|
||||||
|
pub use diagnostic_set::DiagnosticEntry;
|
||||||
use gpui::{executor::Background, AppContext};
|
use gpui::{executor::Background, AppContext};
|
||||||
use highlight_map::HighlightMap;
|
use highlight_map::HighlightMap;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::{Diagnostic, Operation};
|
use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use lsp::DiagnosticSeverity;
|
use lsp::DiagnosticSeverity;
|
||||||
|
@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
|
||||||
replica_id: set_id.replica_id as u32,
|
replica_id: set_id.replica_id as u32,
|
||||||
local_timestamp: set_id.value,
|
local_timestamp: set_id.value,
|
||||||
lamport_timestamp: lamport_timestamp.value,
|
lamport_timestamp: lamport_timestamp.value,
|
||||||
version: selections.version().into(),
|
|
||||||
selections: selections
|
selections: selections
|
||||||
.full_offset_ranges()
|
.iter()
|
||||||
.map(|(range, state)| proto::Selection {
|
.map(|selection| proto::Selection {
|
||||||
id: state.id as u64,
|
id: selection.id as u64,
|
||||||
start: range.start.0 as u64,
|
start: Some(serialize_anchor(&selection.start)),
|
||||||
end: range.end.0 as u64,
|
end: Some(serialize_anchor(&selection.end)),
|
||||||
reversed: state.reversed,
|
reversed: selection.reversed,
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
}),
|
}),
|
||||||
|
@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
|
||||||
lamport_timestamp: lamport_timestamp.value,
|
lamport_timestamp: lamport_timestamp.value,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
Operation::UpdateDiagnostics(diagnostic_set) => {
|
Operation::UpdateDiagnostics {
|
||||||
proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
|
diagnostics,
|
||||||
}
|
lamport_timestamp,
|
||||||
|
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
|
||||||
|
replica_id: lamport_timestamp.replica_id as u32,
|
||||||
|
lamport_timestamp: lamport_timestamp.value,
|
||||||
|
diagnostics: serialize_diagnostics(diagnostics.iter()),
|
||||||
|
}),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
|
pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
|
||||||
let version = set.selections.version();
|
|
||||||
let entries = set.selections.full_offset_ranges();
|
|
||||||
proto::SelectionSet {
|
proto::SelectionSet {
|
||||||
replica_id: set.id.replica_id as u32,
|
replica_id: set.id.replica_id as u32,
|
||||||
lamport_timestamp: set.id.value as u32,
|
lamport_timestamp: set.id.value as u32,
|
||||||
is_active: set.active,
|
is_active: set.active,
|
||||||
version: version.into(),
|
selections: set
|
||||||
selections: entries
|
.selections
|
||||||
.map(|(range, state)| proto::Selection {
|
.iter()
|
||||||
id: state.id as u64,
|
.map(|selection| proto::Selection {
|
||||||
start: range.start.0 as u64,
|
id: selection.id as u64,
|
||||||
end: range.end.0 as u64,
|
start: Some(serialize_anchor(&selection.start)),
|
||||||
reversed: state.reversed,
|
end: Some(serialize_anchor(&selection.end)),
|
||||||
|
reversed: selection.reversed,
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
|
pub fn serialize_diagnostics<'a>(
|
||||||
proto::DiagnosticSet {
|
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
|
||||||
version: map.version().into(),
|
) -> Vec<proto::Diagnostic> {
|
||||||
diagnostics: map
|
diagnostics
|
||||||
.full_offset_ranges()
|
.into_iter()
|
||||||
.map(|(range, diagnostic)| proto::Diagnostic {
|
.map(|entry| proto::Diagnostic {
|
||||||
start: range.start.0 as u64,
|
start: Some(serialize_anchor(&entry.range.start)),
|
||||||
end: range.end.0 as u64,
|
end: Some(serialize_anchor(&entry.range.end)),
|
||||||
message: diagnostic.message.clone(),
|
message: entry.diagnostic.message.clone(),
|
||||||
severity: match diagnostic.severity {
|
severity: match entry.diagnostic.severity {
|
||||||
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
|
||||||
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
|
||||||
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
|
||||||
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
|
||||||
_ => proto::diagnostic::Severity::None,
|
_ => proto::diagnostic::Severity::None,
|
||||||
} as i32,
|
} as i32,
|
||||||
group_id: diagnostic.group_id as u64,
|
group_id: entry.diagnostic.group_id as u64,
|
||||||
is_primary: diagnostic.is_primary,
|
is_primary: entry.diagnostic.is_primary,
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
||||||
|
proto::Anchor {
|
||||||
|
replica_id: anchor.timestamp.replica_id as u32,
|
||||||
|
local_timestamp: anchor.timestamp.value,
|
||||||
|
offset: anchor.offset as u64,
|
||||||
|
bias: match anchor.bias {
|
||||||
|
Bias::Left => proto::Bias::Left as i32,
|
||||||
|
Bias::Right => proto::Bias::Right as i32,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
proto::operation::Variant::UpdateSelections(message) => {
|
proto::operation::Variant::UpdateSelections(message) => {
|
||||||
let version = message.version.into();
|
let selections = message
|
||||||
let entries = message
|
|
||||||
.selections
|
.selections
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|selection| {
|
.filter_map(|selection| {
|
||||||
let range = FullOffset(selection.start as usize)
|
Some(Selection {
|
||||||
..FullOffset(selection.end as usize);
|
|
||||||
let state = SelectionState {
|
|
||||||
id: selection.id as usize,
|
id: selection.id as usize,
|
||||||
|
start: deserialize_anchor(selection.start?)?,
|
||||||
|
end: deserialize_anchor(selection.end?)?,
|
||||||
reversed: selection.reversed,
|
reversed: selection.reversed,
|
||||||
goal: SelectionGoal::None,
|
goal: SelectionGoal::None,
|
||||||
};
|
|
||||||
(range, state)
|
|
||||||
})
|
})
|
||||||
.collect();
|
})
|
||||||
let selections = AnchorRangeMap::from_full_offset_ranges(
|
.collect::<Vec<_>>();
|
||||||
version,
|
|
||||||
Bias::Left,
|
|
||||||
Bias::Left,
|
|
||||||
entries,
|
|
||||||
);
|
|
||||||
|
|
||||||
Operation::Buffer(text::Operation::UpdateSelections {
|
Operation::Buffer(text::Operation::UpdateSelections {
|
||||||
set_id: clock::Lamport {
|
set_id: clock::Lamport {
|
||||||
|
@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
proto::operation::Variant::UpdateDiagnostics(message) => {
|
proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics {
|
||||||
Operation::UpdateDiagnostics(deserialize_diagnostics(message))
|
diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)),
|
||||||
}
|
lamport_timestamp: clock::Lamport {
|
||||||
|
replica_id: message.replica_id as ReplicaId,
|
||||||
|
value: message.lamport_timestamp,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -277,36 +287,32 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
|
||||||
value: set.lamport_timestamp,
|
value: set.lamport_timestamp,
|
||||||
},
|
},
|
||||||
active: set.is_active,
|
active: set.is_active,
|
||||||
selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
|
selections: Arc::from(
|
||||||
set.version.into(),
|
|
||||||
Bias::Left,
|
|
||||||
Bias::Left,
|
|
||||||
set.selections
|
set.selections
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|selection| {
|
.filter_map(|selection| {
|
||||||
let range =
|
Some(Selection {
|
||||||
FullOffset(selection.start as usize)..FullOffset(selection.end as usize);
|
|
||||||
let state = SelectionState {
|
|
||||||
id: selection.id as usize,
|
id: selection.id as usize,
|
||||||
|
start: deserialize_anchor(selection.start?)?,
|
||||||
|
end: deserialize_anchor(selection.end?)?,
|
||||||
reversed: selection.reversed,
|
reversed: selection.reversed,
|
||||||
goal: SelectionGoal::None,
|
goal: SelectionGoal::None,
|
||||||
};
|
|
||||||
(range, state)
|
|
||||||
})
|
})
|
||||||
.collect(),
|
})
|
||||||
)),
|
.collect::<Vec<_>>(),
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
|
pub fn deserialize_diagnostics(
|
||||||
AnchorRangeMultimap::from_full_offset_ranges(
|
diagnostics: Vec<proto::Diagnostic>,
|
||||||
message.version.into(),
|
) -> Vec<DiagnosticEntry<Anchor>> {
|
||||||
Bias::Left,
|
diagnostics
|
||||||
Bias::Right,
|
.into_iter()
|
||||||
message.diagnostics.into_iter().filter_map(|diagnostic| {
|
.filter_map(|diagnostic| {
|
||||||
Some((
|
Some(DiagnosticEntry {
|
||||||
FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
|
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
|
||||||
Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
|
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
|
||||||
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
|
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
|
||||||
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
|
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
|
||||||
|
@ -318,7 +324,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult
|
||||||
group_id: diagnostic.group_id as usize,
|
group_id: diagnostic.group_id as usize,
|
||||||
is_primary: diagnostic.is_primary,
|
is_primary: diagnostic.is_primary,
|
||||||
},
|
},
|
||||||
))
|
})
|
||||||
}),
|
})
|
||||||
)
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
||||||
|
Some(Anchor {
|
||||||
|
timestamp: clock::Local {
|
||||||
|
replica_id: anchor.replica_id as ReplicaId,
|
||||||
|
value: anchor.local_timestamp,
|
||||||
|
},
|
||||||
|
offset: anchor.offset as usize,
|
||||||
|
bias: match proto::Bias::from_i32(anchor.bias)? {
|
||||||
|
proto::Bias::Left => Bias::Left,
|
||||||
|
proto::Bias::Right => Bias::Right,
|
||||||
|
},
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -539,27 +539,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
// The diagnostics have moved down since they were created.
|
// The diagnostics have moved down since they were created.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
.diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0))
|
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(3, 9)..Point::new(3, 11),
|
range: Point::new(3, 9)..Point::new(3, 11),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "undefined variable 'BB'".to_string(),
|
message: "undefined variable 'BB'".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
},
|
},
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(4, 9)..Point::new(4, 12),
|
range: Point::new(4, 9)..Point::new(4, 12),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "undefined variable 'CCC'".to_string(),
|
message: "undefined variable 'CCC'".to_string(),
|
||||||
group_id: 2,
|
group_id: 2,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -606,27 +606,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
.diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0))
|
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(2, 9)..Point::new(2, 12),
|
range: Point::new(2, 9)..Point::new(2, 12),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::WARNING,
|
severity: DiagnosticSeverity::WARNING,
|
||||||
message: "unreachable statement".to_string(),
|
message: "unreachable statement".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(2, 9)..Point::new(2, 10),
|
range: Point::new(2, 9)..Point::new(2, 10),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "undefined variable 'A'".to_string(),
|
message: "undefined variable 'A'".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
},
|
},
|
||||||
)
|
}
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -685,27 +685,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
.diagnostics_in_range(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(2, 21)..Point::new(2, 22),
|
range: Point::new(2, 21)..Point::new(2, 22),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "undefined variable 'A'".to_string(),
|
message: "undefined variable 'A'".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(3, 9)..Point::new(3, 11),
|
range: Point::new(3, 9)..Point::new(3, 11),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "undefined variable 'BB'".to_string(),
|
message: "undefined variable 'BB'".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
},
|
},
|
||||||
)
|
}
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -873,107 +873,107 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
|
||||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 8)..Point::new(1, 9),
|
range: Point::new(1, 8)..Point::new(1, 9),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::WARNING,
|
severity: DiagnosticSeverity::WARNING,
|
||||||
message: "error 1".to_string(),
|
message: "error 1".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 8)..Point::new(1, 9),
|
range: Point::new(1, 8)..Point::new(1, 9),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::HINT,
|
severity: DiagnosticSeverity::HINT,
|
||||||
message: "error 1 hint 1".to_string(),
|
message: "error 1 hint 1".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 13)..Point::new(1, 15),
|
range: Point::new(1, 13)..Point::new(1, 15),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::HINT,
|
severity: DiagnosticSeverity::HINT,
|
||||||
message: "error 2 hint 1".to_string(),
|
message: "error 2 hint 1".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 13)..Point::new(1, 15),
|
range: Point::new(1, 13)..Point::new(1, 15),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::HINT,
|
severity: DiagnosticSeverity::HINT,
|
||||||
message: "error 2 hint 2".to_string(),
|
message: "error 2 hint 2".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(2, 8)..Point::new(2, 17),
|
range: Point::new(2, 8)..Point::new(2, 17),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "error 2".to_string(),
|
message: "error 2".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.diagnostic_group(0).collect::<Vec<_>>(),
|
buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 8)..Point::new(1, 9),
|
range: Point::new(1, 8)..Point::new(1, 9),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::WARNING,
|
severity: DiagnosticSeverity::WARNING,
|
||||||
message: "error 1".to_string(),
|
message: "error 1".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 8)..Point::new(1, 9),
|
range: Point::new(1, 8)..Point::new(1, 9),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::HINT,
|
severity: DiagnosticSeverity::HINT,
|
||||||
message: "error 1 hint 1".to_string(),
|
message: "error 1 hint 1".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.diagnostic_group(1).collect::<Vec<_>>(),
|
buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 13)..Point::new(1, 15),
|
range: Point::new(1, 13)..Point::new(1, 15),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::HINT,
|
severity: DiagnosticSeverity::HINT,
|
||||||
message: "error 2 hint 1".to_string(),
|
message: "error 2 hint 1".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(1, 13)..Point::new(1, 15),
|
range: Point::new(1, 13)..Point::new(1, 15),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::HINT,
|
severity: DiagnosticSeverity::HINT,
|
||||||
message: "error 2 hint 2".to_string(),
|
message: "error 2 hint 2".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: false,
|
is_primary: false,
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(2, 8)..Point::new(2, 17),
|
range: Point::new(2, 8)..Point::new(2, 17),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: DiagnosticSeverity::ERROR,
|
severity: DiagnosticSeverity::ERROR,
|
||||||
message: "error 2".to_string(),
|
message: "error 2".to_string(),
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
is_primary: true,
|
is_primary: true,
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1002,13 +1002,17 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
|
||||||
#[test]
|
#[test]
|
||||||
fn test_contiguous_ranges() {
|
fn test_contiguous_ranges() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::<Vec<_>>(),
|
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
|
||||||
&[1..4, 5..7, 9..13]
|
&[1..4, 5..7, 9..13]
|
||||||
);
|
);
|
||||||
|
|
||||||
// Respects the `max_len` parameter
|
// Respects the `max_len` parameter
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::<Vec<_>>(),
|
contiguous_ranges(
|
||||||
|
[2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
|
||||||
|
3
|
||||||
|
)
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
&[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
|
&[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3005,7 +3005,7 @@ mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use client::test::{FakeHttpClient, FakeServer};
|
use client::test::{FakeHttpClient, FakeServer};
|
||||||
use fs::RealFs;
|
use fs::RealFs;
|
||||||
use language::{tree_sitter_rust, LanguageServerConfig};
|
use language::{tree_sitter_rust, DiagnosticEntry, LanguageServerConfig};
|
||||||
use language::{Diagnostic, LanguageConfig};
|
use language::{Diagnostic, LanguageConfig};
|
||||||
use lsp::Url;
|
use lsp::Url;
|
||||||
use rand::prelude::*;
|
use rand::prelude::*;
|
||||||
|
@ -3721,19 +3721,19 @@ mod tests {
|
||||||
|
|
||||||
buffer.read_with(&cx, |buffer, _| {
|
buffer.read_with(&cx, |buffer, _| {
|
||||||
let diagnostics = buffer
|
let diagnostics = buffer
|
||||||
.diagnostics_in_range(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
diagnostics,
|
diagnostics,
|
||||||
&[(
|
&[DiagnosticEntry {
|
||||||
Point::new(0, 9)..Point::new(0, 10),
|
range: Point::new(0, 9)..Point::new(0, 10),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
severity: lsp::DiagnosticSeverity::ERROR,
|
severity: lsp::DiagnosticSeverity::ERROR,
|
||||||
message: "undefined variable 'A'".to_string(),
|
message: "undefined variable 'A'".to_string(),
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
is_primary: true
|
is_primary: true
|
||||||
}
|
}
|
||||||
)]
|
}]
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -229,32 +229,44 @@ message Buffer {
|
||||||
string content = 2;
|
string content = 2;
|
||||||
repeated Operation.Edit history = 3;
|
repeated Operation.Edit history = 3;
|
||||||
repeated SelectionSet selections = 4;
|
repeated SelectionSet selections = 4;
|
||||||
DiagnosticSet diagnostics = 5;
|
repeated Diagnostic diagnostics = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message SelectionSet {
|
message SelectionSet {
|
||||||
uint32 replica_id = 1;
|
uint32 replica_id = 1;
|
||||||
uint32 lamport_timestamp = 2;
|
uint32 lamport_timestamp = 2;
|
||||||
bool is_active = 3;
|
bool is_active = 3;
|
||||||
repeated VectorClockEntry version = 4;
|
repeated Selection selections = 4;
|
||||||
repeated Selection selections = 5;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message Selection {
|
message Selection {
|
||||||
uint64 id = 1;
|
uint64 id = 1;
|
||||||
uint64 start = 2;
|
Anchor start = 2;
|
||||||
uint64 end = 3;
|
Anchor end = 3;
|
||||||
bool reversed = 4;
|
bool reversed = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message DiagnosticSet {
|
message Anchor {
|
||||||
repeated VectorClockEntry version = 1;
|
uint32 replica_id = 1;
|
||||||
repeated Diagnostic diagnostics = 2;
|
uint32 local_timestamp = 2;
|
||||||
|
uint64 offset = 3;
|
||||||
|
Bias bias = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Bias {
|
||||||
|
Left = 0;
|
||||||
|
Right = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message UpdateDiagnostics {
|
||||||
|
uint32 replica_id = 1;
|
||||||
|
uint32 lamport_timestamp = 2;
|
||||||
|
repeated Diagnostic diagnostics = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message Diagnostic {
|
message Diagnostic {
|
||||||
uint64 start = 1;
|
Anchor start = 1;
|
||||||
uint64 end = 2;
|
Anchor end = 2;
|
||||||
Severity severity = 3;
|
Severity severity = 3;
|
||||||
string message = 4;
|
string message = 4;
|
||||||
uint64 group_id = 5;
|
uint64 group_id = 5;
|
||||||
|
@ -268,8 +280,6 @@ message Diagnostic {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
message Operation {
|
message Operation {
|
||||||
oneof variant {
|
oneof variant {
|
||||||
Edit edit = 1;
|
Edit edit = 1;
|
||||||
|
@ -277,7 +287,7 @@ message Operation {
|
||||||
UpdateSelections update_selections = 3;
|
UpdateSelections update_selections = 3;
|
||||||
RemoveSelections remove_selections = 4;
|
RemoveSelections remove_selections = 4;
|
||||||
SetActiveSelections set_active_selections = 5;
|
SetActiveSelections set_active_selections = 5;
|
||||||
DiagnosticSet update_diagnostics = 6;
|
UpdateDiagnostics update_diagnostics = 6;
|
||||||
}
|
}
|
||||||
|
|
||||||
message Edit {
|
message Edit {
|
||||||
|
@ -308,8 +318,7 @@ message Operation {
|
||||||
uint32 replica_id = 1;
|
uint32 replica_id = 1;
|
||||||
uint32 local_timestamp = 2;
|
uint32 local_timestamp = 2;
|
||||||
uint32 lamport_timestamp = 3;
|
uint32 lamport_timestamp = 3;
|
||||||
repeated VectorClockEntry version = 4;
|
repeated Selection selections = 4;
|
||||||
repeated Selection selections = 5;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message RemoveSelections {
|
message RemoveSelections {
|
||||||
|
|
|
@ -400,7 +400,7 @@ mod tests {
|
||||||
content: "path/one content".to_string(),
|
content: "path/one content".to_string(),
|
||||||
history: vec![],
|
history: vec![],
|
||||||
selections: vec![],
|
selections: vec![],
|
||||||
diagnostics: None,
|
diagnostics: vec![],
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -422,7 +422,7 @@ mod tests {
|
||||||
content: "path/two content".to_string(),
|
content: "path/two content".to_string(),
|
||||||
history: vec![],
|
history: vec![],
|
||||||
selections: vec![],
|
selections: vec![],
|
||||||
diagnostics: None,
|
diagnostics: vec![],
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -453,7 +453,7 @@ mod tests {
|
||||||
content: "path/one content".to_string(),
|
content: "path/one content".to_string(),
|
||||||
history: vec![],
|
history: vec![],
|
||||||
selections: vec![],
|
selections: vec![],
|
||||||
diagnostics: None,
|
diagnostics: vec![],
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -465,7 +465,7 @@ mod tests {
|
||||||
content: "path/two content".to_string(),
|
content: "path/two content".to_string(),
|
||||||
history: vec![],
|
history: vec![],
|
||||||
selections: vec![],
|
selections: vec![],
|
||||||
diagnostics: None,
|
diagnostics: vec![],
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -208,9 +208,25 @@ impl RepoClient {
|
||||||
"Authorization",
|
"Authorization",
|
||||||
self.installation_token_header(false).await?,
|
self.installation_token_header(false).await?,
|
||||||
);
|
);
|
||||||
let client = surf::client().with(surf::middleware::Redirect::new(5));
|
|
||||||
|
let client = surf::client();
|
||||||
let mut response = client.send(request).await?;
|
let mut response = client.send(request).await?;
|
||||||
|
|
||||||
|
// Avoid using `surf::middleware::Redirect` because that type forwards
|
||||||
|
// the original request headers to the redirect URI. In this case, the
|
||||||
|
// redirect will be to S3, which forbids us from supplying an
|
||||||
|
// `Authorization` header.
|
||||||
|
if response.status().is_redirection() {
|
||||||
|
if let Some(url) = response.header("location") {
|
||||||
|
let request = surf::get(url.as_str()).header("Accept", "application/octet-stream");
|
||||||
|
response = client.send(request).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !response.status().is_success() {
|
||||||
|
Err(anyhow!("failed to fetch release asset {} {}", tag, name))?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(response.take_body())
|
Ok(response.take_body())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1705,27 +1705,27 @@ mod tests {
|
||||||
buffer_b.read_with(&cx_b, |buffer, _| {
|
buffer_b.read_with(&cx_b, |buffer, _| {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer
|
buffer
|
||||||
.diagnostics_in_range(0..buffer.len())
|
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
&[
|
&[
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(0, 4)..Point::new(0, 7),
|
range: Point::new(0, 4)..Point::new(0, 7),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
group_id: 0,
|
group_id: 0,
|
||||||
message: "message 1".to_string(),
|
message: "message 1".to_string(),
|
||||||
severity: lsp::DiagnosticSeverity::ERROR,
|
severity: lsp::DiagnosticSeverity::ERROR,
|
||||||
is_primary: true
|
is_primary: true
|
||||||
}
|
}
|
||||||
),
|
},
|
||||||
(
|
DiagnosticEntry {
|
||||||
Point::new(0, 10)..Point::new(0, 13),
|
range: Point::new(0, 10)..Point::new(0, 13),
|
||||||
&Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
group_id: 1,
|
group_id: 1,
|
||||||
severity: lsp::DiagnosticSeverity::WARNING,
|
severity: lsp::DiagnosticSeverity::WARNING,
|
||||||
message: "message 2".to_string(),
|
message: "message 2".to_string(),
|
||||||
is_primary: true
|
is_primary: true
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> {
|
||||||
at_end: bool,
|
at_end: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct Iter<'a, T: Item> {
|
||||||
|
tree: &'a SumTree<T>,
|
||||||
|
stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a, T, D> Cursor<'a, T, D>
|
impl<'a, T, D> Cursor<'a, T, D>
|
||||||
where
|
where
|
||||||
T: Item,
|
T: Item,
|
||||||
|
@ -487,6 +492,71 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a, T: Item> Iter<'a, T> {
|
||||||
|
pub(crate) fn new(tree: &'a SumTree<T>) -> Self {
|
||||||
|
Self {
|
||||||
|
tree,
|
||||||
|
stack: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T: Item> Iterator for Iter<'a, T> {
|
||||||
|
type Item = &'a T;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
let mut descend = false;
|
||||||
|
|
||||||
|
if self.stack.is_empty() {
|
||||||
|
self.stack.push(StackEntry {
|
||||||
|
tree: self.tree,
|
||||||
|
index: 0,
|
||||||
|
position: (),
|
||||||
|
});
|
||||||
|
descend = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
while self.stack.len() > 0 {
|
||||||
|
let new_subtree = {
|
||||||
|
let entry = self.stack.last_mut().unwrap();
|
||||||
|
match entry.tree.0.as_ref() {
|
||||||
|
Node::Internal { child_trees, .. } => {
|
||||||
|
if !descend {
|
||||||
|
entry.index += 1;
|
||||||
|
}
|
||||||
|
child_trees.get(entry.index)
|
||||||
|
}
|
||||||
|
Node::Leaf { items, .. } => {
|
||||||
|
if !descend {
|
||||||
|
entry.index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(next_item) = items.get(entry.index) {
|
||||||
|
return Some(next_item);
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(subtree) = new_subtree {
|
||||||
|
descend = true;
|
||||||
|
self.stack.push(StackEntry {
|
||||||
|
tree: subtree,
|
||||||
|
index: 0,
|
||||||
|
position: (),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
descend = false;
|
||||||
|
self.stack.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
|
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
|
||||||
where
|
where
|
||||||
T: Item<Summary = S>,
|
T: Item<Summary = S>,
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
mod cursor;
|
mod cursor;
|
||||||
|
|
||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
pub use cursor::Cursor;
|
pub use cursor::{Cursor, FilterCursor, Iter};
|
||||||
pub use cursor::FilterCursor;
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
|
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
|
||||||
|
|
||||||
|
@ -156,6 +155,10 @@ impl<T: Item> SumTree<T> {
|
||||||
items
|
items
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> Iter<T> {
|
||||||
|
Iter::new(self)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn cursor<'a, S>(&'a self) -> Cursor<T, S>
|
pub fn cursor<'a, S>(&'a self) -> Cursor<T, S>
|
||||||
where
|
where
|
||||||
S: Dimension<'a, T::Summary>,
|
S: Dimension<'a, T::Summary>,
|
||||||
|
@ -722,6 +725,10 @@ mod tests {
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(tree.items(&()), reference_items);
|
assert_eq!(tree.items(&()), reference_items);
|
||||||
|
assert_eq!(
|
||||||
|
tree.iter().collect::<Vec<_>>(),
|
||||||
|
tree.cursor::<()>().collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
let mut filter_cursor =
|
let mut filter_cursor =
|
||||||
tree.filter::<_, Count>(|summary| summary.contains_even, &());
|
tree.filter::<_, Count>(|summary| summary.contains_even, &());
|
||||||
|
|
|
@ -1,94 +1,36 @@
|
||||||
use super::{FromAnchor, FullOffset, Point, ToOffset};
|
use super::{Point, ToOffset};
|
||||||
use crate::{rope::TextDimension, BufferSnapshot};
|
use crate::{rope::TextDimension, BufferSnapshot};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use std::{
|
use std::{cmp::Ordering, fmt::Debug, ops::Range};
|
||||||
cmp::Ordering,
|
use sum_tree::Bias;
|
||||||
fmt::{Debug, Formatter},
|
|
||||||
ops::Range,
|
|
||||||
};
|
|
||||||
use sum_tree::{Bias, SumTree};
|
|
||||||
|
|
||||||
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
|
||||||
pub struct Anchor {
|
pub struct Anchor {
|
||||||
pub full_offset: FullOffset,
|
pub timestamp: clock::Local,
|
||||||
|
pub offset: usize,
|
||||||
pub bias: Bias,
|
pub bias: Bias,
|
||||||
pub version: clock::Global,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AnchorMap<T> {
|
|
||||||
pub(crate) version: clock::Global,
|
|
||||||
pub(crate) bias: Bias,
|
|
||||||
pub(crate) entries: Vec<(FullOffset, T)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AnchorSet(pub(crate) AnchorMap<()>);
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AnchorRangeMap<T> {
|
|
||||||
pub(crate) version: clock::Global,
|
|
||||||
pub(crate) entries: Vec<(Range<FullOffset>, T)>,
|
|
||||||
pub(crate) start_bias: Bias,
|
|
||||||
pub(crate) end_bias: Bias,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>);
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AnchorRangeMultimap<T: Clone> {
|
|
||||||
pub(crate) entries: SumTree<AnchorRangeMultimapEntry<T>>,
|
|
||||||
pub(crate) version: clock::Global,
|
|
||||||
pub(crate) start_bias: Bias,
|
|
||||||
pub(crate) end_bias: Bias,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub(crate) struct AnchorRangeMultimapEntry<T> {
|
|
||||||
pub(crate) range: FullOffsetRange,
|
|
||||||
pub(crate) value: T,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct FullOffsetRange {
|
|
||||||
pub(crate) start: FullOffset,
|
|
||||||
pub(crate) end: FullOffset,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct AnchorRangeMultimapSummary {
|
|
||||||
start: FullOffset,
|
|
||||||
end: FullOffset,
|
|
||||||
min_start: FullOffset,
|
|
||||||
max_end: FullOffset,
|
|
||||||
count: usize,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Anchor {
|
impl Anchor {
|
||||||
pub fn min() -> Self {
|
pub fn min() -> Self {
|
||||||
Self {
|
Self {
|
||||||
full_offset: FullOffset(0),
|
timestamp: clock::Local::MIN,
|
||||||
|
offset: usize::MIN,
|
||||||
bias: Bias::Left,
|
bias: Bias::Left,
|
||||||
version: Default::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn max() -> Self {
|
pub fn max() -> Self {
|
||||||
Self {
|
Self {
|
||||||
full_offset: FullOffset::MAX,
|
timestamp: clock::Local::MAX,
|
||||||
|
offset: usize::MAX,
|
||||||
bias: Bias::Right,
|
bias: Bias::Right,
|
||||||
version: Default::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cmp<'a>(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result<Ordering> {
|
pub fn cmp<'a>(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result<Ordering> {
|
||||||
if self == other {
|
let offset_comparison = if self.timestamp == other.timestamp {
|
||||||
return Ok(Ordering::Equal);
|
self.offset.cmp(&other.offset)
|
||||||
}
|
|
||||||
|
|
||||||
let offset_comparison = if self.version == other.version {
|
|
||||||
self.full_offset.cmp(&other.full_offset)
|
|
||||||
} else {
|
} else {
|
||||||
buffer
|
buffer
|
||||||
.full_offset_for_anchor(self)
|
.full_offset_for_anchor(self)
|
||||||
|
@ -122,455 +64,10 @@ impl Anchor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> AnchorMap<T> {
|
|
||||||
pub fn version(&self) -> &clock::Global {
|
|
||||||
&self.version
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.entries.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn iter<'a, D>(
|
|
||||||
&'a self,
|
|
||||||
snapshot: &'a BufferSnapshot,
|
|
||||||
) -> impl Iterator<Item = (D, &'a T)> + 'a
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
{
|
|
||||||
snapshot
|
|
||||||
.summaries_for_anchors(
|
|
||||||
self.version.clone(),
|
|
||||||
self.bias,
|
|
||||||
self.entries.iter().map(|e| &e.0),
|
|
||||||
)
|
|
||||||
.zip(self.entries.iter().map(|e| &e.1))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AnchorSet {
|
|
||||||
pub fn version(&self) -> &clock::Global {
|
|
||||||
&self.0.version
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn iter<'a, D>(&'a self, content: &'a BufferSnapshot) -> impl Iterator<Item = D> + 'a
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
{
|
|
||||||
self.0.iter(content).map(|(position, _)| position)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> AnchorRangeMap<T> {
|
|
||||||
pub fn version(&self) -> &clock::Global {
|
|
||||||
&self.version
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.entries.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_full_offset_ranges(
|
|
||||||
version: clock::Global,
|
|
||||||
start_bias: Bias,
|
|
||||||
end_bias: Bias,
|
|
||||||
entries: Vec<(Range<FullOffset>, T)>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
version,
|
|
||||||
start_bias,
|
|
||||||
end_bias,
|
|
||||||
entries,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ranges<'a, D>(
|
|
||||||
&'a self,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
{
|
|
||||||
content
|
|
||||||
.summaries_for_anchor_ranges(
|
|
||||||
self.version.clone(),
|
|
||||||
self.start_bias,
|
|
||||||
self.end_bias,
|
|
||||||
self.entries.iter().map(|e| &e.0),
|
|
||||||
)
|
|
||||||
.zip(self.entries.iter().map(|e| &e.1))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn intersecting_ranges<'a, D, I>(
|
|
||||||
&'a self,
|
|
||||||
range: Range<(I, Bias)>,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
I: ToOffset,
|
|
||||||
{
|
|
||||||
let range = content.anchor_at(range.start.0, range.start.1)
|
|
||||||
..content.anchor_at(range.end.0, range.end.1);
|
|
||||||
|
|
||||||
let mut probe_anchor = Anchor {
|
|
||||||
full_offset: Default::default(),
|
|
||||||
bias: self.start_bias,
|
|
||||||
version: self.version.clone(),
|
|
||||||
};
|
|
||||||
let start_ix = self.entries.binary_search_by(|probe| {
|
|
||||||
probe_anchor.full_offset = probe.0.end;
|
|
||||||
probe_anchor.cmp(&range.start, &content).unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
match start_ix {
|
|
||||||
Ok(start_ix) | Err(start_ix) => content
|
|
||||||
.summaries_for_anchor_ranges(
|
|
||||||
self.version.clone(),
|
|
||||||
self.start_bias,
|
|
||||||
self.end_bias,
|
|
||||||
self.entries[start_ix..].iter().map(|e| &e.0),
|
|
||||||
)
|
|
||||||
.zip(self.entries.iter().map(|e| &e.1)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn full_offset_ranges(&self) -> impl Iterator<Item = &(Range<FullOffset>, T)> {
|
|
||||||
self.entries.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn min_by_key<'a, D, F, K>(
|
|
||||||
&self,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
mut extract_key: F,
|
|
||||||
) -> Option<(Range<D>, &T)>
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
F: FnMut(&T) -> K,
|
|
||||||
K: Ord,
|
|
||||||
{
|
|
||||||
self.entries
|
|
||||||
.iter()
|
|
||||||
.min_by_key(|(_, value)| extract_key(value))
|
|
||||||
.map(|(range, value)| (self.resolve_range(range, &content), value))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn max_by_key<'a, D, F, K>(
|
|
||||||
&self,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
mut extract_key: F,
|
|
||||||
) -> Option<(Range<D>, &T)>
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
F: FnMut(&T) -> K,
|
|
||||||
K: Ord,
|
|
||||||
{
|
|
||||||
self.entries
|
|
||||||
.iter()
|
|
||||||
.max_by_key(|(_, value)| extract_key(value))
|
|
||||||
.map(|(range, value)| (self.resolve_range(range, &content), value))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_range<'a, D>(
|
|
||||||
&self,
|
|
||||||
range: &Range<FullOffset>,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
) -> Range<D>
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
{
|
|
||||||
let mut anchor = Anchor {
|
|
||||||
full_offset: range.start,
|
|
||||||
bias: self.start_bias,
|
|
||||||
version: self.version.clone(),
|
|
||||||
};
|
|
||||||
let start = content.summary_for_anchor(&anchor);
|
|
||||||
|
|
||||||
anchor.full_offset = range.end;
|
|
||||||
anchor.bias = self.end_bias;
|
|
||||||
let end = content.summary_for_anchor(&anchor);
|
|
||||||
|
|
||||||
start..end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: PartialEq> PartialEq for AnchorRangeMap<T> {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.version == other.version && self.entries == other.entries
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Eq> Eq for AnchorRangeMap<T> {}
|
|
||||||
|
|
||||||
impl<T: Debug> Debug for AnchorRangeMap<T> {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
|
|
||||||
let mut f = f.debug_map();
|
|
||||||
for (range, value) in &self.entries {
|
|
||||||
f.key(range);
|
|
||||||
f.value(value);
|
|
||||||
}
|
|
||||||
f.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for AnchorRangeSet {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
let mut f = f.debug_set();
|
|
||||||
for (range, _) in &self.0.entries {
|
|
||||||
f.entry(range);
|
|
||||||
}
|
|
||||||
f.finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AnchorRangeSet {
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn version(&self) -> &clock::Global {
|
|
||||||
self.0.version()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ranges<'a, D>(
|
|
||||||
&'a self,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
) -> impl 'a + Iterator<Item = Range<Point>>
|
|
||||||
where
|
|
||||||
D: TextDimension,
|
|
||||||
{
|
|
||||||
self.0.ranges(content).map(|(range, _)| range)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Clone> Default for AnchorRangeMultimap<T> {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
entries: Default::default(),
|
|
||||||
version: Default::default(),
|
|
||||||
start_bias: Bias::Left,
|
|
||||||
end_bias: Bias::Left,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Clone> AnchorRangeMultimap<T> {
|
|
||||||
pub fn version(&self) -> &clock::Global {
|
|
||||||
&self.version
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn intersecting_ranges<'a, I, O>(
|
|
||||||
&'a self,
|
|
||||||
range: Range<I>,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
inclusive: bool,
|
|
||||||
) -> impl Iterator<Item = (usize, Range<O>, &T)> + 'a
|
|
||||||
where
|
|
||||||
I: ToOffset,
|
|
||||||
O: FromAnchor,
|
|
||||||
{
|
|
||||||
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
|
|
||||||
let range = range.start.to_full_offset(&content, Bias::Left)
|
|
||||||
..range.end.to_full_offset(&content, end_bias);
|
|
||||||
let mut cursor = self.entries.filter::<_, usize>(
|
|
||||||
{
|
|
||||||
let mut endpoint = Anchor {
|
|
||||||
full_offset: FullOffset(0),
|
|
||||||
bias: Bias::Right,
|
|
||||||
version: self.version.clone(),
|
|
||||||
};
|
|
||||||
move |summary: &AnchorRangeMultimapSummary| {
|
|
||||||
endpoint.full_offset = summary.max_end;
|
|
||||||
endpoint.bias = self.end_bias;
|
|
||||||
let max_end = endpoint.to_full_offset(&content, self.end_bias);
|
|
||||||
let start_cmp = range.start.cmp(&max_end);
|
|
||||||
|
|
||||||
endpoint.full_offset = summary.min_start;
|
|
||||||
endpoint.bias = self.start_bias;
|
|
||||||
let min_start = endpoint.to_full_offset(&content, self.start_bias);
|
|
||||||
let end_cmp = range.end.cmp(&min_start);
|
|
||||||
|
|
||||||
if inclusive {
|
|
||||||
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
|
|
||||||
} else {
|
|
||||||
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
&(),
|
|
||||||
);
|
|
||||||
|
|
||||||
std::iter::from_fn({
|
|
||||||
let mut endpoint = Anchor {
|
|
||||||
full_offset: FullOffset(0),
|
|
||||||
bias: Bias::Left,
|
|
||||||
version: self.version.clone(),
|
|
||||||
};
|
|
||||||
move || {
|
|
||||||
if let Some(item) = cursor.item() {
|
|
||||||
let ix = *cursor.start();
|
|
||||||
endpoint.full_offset = item.range.start;
|
|
||||||
endpoint.bias = self.start_bias;
|
|
||||||
let start = O::from_anchor(&endpoint, &content);
|
|
||||||
endpoint.full_offset = item.range.end;
|
|
||||||
endpoint.bias = self.end_bias;
|
|
||||||
let end = O::from_anchor(&endpoint, &content);
|
|
||||||
let value = &item.value;
|
|
||||||
cursor.next(&());
|
|
||||||
Some((ix, start..end, value))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_full_offset_ranges(
|
|
||||||
version: clock::Global,
|
|
||||||
start_bias: Bias,
|
|
||||||
end_bias: Bias,
|
|
||||||
entries: impl Iterator<Item = (Range<FullOffset>, T)>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
version,
|
|
||||||
start_bias,
|
|
||||||
end_bias,
|
|
||||||
entries: SumTree::from_iter(
|
|
||||||
entries.map(|(range, value)| AnchorRangeMultimapEntry {
|
|
||||||
range: FullOffsetRange {
|
|
||||||
start: range.start,
|
|
||||||
end: range.end,
|
|
||||||
},
|
|
||||||
value,
|
|
||||||
}),
|
|
||||||
&(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn full_offset_ranges(&self) -> impl Iterator<Item = (Range<FullOffset>, &T)> {
|
|
||||||
self.entries
|
|
||||||
.cursor::<()>()
|
|
||||||
.map(|entry| (entry.range.start..entry.range.end, &entry.value))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filter<'a, O, F>(
|
|
||||||
&'a self,
|
|
||||||
content: &'a BufferSnapshot,
|
|
||||||
mut f: F,
|
|
||||||
) -> impl 'a + Iterator<Item = (usize, Range<O>, &T)>
|
|
||||||
where
|
|
||||||
O: FromAnchor,
|
|
||||||
F: 'a + FnMut(&'a T) -> bool,
|
|
||||||
{
|
|
||||||
let mut endpoint = Anchor {
|
|
||||||
full_offset: FullOffset(0),
|
|
||||||
bias: Bias::Left,
|
|
||||||
version: self.version.clone(),
|
|
||||||
};
|
|
||||||
self.entries
|
|
||||||
.cursor::<()>()
|
|
||||||
.enumerate()
|
|
||||||
.filter_map(move |(ix, entry)| {
|
|
||||||
if f(&entry.value) {
|
|
||||||
endpoint.full_offset = entry.range.start;
|
|
||||||
endpoint.bias = self.start_bias;
|
|
||||||
let start = O::from_anchor(&endpoint, &content);
|
|
||||||
endpoint.full_offset = entry.range.end;
|
|
||||||
endpoint.bias = self.end_bias;
|
|
||||||
let end = O::from_anchor(&endpoint, &content);
|
|
||||||
Some((ix, start..end, &entry.value))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Clone> sum_tree::Item for AnchorRangeMultimapEntry<T> {
|
|
||||||
type Summary = AnchorRangeMultimapSummary;
|
|
||||||
|
|
||||||
fn summary(&self) -> Self::Summary {
|
|
||||||
AnchorRangeMultimapSummary {
|
|
||||||
start: self.range.start,
|
|
||||||
end: self.range.end,
|
|
||||||
min_start: self.range.start,
|
|
||||||
max_end: self.range.end,
|
|
||||||
count: 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for AnchorRangeMultimapSummary {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
start: FullOffset(0),
|
|
||||||
end: FullOffset::MAX,
|
|
||||||
min_start: FullOffset::MAX,
|
|
||||||
max_end: FullOffset(0),
|
|
||||||
count: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sum_tree::Summary for AnchorRangeMultimapSummary {
|
|
||||||
type Context = ();
|
|
||||||
|
|
||||||
fn add_summary(&mut self, other: &Self, _: &Self::Context) {
|
|
||||||
self.min_start = self.min_start.min(other.min_start);
|
|
||||||
self.max_end = self.max_end.max(other.max_end);
|
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
{
|
|
||||||
let start_comparison = self.start.cmp(&other.start);
|
|
||||||
assert!(start_comparison <= Ordering::Equal);
|
|
||||||
if start_comparison == Ordering::Equal {
|
|
||||||
assert!(self.end.cmp(&other.end) >= Ordering::Equal);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.start = other.start;
|
|
||||||
self.end = other.end;
|
|
||||||
self.count += other.count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for FullOffsetRange {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
start: FullOffset(0),
|
|
||||||
end: FullOffset::MAX,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize {
|
|
||||||
fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
|
|
||||||
*self += summary.count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange {
|
|
||||||
fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
|
|
||||||
self.start = summary.start;
|
|
||||||
self.end = summary.end;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange {
|
|
||||||
fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering {
|
|
||||||
Ord::cmp(&self.start, &cursor_location.start)
|
|
||||||
.then_with(|| Ord::cmp(&cursor_location.end, &self.end))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait AnchorRangeExt {
|
pub trait AnchorRangeExt {
|
||||||
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
|
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
|
||||||
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
|
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
|
||||||
|
fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AnchorRangeExt for Range<Anchor> {
|
impl AnchorRangeExt for Range<Anchor> {
|
||||||
|
@ -584,4 +81,8 @@ impl AnchorRangeExt for Range<Anchor> {
|
||||||
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
|
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
|
||||||
self.start.to_offset(&content)..self.end.to_offset(&content)
|
self.start.to_offset(&content)..self.end.to_offset(&content)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
|
||||||
|
self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
83
crates/text/src/locator.rs
Normal file
83
crates/text/src/locator.rs
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
use std::iter;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub struct Locator(SmallVec<[u64; 4]>);
|
||||||
|
|
||||||
|
impl Locator {
|
||||||
|
pub fn min() -> Self {
|
||||||
|
Self(smallvec![u64::MIN])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn max() -> Self {
|
||||||
|
Self(smallvec![u64::MAX])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn assign(&mut self, other: &Self) {
|
||||||
|
self.0.resize(other.0.len(), 0);
|
||||||
|
self.0.copy_from_slice(&other.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn between(lhs: &Self, rhs: &Self) -> Self {
|
||||||
|
let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
|
||||||
|
let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
|
||||||
|
let mut location = SmallVec::new();
|
||||||
|
for (lhs, rhs) in lhs.zip(rhs) {
|
||||||
|
let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
|
||||||
|
location.push(mid);
|
||||||
|
if mid > lhs {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self(location)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.0.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Locator {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::min()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use rand::prelude::*;
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
#[gpui::test(iterations = 100)]
|
||||||
|
fn test_locators(mut rng: StdRng) {
|
||||||
|
let mut lhs = Default::default();
|
||||||
|
let mut rhs = Default::default();
|
||||||
|
while lhs == rhs {
|
||||||
|
lhs = Locator(
|
||||||
|
(0..rng.gen_range(1..=5))
|
||||||
|
.map(|_| rng.gen_range(0..=100))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
rhs = Locator(
|
||||||
|
(0..rng.gen_range(1..=5))
|
||||||
|
.map(|_| rng.gen_range(0..=100))
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if lhs > rhs {
|
||||||
|
mem::swap(&mut lhs, &mut rhs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let middle = Locator::between(&lhs, &rhs);
|
||||||
|
assert!(middle > lhs);
|
||||||
|
assert!(middle < rhs);
|
||||||
|
for ix in 0..middle.0.len() - 1 {
|
||||||
|
assert!(
|
||||||
|
middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
|
||||||
|
|| middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,9 +1,15 @@
|
||||||
use super::Operation;
|
|
||||||
use std::{fmt::Debug, ops::Add};
|
use std::{fmt::Debug, ops::Add};
|
||||||
use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
|
use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
|
||||||
|
|
||||||
|
pub trait Operation: Clone + Debug {
|
||||||
|
fn lamport_timestamp(&self) -> clock::Lamport;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct OperationQueue(SumTree<Operation>);
|
struct OperationItem<T>(T);
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
|
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
|
||||||
pub struct OperationKey(clock::Lamport);
|
pub struct OperationKey(clock::Lamport);
|
||||||
|
@ -20,7 +26,7 @@ impl OperationKey {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OperationQueue {
|
impl<T: Operation> OperationQueue<T> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
OperationQueue(SumTree::new())
|
OperationQueue(SumTree::new())
|
||||||
}
|
}
|
||||||
|
@ -29,11 +35,15 @@ impl OperationQueue {
|
||||||
self.0.summary().len
|
self.0.summary().len
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert(&mut self, mut ops: Vec<Operation>) {
|
pub fn insert(&mut self, mut ops: Vec<T>) {
|
||||||
ops.sort_by_key(|op| op.lamport_timestamp());
|
ops.sort_by_key(|op| op.lamport_timestamp());
|
||||||
ops.dedup_by_key(|op| op.lamport_timestamp());
|
ops.dedup_by_key(|op| op.lamport_timestamp());
|
||||||
self.0
|
self.0.edit(
|
||||||
.edit(ops.into_iter().map(Edit::Insert).collect(), &());
|
ops.into_iter()
|
||||||
|
.map(|op| Edit::Insert(OperationItem(op)))
|
||||||
|
.collect(),
|
||||||
|
&(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn drain(&mut self) -> Self {
|
pub fn drain(&mut self) -> Self {
|
||||||
|
@ -42,8 +52,8 @@ impl OperationQueue {
|
||||||
clone
|
clone
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cursor(&self) -> Cursor<Operation, ()> {
|
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
||||||
self.0.cursor()
|
self.0.cursor::<()>().map(|i| &i.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Item for Operation {
|
impl<T: Operation> Item for OperationItem<T> {
|
||||||
type Summary = OperationSummary;
|
type Summary = OperationSummary;
|
||||||
|
|
||||||
fn summary(&self) -> Self::Summary {
|
fn summary(&self) -> Self::Summary {
|
||||||
OperationSummary {
|
OperationSummary {
|
||||||
key: OperationKey::new(self.lamport_timestamp()),
|
key: OperationKey::new(self.0.lamport_timestamp()),
|
||||||
len: 1,
|
len: 1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl KeyedItem for Operation {
|
impl<T: Operation> KeyedItem for OperationItem<T> {
|
||||||
type Key = OperationKey;
|
type Key = OperationKey;
|
||||||
|
|
||||||
fn key(&self) -> Self::Key {
|
fn key(&self) -> Self::Key {
|
||||||
OperationKey::new(self.lamport_timestamp())
|
OperationKey::new(self.0.lamport_timestamp())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,21 +117,27 @@ mod tests {
|
||||||
assert_eq!(queue.len(), 0);
|
assert_eq!(queue.len(), 0);
|
||||||
|
|
||||||
queue.insert(vec![
|
queue.insert(vec![
|
||||||
Operation::Test(clock.tick()),
|
TestOperation(clock.tick()),
|
||||||
Operation::Test(clock.tick()),
|
TestOperation(clock.tick()),
|
||||||
]);
|
]);
|
||||||
assert_eq!(queue.len(), 2);
|
assert_eq!(queue.len(), 2);
|
||||||
|
|
||||||
queue.insert(vec![Operation::Test(clock.tick())]);
|
queue.insert(vec![TestOperation(clock.tick())]);
|
||||||
assert_eq!(queue.len(), 3);
|
assert_eq!(queue.len(), 3);
|
||||||
|
|
||||||
drop(queue.drain());
|
drop(queue.drain());
|
||||||
assert_eq!(queue.len(), 0);
|
assert_eq!(queue.len(), 0);
|
||||||
|
|
||||||
queue.insert(vec![Operation::Test(clock.tick())]);
|
queue.insert(vec![TestOperation(clock.tick())]);
|
||||||
assert_eq!(queue.len(), 1);
|
assert_eq!(queue.len(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
struct TestOperation(clock::Lamport);
|
struct TestOperation(clock::Lamport);
|
||||||
|
|
||||||
|
impl Operation for TestOperation {
|
||||||
|
fn lamport_timestamp(&self) -> clock::Lamport {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,13 +22,13 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
|
||||||
|
|
||||||
match self.0.gen_range(0..100) {
|
match self.0.gen_range(0..100) {
|
||||||
// whitespace
|
// whitespace
|
||||||
0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
|
0..=5 => ['\n'].choose(&mut self.0).copied(),
|
||||||
// two-byte greek letters
|
// two-byte greek letters
|
||||||
20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
|
// 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
|
||||||
// three-byte characters
|
// // three-byte characters
|
||||||
33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
|
// 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
|
||||||
// four-byte characters
|
// // four-byte characters
|
||||||
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
|
// 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
|
||||||
// ascii letters
|
// ascii letters
|
||||||
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
|
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{rope::TextDimension, AnchorRangeMap, BufferSnapshot, ToOffset, ToPoint};
|
use crate::Anchor;
|
||||||
|
use crate::{rope::TextDimension, BufferSnapshot, ToOffset, ToPoint};
|
||||||
use std::{cmp::Ordering, ops::Range, sync::Arc};
|
use std::{cmp::Ordering, ops::Range, sync::Arc};
|
||||||
use sum_tree::Bias;
|
use sum_tree::Bias;
|
||||||
|
|
||||||
|
@ -25,7 +26,7 @@ pub struct Selection<T> {
|
||||||
pub struct SelectionSet {
|
pub struct SelectionSet {
|
||||||
pub id: SelectionSetId,
|
pub id: SelectionSetId,
|
||||||
pub active: bool,
|
pub active: bool,
|
||||||
pub selections: Arc<AnchorRangeMap<SelectionState>>,
|
pub selections: Arc<[Selection<Anchor>]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq)]
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
|
@ -75,6 +76,21 @@ impl<T: ToOffset + ToPoint + Copy + Ord> Selection<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Selection<Anchor> {
|
||||||
|
pub fn resolve<'a, D: 'a + TextDimension>(
|
||||||
|
&'a self,
|
||||||
|
snapshot: &'a BufferSnapshot,
|
||||||
|
) -> Selection<D> {
|
||||||
|
Selection {
|
||||||
|
id: self.id,
|
||||||
|
start: snapshot.summary_for_anchor(&self.start),
|
||||||
|
end: snapshot.summary_for_anchor(&self.end),
|
||||||
|
reversed: self.reversed,
|
||||||
|
goal: self.goal,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl SelectionSet {
|
impl SelectionSet {
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.selections.len()
|
self.selections.len()
|
||||||
|
@ -82,69 +98,70 @@ impl SelectionSet {
|
||||||
|
|
||||||
pub fn selections<'a, D>(
|
pub fn selections<'a, D>(
|
||||||
&'a self,
|
&'a self,
|
||||||
content: &'a BufferSnapshot,
|
snapshot: &'a BufferSnapshot,
|
||||||
) -> impl 'a + Iterator<Item = Selection<D>>
|
) -> impl 'a + Iterator<Item = Selection<D>>
|
||||||
where
|
where
|
||||||
D: TextDimension,
|
D: TextDimension,
|
||||||
{
|
{
|
||||||
self.selections
|
let anchors = self
|
||||||
.ranges(content)
|
.selections
|
||||||
.map(|(range, state)| Selection {
|
.iter()
|
||||||
id: state.id,
|
.flat_map(|selection| [&selection.start, &selection.end].into_iter());
|
||||||
start: range.start,
|
let mut positions = snapshot.summaries_for_anchors::<D, _>(anchors);
|
||||||
end: range.end,
|
self.selections.iter().map(move |selection| Selection {
|
||||||
reversed: state.reversed,
|
start: positions.next().unwrap(),
|
||||||
goal: state.goal,
|
end: positions.next().unwrap(),
|
||||||
|
goal: selection.goal,
|
||||||
|
reversed: selection.reversed,
|
||||||
|
id: selection.id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn intersecting_selections<'a, D, I>(
|
pub fn intersecting_selections<'a, D, I>(
|
||||||
&'a self,
|
&'a self,
|
||||||
range: Range<(I, Bias)>,
|
range: Range<(I, Bias)>,
|
||||||
content: &'a BufferSnapshot,
|
snapshot: &'a BufferSnapshot,
|
||||||
) -> impl 'a + Iterator<Item = Selection<D>>
|
) -> impl 'a + Iterator<Item = Selection<D>>
|
||||||
where
|
where
|
||||||
D: TextDimension,
|
D: TextDimension,
|
||||||
I: 'a + ToOffset,
|
I: 'a + ToOffset,
|
||||||
{
|
{
|
||||||
self.selections
|
let start = snapshot.anchor_at(range.start.0, range.start.1);
|
||||||
.intersecting_ranges(range, content)
|
let end = snapshot.anchor_at(range.end.0, range.end.1);
|
||||||
.map(|(range, state)| Selection {
|
let start_ix = match self
|
||||||
id: state.id,
|
.selections
|
||||||
start: range.start,
|
.binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap())
|
||||||
end: range.end,
|
{
|
||||||
reversed: state.reversed,
|
Ok(ix) | Err(ix) => ix,
|
||||||
goal: state.goal,
|
};
|
||||||
})
|
let end_ix = match self
|
||||||
|
.selections
|
||||||
|
.binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap())
|
||||||
|
{
|
||||||
|
Ok(ix) | Err(ix) => ix,
|
||||||
|
};
|
||||||
|
self.selections[start_ix..end_ix]
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.resolve(snapshot))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn oldest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option<Selection<D>>
|
pub fn oldest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option<Selection<D>>
|
||||||
where
|
where
|
||||||
D: TextDimension,
|
D: TextDimension,
|
||||||
{
|
{
|
||||||
self.selections
|
self.selections
|
||||||
.min_by_key(content, |selection| selection.id)
|
.iter()
|
||||||
.map(|(range, state)| Selection {
|
.min_by_key(|s| s.id)
|
||||||
id: state.id,
|
.map(|s| s.resolve(snapshot))
|
||||||
start: range.start,
|
|
||||||
end: range.end,
|
|
||||||
reversed: state.reversed,
|
|
||||||
goal: state.goal,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn newest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option<Selection<D>>
|
pub fn newest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option<Selection<D>>
|
||||||
where
|
where
|
||||||
D: TextDimension,
|
D: TextDimension,
|
||||||
{
|
{
|
||||||
self.selections
|
self.selections
|
||||||
.max_by_key(content, |selection| selection.id)
|
.iter()
|
||||||
.map(|(range, state)| Selection {
|
.max_by_key(|s| s.id)
|
||||||
id: state.id,
|
.map(|s| s.resolve(snapshot))
|
||||||
start: range.start,
|
|
||||||
end: range.end,
|
|
||||||
reversed: state.reversed,
|
|
||||||
goal: state.goal,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,6 +78,8 @@ fn test_random_edits(mut rng: StdRng) {
|
||||||
TextSummary::from(&reference_string[range])
|
TextSummary::from(&reference_string[range])
|
||||||
);
|
);
|
||||||
|
|
||||||
|
buffer.check_invariants();
|
||||||
|
|
||||||
if rng.gen_bool(0.3) {
|
if rng.gen_bool(0.3) {
|
||||||
buffer_versions.push((buffer.clone(), buffer.subscribe()));
|
buffer_versions.push((buffer.clone(), buffer.subscribe()));
|
||||||
}
|
}
|
||||||
|
@ -603,6 +605,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
buffer.check_invariants();
|
||||||
|
|
||||||
if mutation_count == 0 && network.is_idle() {
|
if mutation_count == 0 && network.is_idle() {
|
||||||
break;
|
break;
|
||||||
|
@ -629,6 +632,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
|
||||||
.all_selection_ranges::<usize>()
|
.all_selection_ranges::<usize>()
|
||||||
.collect::<HashMap<_, _>>()
|
.collect::<HashMap<_, _>>()
|
||||||
);
|
);
|
||||||
|
buffer.check_invariants();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -644,6 +648,39 @@ struct Network<T: Clone, R: rand::Rng> {
|
||||||
rng: R,
|
rng: R,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Buffer {
|
||||||
|
fn check_invariants(&self) {
|
||||||
|
// Ensure every fragment is ordered by locator in the fragment tree and corresponds
|
||||||
|
// to an insertion fragment in the insertions tree.
|
||||||
|
let mut prev_fragment_id = Locator::min();
|
||||||
|
for fragment in self.snapshot.fragments.items(&None) {
|
||||||
|
assert!(fragment.id > prev_fragment_id);
|
||||||
|
prev_fragment_id = fragment.id.clone();
|
||||||
|
|
||||||
|
let insertion_fragment = self
|
||||||
|
.snapshot
|
||||||
|
.insertions
|
||||||
|
.get(
|
||||||
|
&InsertionFragmentKey {
|
||||||
|
timestamp: fragment.insertion_timestamp.local(),
|
||||||
|
split_offset: fragment.insertion_offset,
|
||||||
|
},
|
||||||
|
&(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(insertion_fragment.fragment_id, fragment.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>();
|
||||||
|
for insertion_fragment in self.snapshot.insertions.cursor::<()>() {
|
||||||
|
cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
|
||||||
|
let fragment = cursor.item().unwrap();
|
||||||
|
assert_eq!(insertion_fragment.fragment_id, fragment.id);
|
||||||
|
assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: Clone, R: rand::Rng> Network<T, R> {
|
impl<T: Clone, R: rand::Rng> Network<T, R> {
|
||||||
fn new(rng: R) -> Self {
|
fn new(rng: R) -> Self {
|
||||||
Network {
|
Network {
|
||||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue