Merge branch 'fragment-locators' into HEAD

This commit is contained in:
Max Brunsfeld 2021-12-09 14:49:04 -08:00
commit 5e516f59c0
22 changed files with 1317 additions and 1280 deletions

View file

@ -1,4 +1,6 @@
use crate::diagnostic_set::DiagnosticEntry;
pub use crate::{
diagnostic_set::DiagnosticSet,
highlight_map::{HighlightId, HighlightMap},
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
PLAIN_TEXT,
@ -21,6 +23,7 @@ use std::{
ffi::OsString,
future::Future,
iter::{Iterator, Peekable},
mem,
ops::{Deref, DerefMut, Range},
path::{Path, PathBuf},
str,
@ -28,6 +31,7 @@ use std::{
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
vec,
};
use text::operation_queue::OperationQueue;
pub use text::{Buffer as TextBuffer, Operation as _, *};
use theme::SyntaxTheme;
use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
@ -61,9 +65,10 @@ pub struct Buffer {
syntax_tree: Mutex<Option<SyntaxTree>>,
parsing_in_background: bool,
parse_count: usize,
diagnostics: AnchorRangeMultimap<Diagnostic>,
diagnostics: DiagnosticSet,
diagnostics_update_count: usize,
language_server: Option<LanguageServerState>,
deferred_ops: OperationQueue<Operation>,
#[cfg(test)]
pub(crate) operations: Vec<Operation>,
}
@ -71,7 +76,7 @@ pub struct Buffer {
pub struct BufferSnapshot {
text: text::BufferSnapshot,
tree: Option<Tree>,
diagnostics: AnchorRangeMultimap<Diagnostic>,
diagnostics: DiagnosticSet,
diagnostics_update_count: usize,
is_parsing: bool,
language: Option<Arc<Language>>,
@ -101,10 +106,13 @@ struct LanguageServerSnapshot {
path: Arc<Path>,
}
#[derive(Clone)]
#[derive(Clone, Debug)]
pub enum Operation {
Buffer(text::Operation),
UpdateDiagnostics(AnchorRangeMultimap<Diagnostic>),
UpdateDiagnostics {
diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
lamport_timestamp: clock::Lamport,
},
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -173,8 +181,8 @@ struct SyntaxTree {
struct AutoindentRequest {
selection_set_ids: HashSet<SelectionSetId>,
before_edit: BufferSnapshot,
edited: AnchorSet,
inserted: Option<AnchorRangeSet>,
edited: Vec<Anchor>,
inserted: Option<Vec<Range<Anchor>>>,
}
#[derive(Debug)]
@ -275,9 +283,11 @@ impl Buffer {
buffer.add_raw_selection_set(set.id, set);
}
let mut this = Self::build(buffer, file);
if let Some(diagnostics) = message.diagnostics {
this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx);
}
this.apply_diagnostic_update(
Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
cx,
);
Ok(this)
}
@ -294,7 +304,7 @@ impl Buffer {
.selection_sets()
.map(|(_, set)| proto::serialize_selection_set(set))
.collect(),
diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)),
diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
}
}
@ -331,6 +341,7 @@ impl Buffer {
diagnostics: Default::default(),
diagnostics_update_count: 0,
language_server: None,
deferred_ops: OperationQueue::new(),
#[cfg(test)]
operations: Default::default(),
}
@ -690,6 +701,8 @@ impl Buffer {
mut diagnostics: Vec<lsp::Diagnostic>,
cx: &mut ModelContext<Self>,
) -> Result<Operation> {
diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
let version = version.map(|version| version as usize);
let content = if let Some(version) = version {
let language_server = self.language_server.as_mut().unwrap();
@ -710,91 +723,92 @@ impl Buffer {
.and_then(|language| language.disk_based_diagnostic_sources())
.unwrap_or(&empty_set);
diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
self.diagnostics = {
let mut edits_since_save = content
.edits_since::<PointUtf16>(&self.saved_version)
.peekable();
let mut last_edit_old_end = PointUtf16::zero();
let mut last_edit_new_end = PointUtf16::zero();
let mut group_ids_by_diagnostic_range = HashMap::new();
let mut diagnostics_by_group_id = HashMap::new();
let mut next_group_id = 0;
'outer: for diagnostic in &diagnostics {
let mut start = diagnostic.range.start.to_point_utf16();
let mut end = diagnostic.range.end.to_point_utf16();
let source = diagnostic.source.as_ref();
let code = diagnostic.code.as_ref();
let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
.find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
.copied()
.unwrap_or_else(|| {
let group_id = post_inc(&mut next_group_id);
for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
group_ids_by_diagnostic_range.insert((source, code, range), group_id);
}
group_id
});
if diagnostic
.source
.as_ref()
.map_or(false, |source| disk_based_sources.contains(source))
{
while let Some(edit) = edits_since_save.peek() {
if edit.old.end <= start {
last_edit_old_end = edit.old.end;
last_edit_new_end = edit.new.end;
edits_since_save.next();
} else if edit.old.start <= end && edit.old.end >= start {
continue 'outer;
} else {
break;
}
let mut edits_since_save = content
.edits_since::<PointUtf16>(&self.saved_version)
.peekable();
let mut last_edit_old_end = PointUtf16::zero();
let mut last_edit_new_end = PointUtf16::zero();
let mut group_ids_by_diagnostic_range = HashMap::new();
let mut diagnostics_by_group_id = HashMap::new();
let mut next_group_id = 0;
'outer: for diagnostic in &diagnostics {
let mut start = diagnostic.range.start.to_point_utf16();
let mut end = diagnostic.range.end.to_point_utf16();
let source = diagnostic.source.as_ref();
let code = diagnostic.code.as_ref();
let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
.find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
.copied()
.unwrap_or_else(|| {
let group_id = post_inc(&mut next_group_id);
for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
group_ids_by_diagnostic_range.insert((source, code, range), group_id);
}
group_id
});
start = last_edit_new_end + (start - last_edit_old_end);
end = last_edit_new_end + (end - last_edit_old_end);
}
let mut range = content.clip_point_utf16(start, Bias::Left)
..content.clip_point_utf16(end, Bias::Right);
if range.start == range.end {
range.end.column += 1;
range.end = content.clip_point_utf16(range.end, Bias::Right);
if range.start == range.end && range.end.column > 0 {
range.start.column -= 1;
range.start = content.clip_point_utf16(range.start, Bias::Left);
if diagnostic
.source
.as_ref()
.map_or(false, |source| disk_based_sources.contains(source))
{
while let Some(edit) = edits_since_save.peek() {
if edit.old.end <= start {
last_edit_old_end = edit.old.end;
last_edit_new_end = edit.new.end;
edits_since_save.next();
} else if edit.old.start <= end && edit.old.end >= start {
continue 'outer;
} else {
break;
}
}
diagnostics_by_group_id
.entry(group_id)
.or_insert(Vec::new())
.push((
range,
Diagnostic {
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
message: diagnostic.message.clone(),
group_id,
is_primary: false,
},
));
start = last_edit_new_end + (start - last_edit_old_end);
end = last_edit_new_end + (end - last_edit_old_end);
}
content.anchor_range_multimap(
Bias::Left,
Bias::Right,
diagnostics_by_group_id
.into_values()
.flat_map(|mut diagnostics| {
let primary_diagnostic =
diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap();
primary_diagnostic.1.is_primary = true;
diagnostics
}),
)
};
let mut range = content.clip_point_utf16(start, Bias::Left)
..content.clip_point_utf16(end, Bias::Right);
if range.start == range.end {
range.end.column += 1;
range.end = content.clip_point_utf16(range.end, Bias::Right);
if range.start == range.end && range.end.column > 0 {
range.start.column -= 1;
range.start = content.clip_point_utf16(range.start, Bias::Left);
}
}
diagnostics_by_group_id
.entry(group_id)
.or_insert(Vec::new())
.push(DiagnosticEntry {
range,
diagnostic: Diagnostic {
severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
message: diagnostic.message.clone(),
group_id,
is_primary: false,
},
});
}
drop(edits_since_save);
let mut diagnostics = mem::take(&mut self.diagnostics);
diagnostics.reset(
diagnostics_by_group_id
.into_values()
.flat_map(|mut diagnostics| {
let primary = diagnostics
.iter_mut()
.min_by_key(|entry| entry.diagnostic.severity)
.unwrap();
primary.diagnostic.is_primary = true;
diagnostics
}),
self,
);
self.diagnostics = diagnostics;
if let Some(version) = version {
let language_server = self.language_server.as_mut().unwrap();
@ -811,32 +825,31 @@ impl Buffer {
self.diagnostics_update_count += 1;
cx.notify();
cx.emit(Event::DiagnosticsUpdated);
Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
Ok(Operation::UpdateDiagnostics {
diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
lamport_timestamp: self.lamport_timestamp(),
})
}
pub fn diagnostics_in_range<'a, T, O>(
&'a self,
search_range: Range<T>,
) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
T: 'a + ToOffset,
O: 'a + FromAnchor,
{
self.diagnostics
.intersecting_ranges(search_range, self, true)
.map(move |(_, range, diagnostic)| (range, diagnostic))
self.diagnostics.range(search_range, self, true)
}
pub fn diagnostic_group<'a, O>(
&'a self,
group_id: usize,
) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
O: 'a + FromAnchor,
{
self.diagnostics
.filter(self, move |diagnostic| diagnostic.group_id == group_id)
.map(move |(_, range, diagnostic)| (range, diagnostic))
self.diagnostics.group(group_id, self)
}
pub fn diagnostics_update_count(&self) -> usize {
@ -879,13 +892,13 @@ impl Buffer {
for request in autoindent_requests {
let old_to_new_rows = request
.edited
.iter::<Point>(&request.before_edit)
.map(|point| point.row)
.iter()
.map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
.zip(
request
.edited
.iter::<Point>(&snapshot)
.map(|point| point.row),
.iter()
.map(|anchor| anchor.summary::<Point>(&snapshot).row),
)
.collect::<BTreeMap<u32, u32>>();
@ -947,7 +960,8 @@ impl Buffer {
if let Some(inserted) = request.inserted.as_ref() {
let inserted_row_ranges = contiguous_ranges(
inserted
.ranges::<Point>(&snapshot)
.iter()
.map(|range| range.to_point(&snapshot))
.flat_map(|range| range.start.row..range.end.row + 1),
max_rows_between_yields,
);
@ -1264,17 +1278,17 @@ impl Buffer {
self.pending_autoindent.take();
let autoindent_request = if autoindent && self.language.is_some() {
let before_edit = self.snapshot();
let edited = self.anchor_set(
Bias::Left,
ranges.iter().filter_map(|range| {
let edited = ranges
.iter()
.filter_map(|range| {
let start = range.start.to_point(self);
if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
None
} else {
Some(range.start)
Some(self.anchor_before(range.start))
}
}),
);
})
.collect();
Some((before_edit, edited))
} else {
None
@ -1289,17 +1303,19 @@ impl Buffer {
let mut inserted = None;
if let Some(first_newline_ix) = first_newline_ix {
let mut delta = 0isize;
inserted = Some(self.anchor_range_set(
Bias::Left,
Bias::Right,
ranges.iter().map(|range| {
let start = (delta + range.start as isize) as usize + first_newline_ix + 1;
let end = (delta + range.start as isize) as usize + new_text_len;
delta +=
(range.end as isize - range.start as isize) + new_text_len as isize;
start..end
}),
));
inserted = Some(
ranges
.iter()
.map(|range| {
let start =
(delta + range.start as isize) as usize + first_newline_ix + 1;
let end = (delta + range.start as isize) as usize + new_text_len;
delta +=
(range.end as isize - range.start as isize) + new_text_len as isize;
self.anchor_before(start)..self.anchor_after(end)
})
.collect(),
);
}
let selection_set_ids = self
@ -1401,17 +1417,23 @@ impl Buffer {
self.pending_autoindent.take();
let was_dirty = self.is_dirty();
let old_version = self.version.clone();
let mut deferred_ops = Vec::new();
let buffer_ops = ops
.into_iter()
.filter_map(|op| match op {
Operation::Buffer(op) => Some(op),
Operation::UpdateDiagnostics(diagnostics) => {
self.apply_diagnostic_update(diagnostics, cx);
_ => {
if self.can_apply_op(&op) {
self.apply_op(op, cx);
} else {
deferred_ops.push(op);
}
None
}
})
.collect::<Vec<_>>();
self.text.apply_ops(buffer_ops)?;
self.flush_deferred_ops(cx);
self.did_edit(&old_version, was_dirty, cx);
// Notify independently of whether the buffer was edited as the operations could include a
// selection update.
@ -1419,12 +1441,49 @@ impl Buffer {
Ok(())
}
fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
let mut deferred_ops = Vec::new();
for op in self.deferred_ops.drain().iter().cloned() {
if self.can_apply_op(&op) {
self.apply_op(op, cx);
} else {
deferred_ops.push(op);
}
}
self.deferred_ops.insert(deferred_ops);
}
fn can_apply_op(&self, operation: &Operation) -> bool {
match operation {
Operation::Buffer(_) => {
unreachable!("buffer operations should never be applied at this layer")
}
Operation::UpdateDiagnostics { diagnostics, .. } => {
diagnostics.iter().all(|diagnostic| {
self.text.can_resolve(&diagnostic.range.start)
&& self.text.can_resolve(&diagnostic.range.end)
})
}
}
}
fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
match operation {
Operation::Buffer(_) => {
unreachable!("buffer operations should never be applied at this layer")
}
Operation::UpdateDiagnostics { diagnostics, .. } => {
self.apply_diagnostic_update(diagnostics, cx);
}
}
}
fn apply_diagnostic_update(
&mut self,
diagnostics: AnchorRangeMultimap<Diagnostic>,
diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
cx: &mut ModelContext<Self>,
) {
self.diagnostics = diagnostics;
self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
self.diagnostics_update_count += 1;
cx.notify();
}
@ -1632,19 +1691,19 @@ impl BufferSnapshot {
let mut highlights = None;
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
if let Some(theme) = theme {
for (_, range, diagnostic) in
self.diagnostics
.intersecting_ranges(range.clone(), self, true)
for entry in self
.diagnostics
.range::<_, usize>(range.clone(), self, true)
{
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: range.start,
offset: entry.range.start,
is_start: true,
severity: diagnostic.severity,
severity: entry.diagnostic.severity,
});
diagnostic_endpoints.push(DiagnosticEndpoint {
offset: range.end,
offset: entry.range.end,
is_start: false,
severity: diagnostic.severity,
severity: entry.diagnostic.severity,
});
}
diagnostic_endpoints
@ -1939,6 +1998,19 @@ impl ToPointUtf16 for lsp::Position {
}
}
impl operation_queue::Operation for Operation {
fn lamport_timestamp(&self) -> clock::Lamport {
match self {
Operation::Buffer(_) => {
unreachable!("buffer operations should never be deferred at this layer")
}
Operation::UpdateDiagnostics {
lamport_timestamp, ..
} => *lamport_timestamp,
}
}
}
fn diagnostic_ranges<'a>(
diagnostic: &'a lsp::Diagnostic,
abs_path: Option<&'a Path>,
@ -1968,7 +2040,7 @@ fn diagnostic_ranges<'a>(
}
pub fn contiguous_ranges(
values: impl IntoIterator<Item = u32>,
values: impl Iterator<Item = u32>,
max_len: usize,
) -> impl Iterator<Item = Range<u32>> {
let mut values = values.into_iter();

View file

@ -0,0 +1,165 @@
use crate::Diagnostic;
use std::{
cmp::{Ordering, Reverse},
iter,
ops::Range,
};
use sum_tree::{self, Bias, SumTree};
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
#[derive(Clone, Default)]
pub struct DiagnosticSet {
diagnostics: SumTree<DiagnosticEntry<Anchor>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticEntry<T> {
pub range: Range<T>,
pub diagnostic: Diagnostic,
}
#[derive(Clone, Debug)]
pub struct Summary {
start: Anchor,
end: Anchor,
min_start: Anchor,
max_end: Anchor,
count: usize,
}
impl DiagnosticSet {
pub fn from_sorted_entries<I>(iter: I, buffer: &text::Snapshot) -> Self
where
I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
{
Self {
diagnostics: SumTree::from_iter(iter, buffer),
}
}
pub fn reset<I>(&mut self, iter: I, buffer: &text::Snapshot)
where
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
{
let mut entries = iter.into_iter().collect::<Vec<_>>();
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
self.diagnostics = SumTree::from_iter(
entries.into_iter().map(|entry| DiagnosticEntry {
range: buffer.anchor_before(entry.range.start)
..buffer.anchor_after(entry.range.end),
diagnostic: entry.diagnostic,
}),
buffer,
);
}
pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
self.diagnostics.iter()
}
pub fn range<'a, T, O>(
&'a self,
range: Range<T>,
buffer: &'a text::Snapshot,
inclusive: bool,
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
T: 'a + ToOffset,
O: FromAnchor,
{
let end_bias = if inclusive { Bias::Right } else { Bias::Left };
let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
let mut cursor = self.diagnostics.filter::<_, ()>(
{
move |summary: &Summary| {
let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap();
let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap();
if inclusive {
start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
} else {
start_cmp == Ordering::Less && end_cmp == Ordering::Greater
}
}
},
buffer,
);
iter::from_fn({
move || {
if let Some(diagnostic) = cursor.item() {
cursor.next(buffer);
Some(diagnostic.resolve(buffer))
} else {
None
}
}
})
}
pub fn group<'a, O: FromAnchor>(
&'a self,
group_id: usize,
buffer: &'a text::Snapshot,
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
self.iter()
.filter(move |entry| entry.diagnostic.group_id == group_id)
.map(|entry| entry.resolve(buffer))
}
}
impl sum_tree::Item for DiagnosticEntry<Anchor> {
type Summary = Summary;
fn summary(&self) -> Self::Summary {
Summary {
start: self.range.start.clone(),
end: self.range.end.clone(),
min_start: self.range.start.clone(),
max_end: self.range.end.clone(),
count: 1,
}
}
}
impl DiagnosticEntry<Anchor> {
pub fn resolve<O: FromAnchor>(&self, buffer: &text::Snapshot) -> DiagnosticEntry<O> {
DiagnosticEntry {
range: O::from_anchor(&self.range.start, buffer)
..O::from_anchor(&self.range.end, buffer),
diagnostic: self.diagnostic.clone(),
}
}
}
impl Default for Summary {
fn default() -> Self {
Self {
start: Anchor::min(),
end: Anchor::max(),
min_start: Anchor::max(),
max_end: Anchor::min(),
count: 0,
}
}
}
impl sum_tree::Summary for Summary {
type Context = text::Snapshot;
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
if other
.min_start
.cmp(&self.min_start, buffer)
.unwrap()
.is_lt()
{
self.min_start = other.min_start.clone();
}
if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() {
self.max_end = other.max_end.clone();
}
self.start = other.start.clone();
self.end = other.end.clone();
self.count += other.count;
}
}

View file

@ -1,4 +1,5 @@
mod buffer;
mod diagnostic_set;
mod highlight_map;
pub mod multi_buffer;
pub mod proto;
@ -8,6 +9,7 @@ mod tests;
use anyhow::{anyhow, Result};
pub use buffer::Operation;
pub use buffer::*;
pub use diagnostic_set::DiagnosticEntry;
use gpui::{executor::Background, AppContext};
use highlight_map::HighlightMap;
use lazy_static::lazy_static;

View file

@ -1,6 +1,6 @@
use std::sync::Arc;
use crate::{Diagnostic, Operation};
use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use lsp::DiagnosticSeverity;
@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
replica_id: set_id.replica_id as u32,
local_timestamp: set_id.value,
lamport_timestamp: lamport_timestamp.value,
version: selections.version().into(),
selections: selections
.full_offset_ranges()
.map(|(range, state)| proto::Selection {
id: state.id as u64,
start: range.start.0 as u64,
end: range.end.0 as u64,
reversed: state.reversed,
.iter()
.map(|selection| proto::Selection {
id: selection.id as u64,
start: Some(serialize_anchor(&selection.start)),
end: Some(serialize_anchor(&selection.end)),
reversed: selection.reversed,
})
.collect(),
}),
@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
lamport_timestamp: lamport_timestamp.value,
},
),
Operation::UpdateDiagnostics(diagnostic_set) => {
proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
}
Operation::UpdateDiagnostics {
diagnostics,
lamport_timestamp,
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
replica_id: lamport_timestamp.replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
diagnostics: serialize_diagnostics(diagnostics.iter()),
}),
}),
}
}
@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
}
pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
let version = set.selections.version();
let entries = set.selections.full_offset_ranges();
proto::SelectionSet {
replica_id: set.id.replica_id as u32,
lamport_timestamp: set.id.value as u32,
is_active: set.active,
version: version.into(),
selections: entries
.map(|(range, state)| proto::Selection {
id: state.id as u64,
start: range.start.0 as u64,
end: range.end.0 as u64,
reversed: state.reversed,
selections: set
.selections
.iter()
.map(|selection| proto::Selection {
id: selection.id as u64,
start: Some(serialize_anchor(&selection.start)),
end: Some(serialize_anchor(&selection.end)),
reversed: selection.reversed,
})
.collect(),
}
}
pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
proto::DiagnosticSet {
version: map.version().into(),
diagnostics: map
.full_offset_ranges()
.map(|(range, diagnostic)| proto::Diagnostic {
start: range.start.0 as u64,
end: range.end.0 as u64,
message: diagnostic.message.clone(),
severity: match diagnostic.severity {
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
_ => proto::diagnostic::Severity::None,
} as i32,
group_id: diagnostic.group_id as u64,
is_primary: diagnostic.is_primary,
})
.collect(),
pub fn serialize_diagnostics<'a>(
diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
) -> Vec<proto::Diagnostic> {
diagnostics
.into_iter()
.map(|entry| proto::Diagnostic {
start: Some(serialize_anchor(&entry.range.start)),
end: Some(serialize_anchor(&entry.range.end)),
message: entry.diagnostic.message.clone(),
severity: match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
_ => proto::diagnostic::Severity::None,
} as i32,
group_id: entry.diagnostic.group_id as u64,
is_primary: entry.diagnostic.is_primary,
})
.collect()
}
fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
proto::Anchor {
replica_id: anchor.timestamp.replica_id as u32,
local_timestamp: anchor.timestamp.value,
offset: anchor.offset as u64,
bias: match anchor.bias {
Bias::Left => proto::Bias::Left as i32,
Bias::Right => proto::Bias::Right as i32,
},
}
}
@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
},
}),
proto::operation::Variant::UpdateSelections(message) => {
let version = message.version.into();
let entries = message
let selections = message
.selections
.iter()
.map(|selection| {
let range = FullOffset(selection.start as usize)
..FullOffset(selection.end as usize);
let state = SelectionState {
.into_iter()
.filter_map(|selection| {
Some(Selection {
id: selection.id as usize,
start: deserialize_anchor(selection.start?)?,
end: deserialize_anchor(selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
};
(range, state)
})
})
.collect();
let selections = AnchorRangeMap::from_full_offset_ranges(
version,
Bias::Left,
Bias::Left,
entries,
);
.collect::<Vec<_>>();
Operation::Buffer(text::Operation::UpdateSelections {
set_id: clock::Lamport {
@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
},
})
}
proto::operation::Variant::UpdateDiagnostics(message) => {
Operation::UpdateDiagnostics(deserialize_diagnostics(message))
}
proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics {
diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)),
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
},
},
)
}
@ -277,36 +287,32 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
value: set.lamport_timestamp,
},
active: set.is_active,
selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
set.version.into(),
Bias::Left,
Bias::Left,
selections: Arc::from(
set.selections
.into_iter()
.map(|selection| {
let range =
FullOffset(selection.start as usize)..FullOffset(selection.end as usize);
let state = SelectionState {
.filter_map(|selection| {
Some(Selection {
id: selection.id as usize,
start: deserialize_anchor(selection.start?)?,
end: deserialize_anchor(selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
};
(range, state)
})
})
.collect(),
)),
.collect::<Vec<_>>(),
),
}
}
pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
AnchorRangeMultimap::from_full_offset_ranges(
message.version.into(),
Bias::Left,
Bias::Right,
message.diagnostics.into_iter().filter_map(|diagnostic| {
Some((
FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
Diagnostic {
pub fn deserialize_diagnostics(
diagnostics: Vec<proto::Diagnostic>,
) -> Vec<DiagnosticEntry<Anchor>> {
diagnostics
.into_iter()
.filter_map(|diagnostic| {
Some(DiagnosticEntry {
range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
diagnostic: Diagnostic {
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
@ -318,7 +324,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult
group_id: diagnostic.group_id as usize,
is_primary: diagnostic.is_primary,
},
))
}),
)
})
})
.collect()
}
fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
Some(Anchor {
timestamp: clock::Local {
replica_id: anchor.replica_id as ReplicaId,
value: anchor.local_timestamp,
},
offset: anchor.offset as usize,
bias: match proto::Bias::from_i32(anchor.bias)? {
proto::Bias::Left => Bias::Left,
proto::Bias::Right => Bias::Right,
},
})
}

View file

@ -539,27 +539,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
// The diagnostics have moved down since they were created.
assert_eq!(
buffer
.diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0))
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
.collect::<Vec<_>>(),
&[
(
Point::new(3, 9)..Point::new(3, 11),
&Diagnostic {
DiagnosticEntry {
range: Point::new(3, 9)..Point::new(3, 11),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'BB'".to_string(),
group_id: 1,
is_primary: true,
},
),
(
Point::new(4, 9)..Point::new(4, 12),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(4, 9)..Point::new(4, 12),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'CCC'".to_string(),
group_id: 2,
is_primary: true,
}
)
}
]
);
assert_eq!(
@ -606,27 +606,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
.unwrap();
assert_eq!(
buffer
.diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0))
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
.collect::<Vec<_>>(),
&[
(
Point::new(2, 9)..Point::new(2, 12),
&Diagnostic {
DiagnosticEntry {
range: Point::new(2, 9)..Point::new(2, 12),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::WARNING,
message: "unreachable statement".to_string(),
group_id: 1,
is_primary: true,
}
),
(
Point::new(2, 9)..Point::new(2, 10),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(2, 9)..Point::new(2, 10),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'A'".to_string(),
group_id: 0,
is_primary: true,
},
)
}
]
);
assert_eq!(
@ -685,27 +685,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
.unwrap();
assert_eq!(
buffer
.diagnostics_in_range(0..buffer.len())
.diagnostics_in_range::<_, Point>(0..buffer.len())
.collect::<Vec<_>>(),
&[
(
Point::new(2, 21)..Point::new(2, 22),
&Diagnostic {
DiagnosticEntry {
range: Point::new(2, 21)..Point::new(2, 22),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'A'".to_string(),
group_id: 0,
is_primary: true,
}
),
(
Point::new(3, 9)..Point::new(3, 11),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(3, 9)..Point::new(3, 11),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'BB'".to_string(),
group_id: 1,
is_primary: true,
},
)
}
]
);
});
@ -873,107 +873,107 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
.diagnostics_in_range::<_, Point>(0..buffer.len())
.collect::<Vec<_>>(),
&[
(
Point::new(1, 8)..Point::new(1, 9),
&Diagnostic {
DiagnosticEntry {
range: Point::new(1, 8)..Point::new(1, 9),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::WARNING,
message: "error 1".to_string(),
group_id: 0,
is_primary: true,
}
),
(
Point::new(1, 8)..Point::new(1, 9),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(1, 8)..Point::new(1, 9),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 1 hint 1".to_string(),
group_id: 0,
is_primary: false,
}
),
(
Point::new(1, 13)..Point::new(1, 15),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(1, 13)..Point::new(1, 15),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 1".to_string(),
group_id: 1,
is_primary: false,
}
),
(
Point::new(1, 13)..Point::new(1, 15),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(1, 13)..Point::new(1, 15),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 2".to_string(),
group_id: 1,
is_primary: false,
}
),
(
Point::new(2, 8)..Point::new(2, 17),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(2, 8)..Point::new(2, 17),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "error 2".to_string(),
group_id: 1,
is_primary: true,
}
)
}
]
);
assert_eq!(
buffer.diagnostic_group(0).collect::<Vec<_>>(),
buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
&[
(
Point::new(1, 8)..Point::new(1, 9),
&Diagnostic {
DiagnosticEntry {
range: Point::new(1, 8)..Point::new(1, 9),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::WARNING,
message: "error 1".to_string(),
group_id: 0,
is_primary: true,
}
),
(
Point::new(1, 8)..Point::new(1, 9),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(1, 8)..Point::new(1, 9),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 1 hint 1".to_string(),
group_id: 0,
is_primary: false,
}
),
},
]
);
assert_eq!(
buffer.diagnostic_group(1).collect::<Vec<_>>(),
buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
&[
(
Point::new(1, 13)..Point::new(1, 15),
&Diagnostic {
DiagnosticEntry {
range: Point::new(1, 13)..Point::new(1, 15),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 1".to_string(),
group_id: 1,
is_primary: false,
}
),
(
Point::new(1, 13)..Point::new(1, 15),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(1, 13)..Point::new(1, 15),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 2".to_string(),
group_id: 1,
is_primary: false,
}
),
(
Point::new(2, 8)..Point::new(2, 17),
&Diagnostic {
},
DiagnosticEntry {
range: Point::new(2, 8)..Point::new(2, 17),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "error 2".to_string(),
group_id: 1,
is_primary: true,
}
)
}
]
);
@ -1002,13 +1002,17 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
#[test]
fn test_contiguous_ranges() {
assert_eq!(
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::<Vec<_>>(),
contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
&[1..4, 5..7, 9..13]
);
// Respects the `max_len` parameter
assert_eq!(
contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::<Vec<_>>(),
contiguous_ranges(
[2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
3
)
.collect::<Vec<_>>(),
&[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
);
}