diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index 6e8b460861..2632aecce5 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -21,6 +21,15 @@ pub struct Lamport { } impl Local { + pub const MIN: Self = Self { + replica_id: ReplicaId::MIN, + value: Seq::MIN, + }; + pub const MAX: Self = Self { + replica_id: ReplicaId::MAX, + value: Seq::MAX, + }; + pub fn new(replica_id: ReplicaId) -> Self { Self { replica_id, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ae1143d0fa..e14991614a 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -407,7 +407,7 @@ struct SelectNextState { #[derive(Debug)] struct BracketPairState { - ranges: AnchorRangeSet, + ranges: Vec>, pair: BracketPair, } @@ -1292,10 +1292,9 @@ impl Editor { } fn autoclose_pairs(&mut self, cx: &mut ViewContext) { - let selections = self.selections::(cx); - let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| { - let buffer_snapshot = buffer.snapshot(cx); - let autoclose_pair = buffer_snapshot.language().and_then(|language| { + let selections = self.selections::(cx).collect::>(); + let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| { + let autoclose_pair = buffer.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start; let pair = language.brackets().iter().find(|pair| { buffer_snapshot.contains_str_at( @@ -1333,15 +1332,14 @@ impl Editor { if pair.end.len() == 1 { let mut delta = 0; Some(BracketPairState { - ranges: buffer.anchor_range_set( - Bias::Left, - Bias::Right, - selections.iter().map(move |selection| { + ranges: selections + .iter() + .map(move |selection| { let offset = selection.start + delta; delta += 1; - offset..offset - }), - ), + buffer.anchor_before(offset)..buffer.anchor_after(offset) + }) + .collect(), pair, }) } else { @@ -1349,26 +1347,26 @@ impl Editor { } }) }); - self.autoclose_stack.extend(new_autoclose_pair_state); + self.autoclose_stack.extend(new_autoclose_pair); } fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { - let old_selections = self.selections::(cx); - let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() { - autoclose_pair_state + let old_selections = self.selections::(cx).collect::>(); + let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { + autoclose_pair } else { return false; }; - if text != autoclose_pair_state.pair.end { + if text != autoclose_pair.pair.end { return false; } - debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len()); + debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len()); let buffer = self.buffer.read(cx).snapshot(cx); if old_selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(&buffer)) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer))) .all(|(selection, autoclose_range)| { let autoclose_range_end = autoclose_range.end.to_offset(&buffer); selection.is_empty() && selection.start == autoclose_range_end @@ -2832,12 +2830,12 @@ impl Editor { loop { let next_group = buffer .diagnostics_in_range::<_, usize>(search_start..buffer.len()) - .find_map(|(range, diagnostic)| { - if diagnostic.is_primary - && !range.is_empty() - && Some(range.end) != active_primary_range.as_ref().map(|r| *r.end()) + .find_map(|entry| { + if entry.diagnostic.is_primary + && !entry.range.is_empty() + && Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end()) { - Some((range, diagnostic.group_id)) + Some((entry.range, entry.diagnostic.group_id)) } else { None } @@ -2872,11 +2870,11 @@ impl Editor { let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer); let is_valid = buffer .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) - .any(|(range, diagnostic)| { - diagnostic.is_primary - && !range.is_empty() - && range.start == primary_range_start - && diagnostic.message == active_diagnostics.primary_message + .any(|entry| { + entry.diagnostic.is_primary + && !entry.range.is_empty() + && entry.range.start == primary_range_start + && entry.diagnostic.message == active_diagnostics.primary_message }); if is_valid != active_diagnostics.is_valid { @@ -2907,15 +2905,15 @@ impl Editor { let mut group_end = Point::zero(); let diagnostic_group = buffer .diagnostic_group::(group_id) - .map(|(range, diagnostic)| { - if range.end > group_end { - group_end = range.end; + .map(|entry| { + if entry.range.end > group_end { + group_end = entry.range.end; } - if diagnostic.is_primary { - primary_range = Some(range.clone()); - primary_message = Some(diagnostic.message.clone()); + if entry.diagnostic.is_primary { + primary_range = Some(entry.range.clone()); + primary_message = Some(entry.diagnostic.message.clone()); } - (range, diagnostic.clone()) + entry }) .collect::>(); let primary_range = primary_range.unwrap(); @@ -2925,13 +2923,13 @@ impl Editor { let blocks = display_map .insert_blocks( - diagnostic_group.iter().map(|(range, diagnostic)| { + diagnostic_group.iter().map(|entry| { let build_settings = self.build_settings.clone(); - let diagnostic = diagnostic.clone(); + let diagnostic = entry.diagnostic.clone(); let message_height = diagnostic.message.lines().count() as u8; BlockProperties { - position: range.start, + position: entry.range.start, height: message_height, render: Arc::new(move |cx| { let settings = build_settings.borrow()(cx.cx); @@ -2944,11 +2942,7 @@ impl Editor { cx, ) .into_iter() - .zip( - diagnostic_group - .into_iter() - .map(|(_, diagnostic)| diagnostic), - ) + .zip(diagnostic_group.into_iter().map(|entry| entry.diagnostic)) .collect(); Some(ActiveDiagnosticGroup { @@ -3171,12 +3165,12 @@ impl Editor { self.add_selections_state = None; self.select_next_state = None; self.select_larger_syntax_node_stack.clear(); - while let Some(autoclose_pair_state) = self.autoclose_stack.last() { + while let Some(autoclose_pair) = self.autoclose_stack.last() { let all_selections_inside_autoclose_ranges = - if selections.len() == autoclose_pair_state.ranges.len() { + if selections.len() == autoclose_pair.ranges.len() { selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(&buffer)) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer))) .all(|(selection, autoclose_range)| { let head = selection.head().to_point(&buffer); autoclose_range.start <= head && autoclose_range.end >= head diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 1c7b4a25f5..fe4336126a 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -324,14 +324,13 @@ impl DiagnosticMessage { fn update(&mut self, editor: ViewHandle, cx: &mut ViewContext) { let editor = editor.read(cx); - let cursor_position = editor.newest_selection(cx).head(); - let new_diagnostic = editor - .buffer() - .read(cx) - .diagnostics_in_range::(cursor_position..cursor_position) - .filter(|(range, _)| !range.is_empty()) - .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len())) - .map(|(_, diagnostic)| diagnostic.clone()); + let cursor_position = editor.newest_selection::(cx).head(); + let buffer = editor.buffer().read(cx); + let new_diagnostic = buffer + .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) + .filter(|entry| !entry.range.is_empty()) + .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) + .map(|entry| entry.diagnostic); if new_diagnostic != self.diagnostic { self.diagnostic = new_diagnostic; cx.notify(); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index aadf5c5d59..f2cc7f1e86 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,6 @@ +use crate::diagnostic_set::DiagnosticEntry; pub use crate::{ + diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig, PLAIN_TEXT, @@ -21,6 +23,7 @@ use std::{ ffi::OsString, future::Future, iter::{Iterator, Peekable}, + mem, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, @@ -28,6 +31,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; +use text::operation_queue::OperationQueue; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; @@ -61,9 +65,10 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, - diagnostics: AnchorRangeMultimap, + diagnostics: DiagnosticSet, diagnostics_update_count: usize, language_server: Option, + deferred_ops: OperationQueue, #[cfg(test)] pub(crate) operations: Vec, } @@ -71,7 +76,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, tree: Option, - diagnostics: AnchorRangeMultimap, + diagnostics: DiagnosticSet, diagnostics_update_count: usize, is_parsing: bool, language: Option>, @@ -101,10 +106,13 @@ struct LanguageServerSnapshot { path: Arc, } -#[derive(Clone)] +#[derive(Clone, Debug)] pub enum Operation { Buffer(text::Operation), - UpdateDiagnostics(AnchorRangeMultimap), + UpdateDiagnostics { + diagnostics: Arc<[DiagnosticEntry]>, + lamport_timestamp: clock::Lamport, + }, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -173,8 +181,8 @@ struct SyntaxTree { struct AutoindentRequest { selection_set_ids: HashSet, before_edit: BufferSnapshot, - edited: AnchorSet, - inserted: Option, + edited: Vec, + inserted: Option>>, } #[derive(Debug)] @@ -275,9 +283,11 @@ impl Buffer { buffer.add_raw_selection_set(set.id, set); } let mut this = Self::build(buffer, file); - if let Some(diagnostics) = message.diagnostics { - this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx); - } + this.apply_diagnostic_update( + Arc::from(proto::deserialize_diagnostics(message.diagnostics)), + cx, + ); + Ok(this) } @@ -294,7 +304,7 @@ impl Buffer { .selection_sets() .map(|(_, set)| proto::serialize_selection_set(set)) .collect(), - diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)), + diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), } } @@ -331,6 +341,7 @@ impl Buffer { diagnostics: Default::default(), diagnostics_update_count: 0, language_server: None, + deferred_ops: OperationQueue::new(), #[cfg(test)] operations: Default::default(), } @@ -690,6 +701,8 @@ impl Buffer { mut diagnostics: Vec, cx: &mut ModelContext, ) -> Result { + diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); + let version = version.map(|version| version as usize); let content = if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -710,91 +723,92 @@ impl Buffer { .and_then(|language| language.disk_based_diagnostic_sources()) .unwrap_or(&empty_set); - diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); - self.diagnostics = { - let mut edits_since_save = content - .edits_since::(&self.saved_version) - .peekable(); - let mut last_edit_old_end = PointUtf16::zero(); - let mut last_edit_new_end = PointUtf16::zero(); - let mut group_ids_by_diagnostic_range = HashMap::new(); - let mut diagnostics_by_group_id = HashMap::new(); - let mut next_group_id = 0; - 'outer: for diagnostic in &diagnostics { - let mut start = diagnostic.range.start.to_point_utf16(); - let mut end = diagnostic.range.end.to_point_utf16(); - let source = diagnostic.source.as_ref(); - let code = diagnostic.code.as_ref(); - let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) - .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) - .copied() - .unwrap_or_else(|| { - let group_id = post_inc(&mut next_group_id); - for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { - group_ids_by_diagnostic_range.insert((source, code, range), group_id); - } - group_id - }); - - if diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)) - { - while let Some(edit) = edits_since_save.peek() { - if edit.old.end <= start { - last_edit_old_end = edit.old.end; - last_edit_new_end = edit.new.end; - edits_since_save.next(); - } else if edit.old.start <= end && edit.old.end >= start { - continue 'outer; - } else { - break; - } + let mut edits_since_save = content + .edits_since::(&self.saved_version) + .peekable(); + let mut last_edit_old_end = PointUtf16::zero(); + let mut last_edit_new_end = PointUtf16::zero(); + let mut group_ids_by_diagnostic_range = HashMap::new(); + let mut diagnostics_by_group_id = HashMap::new(); + let mut next_group_id = 0; + 'outer: for diagnostic in &diagnostics { + let mut start = diagnostic.range.start.to_point_utf16(); + let mut end = diagnostic.range.end.to_point_utf16(); + let source = diagnostic.source.as_ref(); + let code = diagnostic.code.as_ref(); + let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) + .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) + .copied() + .unwrap_or_else(|| { + let group_id = post_inc(&mut next_group_id); + for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { + group_ids_by_diagnostic_range.insert((source, code, range), group_id); } + group_id + }); - start = last_edit_new_end + (start - last_edit_old_end); - end = last_edit_new_end + (end - last_edit_old_end); - } - - let mut range = content.clip_point_utf16(start, Bias::Left) - ..content.clip_point_utf16(end, Bias::Right); - if range.start == range.end { - range.end.column += 1; - range.end = content.clip_point_utf16(range.end, Bias::Right); - if range.start == range.end && range.end.column > 0 { - range.start.column -= 1; - range.start = content.clip_point_utf16(range.start, Bias::Left); + if diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)) + { + while let Some(edit) = edits_since_save.peek() { + if edit.old.end <= start { + last_edit_old_end = edit.old.end; + last_edit_new_end = edit.new.end; + edits_since_save.next(); + } else if edit.old.start <= end && edit.old.end >= start { + continue 'outer; + } else { + break; } } - diagnostics_by_group_id - .entry(group_id) - .or_insert(Vec::new()) - .push(( - range, - Diagnostic { - severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), - message: diagnostic.message.clone(), - group_id, - is_primary: false, - }, - )); + start = last_edit_new_end + (start - last_edit_old_end); + end = last_edit_new_end + (end - last_edit_old_end); } - content.anchor_range_multimap( - Bias::Left, - Bias::Right, - diagnostics_by_group_id - .into_values() - .flat_map(|mut diagnostics| { - let primary_diagnostic = - diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap(); - primary_diagnostic.1.is_primary = true; - diagnostics - }), - ) - }; + let mut range = content.clip_point_utf16(start, Bias::Left) + ..content.clip_point_utf16(end, Bias::Right); + if range.start == range.end { + range.end.column += 1; + range.end = content.clip_point_utf16(range.end, Bias::Right); + if range.start == range.end && range.end.column > 0 { + range.start.column -= 1; + range.start = content.clip_point_utf16(range.start, Bias::Left); + } + } + + diagnostics_by_group_id + .entry(group_id) + .or_insert(Vec::new()) + .push(DiagnosticEntry { + range, + diagnostic: Diagnostic { + severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), + message: diagnostic.message.clone(), + group_id, + is_primary: false, + }, + }); + } + + drop(edits_since_save); + let mut diagnostics = mem::take(&mut self.diagnostics); + diagnostics.reset( + diagnostics_by_group_id + .into_values() + .flat_map(|mut diagnostics| { + let primary = diagnostics + .iter_mut() + .min_by_key(|entry| entry.diagnostic.severity) + .unwrap(); + primary.diagnostic.is_primary = true; + diagnostics + }), + self, + ); + self.diagnostics = diagnostics; if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -811,32 +825,31 @@ impl Buffer { self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); - Ok(Operation::UpdateDiagnostics(self.diagnostics.clone())) + Ok(Operation::UpdateDiagnostics { + diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), + lamport_timestamp: self.lamport_timestamp(), + }) } pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl 'a + Iterator> where T: 'a + ToOffset, O: 'a + FromAnchor, { - self.diagnostics - .intersecting_ranges(search_range, self, true) - .map(move |(_, range, diagnostic)| (range, diagnostic)) + self.diagnostics.range(search_range, self, true) } pub fn diagnostic_group<'a, O>( &'a self, group_id: usize, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl 'a + Iterator> where O: 'a + FromAnchor, { - self.diagnostics - .filter(self, move |diagnostic| diagnostic.group_id == group_id) - .map(move |(_, range, diagnostic)| (range, diagnostic)) + self.diagnostics.group(group_id, self) } pub fn diagnostics_update_count(&self) -> usize { @@ -879,13 +892,13 @@ impl Buffer { for request in autoindent_requests { let old_to_new_rows = request .edited - .iter::(&request.before_edit) - .map(|point| point.row) + .iter() + .map(|anchor| anchor.summary::(&request.before_edit).row) .zip( request .edited - .iter::(&snapshot) - .map(|point| point.row), + .iter() + .map(|anchor| anchor.summary::(&snapshot).row), ) .collect::>(); @@ -947,7 +960,8 @@ impl Buffer { if let Some(inserted) = request.inserted.as_ref() { let inserted_row_ranges = contiguous_ranges( inserted - .ranges::(&snapshot) + .iter() + .map(|range| range.to_point(&snapshot)) .flat_map(|range| range.start.row..range.end.row + 1), max_rows_between_yields, ); @@ -1264,17 +1278,17 @@ impl Buffer { self.pending_autoindent.take(); let autoindent_request = if autoindent && self.language.is_some() { let before_edit = self.snapshot(); - let edited = self.anchor_set( - Bias::Left, - ranges.iter().filter_map(|range| { + let edited = ranges + .iter() + .filter_map(|range| { let start = range.start.to_point(self); if new_text.starts_with('\n') && start.column == self.line_len(start.row) { None } else { - Some(range.start) + Some(self.anchor_before(range.start)) } - }), - ); + }) + .collect(); Some((before_edit, edited)) } else { None @@ -1289,17 +1303,19 @@ impl Buffer { let mut inserted = None; if let Some(first_newline_ix) = first_newline_ix { let mut delta = 0isize; - inserted = Some(self.anchor_range_set( - Bias::Left, - Bias::Right, - ranges.iter().map(|range| { - let start = (delta + range.start as isize) as usize + first_newline_ix + 1; - let end = (delta + range.start as isize) as usize + new_text_len; - delta += - (range.end as isize - range.start as isize) + new_text_len as isize; - start..end - }), - )); + inserted = Some( + ranges + .iter() + .map(|range| { + let start = + (delta + range.start as isize) as usize + first_newline_ix + 1; + let end = (delta + range.start as isize) as usize + new_text_len; + delta += + (range.end as isize - range.start as isize) + new_text_len as isize; + self.anchor_before(start)..self.anchor_after(end) + }) + .collect(), + ); } let selection_set_ids = self @@ -1401,17 +1417,23 @@ impl Buffer { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); + let mut deferred_ops = Vec::new(); let buffer_ops = ops .into_iter() .filter_map(|op| match op { Operation::Buffer(op) => Some(op), - Operation::UpdateDiagnostics(diagnostics) => { - self.apply_diagnostic_update(diagnostics, cx); + _ => { + if self.can_apply_op(&op) { + self.apply_op(op, cx); + } else { + deferred_ops.push(op); + } None } }) .collect::>(); self.text.apply_ops(buffer_ops)?; + self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. @@ -1419,12 +1441,49 @@ impl Buffer { Ok(()) } + fn flush_deferred_ops(&mut self, cx: &mut ModelContext) { + let mut deferred_ops = Vec::new(); + for op in self.deferred_ops.drain().iter().cloned() { + if self.can_apply_op(&op) { + self.apply_op(op, cx); + } else { + deferred_ops.push(op); + } + } + self.deferred_ops.insert(deferred_ops); + } + + fn can_apply_op(&self, operation: &Operation) -> bool { + match operation { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be applied at this layer") + } + Operation::UpdateDiagnostics { diagnostics, .. } => { + diagnostics.iter().all(|diagnostic| { + self.text.can_resolve(&diagnostic.range.start) + && self.text.can_resolve(&diagnostic.range.end) + }) + } + } + } + + fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext) { + match operation { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be applied at this layer") + } + Operation::UpdateDiagnostics { diagnostics, .. } => { + self.apply_diagnostic_update(diagnostics, cx); + } + } + } + fn apply_diagnostic_update( &mut self, - diagnostics: AnchorRangeMultimap, + diagnostics: Arc<[DiagnosticEntry]>, cx: &mut ModelContext, ) { - self.diagnostics = diagnostics; + self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self); self.diagnostics_update_count += 1; cx.notify(); } @@ -1632,19 +1691,19 @@ impl BufferSnapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for (_, range, diagnostic) in - self.diagnostics - .intersecting_ranges(range.clone(), self, true) + for entry in self + .diagnostics + .range::<_, usize>(range.clone(), self, true) { diagnostic_endpoints.push(DiagnosticEndpoint { - offset: range.start, + offset: entry.range.start, is_start: true, - severity: diagnostic.severity, + severity: entry.diagnostic.severity, }); diagnostic_endpoints.push(DiagnosticEndpoint { - offset: range.end, + offset: entry.range.end, is_start: false, - severity: diagnostic.severity, + severity: entry.diagnostic.severity, }); } diagnostic_endpoints @@ -1939,6 +1998,19 @@ impl ToPointUtf16 for lsp::Position { } } +impl operation_queue::Operation for Operation { + fn lamport_timestamp(&self) -> clock::Lamport { + match self { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be deferred at this layer") + } + Operation::UpdateDiagnostics { + lamport_timestamp, .. + } => *lamport_timestamp, + } + } +} + fn diagnostic_ranges<'a>( diagnostic: &'a lsp::Diagnostic, abs_path: Option<&'a Path>, @@ -1968,7 +2040,7 @@ fn diagnostic_ranges<'a>( } pub fn contiguous_ranges( - values: impl IntoIterator, + values: impl Iterator, max_len: usize, ) -> impl Iterator> { let mut values = values.into_iter(); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs new file mode 100644 index 0000000000..0a04ef17e8 --- /dev/null +++ b/crates/language/src/diagnostic_set.rs @@ -0,0 +1,165 @@ +use crate::Diagnostic; +use std::{ + cmp::{Ordering, Reverse}, + iter, + ops::Range, +}; +use sum_tree::{self, Bias, SumTree}; +use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; + +#[derive(Clone, Default)] +pub struct DiagnosticSet { + diagnostics: SumTree>, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct DiagnosticEntry { + pub range: Range, + pub diagnostic: Diagnostic, +} + +#[derive(Clone, Debug)] +pub struct Summary { + start: Anchor, + end: Anchor, + min_start: Anchor, + max_end: Anchor, + count: usize, +} + +impl DiagnosticSet { + pub fn from_sorted_entries(iter: I, buffer: &text::Snapshot) -> Self + where + I: IntoIterator>, + { + Self { + diagnostics: SumTree::from_iter(iter, buffer), + } + } + + pub fn reset(&mut self, iter: I, buffer: &text::Snapshot) + where + I: IntoIterator>, + { + let mut entries = iter.into_iter().collect::>(); + entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); + self.diagnostics = SumTree::from_iter( + entries.into_iter().map(|entry| DiagnosticEntry { + range: buffer.anchor_before(entry.range.start) + ..buffer.anchor_after(entry.range.end), + diagnostic: entry.diagnostic, + }), + buffer, + ); + } + + pub fn iter(&self) -> impl Iterator> { + self.diagnostics.iter() + } + + pub fn range<'a, T, O>( + &'a self, + range: Range, + buffer: &'a text::Snapshot, + inclusive: bool, + ) -> impl 'a + Iterator> + where + T: 'a + ToOffset, + O: FromAnchor, + { + let end_bias = if inclusive { Bias::Right } else { Bias::Left }; + let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); + let mut cursor = self.diagnostics.filter::<_, ()>( + { + move |summary: &Summary| { + let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap(); + let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap(); + if inclusive { + start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal + } else { + start_cmp == Ordering::Less && end_cmp == Ordering::Greater + } + } + }, + buffer, + ); + + iter::from_fn({ + move || { + if let Some(diagnostic) = cursor.item() { + cursor.next(buffer); + Some(diagnostic.resolve(buffer)) + } else { + None + } + } + }) + } + + pub fn group<'a, O: FromAnchor>( + &'a self, + group_id: usize, + buffer: &'a text::Snapshot, + ) -> impl 'a + Iterator> { + self.iter() + .filter(move |entry| entry.diagnostic.group_id == group_id) + .map(|entry| entry.resolve(buffer)) + } +} + +impl sum_tree::Item for DiagnosticEntry { + type Summary = Summary; + + fn summary(&self) -> Self::Summary { + Summary { + start: self.range.start.clone(), + end: self.range.end.clone(), + min_start: self.range.start.clone(), + max_end: self.range.end.clone(), + count: 1, + } + } +} + +impl DiagnosticEntry { + pub fn resolve(&self, buffer: &text::Snapshot) -> DiagnosticEntry { + DiagnosticEntry { + range: O::from_anchor(&self.range.start, buffer) + ..O::from_anchor(&self.range.end, buffer), + diagnostic: self.diagnostic.clone(), + } + } +} + +impl Default for Summary { + fn default() -> Self { + Self { + start: Anchor::min(), + end: Anchor::max(), + min_start: Anchor::max(), + max_end: Anchor::min(), + count: 0, + } + } +} + +impl sum_tree::Summary for Summary { + type Context = text::Snapshot; + + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { + if other + .min_start + .cmp(&self.min_start, buffer) + .unwrap() + .is_lt() + { + self.min_start = other.min_start.clone(); + } + if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() { + self.max_end = other.max_end.clone(); + } + self.start = other.start.clone(); + self.end = other.end.clone(); + self.count += other.count; + } +} diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 3fa611ee04..001317ea12 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,4 +1,5 @@ mod buffer; +mod diagnostic_set; mod highlight_map; pub mod multi_buffer; pub mod proto; @@ -8,6 +9,7 @@ mod tests; use anyhow::{anyhow, Result}; pub use buffer::Operation; pub use buffer::*; +pub use diagnostic_set::DiagnosticEntry; use gpui::{executor::Background, AppContext}; use highlight_map::HighlightMap; use lazy_static::lazy_static; diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 3e3455c671..6f36c7dc0b 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use crate::{Diagnostic, Operation}; +use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use lsp::DiagnosticSeverity; @@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { replica_id: set_id.replica_id as u32, local_timestamp: set_id.value, lamport_timestamp: lamport_timestamp.value, - version: selections.version().into(), selections: selections - .full_offset_ranges() - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0 as u64, - end: range.end.0 as u64, - reversed: state.reversed, + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, }) .collect(), }), @@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { lamport_timestamp: lamport_timestamp.value, }, ), - Operation::UpdateDiagnostics(diagnostic_set) => { - proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set)) - } + Operation::UpdateDiagnostics { + diagnostics, + lamport_timestamp, + } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { + replica_id: lamport_timestamp.replica_id as u32, + lamport_timestamp: lamport_timestamp.value, + diagnostics: serialize_diagnostics(diagnostics.iter()), + }), }), } } @@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation:: } pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { - let version = set.selections.version(); - let entries = set.selections.full_offset_ranges(); proto::SelectionSet { replica_id: set.id.replica_id as u32, lamport_timestamp: set.id.value as u32, is_active: set.active, - version: version.into(), - selections: entries - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0 as u64, - end: range.end.0 as u64, - reversed: state.reversed, + selections: set + .selections + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, }) .collect(), } } -pub fn serialize_diagnostics(map: &AnchorRangeMultimap) -> proto::DiagnosticSet { - proto::DiagnosticSet { - version: map.version().into(), - diagnostics: map - .full_offset_ranges() - .map(|(range, diagnostic)| proto::Diagnostic { - start: range.start.0 as u64, - end: range.end.0 as u64, - message: diagnostic.message.clone(), - severity: match diagnostic.severity { - DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, - DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, - DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, - DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, - _ => proto::diagnostic::Severity::None, - } as i32, - group_id: diagnostic.group_id as u64, - is_primary: diagnostic.is_primary, - }) - .collect(), +pub fn serialize_diagnostics<'a>( + diagnostics: impl IntoIterator>, +) -> Vec { + diagnostics + .into_iter() + .map(|entry| proto::Diagnostic { + start: Some(serialize_anchor(&entry.range.start)), + end: Some(serialize_anchor(&entry.range.end)), + message: entry.diagnostic.message.clone(), + severity: match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, + DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, + DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, + DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, + _ => proto::diagnostic::Severity::None, + } as i32, + group_id: entry.diagnostic.group_id as u64, + is_primary: entry.diagnostic.is_primary, + }) + .collect() +} + +fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { + proto::Anchor { + replica_id: anchor.timestamp.replica_id as u32, + local_timestamp: anchor.timestamp.value, + offset: anchor.offset as u64, + bias: match anchor.bias { + Bias::Left => proto::Bias::Left as i32, + Bias::Right => proto::Bias::Right as i32, + }, } } @@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }), proto::operation::Variant::UpdateSelections(message) => { - let version = message.version.into(); - let entries = message + let selections = message .selections - .iter() - .map(|selection| { - let range = FullOffset(selection.start as usize) - ..FullOffset(selection.end as usize); - let state = SelectionState { + .into_iter() + .filter_map(|selection| { + Some(Selection { id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, - }; - (range, state) + }) }) - .collect(); - let selections = AnchorRangeMap::from_full_offset_ranges( - version, - Bias::Left, - Bias::Left, - entries, - ); + .collect::>(); Operation::Buffer(text::Operation::UpdateSelections { set_id: clock::Lamport { @@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }) } - proto::operation::Variant::UpdateDiagnostics(message) => { - Operation::UpdateDiagnostics(deserialize_diagnostics(message)) - } + proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics { + diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)), + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + }, }, ) } @@ -277,36 +287,32 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { value: set.lamport_timestamp, }, active: set.is_active, - selections: Arc::new(AnchorRangeMap::from_full_offset_ranges( - set.version.into(), - Bias::Left, - Bias::Left, + selections: Arc::from( set.selections .into_iter() - .map(|selection| { - let range = - FullOffset(selection.start as usize)..FullOffset(selection.end as usize); - let state = SelectionState { + .filter_map(|selection| { + Some(Selection { id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, - }; - (range, state) + }) }) - .collect(), - )), + .collect::>(), + ), } } -pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap { - AnchorRangeMultimap::from_full_offset_ranges( - message.version.into(), - Bias::Left, - Bias::Right, - message.diagnostics.into_iter().filter_map(|diagnostic| { - Some(( - FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize), - Diagnostic { +pub fn deserialize_diagnostics( + diagnostics: Vec, +) -> Vec> { + diagnostics + .into_iter() + .filter_map(|diagnostic| { + Some(DiagnosticEntry { + range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?, + diagnostic: Diagnostic { severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? { proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR, proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING, @@ -318,7 +324,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult group_id: diagnostic.group_id as usize, is_primary: diagnostic.is_primary, }, - )) - }), - ) + }) + }) + .collect() +} + +fn deserialize_anchor(anchor: proto::Anchor) -> Option { + Some(Anchor { + timestamp: clock::Local { + replica_id: anchor.replica_id as ReplicaId, + value: anchor.local_timestamp, + }, + offset: anchor.offset as usize, + bias: match proto::Bias::from_i32(anchor.bias)? { + proto::Bias::Left => Bias::Left, + proto::Bias::Right => Bias::Right, + }, + }) } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index c7c5670103..c10c2064af 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -539,27 +539,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // The diagnostics have moved down since they were created. assert_eq!( buffer - .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) + .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0)) .collect::>(), &[ - ( - Point::new(3, 9)..Point::new(3, 11), - &Diagnostic { + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, }, - ), - ( - Point::new(4, 9)..Point::new(4, 12), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(4, 9)..Point::new(4, 12), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'CCC'".to_string(), group_id: 2, is_primary: true, } - ) + } ] ); assert_eq!( @@ -606,27 +606,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer - .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) + .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0)) .collect::>(), &[ - ( - Point::new(2, 9)..Point::new(2, 12), - &Diagnostic { + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 12), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "unreachable statement".to_string(), group_id: 1, is_primary: true, } - ), - ( - Point::new(2, 9)..Point::new(2, 10), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 10), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, }, - ) + } ] ); assert_eq!( @@ -685,27 +685,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(2, 21)..Point::new(2, 22), - &Diagnostic { + DiagnosticEntry { + range: Point::new(2, 21)..Point::new(2, 22), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(3, 9)..Point::new(3, 11), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, }, - ) + } ] ); }); @@ -873,107 +873,107 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "error 1".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 1 hint 1".to_string(), group_id: 0, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 1".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 2".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(2, 8)..Point::new(2, 17), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "error 2".to_string(), group_id: 1, is_primary: true, } - ) + } ] ); assert_eq!( - buffer.diagnostic_group(0).collect::>(), + buffer.diagnostic_group::(0).collect::>(), &[ - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "error 1".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 1 hint 1".to_string(), group_id: 0, is_primary: false, } - ), + }, ] ); assert_eq!( - buffer.diagnostic_group(1).collect::>(), + buffer.diagnostic_group::(1).collect::>(), &[ - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 1".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 2".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(2, 8)..Point::new(2, 17), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "error 2".to_string(), group_id: 1, is_primary: true, } - ) + } ] ); @@ -1002,13 +1002,17 @@ fn chunks_with_diagnostics( #[test] fn test_contiguous_ranges() { assert_eq!( - contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::>(), + contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::>(), &[1..4, 5..7, 9..13] ); // Respects the `max_len` parameter assert_eq!( - contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::>(), + contiguous_ranges( + [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(), + 3 + ) + .collect::>(), &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32], ); } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 29bc230b97..943ab6dbd0 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3005,7 +3005,7 @@ mod tests { use anyhow::Result; use client::test::{FakeHttpClient, FakeServer}; use fs::RealFs; - use language::{tree_sitter_rust, LanguageServerConfig}; + use language::{tree_sitter_rust, DiagnosticEntry, LanguageServerConfig}; use language::{Diagnostic, LanguageConfig}; use lsp::Url; use rand::prelude::*; @@ -3721,19 +3721,19 @@ mod tests { buffer.read_with(&cx, |buffer, _| { let diagnostics = buffer - .diagnostics_in_range(0..buffer.len()) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(); assert_eq!( diagnostics, - &[( - Point::new(0, 9)..Point::new(0, 10), - &Diagnostic { + &[DiagnosticEntry { + range: Point::new(0, 9)..Point::new(0, 10), + diagnostic: Diagnostic { severity: lsp::DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true } - )] + }] ) }); } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 775f94d595..7e7a180cd2 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -229,32 +229,44 @@ message Buffer { string content = 2; repeated Operation.Edit history = 3; repeated SelectionSet selections = 4; - DiagnosticSet diagnostics = 5; + repeated Diagnostic diagnostics = 5; } message SelectionSet { uint32 replica_id = 1; uint32 lamport_timestamp = 2; bool is_active = 3; - repeated VectorClockEntry version = 4; - repeated Selection selections = 5; + repeated Selection selections = 4; } message Selection { uint64 id = 1; - uint64 start = 2; - uint64 end = 3; + Anchor start = 2; + Anchor end = 3; bool reversed = 4; } -message DiagnosticSet { - repeated VectorClockEntry version = 1; - repeated Diagnostic diagnostics = 2; +message Anchor { + uint32 replica_id = 1; + uint32 local_timestamp = 2; + uint64 offset = 3; + Bias bias = 4; +} + +enum Bias { + Left = 0; + Right = 1; +} + +message UpdateDiagnostics { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated Diagnostic diagnostics = 3; } message Diagnostic { - uint64 start = 1; - uint64 end = 2; + Anchor start = 1; + Anchor end = 2; Severity severity = 3; string message = 4; uint64 group_id = 5; @@ -268,8 +280,6 @@ message Diagnostic { } } - - message Operation { oneof variant { Edit edit = 1; @@ -277,7 +287,7 @@ message Operation { UpdateSelections update_selections = 3; RemoveSelections remove_selections = 4; SetActiveSelections set_active_selections = 5; - DiagnosticSet update_diagnostics = 6; + UpdateDiagnostics update_diagnostics = 6; } message Edit { @@ -308,8 +318,7 @@ message Operation { uint32 replica_id = 1; uint32 local_timestamp = 2; uint32 lamport_timestamp = 3; - repeated VectorClockEntry version = 4; - repeated Selection selections = 5; + repeated Selection selections = 4; } message RemoveSelections { diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 454881fece..d2f2cb2c41 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -400,7 +400,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } ); @@ -422,7 +422,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } ); @@ -453,7 +453,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } } @@ -465,7 +465,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } } diff --git a/crates/server/src/github.rs b/crates/server/src/github.rs index c7122b6e10..e5bcb45f30 100644 --- a/crates/server/src/github.rs +++ b/crates/server/src/github.rs @@ -208,9 +208,25 @@ impl RepoClient { "Authorization", self.installation_token_header(false).await?, ); - let client = surf::client().with(surf::middleware::Redirect::new(5)); + + let client = surf::client(); let mut response = client.send(request).await?; + // Avoid using `surf::middleware::Redirect` because that type forwards + // the original request headers to the redirect URI. In this case, the + // redirect will be to S3, which forbids us from supplying an + // `Authorization` header. + if response.status().is_redirection() { + if let Some(url) = response.header("location") { + let request = surf::get(url.as_str()).header("Accept", "application/octet-stream"); + response = client.send(request).await?; + } + } + + if !response.status().is_success() { + Err(anyhow!("failed to fetch release asset {} {}", tag, name))?; + } + Ok(response.take_body()) } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 970a739981..f3fd04ff74 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1705,27 +1705,27 @@ mod tests { buffer_b.read_with(&cx_b, |buffer, _| { assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(0, 4)..Point::new(0, 7), - &Diagnostic { + DiagnosticEntry { + range: Point::new(0, 4)..Point::new(0, 7), + diagnostic: Diagnostic { group_id: 0, message: "message 1".to_string(), severity: lsp::DiagnosticSeverity::ERROR, is_primary: true } - ), - ( - Point::new(0, 10)..Point::new(0, 13), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(0, 10)..Point::new(0, 13), + diagnostic: Diagnostic { group_id: 1, severity: lsp::DiagnosticSeverity::WARNING, message: "message 2".to_string(), is_primary: true } - ) + } ] ); }); diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 7799bb2ff0..cbb6f7f6f5 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> { at_end: bool, } +pub struct Iter<'a, T: Item> { + tree: &'a SumTree, + stack: ArrayVec, 16>, +} + impl<'a, T, D> Cursor<'a, T, D> where T: Item, @@ -487,6 +492,71 @@ where } } +impl<'a, T: Item> Iter<'a, T> { + pub(crate) fn new(tree: &'a SumTree) -> Self { + Self { + tree, + stack: Default::default(), + } + } +} + +impl<'a, T: Item> Iterator for Iter<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option { + let mut descend = false; + + if self.stack.is_empty() { + self.stack.push(StackEntry { + tree: self.tree, + index: 0, + position: (), + }); + descend = true; + } + + while self.stack.len() > 0 { + let new_subtree = { + let entry = self.stack.last_mut().unwrap(); + match entry.tree.0.as_ref() { + Node::Internal { child_trees, .. } => { + if !descend { + entry.index += 1; + } + child_trees.get(entry.index) + } + Node::Leaf { items, .. } => { + if !descend { + entry.index += 1; + } + + if let Some(next_item) = items.get(entry.index) { + return Some(next_item); + } else { + None + } + } + } + }; + + if let Some(subtree) = new_subtree { + descend = true; + self.stack.push(StackEntry { + tree: subtree, + index: 0, + position: (), + }); + } else { + descend = false; + self.stack.pop(); + } + } + + None + } +} + impl<'a, T, S, D> Iterator for Cursor<'a, T, D> where T: Item, diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8b4a45519f..63fb379d53 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -1,8 +1,7 @@ mod cursor; use arrayvec::ArrayVec; -pub use cursor::Cursor; -pub use cursor::FilterCursor; +pub use cursor::{Cursor, FilterCursor, Iter}; use std::marker::PhantomData; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; @@ -156,6 +155,10 @@ impl SumTree { items } + pub fn iter(&self) -> Iter { + Iter::new(self) + } + pub fn cursor<'a, S>(&'a self) -> Cursor where S: Dimension<'a, T::Summary>, @@ -722,6 +725,10 @@ mod tests { }; assert_eq!(tree.items(&()), reference_items); + assert_eq!( + tree.iter().collect::>(), + tree.cursor::<()>().collect::>() + ); let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even, &()); diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 6f38593a78..2be0058999 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,94 +1,36 @@ -use super::{FromAnchor, FullOffset, Point, ToOffset}; +use super::{Point, ToOffset}; use crate::{rope::TextDimension, BufferSnapshot}; use anyhow::Result; -use std::{ - cmp::Ordering, - fmt::{Debug, Formatter}, - ops::Range, -}; -use sum_tree::{Bias, SumTree}; +use std::{cmp::Ordering, fmt::Debug, ops::Range}; +use sum_tree::Bias; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { - pub full_offset: FullOffset, + pub timestamp: clock::Local, + pub offset: usize, pub bias: Bias, - pub version: clock::Global, -} - -#[derive(Clone)] -pub struct AnchorMap { - pub(crate) version: clock::Global, - pub(crate) bias: Bias, - pub(crate) entries: Vec<(FullOffset, T)>, -} - -#[derive(Clone)] -pub struct AnchorSet(pub(crate) AnchorMap<()>); - -#[derive(Clone)] -pub struct AnchorRangeMap { - pub(crate) version: clock::Global, - pub(crate) entries: Vec<(Range, T)>, - pub(crate) start_bias: Bias, - pub(crate) end_bias: Bias, -} - -#[derive(Clone)] -pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); - -#[derive(Clone)] -pub struct AnchorRangeMultimap { - pub(crate) entries: SumTree>, - pub(crate) version: clock::Global, - pub(crate) start_bias: Bias, - pub(crate) end_bias: Bias, -} - -#[derive(Clone)] -pub(crate) struct AnchorRangeMultimapEntry { - pub(crate) range: FullOffsetRange, - pub(crate) value: T, -} - -#[derive(Clone, Debug)] -pub(crate) struct FullOffsetRange { - pub(crate) start: FullOffset, - pub(crate) end: FullOffset, -} - -#[derive(Clone, Debug)] -pub(crate) struct AnchorRangeMultimapSummary { - start: FullOffset, - end: FullOffset, - min_start: FullOffset, - max_end: FullOffset, - count: usize, } impl Anchor { pub fn min() -> Self { Self { - full_offset: FullOffset(0), + timestamp: clock::Local::MIN, + offset: usize::MIN, bias: Bias::Left, - version: Default::default(), } } pub fn max() -> Self { Self { - full_offset: FullOffset::MAX, + timestamp: clock::Local::MAX, + offset: usize::MAX, bias: Bias::Right, - version: Default::default(), } } pub fn cmp<'a>(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result { - if self == other { - return Ok(Ordering::Equal); - } - - let offset_comparison = if self.version == other.version { - self.full_offset.cmp(&other.full_offset) + let offset_comparison = if self.timestamp == other.timestamp { + self.offset.cmp(&other.offset) } else { buffer .full_offset_for_anchor(self) @@ -122,455 +64,10 @@ impl Anchor { } } -impl AnchorMap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn len(&self) -> usize { - self.entries.len() - } - - pub fn iter<'a, D>( - &'a self, - snapshot: &'a BufferSnapshot, - ) -> impl Iterator + 'a - where - D: TextDimension, - { - snapshot - .summaries_for_anchors( - self.version.clone(), - self.bias, - self.entries.iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)) - } -} - -impl AnchorSet { - pub fn version(&self) -> &clock::Global { - &self.0.version - } - - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn iter<'a, D>(&'a self, content: &'a BufferSnapshot) -> impl Iterator + 'a - where - D: TextDimension, - { - self.0.iter(content).map(|(position, _)| position) - } -} - -impl AnchorRangeMap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn len(&self) -> usize { - self.entries.len() - } - - pub fn from_full_offset_ranges( - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - entries: Vec<(Range, T)>, - ) -> Self { - Self { - version, - start_bias, - end_bias, - entries, - } - } - - pub fn ranges<'a, D>( - &'a self, - content: &'a BufferSnapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: TextDimension, - { - content - .summaries_for_anchor_ranges( - self.version.clone(), - self.start_bias, - self.end_bias, - self.entries.iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)) - } - - pub fn intersecting_ranges<'a, D, I>( - &'a self, - range: Range<(I, Bias)>, - content: &'a BufferSnapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: TextDimension, - I: ToOffset, - { - let range = content.anchor_at(range.start.0, range.start.1) - ..content.anchor_at(range.end.0, range.end.1); - - let mut probe_anchor = Anchor { - full_offset: Default::default(), - bias: self.start_bias, - version: self.version.clone(), - }; - let start_ix = self.entries.binary_search_by(|probe| { - probe_anchor.full_offset = probe.0.end; - probe_anchor.cmp(&range.start, &content).unwrap() - }); - - match start_ix { - Ok(start_ix) | Err(start_ix) => content - .summaries_for_anchor_ranges( - self.version.clone(), - self.start_bias, - self.end_bias, - self.entries[start_ix..].iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)), - } - } - - pub fn full_offset_ranges(&self) -> impl Iterator, T)> { - self.entries.iter() - } - - pub fn min_by_key<'a, D, F, K>( - &self, - content: &'a BufferSnapshot, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: TextDimension, - F: FnMut(&T) -> K, - K: Ord, - { - self.entries - .iter() - .min_by_key(|(_, value)| extract_key(value)) - .map(|(range, value)| (self.resolve_range(range, &content), value)) - } - - pub fn max_by_key<'a, D, F, K>( - &self, - content: &'a BufferSnapshot, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: TextDimension, - F: FnMut(&T) -> K, - K: Ord, - { - self.entries - .iter() - .max_by_key(|(_, value)| extract_key(value)) - .map(|(range, value)| (self.resolve_range(range, &content), value)) - } - - fn resolve_range<'a, D>( - &self, - range: &Range, - content: &'a BufferSnapshot, - ) -> Range - where - D: TextDimension, - { - let mut anchor = Anchor { - full_offset: range.start, - bias: self.start_bias, - version: self.version.clone(), - }; - let start = content.summary_for_anchor(&anchor); - - anchor.full_offset = range.end; - anchor.bias = self.end_bias; - let end = content.summary_for_anchor(&anchor); - - start..end - } -} - -impl PartialEq for AnchorRangeMap { - fn eq(&self, other: &Self) -> bool { - self.version == other.version && self.entries == other.entries - } -} - -impl Eq for AnchorRangeMap {} - -impl Debug for AnchorRangeMap { - fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { - let mut f = f.debug_map(); - for (range, value) in &self.entries { - f.key(range); - f.value(value); - } - f.finish() - } -} - -impl Debug for AnchorRangeSet { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let mut f = f.debug_set(); - for (range, _) in &self.0.entries { - f.entry(range); - } - f.finish() - } -} - -impl AnchorRangeSet { - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn version(&self) -> &clock::Global { - self.0.version() - } - - pub fn ranges<'a, D>( - &'a self, - content: &'a BufferSnapshot, - ) -> impl 'a + Iterator> - where - D: TextDimension, - { - self.0.ranges(content).map(|(range, _)| range) - } -} - -impl Default for AnchorRangeMultimap { - fn default() -> Self { - Self { - entries: Default::default(), - version: Default::default(), - start_bias: Bias::Left, - end_bias: Bias::Left, - } - } -} - -impl AnchorRangeMultimap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn intersecting_ranges<'a, I, O>( - &'a self, - range: Range, - content: &'a BufferSnapshot, - inclusive: bool, - ) -> impl Iterator, &T)> + 'a - where - I: ToOffset, - O: FromAnchor, - { - let end_bias = if inclusive { Bias::Right } else { Bias::Left }; - let range = range.start.to_full_offset(&content, Bias::Left) - ..range.end.to_full_offset(&content, end_bias); - let mut cursor = self.entries.filter::<_, usize>( - { - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Right, - version: self.version.clone(), - }; - move |summary: &AnchorRangeMultimapSummary| { - endpoint.full_offset = summary.max_end; - endpoint.bias = self.end_bias; - let max_end = endpoint.to_full_offset(&content, self.end_bias); - let start_cmp = range.start.cmp(&max_end); - - endpoint.full_offset = summary.min_start; - endpoint.bias = self.start_bias; - let min_start = endpoint.to_full_offset(&content, self.start_bias); - let end_cmp = range.end.cmp(&min_start); - - if inclusive { - start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal - } else { - start_cmp == Ordering::Less && end_cmp == Ordering::Greater - } - } - }, - &(), - ); - - std::iter::from_fn({ - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Left, - version: self.version.clone(), - }; - move || { - if let Some(item) = cursor.item() { - let ix = *cursor.start(); - endpoint.full_offset = item.range.start; - endpoint.bias = self.start_bias; - let start = O::from_anchor(&endpoint, &content); - endpoint.full_offset = item.range.end; - endpoint.bias = self.end_bias; - let end = O::from_anchor(&endpoint, &content); - let value = &item.value; - cursor.next(&()); - Some((ix, start..end, value)) - } else { - None - } - } - }) - } - - pub fn from_full_offset_ranges( - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - entries: impl Iterator, T)>, - ) -> Self { - Self { - version, - start_bias, - end_bias, - entries: SumTree::from_iter( - entries.map(|(range, value)| AnchorRangeMultimapEntry { - range: FullOffsetRange { - start: range.start, - end: range.end, - }, - value, - }), - &(), - ), - } - } - - pub fn full_offset_ranges(&self) -> impl Iterator, &T)> { - self.entries - .cursor::<()>() - .map(|entry| (entry.range.start..entry.range.end, &entry.value)) - } - - pub fn filter<'a, O, F>( - &'a self, - content: &'a BufferSnapshot, - mut f: F, - ) -> impl 'a + Iterator, &T)> - where - O: FromAnchor, - F: 'a + FnMut(&'a T) -> bool, - { - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Left, - version: self.version.clone(), - }; - self.entries - .cursor::<()>() - .enumerate() - .filter_map(move |(ix, entry)| { - if f(&entry.value) { - endpoint.full_offset = entry.range.start; - endpoint.bias = self.start_bias; - let start = O::from_anchor(&endpoint, &content); - endpoint.full_offset = entry.range.end; - endpoint.bias = self.end_bias; - let end = O::from_anchor(&endpoint, &content); - Some((ix, start..end, &entry.value)) - } else { - None - } - }) - } -} - -impl sum_tree::Item for AnchorRangeMultimapEntry { - type Summary = AnchorRangeMultimapSummary; - - fn summary(&self) -> Self::Summary { - AnchorRangeMultimapSummary { - start: self.range.start, - end: self.range.end, - min_start: self.range.start, - max_end: self.range.end, - count: 1, - } - } -} - -impl Default for AnchorRangeMultimapSummary { - fn default() -> Self { - Self { - start: FullOffset(0), - end: FullOffset::MAX, - min_start: FullOffset::MAX, - max_end: FullOffset(0), - count: 0, - } - } -} - -impl sum_tree::Summary for AnchorRangeMultimapSummary { - type Context = (); - - fn add_summary(&mut self, other: &Self, _: &Self::Context) { - self.min_start = self.min_start.min(other.min_start); - self.max_end = self.max_end.max(other.max_end); - - #[cfg(debug_assertions)] - { - let start_comparison = self.start.cmp(&other.start); - assert!(start_comparison <= Ordering::Equal); - if start_comparison == Ordering::Equal { - assert!(self.end.cmp(&other.end) >= Ordering::Equal); - } - } - - self.start = other.start; - self.end = other.end; - self.count += other.count; - } -} - -impl Default for FullOffsetRange { - fn default() -> Self { - Self { - start: FullOffset(0), - end: FullOffset::MAX, - } - } -} - -impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize { - fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { - *self += summary.count; - } -} - -impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange { - fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { - self.start = summary.start; - self.end = summary.end; - } -} - -impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange { - fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering { - Ord::cmp(&self.start, &cursor_location.start) - .then_with(|| Ord::cmp(&cursor_location.end, &self.end)) - } -} - pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &BufferSnapshot) -> Result; fn to_offset(&self, content: &BufferSnapshot) -> Range; + fn to_point(&self, content: &BufferSnapshot) -> Range; } impl AnchorRangeExt for Range { @@ -584,4 +81,8 @@ impl AnchorRangeExt for Range { fn to_offset(&self, content: &BufferSnapshot) -> Range { self.start.to_offset(&content)..self.end.to_offset(&content) } + + fn to_point(&self, content: &BufferSnapshot) -> Range { + self.start.summary::(&content)..self.end.summary::(&content) + } } diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs new file mode 100644 index 0000000000..249e79b6fd --- /dev/null +++ b/crates/text/src/locator.rs @@ -0,0 +1,83 @@ +use smallvec::{smallvec, SmallVec}; +use std::iter; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Locator(SmallVec<[u64; 4]>); + +impl Locator { + pub fn min() -> Self { + Self(smallvec![u64::MIN]) + } + + pub fn max() -> Self { + Self(smallvec![u64::MAX]) + } + + pub fn assign(&mut self, other: &Self) { + self.0.resize(other.0.len(), 0); + self.0.copy_from_slice(&other.0); + } + + pub fn between(lhs: &Self, rhs: &Self) -> Self { + let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX)); + let mut location = SmallVec::new(); + for (lhs, rhs) in lhs.zip(rhs) { + let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48); + location.push(mid); + if mid > lhs { + break; + } + } + Self(location) + } + + pub fn len(&self) -> usize { + self.0.len() + } +} + +impl Default for Locator { + fn default() -> Self { + Self::min() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + use std::mem; + + #[gpui::test(iterations = 100)] + fn test_locators(mut rng: StdRng) { + let mut lhs = Default::default(); + let mut rhs = Default::default(); + while lhs == rhs { + lhs = Locator( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + rhs = Locator( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + } + + if lhs > rhs { + mem::swap(&mut lhs, &mut rhs); + } + + let middle = Locator::between(&lhs, &rhs); + assert!(middle > lhs); + assert!(middle < rhs); + for ix in 0..middle.0.len() - 1 { + assert!( + middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) + || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) + ); + } + } +} diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index 3c3a644024..ef99faf3e2 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -1,9 +1,15 @@ -use super::Operation; use std::{fmt::Debug, ops::Add}; -use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary}; +use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary}; + +pub trait Operation: Clone + Debug { + fn lamport_timestamp(&self) -> clock::Lamport; +} #[derive(Clone, Debug)] -pub struct OperationQueue(SumTree); +struct OperationItem(T); + +#[derive(Clone, Debug)] +pub struct OperationQueue(SumTree>); #[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)] pub struct OperationKey(clock::Lamport); @@ -20,7 +26,7 @@ impl OperationKey { } } -impl OperationQueue { +impl OperationQueue { pub fn new() -> Self { OperationQueue(SumTree::new()) } @@ -29,11 +35,15 @@ impl OperationQueue { self.0.summary().len } - pub fn insert(&mut self, mut ops: Vec) { + pub fn insert(&mut self, mut ops: Vec) { ops.sort_by_key(|op| op.lamport_timestamp()); ops.dedup_by_key(|op| op.lamport_timestamp()); - self.0 - .edit(ops.into_iter().map(Edit::Insert).collect(), &()); + self.0.edit( + ops.into_iter() + .map(|op| Edit::Insert(OperationItem(op))) + .collect(), + &(), + ); } pub fn drain(&mut self) -> Self { @@ -42,8 +52,8 @@ impl OperationQueue { clone } - pub fn cursor(&self) -> Cursor { - self.0.cursor() + pub fn iter(&self) -> impl Iterator { + self.0.cursor::<()>().map(|i| &i.0) } } @@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey { } } -impl Item for Operation { +impl Item for OperationItem { type Summary = OperationSummary; fn summary(&self) -> Self::Summary { OperationSummary { - key: OperationKey::new(self.lamport_timestamp()), + key: OperationKey::new(self.0.lamport_timestamp()), len: 1, } } } -impl KeyedItem for Operation { +impl KeyedItem for OperationItem { type Key = OperationKey; fn key(&self) -> Self::Key { - OperationKey::new(self.lamport_timestamp()) + OperationKey::new(self.0.lamport_timestamp()) } } @@ -107,21 +117,27 @@ mod tests { assert_eq!(queue.len(), 0); queue.insert(vec![ - Operation::Test(clock.tick()), - Operation::Test(clock.tick()), + TestOperation(clock.tick()), + TestOperation(clock.tick()), ]); assert_eq!(queue.len(), 2); - queue.insert(vec![Operation::Test(clock.tick())]); + queue.insert(vec![TestOperation(clock.tick())]); assert_eq!(queue.len(), 3); drop(queue.drain()); assert_eq!(queue.len(), 0); - queue.insert(vec![Operation::Test(clock.tick())]); + queue.insert(vec![TestOperation(clock.tick())]); assert_eq!(queue.len(), 1); } #[derive(Clone, Debug, Eq, PartialEq)] struct TestOperation(clock::Lamport); + + impl Operation for TestOperation { + fn lamport_timestamp(&self) -> clock::Lamport { + self.0 + } + } } diff --git a/crates/text/src/random_char_iter.rs b/crates/text/src/random_char_iter.rs index 94913150be..1f415b3f03 100644 --- a/crates/text/src/random_char_iter.rs +++ b/crates/text/src/random_char_iter.rs @@ -22,13 +22,13 @@ impl Iterator for RandomCharIter { match self.0.gen_range(0..100) { // whitespace - 0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(), + 0..=5 => ['\n'].choose(&mut self.0).copied(), // two-byte greek letters - 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), - // three-byte characters - 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), - // four-byte characters - 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), + // 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), + // // three-byte characters + // 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), + // // four-byte characters + // 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), // ascii letters _ => Some(self.0.gen_range(b'a'..b'z' + 1).into()), } diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index 184118b78b..0afbf8cf74 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,4 +1,5 @@ -use crate::{rope::TextDimension, AnchorRangeMap, BufferSnapshot, ToOffset, ToPoint}; +use crate::Anchor; +use crate::{rope::TextDimension, BufferSnapshot, ToOffset, ToPoint}; use std::{cmp::Ordering, ops::Range, sync::Arc}; use sum_tree::Bias; @@ -25,7 +26,7 @@ pub struct Selection { pub struct SelectionSet { pub id: SelectionSetId, pub active: bool, - pub selections: Arc>, + pub selections: Arc<[Selection]>, } #[derive(Debug, Eq, PartialEq)] @@ -75,6 +76,21 @@ impl Selection { } } +impl Selection { + pub fn resolve<'a, D: 'a + TextDimension>( + &'a self, + snapshot: &'a BufferSnapshot, + ) -> Selection { + Selection { + id: self.id, + start: snapshot.summary_for_anchor(&self.start), + end: snapshot.summary_for_anchor(&self.end), + reversed: self.reversed, + goal: self.goal, + } + } +} + impl SelectionSet { pub fn len(&self) -> usize { self.selections.len() @@ -82,69 +98,70 @@ impl SelectionSet { pub fn selections<'a, D>( &'a self, - content: &'a BufferSnapshot, + snapshot: &'a BufferSnapshot, ) -> impl 'a + Iterator> where D: TextDimension, { - self.selections - .ranges(content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + let anchors = self + .selections + .iter() + .flat_map(|selection| [&selection.start, &selection.end].into_iter()); + let mut positions = snapshot.summaries_for_anchors::(anchors); + self.selections.iter().map(move |selection| Selection { + start: positions.next().unwrap(), + end: positions.next().unwrap(), + goal: selection.goal, + reversed: selection.reversed, + id: selection.id, + }) } pub fn intersecting_selections<'a, D, I>( &'a self, range: Range<(I, Bias)>, - content: &'a BufferSnapshot, + snapshot: &'a BufferSnapshot, ) -> impl 'a + Iterator> where D: TextDimension, I: 'a + ToOffset, { - self.selections - .intersecting_ranges(range, content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + let start = snapshot.anchor_at(range.start.0, range.start.1); + let end = snapshot.anchor_at(range.end.0, range.end.1); + let start_ix = match self + .selections + .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match self + .selections + .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + self.selections[start_ix..end_ix] + .iter() + .map(|s| s.resolve(snapshot)) } - pub fn oldest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option> + pub fn oldest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option> where D: TextDimension, { self.selections - .min_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .min_by_key(|s| s.id) + .map(|s| s.resolve(snapshot)) } - pub fn newest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option> + pub fn newest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option> where D: TextDimension, { self.selections - .max_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .max_by_key(|s| s.id) + .map(|s| s.resolve(snapshot)) } } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index ff1a3d9ec8..8b2c8dbfd9 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -78,6 +78,8 @@ fn test_random_edits(mut rng: StdRng) { TextSummary::from(&reference_string[range]) ); + buffer.check_invariants(); + if rng.gen_bool(0.3) { buffer_versions.push((buffer.clone(), buffer.subscribe())); } @@ -603,6 +605,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { } _ => {} } + buffer.check_invariants(); if mutation_count == 0 && network.is_idle() { break; @@ -629,6 +632,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { .all_selection_ranges::() .collect::>() ); + buffer.check_invariants(); } } @@ -644,6 +648,39 @@ struct Network { rng: R, } +impl Buffer { + fn check_invariants(&self) { + // Ensure every fragment is ordered by locator in the fragment tree and corresponds + // to an insertion fragment in the insertions tree. + let mut prev_fragment_id = Locator::min(); + for fragment in self.snapshot.fragments.items(&None) { + assert!(fragment.id > prev_fragment_id); + prev_fragment_id = fragment.id.clone(); + + let insertion_fragment = self + .snapshot + .insertions + .get( + &InsertionFragmentKey { + timestamp: fragment.insertion_timestamp.local(), + split_offset: fragment.insertion_offset, + }, + &(), + ) + .unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + } + + let mut cursor = self.snapshot.fragments.cursor::>(); + for insertion_fragment in self.snapshot.insertions.cursor::<()>() { + cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); + let fragment = cursor.item().unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); + } + } +} + impl Network { fn new(rng: R) -> Self { Network { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 3c1a8aac16..ac06182939 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1,5 +1,6 @@ mod anchor; -mod operation_queue; +mod locator; +pub mod operation_queue; mod patch; mod point; mod point_utf16; @@ -15,6 +16,7 @@ pub use anchor::*; use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{HashMap, HashSet}; +use locator::Locator; use operation_queue::OperationQueue; pub use patch::Patch; pub use point::*; @@ -25,7 +27,7 @@ use rope::TextDimension; pub use rope::{Chunks, Rope, TextSummary}; pub use selection::*; use std::{ - cmp::{self, Reverse}, + cmp::{self, Ordering}, iter::Iterator, ops::{self, Deref, Range, Sub}, str, @@ -41,7 +43,7 @@ pub struct Buffer { last_edit: clock::Local, history: History, selection_sets: HashMap, - deferred_ops: OperationQueue, + deferred_ops: OperationQueue, deferred_replicas: HashSet, replica_id: ReplicaId, remote_id: u64, @@ -56,6 +58,7 @@ pub struct BufferSnapshot { deleted_text: Rope, undo_map: UndoMap, fragments: SumTree, + insertions: SumTree, pub version: clock::Global, } @@ -65,8 +68,8 @@ pub struct Transaction { end: clock::Global, edits: Vec, ranges: Vec>, - selections_before: HashMap>>, - selections_after: HashMap>>, + selections_before: HashMap]>>, + selections_after: HashMap]>>, first_edit_at: Instant, last_edit_at: Instant, } @@ -153,7 +156,7 @@ impl History { fn start_transaction( &mut self, start: clock::Global, - selections_before: HashMap>>, + selections_before: HashMap]>>, now: Instant, ) { self.transaction_depth += 1; @@ -173,7 +176,7 @@ impl History { fn end_transaction( &mut self, - selections_after: HashMap>>, + selections_after: HashMap]>>, now: Instant, ) -> Option<&Transaction> { assert_ne!(self.transaction_depth, 0); @@ -344,7 +347,7 @@ impl Edit<(D1, D2)> { } } -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] +#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)] pub struct InsertionTimestamp { pub replica_id: ReplicaId, pub local: clock::Seq, @@ -369,7 +372,9 @@ impl InsertionTimestamp { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { - timestamp: InsertionTimestamp, + id: Locator, + insertion_timestamp: InsertionTimestamp, + insertion_offset: usize, len: usize, visible: bool, deletions: HashSet, @@ -379,6 +384,7 @@ struct Fragment { #[derive(Eq, PartialEq, Clone, Debug)] pub struct FragmentSummary { text: FragmentTextSummary, + max_id: Locator, max_version: clock::Global, min_insertion_version: clock::Global, max_insertion_version: clock::Global, @@ -403,6 +409,19 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { } } +#[derive(Eq, PartialEq, Clone, Debug)] +struct InsertionFragment { + timestamp: clock::Local, + split_offset: usize, + fragment_id: Locator, +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +struct InsertionFragmentKey { + timestamp: clock::Local, + split_offset: usize, +} + #[derive(Clone, Debug, Eq, PartialEq)] pub enum Operation { Edit(EditOperation), @@ -412,7 +431,7 @@ pub enum Operation { }, UpdateSelections { set_id: SelectionSetId, - selections: Arc>, + selections: Arc<[Selection]>, lamport_timestamp: clock::Lamport, }, RemoveSelections { @@ -423,8 +442,6 @@ pub enum Operation { set_id: Option, lamport_timestamp: clock::Lamport, }, - #[cfg(test)] - Test(clock::Lamport), } #[derive(Clone, Debug, Eq, PartialEq)] @@ -446,30 +463,33 @@ pub struct UndoOperation { impl Buffer { pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer { let mut fragments = SumTree::new(); + let mut insertions = SumTree::new(); let mut local_clock = clock::Local::new(replica_id); let mut lamport_clock = clock::Lamport::new(replica_id); let mut version = clock::Global::new(); let visible_text = Rope::from(history.base_text.as_ref()); if visible_text.len() > 0 { - let timestamp = InsertionTimestamp { + let insertion_timestamp = InsertionTimestamp { replica_id: 0, local: 1, lamport: 1, }; - local_clock.observe(timestamp.local()); - lamport_clock.observe(timestamp.lamport()); - version.observe(timestamp.local()); - fragments.push( - Fragment { - timestamp, - len: visible_text.len(), - visible: true, - deletions: Default::default(), - max_undos: Default::default(), - }, - &None, - ); + local_clock.observe(insertion_timestamp.local()); + lamport_clock.observe(insertion_timestamp.lamport()); + version.observe(insertion_timestamp.local()); + let fragment_id = Locator::between(&Locator::min(), &Locator::max()); + let fragment = Fragment { + id: fragment_id, + insertion_timestamp, + insertion_offset: 0, + len: visible_text.len(), + visible: true, + deletions: Default::default(), + max_undos: Default::default(), + }; + insertions.push(InsertionFragment::new(&fragment), &()); + fragments.push(fragment, &None); } Buffer { @@ -477,6 +497,7 @@ impl Buffer { visible_text, deleted_text: Rope::new(), fragments, + insertions, version, undo_map: Default::default(), }, @@ -498,19 +519,17 @@ impl Buffer { } pub fn snapshot(&self) -> BufferSnapshot { - BufferSnapshot { - visible_text: self.visible_text.clone(), - deleted_text: self.deleted_text.clone(), - undo_map: self.undo_map.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - } + self.snapshot.clone() } pub fn replica_id(&self) -> ReplicaId { self.local_clock.replica_id } + pub fn lamport_timestamp(&self) -> clock::Lamport { + self.lamport_clock + } + pub fn remote_id(&self) -> u64 { self.remote_id } @@ -563,6 +582,8 @@ impl Buffer { ranges: Vec::with_capacity(ranges.len()), new_text: None, }; + let mut new_insertions = Vec::new(); + let mut insertion_offset = 0; let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -588,6 +609,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; + suffix.insertion_offset += fragment_start - old_fragments.start().visible; + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -606,6 +629,9 @@ impl Buffer { if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; + prefix.insertion_offset += fragment_start - old_fragments.start().visible; + prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); + new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); fragment_start = range.start; @@ -618,17 +644,24 @@ impl Buffer { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); + let fragment = Fragment { + id: Locator::between( + &new_fragments.summary().max_id, + old_fragments + .item() + .map_or(&Locator::max(), |old_fragment| &old_fragment.id), + ), + insertion_timestamp: timestamp, + insertion_offset, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); + new_fragments.push(fragment, &None); + insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -640,6 +673,9 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { intersection.len = intersection_end - fragment_start; + intersection.insertion_offset += fragment_start - old_fragments.start().visible; + intersection.id = + Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -651,6 +687,7 @@ impl Buffer { new: new_start..new_start, }); } + new_insertions.push(InsertionFragment::insert_new(&intersection)); new_ropes.push_fragment(&intersection, fragment.visible); new_fragments.push(intersection, &None); fragment_start = intersection_end; @@ -671,6 +708,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; + suffix.insertion_offset += fragment_start - old_fragments.start().visible; + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -684,6 +723,7 @@ impl Buffer { drop(old_fragments); self.snapshot.fragments = new_fragments; + self.snapshot.insertions.edit(new_insertions, &()); self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.subscriptions.publish_mut(&edits); @@ -771,8 +811,6 @@ impl Buffer { } self.lamport_clock.observe(lamport_timestamp); } - #[cfg(test)] - Operation::Test(_) => {} } Ok(()) } @@ -790,6 +828,8 @@ impl Buffer { let mut edits = Patch::default(); let cx = Some(version.clone()); + let mut new_insertions = Vec::new(); + let mut insertion_offset = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(); @@ -813,6 +853,9 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; + suffix.insertion_offset += + fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -831,6 +874,8 @@ impl Buffer { if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); fragment.len = fragment_end.0 - fragment_start.0; + fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); old_fragments.next(&cx); @@ -841,7 +886,7 @@ impl Buffer { // timestamp. while let Some(fragment) = old_fragments.item() { if fragment_start == range.start - && fragment.timestamp.lamport() > timestamp.lamport() + && fragment.insertion_timestamp.lamport() > timestamp.lamport() { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); @@ -857,6 +902,9 @@ impl Buffer { if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start.0 - fragment_start.0; + prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); + new_insertions.push(InsertionFragment::insert_new(&prefix)); fragment_start = range.start; new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); @@ -873,17 +921,24 @@ impl Buffer { old: old_start..old_start, new: new_start..new_start + new_text.len(), }); + let fragment = Fragment { + id: Locator::between( + &new_fragments.summary().max_id, + old_fragments + .item() + .map_or(&Locator::max(), |old_fragment| &old_fragment.id), + ), + insertion_timestamp: timestamp, + insertion_offset, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); + new_fragments.push(fragment, &None); + insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -895,6 +950,10 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { intersection.len = intersection_end.0 - fragment_start.0; + intersection.insertion_offset += + fragment_start - old_fragments.start().0.full_offset(); + intersection.id = + Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -908,6 +967,7 @@ impl Buffer { new: new_start..new_start, }); } + new_insertions.push(InsertionFragment::insert_new(&intersection)); new_ropes.push_fragment(&intersection, fragment.visible); new_fragments.push(intersection, &None); fragment_start = intersection_end; @@ -925,6 +985,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; + suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -940,6 +1002,7 @@ impl Buffer { self.snapshot.fragments = new_fragments; self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; + self.snapshot.insertions.edit(new_insertions, &()); self.local_clock.observe(timestamp.local()); self.lamport_clock.observe(timestamp.lamport()); self.subscriptions.publish_mut(&edits); @@ -984,7 +1047,9 @@ impl Buffer { let fragment_was_visible = fragment.visible; if fragment.was_visible(&undo.version, &self.undo_map) - || undo.counts.contains_key(&fragment.timestamp.local()) + || undo + .counts + .contains_key(&fragment.insertion_timestamp.local()) { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); @@ -1039,7 +1104,7 @@ impl Buffer { fn flush_deferred_ops(&mut self) -> Result<()> { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); - for op in self.deferred_ops.drain().cursor().cloned() { + for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { self.apply_op(op)?; } else { @@ -1058,19 +1123,23 @@ impl Buffer { match op { Operation::Edit(edit) => self.version.ge(&edit.version), Operation::Undo { undo, .. } => self.version.ge(&undo.version), - Operation::UpdateSelections { selections, .. } => { - self.version.ge(selections.version()) - } + Operation::UpdateSelections { selections, .. } => selections + .iter() + .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), Operation::RemoveSelections { .. } => true, Operation::SetActiveSelections { set_id, .. } => { set_id.map_or(true, |set_id| self.selection_sets.contains_key(&set_id)) } - #[cfg(test)] - Operation::Test(_) => true, } } } + pub fn can_resolve(&self, anchor: &Anchor) -> bool { + *anchor == Anchor::min() + || *anchor == Anchor::max() + || self.version.observed(anchor.timestamp) + } + pub fn peek_undo_stack(&self) -> Option<&Transaction> { self.history.undo_stack.last() } @@ -1203,25 +1272,22 @@ impl Buffer { self.selection_sets.iter() } - fn build_selection_anchor_range_map( + fn build_anchor_selection_set( &self, selections: &[Selection], - ) -> Arc> { - Arc::new(self.anchor_range_map( - Bias::Left, - Bias::Left, - selections.iter().map(|selection| { - let start = selection.start.to_offset(self); - let end = selection.end.to_offset(self); - let range = start..end; - let state = SelectionState { + ) -> Arc<[Selection]> { + Arc::from( + selections + .iter() + .map(|selection| Selection { id: selection.id, + start: self.anchor_before(&selection.start), + end: self.anchor_before(&selection.end), reversed: selection.reversed, goal: selection.goal, - }; - (range, state) - }), - )) + }) + .collect::>(), + ) } pub fn update_selection_set( @@ -1229,7 +1295,7 @@ impl Buffer { set_id: SelectionSetId, selections: &[Selection], ) -> Result { - let selections = self.build_selection_anchor_range_map(selections); + let selections = self.build_anchor_selection_set(selections); let set = self .selection_sets .get_mut(&set_id) @@ -1245,7 +1311,7 @@ impl Buffer { pub fn restore_selection_set( &mut self, set_id: SelectionSetId, - selections: Arc>, + selections: Arc<[Selection]>, ) -> Result { let set = self .selection_sets @@ -1260,7 +1326,7 @@ impl Buffer { } pub fn add_selection_set(&mut self, selections: &[Selection]) -> Operation { - let selections = self.build_selection_anchor_range_map(selections); + let selections = self.build_anchor_selection_set(selections); let set_id = self.lamport_clock.tick(); self.selection_sets.insert( set_id, @@ -1602,25 +1668,6 @@ impl BufferSnapshot { result } - fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D - where - D: TextDimension, - { - let cx = Some(anchor.version.clone()); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - cursor.seek( - &VersionedFullOffset::Offset(anchor.full_offset), - anchor.bias, - &cx, - ); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - anchor.full_offset - cursor.start().0.full_offset() - } else { - 0 - }; - self.text_summary_for_range(0..cursor.start().1 + overshoot) - } - pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range) -> D where D: TextDimension, @@ -1630,68 +1677,129 @@ impl BufferSnapshot { .summary(range.end.to_offset(self)) } - fn summaries_for_anchors<'a, D, I>( - &'a self, - version: clock::Global, - bias: Bias, - ranges: I, - ) -> impl 'a + Iterator + pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator where - D: TextDimension, - I: 'a + IntoIterator, + D: 'a + TextDimension, + A: 'a + IntoIterator, { - let cx = Some(version.clone()); - let mut summary = D::default(); - let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - ranges.into_iter().map(move |offset| { - cursor.seek_forward(&VersionedFullOffset::Offset(*offset), bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - *offset - cursor.start().0.full_offset() - } else { - 0 + let anchors = anchors.into_iter(); + let mut insertion_cursor = self.insertions.cursor::(); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut text_cursor = self.visible_text.cursor(0); + let mut position = D::default(); + + anchors.map(move |anchor| { + if *anchor == Anchor::min() { + return D::default(); + } else if *anchor == Anchor::max() { + return D::from_text_summary(&self.visible_text.summary()); + } + + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, }; - summary.add_assign(&rope_cursor.summary(cursor.start().1 + overshoot)); - summary.clone() + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); + + fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += anchor.offset - insertion.split_offset; + } + + position.add_assign(&text_cursor.summary(fragment_offset)); + position.clone() }) } - fn summaries_for_anchor_ranges<'a, D, I>( - &'a self, - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - ranges: I, - ) -> impl 'a + Iterator> + fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D where D: TextDimension, - I: 'a + IntoIterator>, { - let cx = Some(version); - let mut summary = D::default(); - let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - ranges.into_iter().map(move |range| { - cursor.seek_forward(&VersionedFullOffset::Offset(range.start), start_bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - range.start - cursor.start().0.full_offset() - } else { - 0 + if *anchor == Anchor::min() { + D::default() + } else if *anchor == Anchor::max() { + D::from_text_summary(&self.visible_text.summary()) + } else { + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, }; - summary.add_assign(&rope_cursor.summary::(cursor.start().1 + overshoot)); - let start_summary = summary.clone(); - - cursor.seek_forward(&VersionedFullOffset::Offset(range.end), end_bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - range.end - cursor.start().0.full_offset() + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { + insertion_cursor.prev(&()); + } } else { - 0 - }; - summary.add_assign(&rope_cursor.summary::(cursor.start().1 + overshoot)); - let end_summary = summary.clone(); + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - start_summary..end_summary - }) + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += anchor.offset - insertion.split_offset; + } + self.text_summary_for_range(0..fragment_offset) + } + } + + fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { + if *anchor == Anchor::min() { + Default::default() + } else if *anchor == Anchor::max() { + let text = self.fragments.summary().text; + FullOffset(text.visible + text.deleted) + } else { + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, FullOffset)>(); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); + fragment_cursor.start().1 + (anchor.offset - insertion.split_offset) + } } pub fn anchor_before(&self, position: T) -> Anchor { @@ -1703,139 +1811,22 @@ impl BufferSnapshot { } pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - Anchor { - full_offset: position.to_full_offset(self, bias), - bias, - version: self.version.clone(), - } - } - - pub fn anchor_map(&self, bias: Bias, entries: E) -> AnchorMap - where - E: IntoIterator, - { - let version = self.version.clone(); - let mut cursor = self.fragments.cursor::(); - let entries = entries - .into_iter() - .map(|(offset, value)| { - cursor.seek_forward(&offset, bias, &None); - let full_offset = FullOffset(cursor.start().deleted + offset); - (full_offset, value) - }) - .collect(); - - AnchorMap { - version, - bias, - entries, - } - } - - pub fn anchor_range_map( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMap - where - E: IntoIterator, T)>, - { - let version = self.version.clone(); - let mut cursor = self.fragments.cursor::(); - let entries = entries - .into_iter() - .map(|(range, value)| { - let Range { - start: start_offset, - end: end_offset, - } = range; - cursor.seek_forward(&start_offset, start_bias, &None); - let full_start_offset = FullOffset(cursor.start().deleted + start_offset); - cursor.seek_forward(&end_offset, end_bias, &None); - let full_end_offset = FullOffset(cursor.start().deleted + end_offset); - (full_start_offset..full_end_offset, value) - }) - .collect(); - - AnchorRangeMap { - version, - start_bias, - end_bias, - entries, - } - } - - pub fn anchor_set(&self, bias: Bias, entries: E) -> AnchorSet - where - E: IntoIterator, - { - AnchorSet(self.anchor_map(bias, entries.into_iter().map(|range| (range, ())))) - } - - pub fn anchor_range_set( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeSet - where - E: IntoIterator>, - { - AnchorRangeSet(self.anchor_range_map( - start_bias, - end_bias, - entries.into_iter().map(|range| (range, ())), - )) - } - - pub fn anchor_range_multimap( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMultimap - where - T: Clone, - E: IntoIterator, T)>, - O: ToOffset, - { - let mut entries = entries - .into_iter() - .map(|(range, value)| AnchorRangeMultimapEntry { - range: FullOffsetRange { - start: range.start.to_full_offset(self, start_bias), - end: range.end.to_full_offset(self, end_bias), - }, - value, - }) - .collect::>(); - entries.sort_unstable_by_key(|i| (i.range.start, Reverse(i.range.end))); - AnchorRangeMultimap { - entries: SumTree::from_iter(entries, &()), - version: self.version.clone(), - start_bias, - end_bias, - } - } - - fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { - let cx = Some(anchor.version.clone()); - let mut cursor = self - .fragments - .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); - cursor.seek( - &VersionedFullOffset::Offset(anchor.full_offset), - anchor.bias, - &cx, - ); - let overshoot = if cursor.item().is_some() { - anchor.full_offset - cursor.start().0.full_offset() + let offset = position.to_offset(self); + if bias == Bias::Left && offset == 0 { + Anchor::min() + } else if bias == Bias::Right && offset == self.len() { + Anchor::max() } else { - 0 - }; - let summary = cursor.start().1; - FullOffset(summary.visible + summary.deleted + overshoot) + let mut fragment_cursor = self.fragments.cursor::(); + fragment_cursor.seek(&offset, bias, &None); + let fragment = fragment_cursor.item().unwrap(); + let overshoot = offset - *fragment_cursor.start(); + Anchor { + timestamp: fragment.insertion_timestamp.local(), + offset: fragment.insertion_offset + overshoot, + bias, + } + } } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2052,13 +2043,13 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo impl Fragment { fn is_visible(&self, undos: &UndoMap) -> bool { - !undos.is_undone(self.timestamp.local()) + !undos.is_undone(self.insertion_timestamp.local()) && self.deletions.iter().all(|d| undos.is_undone(*d)) } fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool { - (version.observed(self.timestamp.local()) - && !undos.was_undone(self.timestamp.local(), version)) + (version.observed(self.insertion_timestamp.local()) + && !undos.was_undone(self.insertion_timestamp.local(), version)) && self .deletions .iter() @@ -2071,17 +2062,18 @@ impl sum_tree::Item for Fragment { fn summary(&self) -> Self::Summary { let mut max_version = clock::Global::new(); - max_version.observe(self.timestamp.local()); + max_version.observe(self.insertion_timestamp.local()); for deletion in &self.deletions { max_version.observe(*deletion); } max_version.join(&self.max_undos); let mut min_insertion_version = clock::Global::new(); - min_insertion_version.observe(self.timestamp.local()); + min_insertion_version.observe(self.insertion_timestamp.local()); let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { + max_id: self.id.clone(), text: FragmentTextSummary { visible: self.len, deleted: 0, @@ -2092,6 +2084,7 @@ impl sum_tree::Item for Fragment { } } else { FragmentSummary { + max_id: self.id.clone(), text: FragmentTextSummary { visible: 0, deleted: self.len, @@ -2108,6 +2101,7 @@ impl sum_tree::Summary for FragmentSummary { type Context = Option; fn add_summary(&mut self, other: &Self, _: &Self::Context) { + self.max_id.assign(&other.max_id); self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; self.max_version.join(&other.max_version); @@ -2121,6 +2115,7 @@ impl sum_tree::Summary for FragmentSummary { impl Default for FragmentSummary { fn default() -> Self { FragmentSummary { + max_id: Locator::min(), text: FragmentTextSummary::default(), max_version: clock::Global::new(), min_insertion_version: clock::Global::new(), @@ -2129,13 +2124,50 @@ impl Default for FragmentSummary { } } +impl sum_tree::Item for InsertionFragment { + type Summary = InsertionFragmentKey; + + fn summary(&self) -> Self::Summary { + InsertionFragmentKey { + timestamp: self.timestamp, + split_offset: self.split_offset, + } + } +} + +impl sum_tree::KeyedItem for InsertionFragment { + type Key = InsertionFragmentKey; + + fn key(&self) -> Self::Key { + sum_tree::Item::summary(self) + } +} + +impl InsertionFragment { + fn new(fragment: &Fragment) -> Self { + Self { + timestamp: fragment.insertion_timestamp.local(), + split_offset: fragment.insertion_offset, + fragment_id: fragment.id.clone(), + } + } + + fn insert_new(fragment: &Fragment) -> sum_tree::Edit { + sum_tree::Edit::Insert(Self::new(fragment)) + } +} + +impl sum_tree::Summary for InsertionFragmentKey { + type Context = (); + + fn add_summary(&mut self, summary: &Self, _: &()) { + *self = *summary; + } +} + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct FullOffset(pub usize); -impl FullOffset { - const MAX: Self = FullOffset(usize::MAX); -} - impl ops::AddAssign for FullOffset { fn add_assign(&mut self, rhs: usize) { self.0 += rhs; @@ -2171,6 +2203,12 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { + *self = Some(&summary.max_id); + } +} + impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize { fn cmp( &self, @@ -2228,9 +2266,18 @@ impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset impl Operation { fn replica_id(&self) -> ReplicaId { - self.lamport_timestamp().replica_id + operation_queue::Operation::lamport_timestamp(self).replica_id } + pub fn is_edit(&self) -> bool { + match self { + Operation::Edit { .. } => true, + _ => false, + } + } +} + +impl operation_queue::Operation for Operation { fn lamport_timestamp(&self) -> clock::Lamport { match self { Operation::Edit(edit) => edit.timestamp.lamport(), @@ -2246,15 +2293,6 @@ impl Operation { Operation::SetActiveSelections { lamport_timestamp, .. } => *lamport_timestamp, - #[cfg(test)] - Operation::Test(lamport_timestamp) => *lamport_timestamp, - } - } - - pub fn is_edit(&self) -> bool { - match self { - Operation::Edit { .. } => true, - _ => false, } } } @@ -2293,33 +2331,11 @@ impl ToOffset for Anchor { fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize { snapshot.summary_for_anchor(self) } - - fn to_full_offset<'a>(&self, snapshot: &BufferSnapshot, bias: Bias) -> FullOffset { - if snapshot.version == self.version { - self.full_offset - } else { - let mut cursor = snapshot - .fragments - .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); - cursor.seek( - &VersionedFullOffset::Offset(self.full_offset), - bias, - &Some(self.version.clone()), - ); - - let mut full_offset = cursor.start().1.full_offset().0; - if cursor.item().is_some() { - full_offset += self.full_offset - cursor.start().0.full_offset(); - } - - FullOffset(full_offset) - } - } } -impl<'a> ToOffset for &'a Anchor { - fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { - snapshot.summary_for_anchor(self) +impl<'a, T: ToOffset> ToOffset for &'a T { + fn to_offset(&self, content: &BufferSnapshot) -> usize { + (*self).to_offset(content) } }