From 6d9bf802e2e2749fba12b508124ff30cb4667f13 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 7 Dec 2021 12:34:55 -0800 Subject: [PATCH 01/12] Don't pass GH auth header when following redirects for release assets --- crates/server/src/github.rs | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/crates/server/src/github.rs b/crates/server/src/github.rs index c7122b6e10..e5bcb45f30 100644 --- a/crates/server/src/github.rs +++ b/crates/server/src/github.rs @@ -208,9 +208,25 @@ impl RepoClient { "Authorization", self.installation_token_header(false).await?, ); - let client = surf::client().with(surf::middleware::Redirect::new(5)); + + let client = surf::client(); let mut response = client.send(request).await?; + // Avoid using `surf::middleware::Redirect` because that type forwards + // the original request headers to the redirect URI. In this case, the + // redirect will be to S3, which forbids us from supplying an + // `Authorization` header. + if response.status().is_redirection() { + if let Some(url) = response.header("location") { + let request = surf::get(url.as_str()).header("Accept", "application/octet-stream"); + response = client.send(request).await?; + } + } + + if !response.status().is_success() { + Err(anyhow!("failed to fetch release asset {} {}", tag, name))?; + } + Ok(response.take_body()) } From ec54010e3c69646f7ac32021c5d79b1856e5884d Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 21:04:22 -0700 Subject: [PATCH 02/12] Sketch in type-level changes to track insertion splits --- crates/text/src/locator.rs | 74 ++++++++++++++++++++++++++++++++++++++ crates/text/src/text.rs | 72 ++++++++++++++++++++++++++++++------- 2 files changed, 134 insertions(+), 12 deletions(-) create mode 100644 crates/text/src/locator.rs diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs new file mode 100644 index 0000000000..487c8c2608 --- /dev/null +++ b/crates/text/src/locator.rs @@ -0,0 +1,74 @@ +use smallvec::{smallvec, SmallVec}; +use std::iter; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Locator(SmallVec<[u32; 4]>); + +impl Locator { + pub fn min() -> Self { + Self(smallvec![u32::MIN]) + } + + pub fn max() -> Self { + Self(smallvec![u32::MAX]) + } + + pub fn between(lhs: &Self, rhs: &Self) -> Self { + let lhs = lhs.0.iter().copied().chain(iter::repeat(u32::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u32::MAX)); + let mut location = SmallVec::new(); + for (lhs, rhs) in lhs.zip(rhs) { + let mid = lhs + (rhs.saturating_sub(lhs)) / 2; + location.push(mid); + if mid > lhs { + break; + } + } + Self(location) + } +} + +impl Default for Locator { + fn default() -> Self { + Self::min() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + use std::mem; + + #[gpui::test(iterations = 100)] + fn test_locators(mut rng: StdRng) { + let mut lhs = Default::default(); + let mut rhs = Default::default(); + while lhs == rhs { + lhs = Locator( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + rhs = Locator( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + } + + if lhs > rhs { + mem::swap(&mut lhs, &mut rhs); + } + + let middle = Locator::between(&lhs, &rhs); + assert!(middle > lhs); + assert!(middle < rhs); + for ix in 0..middle.0.len() - 1 { + assert!( + middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) + || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) + ); + } + } +} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a434e97e2e..3d2f95c169 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1,4 +1,5 @@ mod anchor; +mod locator; mod operation_queue; mod patch; mod point; @@ -14,6 +15,7 @@ pub use anchor::*; use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{HashMap, HashSet}; +use locator::Locator; use operation_queue::OperationQueue; use parking_lot::Mutex; pub use patch::Patch; @@ -55,6 +57,7 @@ pub struct Snapshot { deleted_text: Rope, undo_map: UndoMap, fragments: SumTree, + insertions: SumTree, pub version: clock::Global, } @@ -381,6 +384,7 @@ impl InsertionTimestamp { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { + id: Locator, timestamp: InsertionTimestamp, len: usize, visible: bool, @@ -391,6 +395,7 @@ struct Fragment { #[derive(Eq, PartialEq, Clone, Debug)] pub struct FragmentSummary { text: FragmentTextSummary, + max_id: Locator, max_version: clock::Global, min_insertion_version: clock::Global, max_insertion_version: clock::Global, @@ -402,11 +407,17 @@ struct FragmentTextSummary { deleted: usize, } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { - fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { - self.visible += summary.text.visible; - self.deleted += summary.text.deleted; - } +#[derive(Eq, PartialEq, Clone, Debug)] +struct InsertionFragment { + timestamp: InsertionTimestamp, + split_offset: usize, + fragment_id: Locator, +} + +#[derive(Clone, Debug, Default)] +struct InsertionSummary { + max_timestamp: InsertionTimestamp, + max_split_offset: usize, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -452,6 +463,7 @@ pub struct UndoOperation { impl Buffer { pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer { let mut fragments = SumTree::new(); + let mut insertions = SumTree::new(); let mut local_clock = clock::Local::new(replica_id); let mut lamport_clock = clock::Lamport::new(replica_id); @@ -466,8 +478,10 @@ impl Buffer { local_clock.observe(timestamp.local()); lamport_clock.observe(timestamp.lamport()); version.observe(timestamp.local()); + let fragment_id = Locator::between(&Locator::min(), &Locator::max()); fragments.push( Fragment { + id: fragment_id, timestamp, len: visible_text.len(), visible: true, @@ -476,6 +490,14 @@ impl Buffer { }, &None, ); + insertions.push( + InsertionFragment { + timestamp, + split_offset: 0, + fragment_id, + }, + &(), + ); } Buffer { @@ -483,6 +505,7 @@ impl Buffer { visible_text, deleted_text: Rope::new(), fragments, + insertions, version, undo_map: Default::default(), }, @@ -504,13 +527,7 @@ impl Buffer { } pub fn snapshot(&self) -> Snapshot { - Snapshot { - visible_text: self.visible_text.clone(), - deleted_text: self.deleted_text.clone(), - undo_map: self.undo_map.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - } + self.snapshot.clone() } pub fn replica_id(&self) -> ReplicaId { @@ -569,6 +586,7 @@ impl Buffer { ranges: Vec::with_capacity(ranges.len()), new_text: None, }; + let mut insertions = Vec::new(); let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -2040,6 +2058,7 @@ impl sum_tree::Item for Fragment { let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { + max_id: self.id.clone(), text: FragmentTextSummary { visible: self.len, deleted: 0, @@ -2050,6 +2069,7 @@ impl sum_tree::Item for Fragment { } } else { FragmentSummary { + max_id: self.id.clone(), text: FragmentTextSummary { visible: 0, deleted: self.len, @@ -2079,6 +2099,7 @@ impl sum_tree::Summary for FragmentSummary { impl Default for FragmentSummary { fn default() -> Self { FragmentSummary { + max_id: Locator::min(), text: FragmentTextSummary::default(), max_version: clock::Global::new(), min_insertion_version: clock::Global::new(), @@ -2087,6 +2108,33 @@ impl Default for FragmentSummary { } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { + self.visible += summary.text.visible; + self.deleted += summary.text.deleted; + } +} + +impl sum_tree::Item for InsertionFragment { + type Summary = InsertionSummary; + + fn summary(&self) -> Self::Summary { + InsertionSummary { + max_timestamp: self.timestamp, + max_split_offset: self.split_offset, + } + } +} + +impl sum_tree::Summary for InsertionSummary { + type Context = (); + + fn add_summary(&mut self, summary: &Self, cx: &()) { + self.max_timestamp = summary.max_timestamp; + self.max_split_offset = summary.max_split_offset; + } +} + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct FullOffset(pub usize); From dd38eb12648575584acf8a9e2840a09f75753b73 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 22:05:13 -0700 Subject: [PATCH 03/12] Start on maintaining an insertions tree I'm correctly assigning fragment ids to all fragments in the fragments tree, but I have a randomized test failure when making sure that the insertions tree matches the state of the fragments tree. --- crates/text/src/locator.rs | 10 +- crates/text/src/random_char_iter.rs | 12 +- crates/text/src/tests.rs | 23 +++- crates/text/src/text.rs | 163 ++++++++++++++++++---------- 4 files changed, 140 insertions(+), 68 deletions(-) diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 487c8c2608..0a22ea58f9 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -2,20 +2,20 @@ use smallvec::{smallvec, SmallVec}; use std::iter; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Locator(SmallVec<[u32; 4]>); +pub struct Locator(SmallVec<[u8; 4]>); impl Locator { pub fn min() -> Self { - Self(smallvec![u32::MIN]) + Self(smallvec![u8::MIN]) } pub fn max() -> Self { - Self(smallvec![u32::MAX]) + Self(smallvec![u8::MAX]) } pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(u32::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(u32::MAX)); + let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { let mid = lhs + (rhs.saturating_sub(lhs)) / 2; diff --git a/crates/text/src/random_char_iter.rs b/crates/text/src/random_char_iter.rs index 244665688d..1b0e6cc64d 100644 --- a/crates/text/src/random_char_iter.rs +++ b/crates/text/src/random_char_iter.rs @@ -14,13 +14,13 @@ impl Iterator for RandomCharIter { fn next(&mut self) -> Option { match self.0.gen_range(0..100) { // whitespace - 0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(), + 0..=5 => ['\n'].choose(&mut self.0).copied(), // two-byte greek letters - 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), - // three-byte characters - 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), - // four-byte characters - 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), + // 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), + // // three-byte characters + // 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), + // // four-byte characters + // 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), // ascii letters _ => Some(self.0.gen_range(b'a'..b'z' + 1).into()), } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index a13273b898..e55f478c9f 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -51,7 +51,7 @@ fn test_random_edits(mut rng: StdRng) { ); for _i in 0..operations { - let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 1); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } @@ -78,6 +78,27 @@ fn test_random_edits(mut rng: StdRng) { TextSummary::from(&reference_string[range]) ); + // Ensure every fragment is ordered by locator in the fragment tree and corresponds + // to an insertion fragment in the insertions tree. + let mut prev_fragment_id = Locator::min(); + for fragment in buffer.snapshot.fragments.items(&None) { + assert!(fragment.id > prev_fragment_id); + prev_fragment_id = fragment.id.clone(); + + let insertion_fragment = buffer + .snapshot + .insertions + .get( + &InsertionFragmentKey { + timestamp: fragment.insertion_timestamp, + split_offset: fragment.insertion_offset, + }, + &(), + ) + .unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + } + if rng.gen_bool(0.3) { buffer_versions.push((buffer.clone(), buffer.subscribe())); } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 3d2f95c169..c9343eb7a2 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -359,7 +359,7 @@ impl Subscription { } } -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] +#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)] pub struct InsertionTimestamp { pub replica_id: ReplicaId, pub local: clock::Seq, @@ -385,7 +385,8 @@ impl InsertionTimestamp { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { id: Locator, - timestamp: InsertionTimestamp, + insertion_timestamp: InsertionTimestamp, + insertion_offset: usize, len: usize, visible: bool, deletions: HashSet, @@ -414,10 +415,10 @@ struct InsertionFragment { fragment_id: Locator, } -#[derive(Clone, Debug, Default)] -struct InsertionSummary { - max_timestamp: InsertionTimestamp, - max_split_offset: usize, +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +struct InsertionFragmentKey { + timestamp: InsertionTimestamp, + split_offset: usize, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -470,34 +471,26 @@ impl Buffer { let mut version = clock::Global::new(); let visible_text = Rope::from(history.base_text.as_ref()); if visible_text.len() > 0 { - let timestamp = InsertionTimestamp { + let insertion_timestamp = InsertionTimestamp { replica_id: 0, local: 1, lamport: 1, }; - local_clock.observe(timestamp.local()); - lamport_clock.observe(timestamp.lamport()); - version.observe(timestamp.local()); + local_clock.observe(insertion_timestamp.local()); + lamport_clock.observe(insertion_timestamp.lamport()); + version.observe(insertion_timestamp.local()); let fragment_id = Locator::between(&Locator::min(), &Locator::max()); - fragments.push( - Fragment { - id: fragment_id, - timestamp, - len: visible_text.len(), - visible: true, - deletions: Default::default(), - max_undos: Default::default(), - }, - &None, - ); - insertions.push( - InsertionFragment { - timestamp, - split_offset: 0, - fragment_id, - }, - &(), - ); + let fragment = Fragment { + id: fragment_id, + insertion_timestamp, + insertion_offset: 0, + len: visible_text.len(), + visible: true, + deletions: Default::default(), + max_undos: Default::default(), + }; + insertions.push(InsertionFragment::new(&fragment), &()); + fragments.push(fragment, &None); } Buffer { @@ -586,7 +579,7 @@ impl Buffer { ranges: Vec::with_capacity(ranges.len()), new_text: None, }; - let mut insertions = Vec::new(); + let mut new_insertions = Vec::new(); let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -612,6 +605,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; + suffix.insertion_offset += fragment_start - old_fragments.start().visible; + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -630,6 +625,15 @@ impl Buffer { if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; + prefix.insertion_offset += fragment_start - old_fragments.start().visible; + + // log::info!( + // "pushing prefix between {:?} and {:?}", + // new_fragments.summary().max_id, + // prefix.id + // ); + prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); + new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); fragment_start = range.start; @@ -642,17 +646,32 @@ impl Buffer { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); + + // log::info!( + // "pushing new fragment between {:?} and {:?}", + // new_fragments.summary().max_id, + // old_fragments + // .item() + // .map_or(&Locator::max(), |old_fragment| &old_fragment.id) + // ); + + let fragment = Fragment { + id: Locator::between( + &new_fragments.summary().max_id, + old_fragments + .item() + .map_or(&Locator::max(), |old_fragment| &old_fragment.id), + ), + insertion_timestamp: timestamp, + insertion_offset: 0, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); + new_fragments.push(fragment, &None); } // Advance through every fragment that intersects this range, marking the intersecting @@ -664,6 +683,8 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { intersection.len = intersection_end - fragment_start; + intersection.id = + Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -675,6 +696,7 @@ impl Buffer { new: new_start..new_start, }); } + new_insertions.push(InsertionFragment::insert_new(&intersection)); new_ropes.push_fragment(&intersection, fragment.visible); new_fragments.push(intersection, &None); fragment_start = intersection_end; @@ -695,6 +717,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; + suffix.insertion_offset += fragment_start - old_fragments.start().visible; + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -708,6 +732,7 @@ impl Buffer { drop(old_fragments); self.snapshot.fragments = new_fragments; + self.snapshot.insertions.edit(new_insertions, &()); self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.update_subscriptions(edits); @@ -865,7 +890,7 @@ impl Buffer { // timestamp. while let Some(fragment) = old_fragments.item() { if fragment_start == range.start - && fragment.timestamp.lamport() > timestamp.lamport() + && fragment.insertion_timestamp.lamport() > timestamp.lamport() { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); @@ -900,7 +925,9 @@ impl Buffer { new_ropes.push_str(new_text); new_fragments.push( Fragment { - timestamp, + id: todo!(), + insertion_timestamp: timestamp, + insertion_offset: todo!(), len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -1008,7 +1035,9 @@ impl Buffer { let fragment_was_visible = fragment.visible; if fragment.was_visible(&undo.version, &self.undo_map) - || undo.counts.contains_key(&fragment.timestamp.local()) + || undo + .counts + .contains_key(&fragment.insertion_timestamp.local()) { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); @@ -2028,13 +2057,13 @@ impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterato impl Fragment { fn is_visible(&self, undos: &UndoMap) -> bool { - !undos.is_undone(self.timestamp.local()) + !undos.is_undone(self.insertion_timestamp.local()) && self.deletions.iter().all(|d| undos.is_undone(*d)) } fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool { - (version.observed(self.timestamp.local()) - && !undos.was_undone(self.timestamp.local(), version)) + (version.observed(self.insertion_timestamp.local()) + && !undos.was_undone(self.insertion_timestamp.local(), version)) && self .deletions .iter() @@ -2047,14 +2076,14 @@ impl sum_tree::Item for Fragment { fn summary(&self) -> Self::Summary { let mut max_version = clock::Global::new(); - max_version.observe(self.timestamp.local()); + max_version.observe(self.insertion_timestamp.local()); for deletion in &self.deletions { max_version.observe(*deletion); } max_version.join(&self.max_undos); let mut min_insertion_version = clock::Global::new(); - min_insertion_version.observe(self.timestamp.local()); + min_insertion_version.observe(self.insertion_timestamp.local()); let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { @@ -2086,6 +2115,7 @@ impl sum_tree::Summary for FragmentSummary { type Context = Option; fn add_summary(&mut self, other: &Self, _: &Self::Context) { + self.max_id = other.max_id.clone(); self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; self.max_version.join(&other.max_version); @@ -2116,22 +2146,43 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { } impl sum_tree::Item for InsertionFragment { - type Summary = InsertionSummary; + type Summary = InsertionFragmentKey; fn summary(&self) -> Self::Summary { - InsertionSummary { - max_timestamp: self.timestamp, - max_split_offset: self.split_offset, + InsertionFragmentKey { + timestamp: self.timestamp, + split_offset: self.split_offset, } } } -impl sum_tree::Summary for InsertionSummary { +impl sum_tree::KeyedItem for InsertionFragment { + type Key = InsertionFragmentKey; + + fn key(&self) -> Self::Key { + sum_tree::Item::summary(self) + } +} + +impl InsertionFragment { + fn new(fragment: &Fragment) -> Self { + Self { + timestamp: fragment.insertion_timestamp, + split_offset: fragment.insertion_offset, + fragment_id: fragment.id.clone(), + } + } + + fn insert_new(fragment: &Fragment) -> sum_tree::Edit { + sum_tree::Edit::Insert(Self::new(fragment)) + } +} + +impl sum_tree::Summary for InsertionFragmentKey { type Context = (); - fn add_summary(&mut self, summary: &Self, cx: &()) { - self.max_timestamp = summary.max_timestamp; - self.max_split_offset = summary.max_split_offset; + fn add_summary(&mut self, summary: &Self, _: &()) { + *self = *summary; } } From b4ebe179f938292ede0a068501b23afed104e9da Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 08:44:59 +0100 Subject: [PATCH 04/12] Make local edit randomized tests pass with locators --- crates/text/src/tests.rs | 54 ++++++++++++++++++++++++---------------- crates/text/src/text.rs | 20 +++------------ 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index e55f478c9f..e14baf47c1 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -51,7 +51,7 @@ fn test_random_edits(mut rng: StdRng) { ); for _i in 0..operations { - let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 1); + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } @@ -78,26 +78,7 @@ fn test_random_edits(mut rng: StdRng) { TextSummary::from(&reference_string[range]) ); - // Ensure every fragment is ordered by locator in the fragment tree and corresponds - // to an insertion fragment in the insertions tree. - let mut prev_fragment_id = Locator::min(); - for fragment in buffer.snapshot.fragments.items(&None) { - assert!(fragment.id > prev_fragment_id); - prev_fragment_id = fragment.id.clone(); - - let insertion_fragment = buffer - .snapshot - .insertions - .get( - &InsertionFragmentKey { - timestamp: fragment.insertion_timestamp, - split_offset: fragment.insertion_offset, - }, - &(), - ) - .unwrap(); - assert_eq!(insertion_fragment.fragment_id, fragment.id); - } + buffer.check_invariants(); if rng.gen_bool(0.3) { buffer_versions.push((buffer.clone(), buffer.subscribe())); @@ -639,6 +620,37 @@ struct Network { rng: R, } +impl Buffer { + fn check_invariants(&self) { + // Ensure every fragment is ordered by locator in the fragment tree and corresponds + // to an insertion fragment in the insertions tree. + let mut prev_fragment_id = Locator::min(); + for fragment in self.snapshot.fragments.items(&None) { + assert!(fragment.id > prev_fragment_id); + prev_fragment_id = fragment.id.clone(); + + let insertion_fragment = self + .snapshot + .insertions + .get( + &InsertionFragmentKey { + timestamp: fragment.insertion_timestamp, + split_offset: fragment.insertion_offset, + }, + &(), + ) + .unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + } + + let insertions = self.snapshot.insertions.items(&()); + assert_eq!( + HashSet::from_iter(insertions.iter().map(|i| &i.fragment_id)).len(), + insertions.len() + ); + } +} + impl Network { fn new(rng: R) -> Self { Network { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index c9343eb7a2..fb00e4bba1 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -580,6 +580,7 @@ impl Buffer { new_text: None, }; let mut new_insertions = Vec::new(); + let mut insertion_offset = 0; let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -626,12 +627,6 @@ impl Buffer { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; prefix.insertion_offset += fragment_start - old_fragments.start().visible; - - // log::info!( - // "pushing prefix between {:?} and {:?}", - // new_fragments.summary().max_id, - // prefix.id - // ); prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); @@ -646,15 +641,6 @@ impl Buffer { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); - - // log::info!( - // "pushing new fragment between {:?} and {:?}", - // new_fragments.summary().max_id, - // old_fragments - // .item() - // .map_or(&Locator::max(), |old_fragment| &old_fragment.id) - // ); - let fragment = Fragment { id: Locator::between( &new_fragments.summary().max_id, @@ -663,7 +649,7 @@ impl Buffer { .map_or(&Locator::max(), |old_fragment| &old_fragment.id), ), insertion_timestamp: timestamp, - insertion_offset: 0, + insertion_offset, len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -672,6 +658,7 @@ impl Buffer { new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); new_fragments.push(fragment, &None); + insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -683,6 +670,7 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { intersection.len = intersection_end - fragment_start; + intersection.insertion_offset += fragment_start - old_fragments.start().visible; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); From dc81b5f57a227275ebd004200c31594124e261a9 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 09:15:19 +0100 Subject: [PATCH 05/12] Make remote edit randomized tests pass with locators --- crates/text/src/tests.rs | 2 ++ crates/text/src/text.rs | 47 ++++++++++++++++++++++++++++++---------- 2 files changed, 37 insertions(+), 12 deletions(-) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index e14baf47c1..38390a210a 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -579,6 +579,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { } _ => {} } + buffer.check_invariants(); if mutation_count == 0 && network.is_idle() { break; @@ -605,6 +606,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { .all_selection_ranges::() .collect::>() ); + buffer.check_invariants(); } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index fb00e4bba1..a609f8ec8d 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -827,6 +827,8 @@ impl Buffer { let mut edits = Patch::default(); let cx = Some(version.clone()); + let mut new_insertions = Vec::new(); + let mut insertion_offset = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(); @@ -850,6 +852,9 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; + suffix.insertion_offset += + fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -868,6 +873,8 @@ impl Buffer { if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); fragment.len = fragment_end.0 - fragment_start.0; + fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); old_fragments.next(&cx); @@ -894,6 +901,9 @@ impl Buffer { if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start.0 - fragment_start.0; + prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); + new_insertions.push(InsertionFragment::insert_new(&prefix)); fragment_start = range.start; new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); @@ -910,19 +920,24 @@ impl Buffer { old: old_start..old_start, new: new_start..new_start + new_text.len(), }); + let fragment = Fragment { + id: Locator::between( + &new_fragments.summary().max_id, + old_fragments + .item() + .map_or(&Locator::max(), |old_fragment| &old_fragment.id), + ), + insertion_timestamp: timestamp, + insertion_offset, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - id: todo!(), - insertion_timestamp: timestamp, - insertion_offset: todo!(), - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); + new_fragments.push(fragment, &None); + insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -934,6 +949,10 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { intersection.len = intersection_end.0 - fragment_start.0; + intersection.insertion_offset += + fragment_start - old_fragments.start().0.full_offset(); + intersection.id = + Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -947,6 +966,7 @@ impl Buffer { new: new_start..new_start, }); } + new_insertions.push(InsertionFragment::insert_new(&intersection)); new_ropes.push_fragment(&intersection, fragment.visible); new_fragments.push(intersection, &None); fragment_start = intersection_end; @@ -964,6 +984,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; + suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -979,6 +1001,7 @@ impl Buffer { self.snapshot.fragments = new_fragments; self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; + self.snapshot.insertions.edit(new_insertions, &()); self.local_clock.observe(timestamp.local()); self.lamport_clock.observe(timestamp.lamport()); self.update_subscriptions(edits); From b7535dfba4df92ef86194a178df56f781f2b6b4e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 09:21:52 +0100 Subject: [PATCH 06/12] Store only `clock::Local` in `InsertionFragment` --- crates/text/src/tests.rs | 2 +- crates/text/src/text.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 38390a210a..dafbd9604c 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -636,7 +636,7 @@ impl Buffer { .insertions .get( &InsertionFragmentKey { - timestamp: fragment.insertion_timestamp, + timestamp: fragment.insertion_timestamp.local(), split_offset: fragment.insertion_offset, }, &(), diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a609f8ec8d..0137a25bbe 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -410,14 +410,14 @@ struct FragmentTextSummary { #[derive(Eq, PartialEq, Clone, Debug)] struct InsertionFragment { - timestamp: InsertionTimestamp, + timestamp: clock::Local, split_offset: usize, fragment_id: Locator, } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] struct InsertionFragmentKey { - timestamp: InsertionTimestamp, + timestamp: clock::Local, split_offset: usize, } @@ -2178,7 +2178,7 @@ impl sum_tree::KeyedItem for InsertionFragment { impl InsertionFragment { fn new(fragment: &Fragment) -> Self { Self { - timestamp: fragment.insertion_timestamp, + timestamp: fragment.insertion_timestamp.local(), split_offset: fragment.insertion_offset, fragment_id: fragment.id.clone(), } From cbe136c0cb4ef310dd2ab8a89198e8e25887eeee Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 11:18:01 +0100 Subject: [PATCH 07/12] Implement anchor resolution using locators --- crates/text/src/anchor.rs | 589 ++++------------------------------- crates/text/src/selection.rs | 87 +++--- crates/text/src/tests.rs | 12 +- crates/text/src/text.rs | 370 ++++++++-------------- 4 files changed, 244 insertions(+), 814 deletions(-) diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 846c57274b..1123bd2104 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,117 +1,88 @@ use crate::{rope::TextDimension, Snapshot}; -use super::{Buffer, FromAnchor, FullOffset, Point, ToOffset}; +use super::{Buffer, ToOffset}; use anyhow::Result; -use std::{ - cmp::Ordering, - fmt::{Debug, Formatter}, - ops::Range, -}; -use sum_tree::{Bias, SumTree}; +use std::{cmp::Ordering, fmt::Debug, ops::Range}; +use sum_tree::Bias; #[derive(Clone, Eq, PartialEq, Debug, Hash)] -pub struct Anchor { - pub full_offset: FullOffset, - pub bias: Bias, - pub version: clock::Global, -} - -#[derive(Clone)] -pub struct AnchorMap { - pub(crate) version: clock::Global, - pub(crate) bias: Bias, - pub(crate) entries: Vec<(FullOffset, T)>, -} - -#[derive(Clone)] -pub struct AnchorSet(pub(crate) AnchorMap<()>); - -#[derive(Clone)] -pub struct AnchorRangeMap { - pub(crate) version: clock::Global, - pub(crate) entries: Vec<(Range, T)>, - pub(crate) start_bias: Bias, - pub(crate) end_bias: Bias, -} - -#[derive(Clone)] -pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); - -#[derive(Clone)] -pub struct AnchorRangeMultimap { - pub(crate) entries: SumTree>, - pub(crate) version: clock::Global, - pub(crate) start_bias: Bias, - pub(crate) end_bias: Bias, -} - -#[derive(Clone)] -pub(crate) struct AnchorRangeMultimapEntry { - pub(crate) range: FullOffsetRange, - pub(crate) value: T, -} - -#[derive(Clone, Debug)] -pub(crate) struct FullOffsetRange { - pub(crate) start: FullOffset, - pub(crate) end: FullOffset, -} - -#[derive(Clone, Debug)] -pub(crate) struct AnchorRangeMultimapSummary { - start: FullOffset, - end: FullOffset, - min_start: FullOffset, - max_end: FullOffset, - count: usize, +pub enum Anchor { + Min, + Insertion { + timestamp: clock::Local, + offset: usize, + bias: Bias, + }, + Max, } impl Anchor { pub fn min() -> Self { - Self { - full_offset: FullOffset(0), - bias: Bias::Left, - version: Default::default(), - } + Self::Min } pub fn max() -> Self { - Self { - full_offset: FullOffset::MAX, - bias: Bias::Right, - version: Default::default(), - } + Self::Max } pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { - if self == other { - return Ok(Ordering::Equal); + match (self, other) { + (Self::Min, Self::Min) => Ok(Ordering::Equal), + (Self::Min, _) => Ok(Ordering::Less), + (_, Self::Min) => Ok(Ordering::Greater), + (Self::Max, Self::Max) => Ok(Ordering::Equal), + (Self::Max, _) => Ok(Ordering::Greater), + (_, Self::Max) => Ok(Ordering::Less), + ( + Self::Insertion { + timestamp: lhs_id, + bias: lhs_bias, + offset: lhs_offset, + }, + Self::Insertion { + timestamp: rhs_id, + bias: rhs_bias, + offset: rhs_offset, + }, + ) => { + let offset_comparison = if lhs_id == rhs_id { + lhs_offset.cmp(&rhs_offset) + } else { + buffer + .full_offset_for_anchor(self) + .cmp(&buffer.full_offset_for_anchor(other)) + }; + + Ok(offset_comparison.then_with(|| lhs_bias.cmp(&rhs_bias))) + } } - - let offset_comparison = if self.version == other.version { - self.full_offset.cmp(&other.full_offset) - } else { - buffer - .full_offset_for_anchor(self) - .cmp(&buffer.full_offset_for_anchor(other)) - }; - - Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) } pub fn bias_left(&self, buffer: &Buffer) -> Anchor { - if self.bias == Bias::Left { - self.clone() - } else { - buffer.anchor_before(self) + match self { + Anchor::Min => Anchor::Min, + Anchor::Insertion { bias, .. } => { + if *bias == Bias::Left { + self.clone() + } else { + buffer.anchor_before(self) + } + } + Anchor::Max => buffer.anchor_before(self), } } pub fn bias_right(&self, buffer: &Buffer) -> Anchor { - if self.bias == Bias::Right { - self.clone() - } else { - buffer.anchor_after(self) + match self { + Anchor::Min => buffer.anchor_after(self), + Anchor::Insertion { bias, .. } => { + if *bias == Bias::Right { + self.clone() + } else { + buffer.anchor_after(self) + } + } + Anchor::Max => Anchor::Max, } } @@ -123,442 +94,6 @@ impl Anchor { } } -impl AnchorMap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn len(&self) -> usize { - self.entries.len() - } - - pub fn iter<'a, D>(&'a self, snapshot: &'a Snapshot) -> impl Iterator + 'a - where - D: 'a + TextDimension<'a>, - { - snapshot - .summaries_for_anchors( - self.version.clone(), - self.bias, - self.entries.iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)) - } -} - -impl AnchorSet { - pub fn version(&self) -> &clock::Global { - &self.0.version - } - - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn iter<'a, D>(&'a self, content: &'a Snapshot) -> impl Iterator + 'a - where - D: 'a + TextDimension<'a>, - { - self.0.iter(content).map(|(position, _)| position) - } -} - -impl AnchorRangeMap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn len(&self) -> usize { - self.entries.len() - } - - pub fn from_full_offset_ranges( - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - entries: Vec<(Range, T)>, - ) -> Self { - Self { - version, - start_bias, - end_bias, - entries, - } - } - - pub fn ranges<'a, D>( - &'a self, - content: &'a Snapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: 'a + TextDimension<'a>, - { - content - .summaries_for_anchor_ranges( - self.version.clone(), - self.start_bias, - self.end_bias, - self.entries.iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)) - } - - pub fn intersecting_ranges<'a, D, I>( - &'a self, - range: Range<(I, Bias)>, - content: &'a Snapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: 'a + TextDimension<'a>, - I: ToOffset, - { - let range = content.anchor_at(range.start.0, range.start.1) - ..content.anchor_at(range.end.0, range.end.1); - - let mut probe_anchor = Anchor { - full_offset: Default::default(), - bias: self.start_bias, - version: self.version.clone(), - }; - let start_ix = self.entries.binary_search_by(|probe| { - probe_anchor.full_offset = probe.0.end; - probe_anchor.cmp(&range.start, &content).unwrap() - }); - - match start_ix { - Ok(start_ix) | Err(start_ix) => content - .summaries_for_anchor_ranges( - self.version.clone(), - self.start_bias, - self.end_bias, - self.entries[start_ix..].iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)), - } - } - - pub fn full_offset_ranges(&self) -> impl Iterator, T)> { - self.entries.iter() - } - - pub fn min_by_key<'a, D, F, K>( - &self, - content: &'a Snapshot, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: 'a + TextDimension<'a>, - F: FnMut(&T) -> K, - K: Ord, - { - self.entries - .iter() - .min_by_key(|(_, value)| extract_key(value)) - .map(|(range, value)| (self.resolve_range(range, &content), value)) - } - - pub fn max_by_key<'a, D, F, K>( - &self, - content: &'a Snapshot, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: 'a + TextDimension<'a>, - F: FnMut(&T) -> K, - K: Ord, - { - self.entries - .iter() - .max_by_key(|(_, value)| extract_key(value)) - .map(|(range, value)| (self.resolve_range(range, &content), value)) - } - - fn resolve_range<'a, D>(&self, range: &Range, content: &'a Snapshot) -> Range - where - D: 'a + TextDimension<'a>, - { - let mut anchor = Anchor { - full_offset: range.start, - bias: self.start_bias, - version: self.version.clone(), - }; - let start = content.summary_for_anchor(&anchor); - - anchor.full_offset = range.end; - anchor.bias = self.end_bias; - let end = content.summary_for_anchor(&anchor); - - start..end - } -} - -impl PartialEq for AnchorRangeMap { - fn eq(&self, other: &Self) -> bool { - self.version == other.version && self.entries == other.entries - } -} - -impl Eq for AnchorRangeMap {} - -impl Debug for AnchorRangeMap { - fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { - let mut f = f.debug_map(); - for (range, value) in &self.entries { - f.key(range); - f.value(value); - } - f.finish() - } -} - -impl Debug for AnchorRangeSet { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let mut f = f.debug_set(); - for (range, _) in &self.0.entries { - f.entry(range); - } - f.finish() - } -} - -impl AnchorRangeSet { - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn version(&self) -> &clock::Global { - self.0.version() - } - - pub fn ranges<'a, D>(&'a self, content: &'a Snapshot) -> impl 'a + Iterator> - where - D: 'a + TextDimension<'a>, - { - self.0.ranges(content).map(|(range, _)| range) - } -} - -impl Default for AnchorRangeMultimap { - fn default() -> Self { - Self { - entries: Default::default(), - version: Default::default(), - start_bias: Bias::Left, - end_bias: Bias::Left, - } - } -} - -impl AnchorRangeMultimap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn intersecting_ranges<'a, I, O>( - &'a self, - range: Range, - content: &'a Snapshot, - inclusive: bool, - ) -> impl Iterator, &T)> + 'a - where - I: ToOffset, - O: FromAnchor, - { - let end_bias = if inclusive { Bias::Right } else { Bias::Left }; - let range = range.start.to_full_offset(&content, Bias::Left) - ..range.end.to_full_offset(&content, end_bias); - let mut cursor = self.entries.filter::<_, usize>( - { - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Right, - version: self.version.clone(), - }; - move |summary: &AnchorRangeMultimapSummary| { - endpoint.full_offset = summary.max_end; - endpoint.bias = self.end_bias; - let max_end = endpoint.to_full_offset(&content, self.end_bias); - let start_cmp = range.start.cmp(&max_end); - - endpoint.full_offset = summary.min_start; - endpoint.bias = self.start_bias; - let min_start = endpoint.to_full_offset(&content, self.start_bias); - let end_cmp = range.end.cmp(&min_start); - - if inclusive { - start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal - } else { - start_cmp == Ordering::Less && end_cmp == Ordering::Greater - } - } - }, - &(), - ); - - std::iter::from_fn({ - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Left, - version: self.version.clone(), - }; - move || { - if let Some(item) = cursor.item() { - let ix = *cursor.start(); - endpoint.full_offset = item.range.start; - endpoint.bias = self.start_bias; - let start = O::from_anchor(&endpoint, &content); - endpoint.full_offset = item.range.end; - endpoint.bias = self.end_bias; - let end = O::from_anchor(&endpoint, &content); - let value = &item.value; - cursor.next(&()); - Some((ix, start..end, value)) - } else { - None - } - } - }) - } - - pub fn from_full_offset_ranges( - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - entries: impl Iterator, T)>, - ) -> Self { - Self { - version, - start_bias, - end_bias, - entries: SumTree::from_iter( - entries.map(|(range, value)| AnchorRangeMultimapEntry { - range: FullOffsetRange { - start: range.start, - end: range.end, - }, - value, - }), - &(), - ), - } - } - - pub fn full_offset_ranges(&self) -> impl Iterator, &T)> { - self.entries - .cursor::<()>() - .map(|entry| (entry.range.start..entry.range.end, &entry.value)) - } - - pub fn filter<'a, O, F>( - &'a self, - content: &'a Snapshot, - mut f: F, - ) -> impl 'a + Iterator, &T)> - where - O: FromAnchor, - F: 'a + FnMut(&'a T) -> bool, - { - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Left, - version: self.version.clone(), - }; - self.entries - .cursor::<()>() - .enumerate() - .filter_map(move |(ix, entry)| { - if f(&entry.value) { - endpoint.full_offset = entry.range.start; - endpoint.bias = self.start_bias; - let start = O::from_anchor(&endpoint, &content); - endpoint.full_offset = entry.range.end; - endpoint.bias = self.end_bias; - let end = O::from_anchor(&endpoint, &content); - Some((ix, start..end, &entry.value)) - } else { - None - } - }) - } -} - -impl sum_tree::Item for AnchorRangeMultimapEntry { - type Summary = AnchorRangeMultimapSummary; - - fn summary(&self) -> Self::Summary { - AnchorRangeMultimapSummary { - start: self.range.start, - end: self.range.end, - min_start: self.range.start, - max_end: self.range.end, - count: 1, - } - } -} - -impl Default for AnchorRangeMultimapSummary { - fn default() -> Self { - Self { - start: FullOffset(0), - end: FullOffset::MAX, - min_start: FullOffset::MAX, - max_end: FullOffset(0), - count: 0, - } - } -} - -impl sum_tree::Summary for AnchorRangeMultimapSummary { - type Context = (); - - fn add_summary(&mut self, other: &Self, _: &Self::Context) { - self.min_start = self.min_start.min(other.min_start); - self.max_end = self.max_end.max(other.max_end); - - #[cfg(debug_assertions)] - { - let start_comparison = self.start.cmp(&other.start); - assert!(start_comparison <= Ordering::Equal); - if start_comparison == Ordering::Equal { - assert!(self.end.cmp(&other.end) >= Ordering::Equal); - } - } - - self.start = other.start; - self.end = other.end; - self.count += other.count; - } -} - -impl Default for FullOffsetRange { - fn default() -> Self { - Self { - start: FullOffset(0), - end: FullOffset::MAX, - } - } -} - -impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize { - fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { - *self += summary.count; - } -} - -impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange { - fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { - self.start = summary.start; - self.end = summary.end; - } -} - -impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange { - fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering { - Ord::cmp(&self.start, &cursor_location.start) - .then_with(|| Ord::cmp(&cursor_location.end, &self.end)) - } -} - pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &Snapshot) -> Result; fn to_offset(&self, content: &Snapshot) -> Range; diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index eaa2409772..e9e7dd1f22 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,8 +1,8 @@ use sum_tree::Bias; -use crate::{rope::TextDimension, Snapshot}; +use crate::{rope::TextDimension, Anchor, Snapshot}; -use super::{AnchorRangeMap, Buffer, Point, ToOffset, ToPoint}; +use super::{Buffer, Point, ToOffset, ToPoint}; use std::{cmp::Ordering, ops::Range, sync::Arc}; pub type SelectionSetId = clock::Lamport; @@ -28,7 +28,7 @@ pub struct Selection { pub struct SelectionSet { pub id: SelectionSetId, pub active: bool, - pub selections: Arc>, + pub selections: Arc<[Selection]>, } #[derive(Debug, Eq, PartialEq)] @@ -98,6 +98,21 @@ impl Selection { } } +impl Selection { + pub fn resolve<'a, D: 'a + TextDimension<'a>>( + &'a self, + snapshot: &'a Snapshot, + ) -> Selection { + Selection { + id: self.id, + start: snapshot.summary_for_anchor(&self.start), + end: snapshot.summary_for_anchor(&self.end), + reversed: self.reversed, + goal: self.goal, + } + } +} + impl SelectionSet { pub fn len(&self) -> usize { self.selections.len() @@ -105,69 +120,59 @@ impl SelectionSet { pub fn selections<'a, D>( &'a self, - content: &'a Snapshot, + snapshot: &'a Snapshot, ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, { - self.selections - .ranges(content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + self.selections.iter().map(|s| s.resolve(snapshot)) } pub fn intersecting_selections<'a, D, I>( &'a self, range: Range<(I, Bias)>, - content: &'a Snapshot, + snapshot: &'a Snapshot, ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, I: 'a + ToOffset, { - self.selections - .intersecting_ranges(range, content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + let start = snapshot.anchor_at(range.start.0, range.start.1); + let end = snapshot.anchor_at(range.end.0, range.end.1); + let start_ix = match self + .selections + .binary_search_by(|probe| probe.start.cmp(&start, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match self + .selections + .binary_search_by(|probe| probe.end.cmp(&end, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + self.selections[start_ix..end_ix] + .iter() + .map(|s| s.resolve(snapshot)) } - pub fn oldest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option> + pub fn oldest_selection<'a, D>(&'a self, snapshot: &'a Snapshot) -> Option> where D: 'a + TextDimension<'a>, { self.selections - .min_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .min_by_key(|s| s.id) + .map(|s| s.resolve(snapshot)) } - pub fn newest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option> + pub fn newest_selection<'a, D>(&'a self, snapshot: &'a Snapshot) -> Option> where D: 'a + TextDimension<'a>, { self.selections - .max_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .max_by_key(|s| s.id) + .map(|s| s.resolve(snapshot)) } } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index dafbd9604c..f7f307049c 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -645,11 +645,13 @@ impl Buffer { assert_eq!(insertion_fragment.fragment_id, fragment.id); } - let insertions = self.snapshot.insertions.items(&()); - assert_eq!( - HashSet::from_iter(insertions.iter().map(|i| &i.fragment_id)).len(), - insertions.len() - ); + let mut cursor = self.snapshot.fragments.cursor::(); + for insertion_fragment in self.snapshot.insertions.cursor::<()>() { + cursor.seek(&insertion_fragment.fragment_id, Bias::Left, &None); + let fragment = cursor.item().unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); + } } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 0137a25bbe..5f54c4b8b9 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -27,7 +27,7 @@ use rope::TextDimension; pub use rope::{Chunks, Rope, TextSummary}; pub use selection::*; use std::{ - cmp::{self, Reverse}, + cmp::{self, Ordering}, iter::Iterator, ops::{self, Deref, Range, Sub}, str, @@ -67,8 +67,8 @@ pub struct Transaction { end: clock::Global, edits: Vec, ranges: Vec>, - selections_before: HashMap>>, - selections_after: HashMap>>, + selections_before: HashMap]>>, + selections_after: HashMap]>>, first_edit_at: Instant, last_edit_at: Instant, } @@ -155,7 +155,7 @@ impl History { fn start_transaction( &mut self, start: clock::Global, - selections_before: HashMap>>, + selections_before: HashMap]>>, now: Instant, ) { self.transaction_depth += 1; @@ -175,7 +175,7 @@ impl History { fn end_transaction( &mut self, - selections_after: HashMap>>, + selections_after: HashMap]>>, now: Instant, ) -> Option<&Transaction> { assert_ne!(self.transaction_depth, 0); @@ -430,7 +430,7 @@ pub enum Operation { }, UpdateSelections { set_id: SelectionSetId, - selections: Arc>, + selections: Arc<[Selection]>, lamport_timestamp: clock::Lamport, }, RemoveSelections { @@ -1122,9 +1122,9 @@ impl Buffer { match op { Operation::Edit(edit) => self.version.ge(&edit.version), Operation::Undo { undo, .. } => self.version.ge(&undo.version), - Operation::UpdateSelections { selections, .. } => { - self.version.ge(selections.version()) - } + Operation::UpdateSelections { selections, .. } => selections + .iter() + .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), Operation::RemoveSelections { .. } => true, Operation::SetActiveSelections { set_id, .. } => { set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) @@ -1135,6 +1135,14 @@ impl Buffer { } } + fn can_resolve(&self, anchor: &Anchor) -> bool { + match anchor { + Anchor::Min => true, + Anchor::Insertion { timestamp, .. } => self.version.observed(*timestamp), + Anchor::Max => true, + } + } + pub fn peek_undo_stack(&self) -> Option<&Transaction> { self.history.undo_stack.last() } @@ -1280,25 +1288,22 @@ impl Buffer { self.selections.iter() } - fn build_selection_anchor_range_map( + fn build_anchor_selection_set( &self, selections: &[Selection], - ) -> Arc> { - Arc::new(self.anchor_range_map( - Bias::Left, - Bias::Left, - selections.iter().map(|selection| { - let start = selection.start.to_offset(self); - let end = selection.end.to_offset(self); - let range = start..end; - let state = SelectionState { + ) -> Arc<[Selection]> { + Arc::from( + selections + .iter() + .map(|selection| Selection { id: selection.id, + start: self.anchor_before(&selection.start), + end: self.anchor_before(&selection.end), reversed: selection.reversed, goal: selection.goal, - }; - (range, state) - }), - )) + }) + .collect::>(), + ) } pub fn update_selection_set( @@ -1306,7 +1311,7 @@ impl Buffer { set_id: SelectionSetId, selections: &[Selection], ) -> Result { - let selections = self.build_selection_anchor_range_map(selections); + let selections = self.build_anchor_selection_set(selections); let set = self .selections .get_mut(&set_id) @@ -1322,7 +1327,7 @@ impl Buffer { pub fn restore_selection_set( &mut self, set_id: SelectionSetId, - selections: Arc>, + selections: Arc<[Selection]>, ) -> Result { let set = self .selections @@ -1337,7 +1342,7 @@ impl Buffer { } pub fn add_selection_set(&mut self, selections: &[Selection]) -> Operation { - let selections = self.build_selection_anchor_range_map(selections); + let selections = self.build_anchor_selection_set(selections); let set_id = self.lamport_clock.tick(); self.selections.insert( set_id, @@ -1675,19 +1680,81 @@ impl Snapshot { where D: TextDimension<'a>, { - let cx = Some(anchor.version.clone()); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - cursor.seek( - &VersionedFullOffset::Offset(anchor.full_offset), - anchor.bias, - &cx, - ); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - anchor.full_offset - cursor.start().0.full_offset() - } else { - 0 - }; - self.text_summary_for_range(0..cursor.start().1 + overshoot) + match anchor { + Anchor::Min => D::default(), + Anchor::Insertion { + timestamp, + offset, + bias, + } => { + let anchor_key = InsertionFragmentKey { + timestamp: *timestamp, + split_offset: *offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, *bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += *offset - insertion.split_offset; + } + self.text_summary_for_range(0..fragment_offset) + } + Anchor::Max => D::from_text_summary(&self.visible_text.summary()), + } + } + + fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { + match anchor { + Anchor::Min => Default::default(), + Anchor::Insertion { + timestamp, + offset, + bias, + } => { + let anchor_key = InsertionFragmentKey { + timestamp: *timestamp, + split_offset: *offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, *bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + fragment_cursor.start().1 + (*offset - insertion.split_offset) + } + Anchor::Max => { + let text = self.fragments.summary().text; + FullOffset(text.visible + text.deleted) + } + } } pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range) -> D @@ -1699,70 +1766,6 @@ impl Snapshot { .summary(range.end.to_offset(self)) } - fn summaries_for_anchors<'a, D, I>( - &'a self, - version: clock::Global, - bias: Bias, - ranges: I, - ) -> impl 'a + Iterator - where - D: 'a + TextDimension<'a>, - I: 'a + IntoIterator, - { - let cx = Some(version.clone()); - let mut summary = D::default(); - let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - ranges.into_iter().map(move |offset| { - cursor.seek_forward(&VersionedFullOffset::Offset(*offset), bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - *offset - cursor.start().0.full_offset() - } else { - 0 - }; - summary.add_assign(&rope_cursor.summary(cursor.start().1 + overshoot)); - summary.clone() - }) - } - - fn summaries_for_anchor_ranges<'a, D, I>( - &'a self, - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - ranges: I, - ) -> impl 'a + Iterator> - where - D: 'a + TextDimension<'a>, - I: 'a + IntoIterator>, - { - let cx = Some(version); - let mut summary = D::default(); - let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - ranges.into_iter().map(move |range| { - cursor.seek_forward(&VersionedFullOffset::Offset(range.start), start_bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - range.start - cursor.start().0.full_offset() - } else { - 0 - }; - summary.add_assign(&rope_cursor.summary::(cursor.start().1 + overshoot)); - let start_summary = summary.clone(); - - cursor.seek_forward(&VersionedFullOffset::Offset(range.end), end_bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - range.end - cursor.start().0.full_offset() - } else { - 0 - }; - summary.add_assign(&rope_cursor.summary::(cursor.start().1 + overshoot)); - let end_summary = summary.clone(); - - start_summary..end_summary - }) - } - pub fn anchor_before(&self, position: T) -> Anchor { self.anchor_at(position, Bias::Left) } @@ -1772,139 +1775,22 @@ impl Snapshot { } pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - Anchor { - full_offset: position.to_full_offset(self, bias), - bias, - version: self.version.clone(), - } - } - - pub fn anchor_map(&self, bias: Bias, entries: E) -> AnchorMap - where - E: IntoIterator, - { - let version = self.version.clone(); - let mut cursor = self.fragments.cursor::(); - let entries = entries - .into_iter() - .map(|(offset, value)| { - cursor.seek_forward(&offset, bias, &None); - let full_offset = FullOffset(cursor.start().deleted + offset); - (full_offset, value) - }) - .collect(); - - AnchorMap { - version, - bias, - entries, - } - } - - pub fn anchor_range_map( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMap - where - E: IntoIterator, T)>, - { - let version = self.version.clone(); - let mut cursor = self.fragments.cursor::(); - let entries = entries - .into_iter() - .map(|(range, value)| { - let Range { - start: start_offset, - end: end_offset, - } = range; - cursor.seek_forward(&start_offset, start_bias, &None); - let full_start_offset = FullOffset(cursor.start().deleted + start_offset); - cursor.seek_forward(&end_offset, end_bias, &None); - let full_end_offset = FullOffset(cursor.start().deleted + end_offset); - (full_start_offset..full_end_offset, value) - }) - .collect(); - - AnchorRangeMap { - version, - start_bias, - end_bias, - entries, - } - } - - pub fn anchor_set(&self, bias: Bias, entries: E) -> AnchorSet - where - E: IntoIterator, - { - AnchorSet(self.anchor_map(bias, entries.into_iter().map(|range| (range, ())))) - } - - pub fn anchor_range_set( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeSet - where - E: IntoIterator>, - { - AnchorRangeSet(self.anchor_range_map( - start_bias, - end_bias, - entries.into_iter().map(|range| (range, ())), - )) - } - - pub fn anchor_range_multimap( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMultimap - where - T: Clone, - E: IntoIterator, T)>, - O: ToOffset, - { - let mut entries = entries - .into_iter() - .map(|(range, value)| AnchorRangeMultimapEntry { - range: FullOffsetRange { - start: range.start.to_full_offset(self, start_bias), - end: range.end.to_full_offset(self, end_bias), - }, - value, - }) - .collect::>(); - entries.sort_unstable_by_key(|i| (i.range.start, Reverse(i.range.end))); - AnchorRangeMultimap { - entries: SumTree::from_iter(entries, &()), - version: self.version.clone(), - start_bias, - end_bias, - } - } - - fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { - let cx = Some(anchor.version.clone()); - let mut cursor = self - .fragments - .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); - cursor.seek( - &VersionedFullOffset::Offset(anchor.full_offset), - anchor.bias, - &cx, - ); - let overshoot = if cursor.item().is_some() { - anchor.full_offset - cursor.start().0.full_offset() + let offset = position.to_offset(self); + if bias == Bias::Left && offset == 0 { + Anchor::Min + } else if bias == Bias::Right && offset == self.len() { + Anchor::Max } else { - 0 - }; - let summary = cursor.start().1; - FullOffset(summary.visible + summary.deleted + overshoot) + let mut fragment_cursor = self.fragments.cursor::<(usize, Locator)>(); + fragment_cursor.seek(&offset, bias, &None); + let fragment = fragment_cursor.item().unwrap(); + let overshoot = offset - fragment_cursor.start().0; + Anchor::Insertion { + timestamp: fragment.insertion_timestamp.local(), + offset: fragment.insertion_offset + overshoot, + bias, + } + } } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2200,10 +2086,6 @@ impl sum_tree::Summary for InsertionFragmentKey { #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct FullOffset(pub usize); -impl FullOffset { - const MAX: Self = FullOffset(usize::MAX); -} - impl ops::AddAssign for FullOffset { fn add_assign(&mut self, rhs: usize) { self.0 += rhs; @@ -2239,6 +2121,12 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Locator { + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { + *self = summary.max_id.clone(); + } +} + impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize { fn cmp( &self, @@ -2363,9 +2251,9 @@ impl ToOffset for Anchor { } } -impl<'a> ToOffset for &'a Anchor { +impl<'a, T: ToOffset> ToOffset for &'a T { fn to_offset(&self, content: &Snapshot) -> usize { - content.summary_for_anchor(self) + (*self).to_offset(content) } } From 67686dd1c2013092255e554ef83d9b9a66b1f8f0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 12:00:51 +0100 Subject: [PATCH 08/12] Don't use an enum for anchors and model min/max more implicitly This will make it easier to serialize an anchor. --- crates/clock/src/clock.rs | 9 +++ crates/text/src/anchor.rs | 110 ++++++++++++------------------ crates/text/src/text.rs | 136 ++++++++++++++++++-------------------- 3 files changed, 115 insertions(+), 140 deletions(-) diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index 6e8b460861..2632aecce5 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -21,6 +21,15 @@ pub struct Lamport { } impl Local { + pub const MIN: Self = Self { + replica_id: ReplicaId::MIN, + value: Seq::MIN, + }; + pub const MAX: Self = Self { + replica_id: ReplicaId::MAX, + value: Seq::MAX, + }; + pub fn new(replica_id: ReplicaId) -> Self { Self { replica_id, diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 1123bd2104..a781c9f887 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,88 +1,57 @@ -use crate::{rope::TextDimension, Snapshot}; - -use super::{Buffer, ToOffset}; +use super::{rope::TextDimension, Buffer, Point, Snapshot, ToOffset}; use anyhow::Result; use std::{cmp::Ordering, fmt::Debug, ops::Range}; use sum_tree::Bias; #[derive(Clone, Eq, PartialEq, Debug, Hash)] -pub enum Anchor { - Min, - Insertion { - timestamp: clock::Local, - offset: usize, - bias: Bias, - }, - Max, +pub struct Anchor { + pub timestamp: clock::Local, + pub offset: usize, + pub bias: Bias, } impl Anchor { pub fn min() -> Self { - Self::Min - } - - pub fn max() -> Self { - Self::Max - } - - pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { - match (self, other) { - (Self::Min, Self::Min) => Ok(Ordering::Equal), - (Self::Min, _) => Ok(Ordering::Less), - (_, Self::Min) => Ok(Ordering::Greater), - (Self::Max, Self::Max) => Ok(Ordering::Equal), - (Self::Max, _) => Ok(Ordering::Greater), - (_, Self::Max) => Ok(Ordering::Less), - ( - Self::Insertion { - timestamp: lhs_id, - bias: lhs_bias, - offset: lhs_offset, - }, - Self::Insertion { - timestamp: rhs_id, - bias: rhs_bias, - offset: rhs_offset, - }, - ) => { - let offset_comparison = if lhs_id == rhs_id { - lhs_offset.cmp(&rhs_offset) - } else { - buffer - .full_offset_for_anchor(self) - .cmp(&buffer.full_offset_for_anchor(other)) - }; - - Ok(offset_comparison.then_with(|| lhs_bias.cmp(&rhs_bias))) - } + Self { + timestamp: clock::Local::MIN, + offset: usize::MIN, + bias: Bias::Left, } } + pub fn max() -> Self { + Self { + timestamp: clock::Local::MAX, + offset: usize::MAX, + bias: Bias::Right, + } + } + + pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { + let offset_comparison = if self.timestamp == other.timestamp { + self.offset.cmp(&other.offset) + } else { + buffer + .full_offset_for_anchor(self) + .cmp(&buffer.full_offset_for_anchor(other)) + }; + + Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) + } + pub fn bias_left(&self, buffer: &Buffer) -> Anchor { - match self { - Anchor::Min => Anchor::Min, - Anchor::Insertion { bias, .. } => { - if *bias == Bias::Left { - self.clone() - } else { - buffer.anchor_before(self) - } - } - Anchor::Max => buffer.anchor_before(self), + if self.bias == Bias::Left { + self.clone() + } else { + buffer.anchor_before(self) } } pub fn bias_right(&self, buffer: &Buffer) -> Anchor { - match self { - Anchor::Min => buffer.anchor_after(self), - Anchor::Insertion { bias, .. } => { - if *bias == Bias::Right { - self.clone() - } else { - buffer.anchor_after(self) - } - } - Anchor::Max => Anchor::Max, + if self.bias == Bias::Right { + self.clone() + } else { + buffer.anchor_after(self) } } @@ -97,6 +66,7 @@ impl Anchor { pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &Snapshot) -> Result; fn to_offset(&self, content: &Snapshot) -> Range; + fn to_point(&self, content: &Snapshot) -> Range; } impl AnchorRangeExt for Range { @@ -110,4 +80,8 @@ impl AnchorRangeExt for Range { fn to_offset(&self, content: &Snapshot) -> Range { self.start.to_offset(&content)..self.end.to_offset(&content) } + + fn to_point(&self, content: &Snapshot) -> Range { + self.start.summary::(&content)..self.end.summary::(&content) + } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 5f54c4b8b9..b896aa687e 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1136,11 +1136,9 @@ impl Buffer { } fn can_resolve(&self, anchor: &Anchor) -> bool { - match anchor { - Anchor::Min => true, - Anchor::Insertion { timestamp, .. } => self.version.observed(*timestamp), - Anchor::Max => true, - } + *anchor == Anchor::min() + || *anchor == Anchor::max() + || self.version.observed(anchor.timestamp) } pub fn peek_undo_stack(&self) -> Option<&Transaction> { @@ -1680,80 +1678,74 @@ impl Snapshot { where D: TextDimension<'a>, { - match anchor { - Anchor::Min => D::default(), - Anchor::Insertion { - timestamp, - offset, - bias, - } => { - let anchor_key = InsertionFragmentKey { - timestamp: *timestamp, - split_offset: *offset, - }; - let mut insertion_cursor = self.insertions.cursor::(); - insertion_cursor.seek(&anchor_key, *bias, &()); - if let Some(insertion) = insertion_cursor.item() { - let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); - if comparison == Ordering::Greater - || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) - { - insertion_cursor.prev(&()); - } - } else { + if *anchor == Anchor::min() { + D::default() + } else if *anchor == Anchor::max() { + D::from_text_summary(&self.visible_text.summary()) + } else { + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { insertion_cursor.prev(&()); } - let insertion = insertion_cursor.item().expect("invalid insertion"); - debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); - - let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); - let fragment = fragment_cursor.item().unwrap(); - let mut fragment_offset = fragment_cursor.start().1; - if fragment.visible { - fragment_offset += *offset - insertion.split_offset; - } - self.text_summary_for_range(0..fragment_offset) + } else { + insertion_cursor.prev(&()); } - Anchor::Max => D::from_text_summary(&self.visible_text.summary()), + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += anchor.offset - insertion.split_offset; + } + self.text_summary_for_range(0..fragment_offset) } } fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { - match anchor { - Anchor::Min => Default::default(), - Anchor::Insertion { - timestamp, - offset, - bias, - } => { - let anchor_key = InsertionFragmentKey { - timestamp: *timestamp, - split_offset: *offset, - }; - let mut insertion_cursor = self.insertions.cursor::(); - insertion_cursor.seek(&anchor_key, *bias, &()); - if let Some(insertion) = insertion_cursor.item() { - let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); - if comparison == Ordering::Greater - || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) - { - insertion_cursor.prev(&()); - } - } else { + if *anchor == Anchor::min() { + Default::default() + } else if *anchor == Anchor::max() { + let text = self.fragments.summary().text; + FullOffset(text.visible + text.deleted) + } else { + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { insertion_cursor.prev(&()); } - let insertion = insertion_cursor.item().expect("invalid insertion"); - debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); - fragment_cursor.start().1 + (*offset - insertion.split_offset) - } - Anchor::Max => { - let text = self.fragments.summary().text; - FullOffset(text.visible + text.deleted) - } + let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + fragment_cursor.start().1 + (anchor.offset - insertion.split_offset) } } @@ -1777,15 +1769,15 @@ impl Snapshot { pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { let offset = position.to_offset(self); if bias == Bias::Left && offset == 0 { - Anchor::Min + Anchor::min() } else if bias == Bias::Right && offset == self.len() { - Anchor::Max + Anchor::max() } else { let mut fragment_cursor = self.fragments.cursor::<(usize, Locator)>(); fragment_cursor.seek(&offset, bias, &None); let fragment = fragment_cursor.item().unwrap(); let overshoot = offset - fragment_cursor.start().0; - Anchor::Insertion { + Anchor { timestamp: fragment.insertion_timestamp.local(), offset: fragment.insertion_offset + overshoot, bias, From 65711b2256cfd535e30fe9bd3e5f7b4b31888d65 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 16:38:46 +0100 Subject: [PATCH 09/12] Remove anchor collections Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 + crates/editor/src/editor.rs | 64 ++--- crates/editor/src/items.rs | 12 +- crates/language/Cargo.toml | 5 +- crates/language/src/buffer.rs | 321 +++++++++++++++----------- crates/language/src/diagnostic_set.rs | 141 +++++++++++ crates/language/src/language.rs | 1 + crates/language/src/proto.rs | 184 ++++++++------- crates/language/src/tests.rs | 24 +- crates/project/src/worktree.rs | 3 +- crates/rpc/proto/zed.proto | 39 ++-- crates/rpc/src/peer.rs | 8 +- crates/server/src/rpc.rs | 5 +- crates/sum_tree/src/cursor.rs | 70 ++++++ crates/sum_tree/src/sum_tree.rs | 11 +- crates/text/src/operation_queue.rs | 50 ++-- crates/text/src/selection.rs | 4 +- crates/text/src/text.rs | 38 ++- 18 files changed, 659 insertions(+), 322 deletions(-) create mode 100644 crates/language/src/diagnostic_set.rs diff --git a/Cargo.lock b/Cargo.lock index 0aadd18f66..a1188259ac 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2587,6 +2587,7 @@ dependencies = [ "serde", "similar", "smol", + "sum_tree", "text", "theme", "tree-sitter", diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 539736aca2..497fbb2e83 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -398,7 +398,7 @@ struct SelectNextState { #[derive(Debug)] struct BracketPairState { - ranges: AnchorRangeSet, + ranges: Vec>, pair: BracketPair, } @@ -1285,7 +1285,7 @@ impl Editor { fn autoclose_pairs(&mut self, cx: &mut ViewContext) { let selections = self.selections::(cx).collect::>(); - let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| { + let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| { let autoclose_pair = buffer.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start; let pair = language.brackets().iter().find(|pair| { @@ -1324,15 +1324,14 @@ impl Editor { if pair.end.len() == 1 { let mut delta = 0; Some(BracketPairState { - ranges: buffer.anchor_range_set( - Bias::Left, - Bias::Right, - selections.iter().map(move |selection| { + ranges: selections + .iter() + .map(move |selection| { let offset = selection.start + delta; delta += 1; - offset..offset - }), - ), + buffer.anchor_before(offset)..buffer.anchor_after(offset) + }) + .collect(), pair, }) } else { @@ -1340,26 +1339,26 @@ impl Editor { } }) }); - self.autoclose_stack.extend(new_autoclose_pair_state); + self.autoclose_stack.extend(new_autoclose_pair); } fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { let old_selections = self.selections::(cx).collect::>(); - let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() { - autoclose_pair_state + let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { + autoclose_pair } else { return false; }; - if text != autoclose_pair_state.pair.end { + if text != autoclose_pair.pair.end { return false; } - debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len()); + debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len()); let buffer = self.buffer.read(cx); if old_selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(buffer)) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(buffer))) .all(|(selection, autoclose_range)| { let autoclose_range_end = autoclose_range.end.to_offset(buffer); selection.is_empty() && selection.start == autoclose_range_end @@ -2826,13 +2825,14 @@ impl Editor { loop { let next_group = buffer - .diagnostics_in_range::<_, usize>(search_start..buffer.len()) - .find_map(|(range, diagnostic)| { - if diagnostic.is_primary + .diagnostics_in_range(search_start..buffer.len()) + .find_map(|entry| { + let range = entry.range.to_offset(buffer); + if entry.diagnostic.is_primary && !range.is_empty() && Some(range.end) != active_primary_range.as_ref().map(|r| *r.end()) { - Some((range, diagnostic.group_id)) + Some((range, entry.diagnostic.group_id)) } else { None } @@ -2866,12 +2866,13 @@ impl Editor { let buffer = self.buffer.read(cx); let primary_range_start = active_diagnostics.primary_range.start.to_offset(buffer); let is_valid = buffer - .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) - .any(|(range, diagnostic)| { - diagnostic.is_primary + .diagnostics_in_range(active_diagnostics.primary_range.clone()) + .any(|entry| { + let range = entry.range.to_offset(buffer); + entry.diagnostic.is_primary && !range.is_empty() && range.start == primary_range_start - && diagnostic.message == active_diagnostics.primary_message + && entry.diagnostic.message == active_diagnostics.primary_message }); if is_valid != active_diagnostics.is_valid { @@ -2901,16 +2902,17 @@ impl Editor { let mut primary_message = None; let mut group_end = Point::zero(); let diagnostic_group = buffer - .diagnostic_group::(group_id) - .map(|(range, diagnostic)| { + .diagnostic_group(group_id) + .map(|entry| { + let range = entry.range.to_point(buffer); if range.end > group_end { group_end = range.end; } - if diagnostic.is_primary { + if entry.diagnostic.is_primary { primary_range = Some(range.clone()); - primary_message = Some(diagnostic.message.clone()); + primary_message = Some(entry.diagnostic.message.clone()); } - (range, diagnostic.clone()) + (range, entry.diagnostic.clone()) }) .collect::>(); let primary_range = primary_range.unwrap(); @@ -3165,12 +3167,12 @@ impl Editor { self.add_selections_state = None; self.select_next_state = None; self.select_larger_syntax_node_stack.clear(); - while let Some(autoclose_pair_state) = self.autoclose_stack.last() { + while let Some(autoclose_pair) = self.autoclose_stack.last() { let all_selections_inside_autoclose_ranges = - if selections.len() == autoclose_pair_state.ranges.len() { + if selections.len() == autoclose_pair.ranges.len() { selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(buffer)) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer))) .all(|(selection, autoclose_range)| { let head = selection.head().to_point(&*buffer); autoclose_range.start <= head && autoclose_range.end >= head diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index f4261c30bb..061aece652 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -5,7 +5,7 @@ use gpui::{ MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; -use language::{Buffer, Diagnostic, File as _}; +use language::{AnchorRangeExt, Buffer, Diagnostic, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::fmt::Write; @@ -314,11 +314,11 @@ impl DiagnosticMessage { fn update(&mut self, editor: ViewHandle, cx: &mut ViewContext) { let editor = editor.read(cx); - let cursor_position = editor.newest_selection(cx).head(); - let new_diagnostic = editor - .buffer() - .read(cx) - .diagnostics_in_range::(cursor_position..cursor_position) + let cursor_position = editor.newest_selection::(cx).head(); + let buffer = editor.buffer().read(cx); + let new_diagnostic = buffer + .diagnostics_in_range(cursor_position..cursor_position) + .map(|entry| (entry.range.to_offset(buffer), &entry.diagnostic)) .filter(|(range, _)| !range.is_empty()) .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len())) .map(|(_, diagnostic)| diagnostic.clone()); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index f4037ee70a..d5e40456c8 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "language" version = "0.1.0" -edition = "2018" +edition = "2021" [lib] path = "src/language.rs" @@ -15,11 +15,12 @@ test-support = [ ] [dependencies] -text = { path = "../text" } clock = { path = "../clock" } gpui = { path = "../gpui" } lsp = { path = "../lsp" } rpc = { path = "../rpc" } +sum_tree = { path = "../sum_tree" } +text = { path = "../text" } theme = { path = "../theme" } util = { path = "../util" } anyhow = "1.0.38" diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 55346fc9dd..99239a3089 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,6 @@ +use crate::diagnostic_set::DiagnosticEntry; pub use crate::{ + diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig, PLAIN_TEXT, @@ -28,6 +30,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; +use text::operation_queue::OperationQueue; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; @@ -61,9 +64,10 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, - diagnostics: AnchorRangeMultimap, + diagnostics: DiagnosticSet, diagnostics_update_count: usize, language_server: Option, + deferred_ops: OperationQueue, #[cfg(test)] pub(crate) operations: Vec, } @@ -71,7 +75,7 @@ pub struct Buffer { pub struct Snapshot { text: text::Snapshot, tree: Option, - diagnostics: AnchorRangeMultimap, + diagnostics: DiagnosticSet, diagnostics_update_count: usize, is_parsing: bool, language: Option>, @@ -101,10 +105,13 @@ struct LanguageServerSnapshot { path: Arc, } -#[derive(Clone)] +#[derive(Clone, Debug)] pub enum Operation { Buffer(text::Operation), - UpdateDiagnostics(AnchorRangeMultimap), + UpdateDiagnostics { + diagnostics: Arc<[DiagnosticEntry]>, + lamport_timestamp: clock::Lamport, + }, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -173,8 +180,8 @@ struct SyntaxTree { struct AutoindentRequest { selection_set_ids: HashSet, before_edit: Snapshot, - edited: AnchorSet, - inserted: Option, + edited: Vec, + inserted: Option>>, } #[derive(Debug)] @@ -275,9 +282,11 @@ impl Buffer { buffer.add_raw_selection_set(set.id, set); } let mut this = Self::build(buffer, file); - if let Some(diagnostics) = message.diagnostics { - this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx); - } + this.apply_diagnostic_update( + Arc::from(proto::deserialize_diagnostics(message.diagnostics)), + cx, + ); + Ok(this) } @@ -294,7 +303,7 @@ impl Buffer { .selection_sets() .map(|(_, set)| proto::serialize_selection_set(set)) .collect(), - diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)), + diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), } } @@ -331,6 +340,7 @@ impl Buffer { diagnostics: Default::default(), diagnostics_update_count: 0, language_server: None, + deferred_ops: OperationQueue::new(), #[cfg(test)] operations: Default::default(), } @@ -690,6 +700,8 @@ impl Buffer { mut diagnostics: Vec, cx: &mut ModelContext, ) -> Result { + diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); + let version = version.map(|version| version as usize); let content = if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -710,81 +722,79 @@ impl Buffer { .and_then(|language| language.disk_based_diagnostic_sources()) .unwrap_or(&empty_set); - diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); - self.diagnostics = { - let mut edits_since_save = content - .edits_since::(&self.saved_version) - .peekable(); - let mut last_edit_old_end = PointUtf16::zero(); - let mut last_edit_new_end = PointUtf16::zero(); - let mut group_ids_by_diagnostic_range = HashMap::new(); - let mut diagnostics_by_group_id = HashMap::new(); - let mut next_group_id = 0; - 'outer: for diagnostic in &diagnostics { - let mut start = diagnostic.range.start.to_point_utf16(); - let mut end = diagnostic.range.end.to_point_utf16(); - let source = diagnostic.source.as_ref(); - let code = diagnostic.code.as_ref(); - let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) - .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) - .copied() - .unwrap_or_else(|| { - let group_id = post_inc(&mut next_group_id); - for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { - group_ids_by_diagnostic_range.insert((source, code, range), group_id); - } - group_id - }); - - if diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)) - { - while let Some(edit) = edits_since_save.peek() { - if edit.old.end <= start { - last_edit_old_end = edit.old.end; - last_edit_new_end = edit.new.end; - edits_since_save.next(); - } else if edit.old.start <= end && edit.old.end >= start { - continue 'outer; - } else { - break; - } + let mut edits_since_save = content + .edits_since::(&self.saved_version) + .peekable(); + let mut last_edit_old_end = PointUtf16::zero(); + let mut last_edit_new_end = PointUtf16::zero(); + let mut group_ids_by_diagnostic_range = HashMap::new(); + let mut diagnostics_by_group_id = HashMap::new(); + let mut next_group_id = 0; + 'outer: for diagnostic in &diagnostics { + let mut start = diagnostic.range.start.to_point_utf16(); + let mut end = diagnostic.range.end.to_point_utf16(); + let source = diagnostic.source.as_ref(); + let code = diagnostic.code.as_ref(); + let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) + .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) + .copied() + .unwrap_or_else(|| { + let group_id = post_inc(&mut next_group_id); + for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { + group_ids_by_diagnostic_range.insert((source, code, range), group_id); } + group_id + }); - start = last_edit_new_end + (start - last_edit_old_end); - end = last_edit_new_end + (end - last_edit_old_end); - } - - let mut range = content.clip_point_utf16(start, Bias::Left) - ..content.clip_point_utf16(end, Bias::Right); - if range.start == range.end { - range.end.column += 1; - range.end = content.clip_point_utf16(range.end, Bias::Right); - if range.start == range.end && range.end.column > 0 { - range.start.column -= 1; - range.start = content.clip_point_utf16(range.start, Bias::Left); + if diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)) + { + while let Some(edit) = edits_since_save.peek() { + if edit.old.end <= start { + last_edit_old_end = edit.old.end; + last_edit_new_end = edit.new.end; + edits_since_save.next(); + } else if edit.old.start <= end && edit.old.end >= start { + continue 'outer; + } else { + break; } } - diagnostics_by_group_id - .entry(group_id) - .or_insert(Vec::new()) - .push(( - range, - Diagnostic { - severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), - message: diagnostic.message.clone(), - group_id, - is_primary: false, - }, - )); + start = last_edit_new_end + (start - last_edit_old_end); + end = last_edit_new_end + (end - last_edit_old_end); } - content.anchor_range_multimap( - Bias::Left, - Bias::Right, + let mut range = content.clip_point_utf16(start, Bias::Left) + ..content.clip_point_utf16(end, Bias::Right); + if range.start == range.end { + range.end.column += 1; + range.end = content.clip_point_utf16(range.end, Bias::Right); + if range.start == range.end && range.end.column > 0 { + range.start.column -= 1; + range.start = content.clip_point_utf16(range.start, Bias::Left); + } + } + + diagnostics_by_group_id + .entry(group_id) + .or_insert(Vec::new()) + .push(( + range, + Diagnostic { + severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), + message: diagnostic.message.clone(), + group_id, + is_primary: false, + }, + )); + } + + drop(edits_since_save); + self.diagnostics + .reset( diagnostics_by_group_id .into_values() .flat_map(|mut diagnostics| { @@ -793,8 +803,7 @@ impl Buffer { primary_diagnostic.1.is_primary = true; diagnostics }), - ) - }; + ); if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -811,32 +820,24 @@ impl Buffer { self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); - Ok(Operation::UpdateDiagnostics(self.diagnostics.clone())) + Ok(Operation::UpdateDiagnostics { + diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), + lamport_timestamp: self.lamport_timestamp(), + }) } - pub fn diagnostics_in_range<'a, T, O>( + pub fn diagnostics_in_range<'a, T>( &'a self, search_range: Range, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl Iterator where T: 'a + ToOffset, - O: 'a + FromAnchor, { - self.diagnostics - .intersecting_ranges(search_range, self, true) - .map(move |(_, range, diagnostic)| (range, diagnostic)) + self.diagnostics.range(search_range, self, true) } - pub fn diagnostic_group<'a, O>( - &'a self, - group_id: usize, - ) -> impl Iterator, &Diagnostic)> + 'a - where - O: 'a + FromAnchor, - { - self.diagnostics - .filter(self, move |diagnostic| diagnostic.group_id == group_id) - .map(move |(_, range, diagnostic)| (range, diagnostic)) + pub fn diagnostic_group(&self, group_id: usize) -> impl Iterator { + self.diagnostics.group(group_id) } pub fn diagnostics_update_count(&self) -> usize { @@ -879,13 +880,13 @@ impl Buffer { for request in autoindent_requests { let old_to_new_rows = request .edited - .iter::(&request.before_edit) - .map(|point| point.row) + .iter() + .map(|anchor| anchor.summary::(&request.before_edit).row) .zip( request .edited - .iter::(&snapshot) - .map(|point| point.row), + .iter() + .map(|anchor| anchor.summary::(&snapshot).row), ) .collect::>(); @@ -947,7 +948,8 @@ impl Buffer { if let Some(inserted) = request.inserted.as_ref() { let inserted_row_ranges = contiguous_ranges( inserted - .ranges::(&snapshot) + .iter() + .map(|range| range.to_point(&snapshot)) .flat_map(|range| range.start.row..range.end.row + 1), max_rows_between_yields, ); @@ -1264,17 +1266,17 @@ impl Buffer { self.pending_autoindent.take(); let autoindent_request = if autoindent && self.language.is_some() { let before_edit = self.snapshot(); - let edited = self.anchor_set( - Bias::Left, - ranges.iter().filter_map(|range| { + let edited = ranges + .iter() + .filter_map(|range| { let start = range.start.to_point(self); if new_text.starts_with('\n') && start.column == self.line_len(start.row) { None } else { - Some(range.start) + Some(self.anchor_before(range.start)) } - }), - ); + }) + .collect(); Some((before_edit, edited)) } else { None @@ -1289,17 +1291,19 @@ impl Buffer { let mut inserted = None; if let Some(first_newline_ix) = first_newline_ix { let mut delta = 0isize; - inserted = Some(self.anchor_range_set( - Bias::Left, - Bias::Right, - ranges.iter().map(|range| { - let start = (delta + range.start as isize) as usize + first_newline_ix + 1; - let end = (delta + range.start as isize) as usize + new_text_len; - delta += - (range.end as isize - range.start as isize) + new_text_len as isize; - start..end - }), - )); + inserted = Some( + ranges + .iter() + .map(|range| { + let start = + (delta + range.start as isize) as usize + first_newline_ix + 1; + let end = (delta + range.start as isize) as usize + new_text_len; + delta += + (range.end as isize - range.start as isize) + new_text_len as isize; + self.anchor_before(start)..self.anchor_after(end) + }) + .collect(), + ); } let selection_set_ids = self @@ -1401,17 +1405,23 @@ impl Buffer { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); + let mut deferred_ops = Vec::new(); let buffer_ops = ops .into_iter() .filter_map(|op| match op { Operation::Buffer(op) => Some(op), - Operation::UpdateDiagnostics(diagnostics) => { - self.apply_diagnostic_update(diagnostics, cx); + _ => { + if self.can_apply_op(&op) { + self.apply_op(op, cx); + } else { + deferred_ops.push(op); + } None } }) .collect::>(); self.text.apply_ops(buffer_ops)?; + self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. @@ -1419,12 +1429,49 @@ impl Buffer { Ok(()) } + fn flush_deferred_ops(&mut self, cx: &mut ModelContext) { + let mut deferred_ops = Vec::new(); + for op in self.deferred_ops.drain().iter().cloned() { + if self.can_apply_op(&op) { + self.apply_op(op, cx); + } else { + deferred_ops.push(op); + } + } + self.deferred_ops.insert(deferred_ops); + } + + fn can_apply_op(&self, operation: &Operation) -> bool { + match operation { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be applied at this layer") + } + Operation::UpdateDiagnostics { diagnostics, .. } => { + diagnostics.iter().all(|diagnostic| { + self.text.can_resolve(&diagnostic.range.start) + && self.text.can_resolve(&diagnostic.range.end) + }) + } + } + } + + fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext) { + match operation { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be applied at this layer") + } + Operation::UpdateDiagnostics { diagnostics, .. } => { + self.apply_diagnostic_update(diagnostics, cx); + } + } + } + fn apply_diagnostic_update( &mut self, - diagnostics: AnchorRangeMultimap, + diagnostics: Arc<[DiagnosticEntry]>, cx: &mut ModelContext, ) { - self.diagnostics = diagnostics; + self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self); self.diagnostics_update_count += 1; cx.notify(); } @@ -1632,19 +1679,16 @@ impl Snapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for (_, range, diagnostic) in - self.diagnostics - .intersecting_ranges(range.clone(), self, true) - { + for entry in self.diagnostics.range(range.clone(), self, true) { diagnostic_endpoints.push(DiagnosticEndpoint { - offset: range.start, + offset: entry.range.start.to_offset(self), is_start: true, - severity: diagnostic.severity, + severity: entry.diagnostic.severity, }); diagnostic_endpoints.push(DiagnosticEndpoint { - offset: range.end, + offset: entry.range.end.to_offset(self), is_start: false, - severity: diagnostic.severity, + severity: entry.diagnostic.severity, }); } diagnostic_endpoints @@ -1939,6 +1983,19 @@ impl ToPointUtf16 for lsp::Position { } } +impl operation_queue::Operation for Operation { + fn lamport_timestamp(&self) -> clock::Lamport { + match self { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be deferred at this layer") + } + Operation::UpdateDiagnostics { + lamport_timestamp, .. + } => *lamport_timestamp, + } + } +} + fn diagnostic_ranges<'a>( diagnostic: &'a lsp::Diagnostic, abs_path: Option<&'a Path>, @@ -1968,7 +2025,7 @@ fn diagnostic_ranges<'a>( } pub fn contiguous_ranges( - values: impl IntoIterator, + values: impl Iterator, max_len: usize, ) -> impl Iterator> { let mut values = values.into_iter(); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs new file mode 100644 index 0000000000..9640ded372 --- /dev/null +++ b/crates/language/src/diagnostic_set.rs @@ -0,0 +1,141 @@ +use crate::Diagnostic; +use std::{ + cmp::{Ordering, Reverse}, + iter, + ops::Range, +}; +use sum_tree::{self, Bias, SumTree}; +use text::{Anchor, PointUtf16, ToOffset}; + +#[derive(Clone, Default)] +pub struct DiagnosticSet { + diagnostics: SumTree, +} + +#[derive(Clone, Debug)] +pub struct DiagnosticEntry { + pub range: Range, + pub diagnostic: Diagnostic, +} + +#[derive(Clone, Debug)] +pub struct Summary { + start: Anchor, + end: Anchor, + min_start: Anchor, + max_end: Anchor, + count: usize, +} + +impl DiagnosticSet { + pub fn from_sorted_entries(iter: I, buffer: &text::Snapshot) -> Self + where + I: IntoIterator, + { + Self { + diagnostics: SumTree::from_iter(iter, buffer), + } + } + + pub fn reset(&mut self, iter: I) + where + I: IntoIterator, Diagnostic)>, + { + let mut entries = iter.into_iter().collect::>(); + entries.sort_unstable_by_key(|(range, _)| (range.start, Reverse(range.end))); + } + + pub fn iter(&self) -> impl Iterator { + self.diagnostics.iter() + } + + pub fn range<'a, T>( + &'a self, + range: Range, + buffer: &'a text::Snapshot, + inclusive: bool, + ) -> impl Iterator + where + T: 'a + ToOffset, + { + let end_bias = if inclusive { Bias::Right } else { Bias::Left }; + let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); + let mut cursor = self.diagnostics.filter::<_, ()>( + { + move |summary: &Summary| { + let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap(); + let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap(); + if inclusive { + start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal + } else { + start_cmp == Ordering::Less && end_cmp == Ordering::Greater + } + } + }, + buffer, + ); + + iter::from_fn({ + move || { + if let Some(diagnostic) = cursor.item() { + cursor.next(buffer); + Some(diagnostic) + } else { + None + } + } + }) + } + + pub fn group(&self, group_id: usize) -> impl Iterator { + self.iter() + .filter(move |entry| entry.diagnostic.group_id == group_id) + } +} + +impl sum_tree::Item for DiagnosticEntry { + type Summary = Summary; + + fn summary(&self) -> Self::Summary { + Summary { + start: self.range.start.clone(), + end: self.range.end.clone(), + min_start: self.range.start.clone(), + max_end: self.range.end.clone(), + count: 1, + } + } +} + +impl Default for Summary { + fn default() -> Self { + Self { + start: Anchor::min(), + end: Anchor::max(), + min_start: Anchor::max(), + max_end: Anchor::min(), + count: 0, + } + } +} + +impl sum_tree::Summary for Summary { + type Context = text::Snapshot; + + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { + if other + .min_start + .cmp(&self.min_start, buffer) + .unwrap() + .is_lt() + { + self.min_start = other.min_start.clone(); + } + if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() { + self.max_end = other.max_end.clone(); + } + self.start = other.start.clone(); + self.end = other.end.clone(); + self.count += other.count; + } +} diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 77d01c7ecf..619ce19689 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,4 +1,5 @@ mod buffer; +mod diagnostic_set; mod highlight_map; pub mod proto; #[cfg(test)] diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 3e3455c671..851ab76bca 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use crate::{Diagnostic, Operation}; +use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use lsp::DiagnosticSeverity; @@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { replica_id: set_id.replica_id as u32, local_timestamp: set_id.value, lamport_timestamp: lamport_timestamp.value, - version: selections.version().into(), selections: selections - .full_offset_ranges() - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0 as u64, - end: range.end.0 as u64, - reversed: state.reversed, + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, }) .collect(), }), @@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { lamport_timestamp: lamport_timestamp.value, }, ), - Operation::UpdateDiagnostics(diagnostic_set) => { - proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set)) - } + Operation::UpdateDiagnostics { + diagnostics, + lamport_timestamp, + } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { + replica_id: lamport_timestamp.replica_id as u32, + lamport_timestamp: lamport_timestamp.value, + diagnostics: serialize_diagnostics(diagnostics.iter()), + }), }), } } @@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation:: } pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { - let version = set.selections.version(); - let entries = set.selections.full_offset_ranges(); proto::SelectionSet { replica_id: set.id.replica_id as u32, lamport_timestamp: set.id.value as u32, is_active: set.active, - version: version.into(), - selections: entries - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0 as u64, - end: range.end.0 as u64, - reversed: state.reversed, + selections: set + .selections + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, }) .collect(), } } -pub fn serialize_diagnostics(map: &AnchorRangeMultimap) -> proto::DiagnosticSet { - proto::DiagnosticSet { - version: map.version().into(), - diagnostics: map - .full_offset_ranges() - .map(|(range, diagnostic)| proto::Diagnostic { - start: range.start.0 as u64, - end: range.end.0 as u64, - message: diagnostic.message.clone(), - severity: match diagnostic.severity { - DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, - DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, - DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, - DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, - _ => proto::diagnostic::Severity::None, - } as i32, - group_id: diagnostic.group_id as u64, - is_primary: diagnostic.is_primary, - }) - .collect(), +pub fn serialize_diagnostics<'a>( + diagnostics: impl IntoIterator, +) -> Vec { + diagnostics + .into_iter() + .map(|entry| proto::Diagnostic { + start: Some(serialize_anchor(&entry.range.start)), + end: Some(serialize_anchor(&entry.range.end)), + message: entry.diagnostic.message.clone(), + severity: match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, + DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, + DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, + DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, + _ => proto::diagnostic::Severity::None, + } as i32, + group_id: entry.diagnostic.group_id as u64, + is_primary: entry.diagnostic.is_primary, + }) + .collect() +} + +fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { + proto::Anchor { + replica_id: anchor.timestamp.replica_id as u32, + local_timestamp: anchor.timestamp.value, + offset: anchor.offset as u64, + bias: match anchor.bias { + Bias::Left => proto::Bias::Left as i32, + Bias::Right => proto::Bias::Right as i32, + }, } } @@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }), proto::operation::Variant::UpdateSelections(message) => { - let version = message.version.into(); - let entries = message + let selections = message .selections - .iter() - .map(|selection| { - let range = FullOffset(selection.start as usize) - ..FullOffset(selection.end as usize); - let state = SelectionState { + .into_iter() + .filter_map(|selection| { + Some(Selection { id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, - }; - (range, state) + }) }) - .collect(); - let selections = AnchorRangeMap::from_full_offset_ranges( - version, - Bias::Left, - Bias::Left, - entries, - ); + .collect::>(); Operation::Buffer(text::Operation::UpdateSelections { set_id: clock::Lamport { @@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }) } - proto::operation::Variant::UpdateDiagnostics(message) => { - Operation::UpdateDiagnostics(deserialize_diagnostics(message)) - } + proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics { + diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)), + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + }, }, ) } @@ -277,36 +287,30 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { value: set.lamport_timestamp, }, active: set.is_active, - selections: Arc::new(AnchorRangeMap::from_full_offset_ranges( - set.version.into(), - Bias::Left, - Bias::Left, + selections: Arc::from( set.selections .into_iter() - .map(|selection| { - let range = - FullOffset(selection.start as usize)..FullOffset(selection.end as usize); - let state = SelectionState { + .filter_map(|selection| { + Some(Selection { id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, - }; - (range, state) + }) }) - .collect(), - )), + .collect::>(), + ), } } -pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap { - AnchorRangeMultimap::from_full_offset_ranges( - message.version.into(), - Bias::Left, - Bias::Right, - message.diagnostics.into_iter().filter_map(|diagnostic| { - Some(( - FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize), - Diagnostic { +pub fn deserialize_diagnostics(diagnostics: Vec) -> Vec { + diagnostics + .into_iter() + .filter_map(|diagnostic| { + Some(DiagnosticEntry { + range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?, + diagnostic: Diagnostic { severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? { proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR, proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING, @@ -318,7 +322,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult group_id: diagnostic.group_id as usize, is_primary: diagnostic.is_primary, }, - )) - }), - ) + }) + }) + .collect() +} + +fn deserialize_anchor(anchor: proto::Anchor) -> Option { + Some(Anchor { + timestamp: clock::Local { + replica_id: anchor.replica_id as ReplicaId, + value: anchor.local_timestamp, + }, + offset: anchor.offset as usize, + bias: match proto::Bias::from_i32(anchor.bias)? { + proto::Bias::Left => Bias::Left, + proto::Bias::Right => Bias::Right, + }, + }) } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index cff74af1e3..d1f48245db 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -533,6 +533,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { assert_eq!( buffer .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -600,6 +601,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { assert_eq!( buffer .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -679,6 +681,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { assert_eq!( buffer .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -863,7 +866,8 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { buffer.update_diagnostics(None, diagnostics, cx).unwrap(); assert_eq!( buffer - .diagnostics_in_range::<_, Point>(0..buffer.len()) + .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -915,7 +919,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { ); assert_eq!( - buffer.diagnostic_group(0).collect::>(), + buffer + .diagnostic_group(0) + .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) + .collect::>(), &[ ( Point::new(1, 8)..Point::new(1, 9), @@ -938,7 +945,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { ] ); assert_eq!( - buffer.diagnostic_group(1).collect::>(), + buffer + .diagnostic_group(1) + .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) + .collect::>(), &[ ( Point::new(1, 13)..Point::new(1, 15), @@ -995,13 +1005,17 @@ fn chunks_with_diagnostics( #[test] fn test_contiguous_ranges() { assert_eq!( - contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::>(), + contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::>(), &[1..4, 5..7, 9..13] ); // Respects the `max_len` parameter assert_eq!( - contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::>(), + contiguous_ranges( + [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(), + 3 + ) + .collect::>(), &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32], ); } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 29bc230b97..393e92dfb9 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3005,7 +3005,7 @@ mod tests { use anyhow::Result; use client::test::{FakeHttpClient, FakeServer}; use fs::RealFs; - use language::{tree_sitter_rust, LanguageServerConfig}; + use language::{tree_sitter_rust, AnchorRangeExt, LanguageServerConfig}; use language::{Diagnostic, LanguageConfig}; use lsp::Url; use rand::prelude::*; @@ -3722,6 +3722,7 @@ mod tests { buffer.read_with(&cx, |buffer, _| { let diagnostics = buffer .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(); assert_eq!( diagnostics, diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 775f94d595..7e7a180cd2 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -229,32 +229,44 @@ message Buffer { string content = 2; repeated Operation.Edit history = 3; repeated SelectionSet selections = 4; - DiagnosticSet diagnostics = 5; + repeated Diagnostic diagnostics = 5; } message SelectionSet { uint32 replica_id = 1; uint32 lamport_timestamp = 2; bool is_active = 3; - repeated VectorClockEntry version = 4; - repeated Selection selections = 5; + repeated Selection selections = 4; } message Selection { uint64 id = 1; - uint64 start = 2; - uint64 end = 3; + Anchor start = 2; + Anchor end = 3; bool reversed = 4; } -message DiagnosticSet { - repeated VectorClockEntry version = 1; - repeated Diagnostic diagnostics = 2; +message Anchor { + uint32 replica_id = 1; + uint32 local_timestamp = 2; + uint64 offset = 3; + Bias bias = 4; +} + +enum Bias { + Left = 0; + Right = 1; +} + +message UpdateDiagnostics { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated Diagnostic diagnostics = 3; } message Diagnostic { - uint64 start = 1; - uint64 end = 2; + Anchor start = 1; + Anchor end = 2; Severity severity = 3; string message = 4; uint64 group_id = 5; @@ -268,8 +280,6 @@ message Diagnostic { } } - - message Operation { oneof variant { Edit edit = 1; @@ -277,7 +287,7 @@ message Operation { UpdateSelections update_selections = 3; RemoveSelections remove_selections = 4; SetActiveSelections set_active_selections = 5; - DiagnosticSet update_diagnostics = 6; + UpdateDiagnostics update_diagnostics = 6; } message Edit { @@ -308,8 +318,7 @@ message Operation { uint32 replica_id = 1; uint32 local_timestamp = 2; uint32 lamport_timestamp = 3; - repeated VectorClockEntry version = 4; - repeated Selection selections = 5; + repeated Selection selections = 4; } message RemoveSelections { diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 454881fece..d2f2cb2c41 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -400,7 +400,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } ); @@ -422,7 +422,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } ); @@ -453,7 +453,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } } @@ -465,7 +465,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 96949d05ff..6d1b1238c6 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -947,8 +947,8 @@ mod tests { editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, language::{ - tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry, - LanguageServerConfig, Point, + tree_sitter_rust, AnchorRangeExt, Diagnostic, Language, LanguageConfig, + LanguageRegistry, LanguageServerConfig, Point, }, lsp, project::{ProjectPath, Worktree}, @@ -1705,6 +1705,7 @@ mod tests { assert_eq!( buffer .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 7799bb2ff0..cbb6f7f6f5 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> { at_end: bool, } +pub struct Iter<'a, T: Item> { + tree: &'a SumTree, + stack: ArrayVec, 16>, +} + impl<'a, T, D> Cursor<'a, T, D> where T: Item, @@ -487,6 +492,71 @@ where } } +impl<'a, T: Item> Iter<'a, T> { + pub(crate) fn new(tree: &'a SumTree) -> Self { + Self { + tree, + stack: Default::default(), + } + } +} + +impl<'a, T: Item> Iterator for Iter<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option { + let mut descend = false; + + if self.stack.is_empty() { + self.stack.push(StackEntry { + tree: self.tree, + index: 0, + position: (), + }); + descend = true; + } + + while self.stack.len() > 0 { + let new_subtree = { + let entry = self.stack.last_mut().unwrap(); + match entry.tree.0.as_ref() { + Node::Internal { child_trees, .. } => { + if !descend { + entry.index += 1; + } + child_trees.get(entry.index) + } + Node::Leaf { items, .. } => { + if !descend { + entry.index += 1; + } + + if let Some(next_item) = items.get(entry.index) { + return Some(next_item); + } else { + None + } + } + } + }; + + if let Some(subtree) = new_subtree { + descend = true; + self.stack.push(StackEntry { + tree: subtree, + index: 0, + position: (), + }); + } else { + descend = false; + self.stack.pop(); + } + } + + None + } +} + impl<'a, T, S, D> Iterator for Cursor<'a, T, D> where T: Item, diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8b4a45519f..63fb379d53 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -1,8 +1,7 @@ mod cursor; use arrayvec::ArrayVec; -pub use cursor::Cursor; -pub use cursor::FilterCursor; +pub use cursor::{Cursor, FilterCursor, Iter}; use std::marker::PhantomData; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; @@ -156,6 +155,10 @@ impl SumTree { items } + pub fn iter(&self) -> Iter { + Iter::new(self) + } + pub fn cursor<'a, S>(&'a self) -> Cursor where S: Dimension<'a, T::Summary>, @@ -722,6 +725,10 @@ mod tests { }; assert_eq!(tree.items(&()), reference_items); + assert_eq!( + tree.iter().collect::>(), + tree.cursor::<()>().collect::>() + ); let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even, &()); diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index 3c3a644024..ef99faf3e2 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -1,9 +1,15 @@ -use super::Operation; use std::{fmt::Debug, ops::Add}; -use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary}; +use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary}; + +pub trait Operation: Clone + Debug { + fn lamport_timestamp(&self) -> clock::Lamport; +} #[derive(Clone, Debug)] -pub struct OperationQueue(SumTree); +struct OperationItem(T); + +#[derive(Clone, Debug)] +pub struct OperationQueue(SumTree>); #[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)] pub struct OperationKey(clock::Lamport); @@ -20,7 +26,7 @@ impl OperationKey { } } -impl OperationQueue { +impl OperationQueue { pub fn new() -> Self { OperationQueue(SumTree::new()) } @@ -29,11 +35,15 @@ impl OperationQueue { self.0.summary().len } - pub fn insert(&mut self, mut ops: Vec) { + pub fn insert(&mut self, mut ops: Vec) { ops.sort_by_key(|op| op.lamport_timestamp()); ops.dedup_by_key(|op| op.lamport_timestamp()); - self.0 - .edit(ops.into_iter().map(Edit::Insert).collect(), &()); + self.0.edit( + ops.into_iter() + .map(|op| Edit::Insert(OperationItem(op))) + .collect(), + &(), + ); } pub fn drain(&mut self) -> Self { @@ -42,8 +52,8 @@ impl OperationQueue { clone } - pub fn cursor(&self) -> Cursor { - self.0.cursor() + pub fn iter(&self) -> impl Iterator { + self.0.cursor::<()>().map(|i| &i.0) } } @@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey { } } -impl Item for Operation { +impl Item for OperationItem { type Summary = OperationSummary; fn summary(&self) -> Self::Summary { OperationSummary { - key: OperationKey::new(self.lamport_timestamp()), + key: OperationKey::new(self.0.lamport_timestamp()), len: 1, } } } -impl KeyedItem for Operation { +impl KeyedItem for OperationItem { type Key = OperationKey; fn key(&self) -> Self::Key { - OperationKey::new(self.lamport_timestamp()) + OperationKey::new(self.0.lamport_timestamp()) } } @@ -107,21 +117,27 @@ mod tests { assert_eq!(queue.len(), 0); queue.insert(vec![ - Operation::Test(clock.tick()), - Operation::Test(clock.tick()), + TestOperation(clock.tick()), + TestOperation(clock.tick()), ]); assert_eq!(queue.len(), 2); - queue.insert(vec![Operation::Test(clock.tick())]); + queue.insert(vec![TestOperation(clock.tick())]); assert_eq!(queue.len(), 3); drop(queue.drain()); assert_eq!(queue.len(), 0); - queue.insert(vec![Operation::Test(clock.tick())]); + queue.insert(vec![TestOperation(clock.tick())]); assert_eq!(queue.len(), 1); } #[derive(Clone, Debug, Eq, PartialEq)] struct TestOperation(clock::Lamport); + + impl Operation for TestOperation { + fn lamport_timestamp(&self) -> clock::Lamport { + self.0 + } + } } diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index e9e7dd1f22..ae96e93e51 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -141,13 +141,13 @@ impl SelectionSet { let end = snapshot.anchor_at(range.end.0, range.end.1); let start_ix = match self .selections - .binary_search_by(|probe| probe.start.cmp(&start, snapshot).unwrap()) + .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap()) { Ok(ix) | Err(ix) => ix, }; let end_ix = match self .selections - .binary_search_by(|probe| probe.end.cmp(&end, snapshot).unwrap()) + .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap()) { Ok(ix) | Err(ix) => ix, }; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b896aa687e..c2e0d8e4ef 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1,6 +1,6 @@ mod anchor; mod locator; -mod operation_queue; +pub mod operation_queue; mod patch; mod point; mod point_utf16; @@ -42,7 +42,7 @@ pub struct Buffer { last_edit: clock::Local, history: History, selections: HashMap, - deferred_ops: OperationQueue, + deferred_ops: OperationQueue, deferred_replicas: HashSet, replica_id: ReplicaId, remote_id: u64, @@ -441,8 +441,6 @@ pub enum Operation { set_id: Option, lamport_timestamp: clock::Lamport, }, - #[cfg(test)] - Test(clock::Lamport), } #[derive(Clone, Debug, Eq, PartialEq)] @@ -527,6 +525,10 @@ impl Buffer { self.local_clock.replica_id } + pub fn lamport_timestamp(&self) -> clock::Lamport { + self.lamport_clock + } + pub fn remote_id(&self) -> u64 { self.remote_id } @@ -808,8 +810,6 @@ impl Buffer { } self.lamport_clock.observe(lamport_timestamp); } - #[cfg(test)] - Operation::Test(_) => {} } Ok(()) } @@ -1103,7 +1103,7 @@ impl Buffer { fn flush_deferred_ops(&mut self) -> Result<()> { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); - for op in self.deferred_ops.drain().cursor().cloned() { + for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { self.apply_op(op)?; } else { @@ -1129,13 +1129,11 @@ impl Buffer { Operation::SetActiveSelections { set_id, .. } => { set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) } - #[cfg(test)] - Operation::Test(_) => true, } } } - fn can_resolve(&self, anchor: &Anchor) -> bool { + pub fn can_resolve(&self, anchor: &Anchor) -> bool { *anchor == Anchor::min() || *anchor == Anchor::max() || self.version.observed(anchor.timestamp) @@ -2176,9 +2174,18 @@ impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset impl Operation { fn replica_id(&self) -> ReplicaId { - self.lamport_timestamp().replica_id + operation_queue::Operation::lamport_timestamp(self).replica_id } + pub fn is_edit(&self) -> bool { + match self { + Operation::Edit { .. } => true, + _ => false, + } + } +} + +impl operation_queue::Operation for Operation { fn lamport_timestamp(&self) -> clock::Lamport { match self { Operation::Edit(edit) => edit.timestamp.lamport(), @@ -2194,15 +2201,6 @@ impl Operation { Operation::SetActiveSelections { lamport_timestamp, .. } => *lamport_timestamp, - #[cfg(test)] - Operation::Test(lamport_timestamp) => *lamport_timestamp, - } - } - - pub fn is_edit(&self) -> bool { - match self { - Operation::Edit { .. } => true, - _ => false, } } } From 91a7bbbba2e4229641edba0175e8494bad35028b Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 17:53:08 +0100 Subject: [PATCH 10/12] Fix some of the diagnostic tests and make DiagnosticEntry generic Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 39 +++---- crates/editor/src/items.rs | 11 +- crates/language/src/buffer.rs | 61 +++++++---- crates/language/src/diagnostic_set.rs | 54 +++++++--- crates/language/src/language.rs | 1 + crates/language/src/proto.rs | 6 +- crates/language/src/tests.rs | 150 ++++++++++++-------------- crates/project/src/worktree.rs | 13 ++- crates/server/src/rpc.rs | 21 ++-- 9 files changed, 189 insertions(+), 167 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 497fbb2e83..e913174199 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2825,14 +2825,13 @@ impl Editor { loop { let next_group = buffer - .diagnostics_in_range(search_start..buffer.len()) + .diagnostics_in_range::<_, usize>(search_start..buffer.len()) .find_map(|entry| { - let range = entry.range.to_offset(buffer); if entry.diagnostic.is_primary - && !range.is_empty() - && Some(range.end) != active_primary_range.as_ref().map(|r| *r.end()) + && !entry.range.is_empty() + && Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end()) { - Some((range, entry.diagnostic.group_id)) + Some((entry.range, entry.diagnostic.group_id)) } else { None } @@ -2866,12 +2865,11 @@ impl Editor { let buffer = self.buffer.read(cx); let primary_range_start = active_diagnostics.primary_range.start.to_offset(buffer); let is_valid = buffer - .diagnostics_in_range(active_diagnostics.primary_range.clone()) + .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) .any(|entry| { - let range = entry.range.to_offset(buffer); entry.diagnostic.is_primary - && !range.is_empty() - && range.start == primary_range_start + && !entry.range.is_empty() + && entry.range.start == primary_range_start && entry.diagnostic.message == active_diagnostics.primary_message }); @@ -2902,17 +2900,16 @@ impl Editor { let mut primary_message = None; let mut group_end = Point::zero(); let diagnostic_group = buffer - .diagnostic_group(group_id) + .diagnostic_group::(group_id) .map(|entry| { - let range = entry.range.to_point(buffer); - if range.end > group_end { - group_end = range.end; + if entry.range.end > group_end { + group_end = entry.range.end; } if entry.diagnostic.is_primary { - primary_range = Some(range.clone()); + primary_range = Some(entry.range.clone()); primary_message = Some(entry.diagnostic.message.clone()); } - (range, entry.diagnostic.clone()) + entry }) .collect::>(); let primary_range = primary_range.unwrap(); @@ -2922,13 +2919,13 @@ impl Editor { let blocks = display_map .insert_blocks( - diagnostic_group.iter().map(|(range, diagnostic)| { + diagnostic_group.iter().map(|entry| { let build_settings = self.build_settings.clone(); - let diagnostic = diagnostic.clone(); + let diagnostic = entry.diagnostic.clone(); let message_height = diagnostic.message.lines().count() as u8; BlockProperties { - position: range.start, + position: entry.range.start, height: message_height, render: Arc::new(move |cx| { let settings = build_settings.borrow()(cx.cx); @@ -2941,11 +2938,7 @@ impl Editor { cx, ) .into_iter() - .zip( - diagnostic_group - .into_iter() - .map(|(_, diagnostic)| diagnostic), - ) + .zip(diagnostic_group.into_iter().map(|entry| entry.diagnostic)) .collect(); Some(ActiveDiagnosticGroup { diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 061aece652..7fa25eb884 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -5,7 +5,7 @@ use gpui::{ MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; -use language::{AnchorRangeExt, Buffer, Diagnostic, File as _}; +use language::{Buffer, Diagnostic, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::fmt::Write; @@ -317,11 +317,10 @@ impl DiagnosticMessage { let cursor_position = editor.newest_selection::(cx).head(); let buffer = editor.buffer().read(cx); let new_diagnostic = buffer - .diagnostics_in_range(cursor_position..cursor_position) - .map(|entry| (entry.range.to_offset(buffer), &entry.diagnostic)) - .filter(|(range, _)| !range.is_empty()) - .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len())) - .map(|(_, diagnostic)| diagnostic.clone()); + .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) + .filter(|entry| !entry.range.is_empty()) + .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) + .map(|entry| entry.diagnostic); if new_diagnostic != self.diagnostic { self.diagnostic = new_diagnostic; cx.notify(); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 99239a3089..d34528c784 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -23,6 +23,7 @@ use std::{ ffi::OsString, future::Future, iter::{Iterator, Peekable}, + mem, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, @@ -109,7 +110,7 @@ struct LanguageServerSnapshot { pub enum Operation { Buffer(text::Operation), UpdateDiagnostics { - diagnostics: Arc<[DiagnosticEntry]>, + diagnostics: Arc<[DiagnosticEntry]>, lamport_timestamp: clock::Lamport, }, } @@ -781,29 +782,33 @@ impl Buffer { diagnostics_by_group_id .entry(group_id) .or_insert(Vec::new()) - .push(( + .push(DiagnosticEntry { range, - Diagnostic { + diagnostic: Diagnostic { severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), message: diagnostic.message.clone(), group_id, is_primary: false, }, - )); + }); } drop(edits_since_save); - self.diagnostics - .reset( - diagnostics_by_group_id - .into_values() - .flat_map(|mut diagnostics| { - let primary_diagnostic = - diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap(); - primary_diagnostic.1.is_primary = true; - diagnostics - }), - ); + let mut diagnostics = mem::take(&mut self.diagnostics); + diagnostics.reset( + diagnostics_by_group_id + .into_values() + .flat_map(|mut diagnostics| { + let primary = diagnostics + .iter_mut() + .min_by_key(|entry| entry.diagnostic.severity) + .unwrap(); + primary.diagnostic.is_primary = true; + diagnostics + }), + self, + ); + self.diagnostics = diagnostics; if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -826,18 +831,25 @@ impl Buffer { }) } - pub fn diagnostics_in_range<'a, T>( + pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, - ) -> impl Iterator + ) -> impl 'a + Iterator> where T: 'a + ToOffset, + O: 'a + FromAnchor, { self.diagnostics.range(search_range, self, true) } - pub fn diagnostic_group(&self, group_id: usize) -> impl Iterator { - self.diagnostics.group(group_id) + pub fn diagnostic_group<'a, O>( + &'a self, + group_id: usize, + ) -> impl 'a + Iterator> + where + O: 'a + FromAnchor, + { + self.diagnostics.group(group_id, self) } pub fn diagnostics_update_count(&self) -> usize { @@ -1468,7 +1480,7 @@ impl Buffer { fn apply_diagnostic_update( &mut self, - diagnostics: Arc<[DiagnosticEntry]>, + diagnostics: Arc<[DiagnosticEntry]>, cx: &mut ModelContext, ) { self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self); @@ -1679,14 +1691,17 @@ impl Snapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for entry in self.diagnostics.range(range.clone(), self, true) { + for entry in self + .diagnostics + .range::<_, usize>(range.clone(), self, true) + { diagnostic_endpoints.push(DiagnosticEndpoint { - offset: entry.range.start.to_offset(self), + offset: entry.range.start, is_start: true, severity: entry.diagnostic.severity, }); diagnostic_endpoints.push(DiagnosticEndpoint { - offset: entry.range.end.to_offset(self), + offset: entry.range.end, is_start: false, severity: entry.diagnostic.severity, }); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 9640ded372..0a04ef17e8 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -5,16 +5,16 @@ use std::{ ops::Range, }; use sum_tree::{self, Bias, SumTree}; -use text::{Anchor, PointUtf16, ToOffset}; +use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; #[derive(Clone, Default)] pub struct DiagnosticSet { - diagnostics: SumTree, + diagnostics: SumTree>, } -#[derive(Clone, Debug)] -pub struct DiagnosticEntry { - pub range: Range, +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct DiagnosticEntry { + pub range: Range, pub diagnostic: Diagnostic, } @@ -30,33 +30,42 @@ pub struct Summary { impl DiagnosticSet { pub fn from_sorted_entries(iter: I, buffer: &text::Snapshot) -> Self where - I: IntoIterator, + I: IntoIterator>, { Self { diagnostics: SumTree::from_iter(iter, buffer), } } - pub fn reset(&mut self, iter: I) + pub fn reset(&mut self, iter: I, buffer: &text::Snapshot) where - I: IntoIterator, Diagnostic)>, + I: IntoIterator>, { let mut entries = iter.into_iter().collect::>(); - entries.sort_unstable_by_key(|(range, _)| (range.start, Reverse(range.end))); + entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); + self.diagnostics = SumTree::from_iter( + entries.into_iter().map(|entry| DiagnosticEntry { + range: buffer.anchor_before(entry.range.start) + ..buffer.anchor_after(entry.range.end), + diagnostic: entry.diagnostic, + }), + buffer, + ); } - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator> { self.diagnostics.iter() } - pub fn range<'a, T>( + pub fn range<'a, T, O>( &'a self, range: Range, buffer: &'a text::Snapshot, inclusive: bool, - ) -> impl Iterator + ) -> impl 'a + Iterator> where T: 'a + ToOffset, + O: FromAnchor, { let end_bias = if inclusive { Bias::Right } else { Bias::Left }; let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); @@ -79,7 +88,7 @@ impl DiagnosticSet { move || { if let Some(diagnostic) = cursor.item() { cursor.next(buffer); - Some(diagnostic) + Some(diagnostic.resolve(buffer)) } else { None } @@ -87,13 +96,18 @@ impl DiagnosticSet { }) } - pub fn group(&self, group_id: usize) -> impl Iterator { + pub fn group<'a, O: FromAnchor>( + &'a self, + group_id: usize, + buffer: &'a text::Snapshot, + ) -> impl 'a + Iterator> { self.iter() .filter(move |entry| entry.diagnostic.group_id == group_id) + .map(|entry| entry.resolve(buffer)) } } -impl sum_tree::Item for DiagnosticEntry { +impl sum_tree::Item for DiagnosticEntry { type Summary = Summary; fn summary(&self) -> Self::Summary { @@ -107,6 +121,16 @@ impl sum_tree::Item for DiagnosticEntry { } } +impl DiagnosticEntry { + pub fn resolve(&self, buffer: &text::Snapshot) -> DiagnosticEntry { + DiagnosticEntry { + range: O::from_anchor(&self.range.start, buffer) + ..O::from_anchor(&self.range.end, buffer), + diagnostic: self.diagnostic.clone(), + } + } +} + impl Default for Summary { fn default() -> Self { Self { diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 619ce19689..99161d1f5c 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -8,6 +8,7 @@ mod tests; use anyhow::{anyhow, Result}; pub use buffer::Operation; pub use buffer::*; +pub use diagnostic_set::DiagnosticEntry; use gpui::{executor::Background, AppContext}; use highlight_map::HighlightMap; use lazy_static::lazy_static; diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 851ab76bca..6f36c7dc0b 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -127,7 +127,7 @@ pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { } pub fn serialize_diagnostics<'a>( - diagnostics: impl IntoIterator, + diagnostics: impl IntoIterator>, ) -> Vec { diagnostics .into_iter() @@ -304,7 +304,9 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { } } -pub fn deserialize_diagnostics(diagnostics: Vec) -> Vec { +pub fn deserialize_diagnostics( + diagnostics: Vec, +) -> Vec> { diagnostics .into_iter() .filter_map(|diagnostic| { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index d1f48245db..cc873f253a 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -532,28 +532,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // The diagnostics have moved down since they were created. assert_eq!( buffer - .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0)) .collect::>(), &[ - ( - Point::new(3, 9)..Point::new(3, 11), - &Diagnostic { + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, }, - ), - ( - Point::new(4, 9)..Point::new(4, 12), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(4, 9)..Point::new(4, 12), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'CCC'".to_string(), group_id: 2, is_primary: true, } - ) + } ] ); assert_eq!( @@ -600,28 +599,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer - .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0)) .collect::>(), &[ - ( - Point::new(2, 9)..Point::new(2, 12), - &Diagnostic { + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 12), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "unreachable statement".to_string(), group_id: 1, is_primary: true, } - ), - ( - Point::new(2, 9)..Point::new(2, 10), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 10), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, }, - ) + } ] ); assert_eq!( @@ -680,28 +678,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(2, 21)..Point::new(2, 22), - &Diagnostic { + DiagnosticEntry { + range: Point::new(2, 21)..Point::new(2, 22), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(3, 9)..Point::new(3, 11), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, }, - ) + } ] ); }); @@ -866,117 +863,110 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { buffer.update_diagnostics(None, diagnostics, cx).unwrap(); assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "error 1".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 1 hint 1".to_string(), group_id: 0, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 1".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 2".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(2, 8)..Point::new(2, 17), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "error 2".to_string(), group_id: 1, is_primary: true, } - ) + } ] ); assert_eq!( - buffer - .diagnostic_group(0) - .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) - .collect::>(), + buffer.diagnostic_group::(0).collect::>(), &[ - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "error 1".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 1 hint 1".to_string(), group_id: 0, is_primary: false, } - ), + }, ] ); assert_eq!( - buffer - .diagnostic_group(1) - .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) - .collect::>(), + buffer.diagnostic_group::(1).collect::>(), &[ - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 1".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 2".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(2, 8)..Point::new(2, 17), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "error 2".to_string(), group_id: 1, is_primary: true, } - ) + } ] ); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 393e92dfb9..943ab6dbd0 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3005,7 +3005,7 @@ mod tests { use anyhow::Result; use client::test::{FakeHttpClient, FakeServer}; use fs::RealFs; - use language::{tree_sitter_rust, AnchorRangeExt, LanguageServerConfig}; + use language::{tree_sitter_rust, DiagnosticEntry, LanguageServerConfig}; use language::{Diagnostic, LanguageConfig}; use lsp::Url; use rand::prelude::*; @@ -3721,20 +3721,19 @@ mod tests { buffer.read_with(&cx, |buffer, _| { let diagnostics = buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(); assert_eq!( diagnostics, - &[( - Point::new(0, 9)..Point::new(0, 10), - &Diagnostic { + &[DiagnosticEntry { + range: Point::new(0, 9)..Point::new(0, 10), + diagnostic: Diagnostic { severity: lsp::DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true } - )] + }] ) }); } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 6d1b1238c6..a37dc56532 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -947,7 +947,7 @@ mod tests { editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, language::{ - tree_sitter_rust, AnchorRangeExt, Diagnostic, Language, LanguageConfig, + tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, }, lsp, @@ -1704,28 +1704,27 @@ mod tests { buffer_b.read_with(&cx_b, |buffer, _| { assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(0, 4)..Point::new(0, 7), - &Diagnostic { + DiagnosticEntry { + range: Point::new(0, 4)..Point::new(0, 7), + diagnostic: Diagnostic { group_id: 0, message: "message 1".to_string(), severity: lsp::DiagnosticSeverity::ERROR, is_primary: true } - ), - ( - Point::new(0, 10)..Point::new(0, 13), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(0, 10)..Point::new(0, 13), + diagnostic: Diagnostic { group_id: 1, severity: lsp::DiagnosticSeverity::WARNING, message: "message 2".to_string(), is_primary: true } - ) + } ] ); }); From e9c385e7a6706b791e811d0b82e2b49dd0788c97 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 18:27:54 +0100 Subject: [PATCH 11/12] WIP --- crates/text/src/locator.rs | 21 +++++++++++++++------ crates/text/src/text.rs | 4 ++-- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 0a22ea58f9..249e79b6fd 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -2,23 +2,28 @@ use smallvec::{smallvec, SmallVec}; use std::iter; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Locator(SmallVec<[u8; 4]>); +pub struct Locator(SmallVec<[u64; 4]>); impl Locator { pub fn min() -> Self { - Self(smallvec![u8::MIN]) + Self(smallvec![u64::MIN]) } pub fn max() -> Self { - Self(smallvec![u8::MAX]) + Self(smallvec![u64::MAX]) + } + + pub fn assign(&mut self, other: &Self) { + self.0.resize(other.0.len(), 0); + self.0.copy_from_slice(&other.0); } pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); + let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { - let mid = lhs + (rhs.saturating_sub(lhs)) / 2; + let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48); location.push(mid); if mid > lhs { break; @@ -26,6 +31,10 @@ impl Locator { } Self(location) } + + pub fn len(&self) -> usize { + self.0.len() + } } impl Default for Locator { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index c2e0d8e4ef..3985501659 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2002,7 +2002,7 @@ impl sum_tree::Summary for FragmentSummary { type Context = Option; fn add_summary(&mut self, other: &Self, _: &Self::Context) { - self.max_id = other.max_id.clone(); + self.max_id.assign(&other.max_id); self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; self.max_version.join(&other.max_version); @@ -2113,7 +2113,7 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Locator { fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { - *self = summary.max_id.clone(); + self.assign(&summary.max_id); } } From 1ed1ec21ddb3ec44e0111c2e87827cae87db76ab Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 9 Dec 2021 10:42:44 -0800 Subject: [PATCH 12/12] Batch anchor resolution, avoid cloning fragment ids when seeking --- crates/text/src/selection.rs | 13 ++++++- crates/text/src/tests.rs | 4 +-- crates/text/src/text.rs | 68 +++++++++++++++++++++++++++++++----- 3 files changed, 73 insertions(+), 12 deletions(-) diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index ae96e93e51..5142baf7f5 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -125,7 +125,18 @@ impl SelectionSet { where D: 'a + TextDimension<'a>, { - self.selections.iter().map(|s| s.resolve(snapshot)) + let anchors = self + .selections + .iter() + .flat_map(|selection| [&selection.start, &selection.end].into_iter()); + let mut positions = snapshot.summaries_for_anchors::(anchors); + self.selections.iter().map(move |selection| Selection { + start: positions.next().unwrap(), + end: positions.next().unwrap(), + goal: selection.goal, + reversed: selection.reversed, + id: selection.id, + }) } pub fn intersecting_selections<'a, D, I>( diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index f7f307049c..5439e71af7 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -645,9 +645,9 @@ impl Buffer { assert_eq!(insertion_fragment.fragment_id, fragment.id); } - let mut cursor = self.snapshot.fragments.cursor::(); + let mut cursor = self.snapshot.fragments.cursor::>(); for insertion_fragment in self.snapshot.insertions.cursor::<()>() { - cursor.seek(&insertion_fragment.fragment_id, Bias::Left, &None); + cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); let fragment = cursor.item().unwrap(); assert_eq!(insertion_fragment.fragment_id, fragment.id); assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 3985501659..d8c9c43d5f 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1672,6 +1672,56 @@ impl Snapshot { result } + pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator + where + D: 'a + TextDimension<'a>, + A: 'a + IntoIterator, + { + let anchors = anchors.into_iter(); + let mut insertion_cursor = self.insertions.cursor::(); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut text_cursor = self.visible_text.cursor(0); + let mut position = D::default(); + + anchors.map(move |anchor| { + if *anchor == Anchor::min() { + return D::default(); + } else if *anchor == Anchor::max() { + return D::from_text_summary(&self.visible_text.summary()); + } + + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); + + fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += anchor.offset - insertion.split_offset; + } + + position.add_assign(&text_cursor.summary(fragment_offset)); + position.clone() + }) + } + fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D where D: TextDimension<'a>, @@ -1702,8 +1752,8 @@ impl Snapshot { let insertion = insertion_cursor.item().expect("invalid insertion"); debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; if fragment.visible { @@ -1741,8 +1791,8 @@ impl Snapshot { let insertion = insertion_cursor.item().expect("invalid insertion"); debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, FullOffset)>(); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); fragment_cursor.start().1 + (anchor.offset - insertion.split_offset) } } @@ -1771,10 +1821,10 @@ impl Snapshot { } else if bias == Bias::Right && offset == self.len() { Anchor::max() } else { - let mut fragment_cursor = self.fragments.cursor::<(usize, Locator)>(); + let mut fragment_cursor = self.fragments.cursor::(); fragment_cursor.seek(&offset, bias, &None); let fragment = fragment_cursor.item().unwrap(); - let overshoot = offset - fragment_cursor.start().0; + let overshoot = offset - *fragment_cursor.start(); Anchor { timestamp: fragment.insertion_timestamp.local(), offset: fragment.insertion_offset + overshoot, @@ -2111,9 +2161,9 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { } } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Locator { - fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { - self.assign(&summary.max_id); +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { + *self = Some(&summary.max_id); } }