Merge pull request #68 from zed-industries/optimizations-2

Introduce a few optimizations to `SumTree` and `Buffer`
This commit is contained in:
Max Brunsfeld 2021-05-18 09:14:23 -07:00 committed by GitHub
commit 45e81dc768
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 49 additions and 69 deletions

View file

@ -23,7 +23,7 @@ scoped-pool = {path = "../scoped_pool"}
seahash = "4.1"
serde = {version = "1.0.125", features = ["derive"]}
serde_json = "1.0.64"
smallvec = "1.6.1"
smallvec = {version = "1.6", features = ["union"]}
smol = "1.2"
tiny-skia = "0.5"
tree-sitter = "0.17"

View file

@ -36,7 +36,7 @@ seahash = "4.1"
serde = {version = "1", features = ["derive"]}
similar = "1.3"
simplelog = "0.9"
smallvec = "1.6.1"
smallvec = {version = "1.6", features = ["union"]}
smol = "1.2.5"
[dev-dependencies]

View file

@ -1900,15 +1900,13 @@ impl Buffer {
.item()
.ok_or_else(|| anyhow!("split offset is out of range"))?;
let mut fragments_cursor = self
.fragments
.cursor::<FragmentIdRef, FragmentTextSummary>();
let mut fragments_cursor = self.fragments.cursor::<FragmentIdRef, usize>();
fragments_cursor.seek(&FragmentIdRef::new(&split.fragment_id), SeekBias::Left, &());
let fragment = fragments_cursor
.item()
.ok_or_else(|| anyhow!("fragment id does not exist"))?;
let mut ix = fragments_cursor.start().clone().visible;
let mut ix = *fragments_cursor.start();
if fragment.visible {
ix += offset - fragment.range_in_insertion.start;
}
@ -2316,7 +2314,7 @@ impl Default for InsertionSplitSummary {
impl<'a> sum_tree::Dimension<'a, InsertionSplitSummary> for usize {
fn add_summary(&mut self, summary: &InsertionSplitSummary) {
*self += &summary.extent;
*self += summary.extent;
}
}

View file

@ -207,27 +207,23 @@ impl<T: Item> SumTree<T> {
pub fn push(&mut self, item: T, ctx: &<T::Summary as Summary>::Context) {
let summary = item.summary();
self.push_tree(
SumTree::from_child_trees(
vec![SumTree(Arc::new(Node::Leaf {
SumTree(Arc::new(Node::Leaf {
summary: summary.clone(),
items: ArrayVec::from_iter(Some(item)),
item_summaries: ArrayVec::from_iter(Some(summary)),
}))],
})),
ctx,
),
ctx,
)
);
}
pub fn push_tree(&mut self, other: Self, ctx: &<T::Summary as Summary>::Context) {
let other_node = other.0.clone();
if !other_node.is_leaf() || other_node.items().len() > 0 {
if self.0.height() < other_node.height() {
for tree in other_node.child_trees() {
if !other.0.is_leaf() || other.0.items().len() > 0 {
if self.0.height() < other.0.height() {
for tree in other.0.child_trees() {
self.push_tree(tree.clone(), ctx);
}
} else if let Some(split_tree) = self.push_tree_recursive(other, ctx) {
*self = Self::from_child_trees(vec![self.clone(), split_tree], ctx);
*self = Self::from_child_trees(self.clone(), split_tree, ctx);
}
}
}
@ -353,20 +349,22 @@ impl<T: Item> SumTree<T> {
}
fn from_child_trees(
child_trees: Vec<SumTree<T>>,
left: SumTree<T>,
right: SumTree<T>,
ctx: &<T::Summary as Summary>::Context,
) -> Self {
let height = child_trees[0].0.height() + 1;
let height = left.0.height() + 1;
let mut child_summaries = ArrayVec::new();
for child in &child_trees {
child_summaries.push(child.0.summary().clone());
}
let summary = sum(child_summaries.iter(), ctx);
child_summaries.push(left.0.summary().clone());
child_summaries.push(right.0.summary().clone());
let mut child_trees = ArrayVec::new();
child_trees.push(left);
child_trees.push(right);
SumTree(Arc::new(Node::Internal {
height,
summary,
summary: sum(child_summaries.iter(), ctx),
child_summaries,
child_trees: ArrayVec::from_iter(child_trees),
child_trees,
}))
}

View file

@ -1,13 +1,9 @@
use smallvec::SmallVec;
use std::cmp::{self, Ordering};
use std::collections::HashMap;
use std::mem;
use std::ops::{Add, AddAssign};
use std::sync::Arc;
use lazy_static::lazy_static;
pub type ReplicaId = u16;
pub type Seq = u64;
pub type Seq = u32;
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq, Ord, PartialOrd)]
pub struct Local {
@ -58,18 +54,8 @@ impl<'a> AddAssign<&'a Local> for Local {
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Global(Arc<HashMap<ReplicaId, u64>>);
lazy_static! {
static ref DEFAULT_GLOBAL: Global = Global(Arc::new(HashMap::new()));
}
impl Default for Global {
fn default() -> Self {
DEFAULT_GLOBAL.clone()
}
}
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct Global(SmallVec<[Local; 3]>);
impl Global {
pub fn new() -> Self {
@ -77,21 +63,27 @@ impl Global {
}
pub fn get(&self, replica_id: ReplicaId) -> Seq {
*self.0.get(&replica_id).unwrap_or(&0)
self.0
.iter()
.find(|t| t.replica_id == replica_id)
.map_or(0, |t| t.value)
}
pub fn observe(&mut self, timestamp: Local) {
let map = Arc::make_mut(&mut self.0);
let value = map.entry(timestamp.replica_id).or_insert(0);
*value = cmp::max(*value, timestamp.value);
if let Some(entry) = self
.0
.iter_mut()
.find(|t| t.replica_id == timestamp.replica_id)
{
entry.value = cmp::max(entry.value, timestamp.value);
} else {
self.0.push(timestamp);
}
}
pub fn observe_all(&mut self, other: &Self) {
for (replica_id, value) in other.0.as_ref() {
self.observe(Local {
replica_id: *replica_id,
value: *value,
});
for timestamp in other.0.iter() {
self.observe(*timestamp);
}
}
@ -100,9 +92,7 @@ impl Global {
}
pub fn changed_since(&self, other: &Self) -> bool {
self.0
.iter()
.any(|(replica_id, value)| *value > other.get(*replica_id))
self.0.iter().any(|t| t.value > other.get(t.replica_id))
}
}
@ -110,8 +100,10 @@ impl PartialOrd for Global {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
let mut global_ordering = Ordering::Equal;
for replica_id in self.0.keys().chain(other.0.keys()) {
let ordering = self.get(*replica_id).cmp(&other.get(*replica_id));
for timestamp in self.0.iter().chain(other.0.iter()) {
let ordering = self
.get(timestamp.replica_id)
.cmp(&other.get(timestamp.replica_id));
if ordering != Ordering::Equal {
if global_ordering == Ordering::Equal {
global_ordering = ordering;
@ -142,12 +134,4 @@ impl Lamport {
pub fn observe(&mut self, timestamp: Self) {
self.value = cmp::max(self.value, timestamp.value) + 1;
}
pub fn to_bytes(&self) -> [u8; 24] {
let mut bytes = [0; 24];
bytes[0..8].copy_from_slice(unsafe { &mem::transmute::<u64, [u8; 8]>(self.value.to_be()) });
bytes[8..10]
.copy_from_slice(unsafe { &mem::transmute::<u16, [u8; 2]>(self.replica_id.to_be()) });
bytes
}
}