Merge remote-tracking branch 'origin/master' into upload-release

This commit is contained in:
Nathan Sobo 2021-05-17 18:27:19 -06:00
commit a750a09002
5 changed files with 326 additions and 296 deletions

View file

@ -22,21 +22,18 @@ Ship a minimal text editor to investors and other insiders. It should be extreme
Establish basic infrastructure for building the app bundle and uploading an artifact. Once this is released, we should regularly distribute updates as features land.
### Minimal code editor for internal use
### Collaborative code editor for internal use
[Tracking issue](https://github.com/zed-industries/zed/issues/6)
Turn the minimal text editor into a minimal *code* editor. We define "minimal" as the features that the Zed team needs to use Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
Turn the minimal text editor into a collaborative *code* editor. This will include the minimal features that the Zed team needs to collaborate in Zed to build Zed without net loss in developer productivity. This includes productivity-critical features such as:
* Syntax highlighting and syntax-aware editing and navigation
* The ability to see and edit non-local working copies of a repository
* Language server support for Rust code navigation, refactoring, diagnostics, etc.
* Project browsing and project-wide search and replace
We don't need to implement everything, just anything stopping us from being productive. For example, maybe we don't implement soft wrap and continue to edit prose in another editor at first.
### Minimal collaborative code editor for internal use
Once we're using Zed every day, our next goal is to *collaborate* in Zed every day. What features do we need to stop pairing over Discord screen sharing, then stop using Discord screen sharing entirely, then spend increasingly less time talking about code in Discord, etc? How much team collaboration can take place inside of Zed with code as its focus?
We want to tackle collaboration fairly early so that the rest of the design of the product can flow around that assumption. We could probably produce a single-player code editor more quickly, but at the risk of having collaboration feel more "bolted on" when we eventually add it.
### Private alpha for Rust teams on macOS

View file

@ -25,6 +25,7 @@ use std::{
cmp,
hash::BuildHasher,
iter::{self, Iterator},
mem,
ops::Range,
str,
sync::Arc,
@ -599,8 +600,7 @@ impl Buffer {
}
pub fn text_summary_for_range(&self, range: Range<usize>) -> TextSummary {
// TODO: Use a dedicated ::summarize method in Rope.
self.visible_text.slice(range).summary()
self.visible_text.cursor(range.start).summary(range.end)
}
pub fn len(&self) -> usize {
@ -1016,24 +1016,26 @@ impl Buffer {
let start_fragment_id = self.resolve_fragment_id(start_id, start_offset)?;
let end_fragment_id = self.resolve_fragment_id(end_id, end_offset)?;
let old_fragments = self.fragments.clone();
let old_visible_text = self.visible_text.clone();
let old_deleted_text = self.deleted_text.clone();
let mut old_visible_text = Rope::new();
let mut old_deleted_text = Rope::new();
let mut old_fragments = SumTree::new();
mem::swap(&mut old_visible_text, &mut self.visible_text);
mem::swap(&mut old_deleted_text, &mut self.deleted_text);
mem::swap(&mut old_fragments, &mut self.fragments);
let mut builder = RopeBuilder::new(old_visible_text.cursor(0), new_visible_text.cursor(0));
let mut fragments_cursor = old_fragments.cursor::<FragmentIdRef, FragmentTextSummary>();
let mut new_fragments =
fragments_cursor.slice(&FragmentIdRef::new(&start_fragment_id), SeekBias::Left, &());
builder.keep_to(
new_fragments.summary().text.visible,
new_fragments.summary().text.deleted,
);
let mut new_ropes =
RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0));
new_ropes.push_tree(new_fragments.summary().text);
let start_fragment = fragments_cursor.item().unwrap();
if start_offset == start_fragment.range_in_insertion.end {
new_fragments.push(fragments_cursor.item().unwrap().clone(), &());
let fragment = fragments_cursor.item().unwrap().clone();
new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &());
fragments_cursor.next();
}
@ -1073,35 +1075,27 @@ impl Buffer {
None
};
if let Some(fragment) = before_range {
new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &());
}
if let Some(fragment) = insertion {
new_visible_text
.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_ropes.push_str(new_text.take().unwrap());
new_fragments.push(fragment, &());
new_visible_text.push(new_text.take().unwrap());
}
if let Some(mut fragment) = within_range {
let fragment_was_visible = fragment.visible;
if fragment.was_visible(&version_in_range, &self.undo_map) {
fragment.deletions.insert(local_timestamp);
if fragment.visible {
fragment.visible = false;
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_deleted_text.append(
deleted_text_cursor.slice(new_fragments.summary().text.deleted),
);
new_deleted_text.append(
visible_text_cursor
.slice(new_fragments.summary().text.visible + fragment.len()),
);
}
}
new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &());
}
if let Some(fragment) = after_range {
new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &());
}
} else {
@ -1114,31 +1108,21 @@ impl Buffer {
local_timestamp,
lamport_timestamp,
);
new_visible_text
.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_ropes.push_str(new_text);
new_fragments.push(fragment, &());
new_visible_text.push(new_text);
}
let fragment_was_visible = fragment.visible;
if fragment.id < end_fragment_id
&& fragment.was_visible(&version_in_range, &self.undo_map)
{
fragment.deletions.insert(local_timestamp);
if fragment.visible {
fragment.visible = false;
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_deleted_text.append(
deleted_text_cursor.slice(new_fragments.summary().text.deleted),
);
new_deleted_text.append(
visible_text_cursor
.slice(new_fragments.summary().text.visible + fragment.len()),
);
}
}
new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &());
}
@ -1153,19 +1137,16 @@ impl Buffer {
local_timestamp,
lamport_timestamp,
);
new_visible_text
.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_ropes.push_str(new_text);
new_fragments.push(fragment, &());
new_visible_text.push(new_text);
}
let (visible_text, deleted_text) = new_ropes.finish();
new_fragments.push_tree(fragments_cursor.suffix(&()), &());
new_visible_text.append(visible_text_cursor.suffix());
new_deleted_text.append(deleted_text_cursor.suffix());
self.fragments = new_fragments;
self.visible_text = new_visible_text;
self.deleted_text = new_deleted_text;
self.visible_text = visible_text;
self.deleted_text = deleted_text;
self.local_clock.observe(local_timestamp);
self.lamport_clock.observe(lamport_timestamp);
Ok(())
@ -1240,8 +1221,12 @@ impl Buffer {
fn apply_undo(&mut self, undo: UndoOperation) -> Result<()> {
let mut new_fragments;
let mut new_visible_text = Rope::new();
let mut new_deleted_text = Rope::new();
let mut old_visible_text = Rope::new();
let mut old_deleted_text = Rope::new();
mem::swap(&mut old_visible_text, &mut self.visible_text);
mem::swap(&mut old_deleted_text, &mut self.deleted_text);
let mut new_ropes =
RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0));
self.undo_map.insert(undo);
let edit = &self.history.ops[&undo.edit_id];
@ -1249,8 +1234,6 @@ impl Buffer {
let end_fragment_id = self.resolve_fragment_id(edit.end_id, edit.end_offset)?;
let mut fragments_cursor = self.fragments.cursor::<FragmentIdRef, ()>();
let mut visible_text_cursor = self.visible_text.cursor(0);
let mut deleted_text_cursor = self.deleted_text.cursor(0);
if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset {
let splits = &self.insertion_splits[&undo.edit_id];
@ -1259,10 +1242,7 @@ impl Buffer {
let first_split_id = insertion_splits.next().unwrap();
new_fragments =
fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &());
new_visible_text
.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_deleted_text
.append(deleted_text_cursor.slice(new_fragments.summary().text.deleted));
new_ropes.push_tree(new_fragments.summary().text);
loop {
let mut fragment = fragments_cursor.item().unwrap().clone();
@ -1270,32 +1250,15 @@ impl Buffer {
fragment.visible = fragment.is_visible(&self.undo_map);
fragment.max_undos.observe(undo.id);
if fragment.visible != was_visible {
new_visible_text
.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_deleted_text
.append(deleted_text_cursor.slice(new_fragments.summary().text.deleted));
}
new_ropes.push_fragment(&fragment, was_visible);
new_fragments.push(fragment.clone(), &());
if fragment.visible && !was_visible {
new_visible_text.append(
deleted_text_cursor
.slice(new_fragments.summary().text.deleted + fragment.len()),
);
} else if !fragment.visible && was_visible {
new_deleted_text.append(
visible_text_cursor
.slice(new_fragments.summary().text.visible + fragment.len()),
);
}
new_fragments.push(fragment, &());
fragments_cursor.next();
if let Some(split_id) = insertion_splits.next() {
new_fragments.push_tree(
fragments_cursor.slice(&FragmentIdRef::new(split_id), SeekBias::Left, &()),
&(),
);
let slice =
fragments_cursor.slice(&FragmentIdRef::new(split_id), SeekBias::Left, &());
new_ropes.push_tree(slice.summary().text);
new_fragments.push_tree(slice, &());
} else {
break;
}
@ -1306,40 +1269,22 @@ impl Buffer {
SeekBias::Left,
&(),
);
new_ropes.push_tree(new_fragments.summary().text);
while let Some(fragment) = fragments_cursor.item() {
if fragment.id > end_fragment_id {
break;
} else {
let mut fragment = fragment.clone();
let fragment_was_visible = fragment.visible;
if edit.version_in_range.observed(fragment.insertion.id)
|| fragment.insertion.id == undo.edit_id
{
let was_visible = fragment.visible;
fragment.visible = fragment.is_visible(&self.undo_map);
fragment.max_undos.observe(undo.id);
if fragment.visible != was_visible {
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_deleted_text.append(
deleted_text_cursor.slice(new_fragments.summary().text.deleted),
);
}
if fragment.visible && !was_visible {
new_visible_text.append(
deleted_text_cursor
.slice(new_fragments.summary().text.deleted + fragment.len()),
);
} else if !fragment.visible && was_visible {
new_deleted_text.append(
visible_text_cursor
.slice(new_fragments.summary().text.visible + fragment.len()),
);
}
}
new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &());
fragments_cursor.next();
}
@ -1347,13 +1292,12 @@ impl Buffer {
}
new_fragments.push_tree(fragments_cursor.suffix(&()), &());
new_visible_text.append(visible_text_cursor.suffix());
new_deleted_text.append(deleted_text_cursor.suffix());
let (visible_text, deleted_text) = new_ropes.finish();
drop(fragments_cursor);
self.fragments = new_fragments;
self.visible_text = new_visible_text;
self.deleted_text = new_deleted_text;
self.visible_text = visible_text;
self.deleted_text = deleted_text;
Ok(())
}
@ -1434,24 +1378,20 @@ impl Buffer {
let mut ops = Vec::with_capacity(old_ranges.size_hint().0);
let old_fragments = self.fragments.clone();
let old_visible_text = self.visible_text.clone();
let old_deleted_text = self.deleted_text.clone();
let mut old_fragments = SumTree::new();
let mut old_visible_text = Rope::new();
let mut old_deleted_text = Rope::new();
mem::swap(&mut old_visible_text, &mut self.visible_text);
mem::swap(&mut old_deleted_text, &mut self.deleted_text);
mem::swap(&mut old_fragments, &mut self.fragments);
let mut fragments_cursor = old_fragments.cursor::<usize, usize>();
let mut visible_text_cursor = old_visible_text.cursor(0);
let mut deleted_text_cursor = old_deleted_text.cursor(0);
let mut new_fragments =
fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &());
let mut new_fragments = SumTree::new();
let mut new_visible_text = Rope::new();
let mut new_deleted_text = Rope::new();
new_fragments.push_tree(
fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &()),
&(),
);
new_visible_text.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_deleted_text.append(deleted_text_cursor.slice(new_fragments.summary().text.deleted));
let mut new_ropes =
RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0));
new_ropes.push_tree(new_fragments.summary().text);
let mut start_id = None;
let mut start_offset = None;
@ -1467,6 +1407,7 @@ impl Buffer {
let fragment_summary = fragments_cursor.item_summary().unwrap();
let mut fragment_start = *fragments_cursor.start();
let mut fragment_end = fragment_start + fragment.visible_len();
let fragment_was_visible = fragment.visible;
let old_split_tree = self
.insertion_splits
@ -1488,6 +1429,8 @@ impl Buffer {
prefix.id =
FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id);
fragment.range_in_insertion.start = prefix.range_in_insertion.end;
new_ropes.push_fragment(&prefix, prefix.visible);
new_fragments.push(prefix.clone(), &());
new_split_tree.push(
InsertionSplit {
@ -1520,11 +1463,8 @@ impl Buffer {
lamport_timestamp,
);
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_ropes.push_str(&new_text);
new_fragments.push(new_fragment, &());
new_visible_text.push(&new_text);
}
}
@ -1539,19 +1479,9 @@ impl Buffer {
if prefix.visible {
prefix.deletions.insert(local_timestamp);
prefix.visible = false;
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_deleted_text.append(
deleted_text_cursor.slice(new_fragments.summary().text.deleted),
);
new_deleted_text.append(
visible_text_cursor
.slice(new_fragments.summary().text.visible + prefix.len()),
);
}
fragment.range_in_insertion.start = prefix.range_in_insertion.end;
new_ropes.push_fragment(&prefix, fragment_was_visible);
new_fragments.push(prefix.clone(), &());
new_split_tree.push(
InsertionSplit {
@ -1570,17 +1500,6 @@ impl Buffer {
if fragment.visible {
fragment.deletions.insert(local_timestamp);
fragment.visible = false;
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_deleted_text.append(
deleted_text_cursor.slice(new_fragments.summary().text.deleted),
);
new_deleted_text.append(
visible_text_cursor
.slice(new_fragments.summary().text.visible + fragment.len()),
);
}
}
@ -1629,6 +1548,8 @@ impl Buffer {
);
self.insertion_splits
.insert(fragment.insertion.id, new_split_tree);
new_ropes.push_fragment(&fragment, fragment_was_visible);
new_fragments.push(fragment, &());
// Scan forward until we find a fragment that is not fully contained by the current splice.
@ -1636,6 +1557,7 @@ impl Buffer {
if let Some(range) = cur_range.clone() {
while let Some(fragment) = fragments_cursor.item() {
let fragment_summary = fragments_cursor.item_summary().unwrap();
let fragment_was_visible = fragment.visible;
fragment_start = *fragments_cursor.start();
fragment_end = fragment_start + fragment.visible_len();
if range.start < fragment_start && range.end >= fragment_end {
@ -1644,19 +1566,9 @@ impl Buffer {
if new_fragment.visible {
new_fragment.deletions.insert(local_timestamp);
new_fragment.visible = false;
new_visible_text.append(
visible_text_cursor.slice(new_fragments.summary().text.visible),
);
new_deleted_text.append(
deleted_text_cursor.slice(new_fragments.summary().text.deleted),
);
new_deleted_text.append(
visible_text_cursor.slice(
new_fragments.summary().text.visible + new_fragment.len(),
),
);
}
new_ropes.push_fragment(&new_fragment, fragment_was_visible);
new_fragments.push(new_fragment, &());
fragments_cursor.next();
@ -1698,14 +1610,13 @@ impl Buffer {
// that the cursor is parked at, we should seek to the next splice's start range
// and push all the fragments in between into the new tree.
if cur_range.as_ref().map_or(false, |r| r.start > fragment_end) {
new_fragments.push_tree(
fragments_cursor.slice(
&cur_range.as_ref().unwrap().start,
SeekBias::Right,
&(),
),
let slice = fragments_cursor.slice(
&cur_range.as_ref().unwrap().start,
SeekBias::Right,
&(),
);
new_ropes.push_tree(slice.summary().text);
new_fragments.push_tree(slice, &());
}
}
}
@ -1738,20 +1649,17 @@ impl Buffer {
lamport_timestamp,
);
new_visible_text
.append(visible_text_cursor.slice(new_fragments.summary().text.visible));
new_ropes.push_str(&new_text);
new_fragments.push(new_fragment, &());
new_visible_text.push(&new_text);
}
}
new_fragments.push_tree(fragments_cursor.suffix(&()), &());
new_visible_text.append(visible_text_cursor.suffix());
new_deleted_text.append(deleted_text_cursor.suffix());
let (visible_text, deleted_text) = new_ropes.finish();
self.fragments = new_fragments;
self.visible_text = new_visible_text;
self.deleted_text = new_deleted_text;
self.visible_text = visible_text;
self.deleted_text = deleted_text;
ops
}
@ -2043,6 +1951,58 @@ impl Clone for Buffer {
}
}
struct RopeBuilder<'a> {
old_visible_cursor: rope::Cursor<'a>,
old_deleted_cursor: rope::Cursor<'a>,
new_visible: Rope,
new_deleted: Rope,
}
impl<'a> RopeBuilder<'a> {
fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
Self {
old_visible_cursor,
old_deleted_cursor,
new_visible: Rope::new(),
new_deleted: Rope::new(),
}
}
fn push_tree(&mut self, len: FragmentTextSummary) {
self.push(len.visible, true, true);
self.push(len.deleted, false, false);
}
fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
self.push(fragment.len(), was_visible, fragment.visible)
}
fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
let text = if was_visible {
self.old_visible_cursor
.slice(self.old_visible_cursor.offset() + len)
} else {
self.old_deleted_cursor
.slice(self.old_deleted_cursor.offset() + len)
};
if is_visible {
self.new_visible.append(text);
} else {
self.new_deleted.append(text);
}
}
fn push_str(&mut self, text: &str) {
self.new_visible.push(text);
}
fn finish(mut self) -> (Rope, Rope) {
self.new_visible.append(self.old_visible_cursor.suffix());
self.new_deleted.append(self.old_deleted_cursor.suffix());
(self.new_visible, self.new_deleted)
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Event {
Edited,
@ -2548,12 +2508,12 @@ mod tests {
for _i in 0..10 {
let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None);
for old_range in old_ranges.iter().rev() {
reference_string = [
&reference_string[0..old_range.start],
new_text.as_str(),
&reference_string[old_range.end..],
]
.concat();
reference_string = reference_string
.chars()
.take(old_range.start)
.chain(new_text.chars())
.chain(reference_string.chars().skip(old_range.end))
.collect();
}
assert_eq!(buffer.text(), reference_string);
@ -2588,7 +2548,12 @@ mod tests {
let range_sum = buffer.text_summary_for_range(start..end);
assert_eq!(range_sum.rightmost_point.column, *longest_column);
assert!(longest_rows.contains(&range_sum.rightmost_point.row));
let range_text = &buffer.text()[start..end];
let range_text = buffer
.text()
.chars()
.skip(start)
.take(end - start)
.collect::<String>();
assert_eq!(range_sum.chars, range_text.chars().count());
assert_eq!(range_sum.bytes, range_text.len());
}
@ -3497,9 +3462,17 @@ mod tests {
fn line_lengths_in_range(buffer: &Buffer, range: Range<usize>) -> BTreeMap<u32, HashSet<u32>> {
let mut lengths = BTreeMap::new();
for (row, line) in buffer.text()[range].lines().enumerate() {
for (row, line) in buffer
.text()
.chars()
.skip(range.start)
.take(range.len())
.collect::<String>()
.lines()
.enumerate()
{
lengths
.entry(line.len() as u32)
.entry(line.chars().count() as u32)
.or_insert(HashSet::default())
.insert(row as u32);
}
@ -3511,68 +3484,3 @@ mod tests {
lengths
}
}
struct RopeBuilder<'a> {
visible_delta: isize,
deleted_delta: isize,
old_visible_cursor: rope::Cursor<'a>,
old_deleted_cursor: rope::Cursor<'a>,
new_visible: Rope,
new_deleted: Rope,
}
impl<'a> RopeBuilder<'a> {
fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
Self {
visible_delta: 0,
deleted_delta: 0,
old_visible_cursor,
old_deleted_cursor,
new_visible: Rope::new(),
new_deleted: Rope::new(),
}
}
fn keep_to(&mut self, sum: FragmentTextSummary) {
self.new_visible.append(
self.old_visible_cursor
.slice((sum.visible as isize + self.visible_delta) as usize),
);
self.new_deleted.append(
self.old_deleted_cursor
.slice((sum.deleted as isize + self.deleted_delta) as usize),
);
}
fn delete_to(&mut self, offset: usize) {
let deleted = self
.old_visible_cursor
.slice((offset as isize + self.visible_delta) as usize);
let deleted_len = deleted.len();
self.new_deleted.append(deleted);
self.visible_delta += deleted_len as isize;
self.deleted_delta -= deleted_len as isize;
}
fn restore_to(&mut self, offset: usize) {
let restored = self
.old_deleted_cursor
.slice((offset as isize + self.deleted_delta) as usize);
let restored_len = restored.len();
self.new_visible.append(restored);
self.visible_delta -= restored_len as isize;
self.deleted_delta += restored_len as isize;
}
fn insert(&mut self, text: &str) {
let old_len = self.new_visible.len();
self.new_visible.push(text);
self.visible_delta -= (self.new_visible.len() - old_len) as isize;
}
fn finish(self) -> (Rope, Rope) {
self.new_visible.append(self.old_visible_cursor.suffix());
self.new_deleted.append(self.old_deleted_cursor.suffix());
(self.new_visible, self.new_deleted)
}
}

View file

@ -1,12 +1,13 @@
use super::Point;
use crate::sum_tree::{self, SeekBias, SumTree};
use crate::util::byte_range_for_char_range;
use anyhow::{anyhow, Result};
use arrayvec::ArrayString;
use smallvec::SmallVec;
use std::{cmp, ops::Range, str};
use std::{cmp, iter::Skip, str};
#[cfg(test)]
const CHUNK_BASE: usize = 2;
const CHUNK_BASE: usize = 6;
#[cfg(not(test))]
const CHUNK_BASE: usize = 16;
@ -25,8 +26,12 @@ impl Rope {
let mut chunks = rope.chunks.cursor::<(), ()>();
chunks.next();
if let Some(chunk) = chunks.item() {
self.push(&chunk.0);
chunks.next();
if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE)
|| chunk.0.len() < CHUNK_BASE
{
self.push(&chunk.0);
chunks.next();
}
}
self.chunks.push_tree(chunks.suffix(&()), &());
@ -58,12 +63,7 @@ impl Rope {
let mut text = ArrayString::<[_; 4 * CHUNK_BASE]>::new();
text.push_str(&last_chunk.0);
text.push_str(&first_new_chunk_ref.0);
let mut midpoint = text.len() / 2;
while !text.is_char_boundary(midpoint) {
midpoint += 1;
}
let (left, right) = text.split_at(midpoint);
let (left, right) = text.split_at(find_split_ix(&text));
last_chunk.0.clear();
last_chunk.0.push_str(left);
first_new_chunk_ref.0.clear();
@ -83,19 +83,16 @@ impl Rope {
#[cfg(test)]
{
// Ensure all chunks except maybe the last one are not underflowing.
// Allow some wiggle room for multibyte characters at chunk boundaries.
let mut chunks = self.chunks.cursor::<(), ()>().peekable();
while let Some(chunk) = chunks.next() {
if chunks.peek().is_some() {
assert!(chunk.0.len() >= CHUNK_BASE);
assert!(chunk.0.len() + 3 >= CHUNK_BASE);
}
}
}
}
pub fn slice(&self, range: Range<usize>) -> Rope {
self.cursor(range.start).slice(range.end)
}
pub fn summary(&self) -> TextSummary {
self.chunks.summary()
}
@ -129,19 +126,24 @@ impl Rope {
let mut cursor = self.chunks.cursor::<usize, TextSummary>();
cursor.seek(&offset, SeekBias::Left, &());
let overshoot = offset - cursor.start().chars;
Ok(cursor.start().lines + cursor.item().unwrap().to_point(overshoot))
Ok(cursor.start().lines
+ cursor
.item()
.map_or(Point::zero(), |chunk| chunk.to_point(overshoot)))
} else {
Err(anyhow!("offset out of bounds"))
}
}
pub fn to_offset(&self, point: Point) -> Result<usize> {
// TODO: Verify the point actually exists.
if point <= self.summary().lines {
let mut cursor = self.chunks.cursor::<Point, TextSummary>();
cursor.seek(&point, SeekBias::Left, &());
let overshoot = point - cursor.start().lines;
Ok(cursor.start().chars + cursor.item().unwrap().to_offset(overshoot))
Ok(cursor.start().chars
+ cursor
.item()
.map_or(Ok(0), |chunk| chunk.to_offset(overshoot))?)
} else {
Err(anyhow!("offset out of bounds"))
}
@ -187,7 +189,8 @@ impl<'a> Cursor<'a> {
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
slice.push(&start_chunk.0[start_ix..end_ix]);
let byte_range = byte_range_for_char_range(start_chunk.0, start_ix..end_ix);
slice.push(&start_chunk.0[byte_range]);
}
if end_offset > self.chunks.end() {
@ -196,7 +199,9 @@ impl<'a> Cursor<'a> {
chunks: self.chunks.slice(&end_offset, SeekBias::Right, &()),
});
if let Some(end_chunk) = self.chunks.item() {
slice.push(&end_chunk.0[..end_offset - self.chunks.start()]);
let end_ix = end_offset - self.chunks.start();
let byte_range = byte_range_for_char_range(end_chunk.0, 0..end_ix);
slice.push(&end_chunk.0[byte_range]);
}
}
@ -204,9 +209,37 @@ impl<'a> Cursor<'a> {
slice
}
pub fn summary(&mut self, end_offset: usize) -> TextSummary {
debug_assert!(end_offset >= self.offset);
let mut summary = TextSummary::default();
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start();
let byte_range = byte_range_for_char_range(start_chunk.0, start_ix..end_ix);
summary = TextSummary::from(&start_chunk.0[byte_range]);
}
if end_offset > self.chunks.end() {
self.chunks.next();
summary += &self.chunks.summary(&end_offset, SeekBias::Right, &());
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
let byte_range = byte_range_for_char_range(end_chunk.0, 0..end_ix);
summary += TextSummary::from(&end_chunk.0[byte_range]);
}
}
summary
}
pub fn suffix(mut self) -> Rope {
self.slice(self.rope.chunks.extent())
}
pub fn offset(&self) -> usize {
self.offset
}
}
#[derive(Clone, Debug, Default)]
@ -232,7 +265,7 @@ impl Chunk {
point
}
fn to_offset(&self, target: Point) -> usize {
fn to_offset(&self, target: Point) -> Result<usize> {
let mut offset = 0;
let mut point = Point::new(0, 0);
for ch in self.0.chars() {
@ -248,7 +281,12 @@ impl Chunk {
}
offset += 1;
}
offset
if point == target {
Ok(offset)
} else {
Err(anyhow!("point out of bounds"))
}
}
}
@ -256,12 +294,27 @@ impl sum_tree::Item for Chunk {
type Summary = TextSummary;
fn summary(&self) -> Self::Summary {
TextSummary::from(self.0.as_str())
}
}
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct TextSummary {
pub chars: usize,
pub bytes: usize,
pub lines: Point,
pub first_line_len: u32,
pub rightmost_point: Point,
}
impl<'a> From<&'a str> for TextSummary {
fn from(text: &'a str) -> Self {
let mut chars = 0;
let mut bytes = 0;
let mut lines = Point::new(0, 0);
let mut first_line_len = 0;
let mut rightmost_point = Point::new(0, 0);
for c in self.0.chars() {
for c in text.chars() {
chars += 1;
bytes += c.len_utf8();
if c == '\n' {
@ -288,15 +341,6 @@ impl sum_tree::Item for Chunk {
}
}
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct TextSummary {
pub chars: usize,
pub bytes: usize,
pub lines: Point,
pub first_line_len: u32,
pub rightmost_point: Point,
}
impl sum_tree::Summary for TextSummary {
type Context = ();
@ -351,19 +395,19 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for Point {
pub struct Chars<'a> {
cursor: sum_tree::Cursor<'a, Chunk, usize, usize>,
chars: str::Chars<'a>,
chars: Skip<str::Chars<'a>>,
}
impl<'a> Chars<'a> {
pub fn new(rope: &'a Rope, start: usize) -> Self {
let mut cursor = rope.chunks.cursor::<usize, usize>();
cursor.slice(&start, SeekBias::Left, &());
cursor.seek(&start, SeekBias::Left, &());
let chars = if let Some(chunk) = cursor.item() {
let ix = start - cursor.start();
cursor.next();
chunk.0[ix..].chars()
chunk.0.chars().skip(ix)
} else {
"".chars()
"".chars().skip(0)
};
Self { cursor, chars }
@ -377,7 +421,7 @@ impl<'a> Iterator for Chars<'a> {
if let Some(ch) = self.chars.next() {
Some(ch)
} else if let Some(chunk) = self.cursor.item() {
self.chars = chunk.0.chars();
self.chars = chunk.0.chars().skip(0);
self.cursor.next();
Some(self.chars.next().unwrap())
} else {
@ -386,6 +430,25 @@ impl<'a> Iterator for Chars<'a> {
}
}
fn find_split_ix(text: &str) -> usize {
let mut ix = text.len() / 2;
while !text.is_char_boundary(ix) {
if ix < 2 * CHUNK_BASE {
ix += 1;
} else {
ix = (text.len() / 2) - 1;
break;
}
}
while !text.is_char_boundary(ix) {
ix -= 1;
}
debug_assert!(ix <= 2 * CHUNK_BASE);
debug_assert!(text.len() - ix <= 2 * CHUNK_BASE);
ix
}
#[cfg(test)]
mod tests {
use crate::util::RandomCharIter;
@ -394,6 +457,14 @@ mod tests {
use rand::prelude::*;
use std::env;
#[test]
fn test_all_4_byte_chars() {
let mut rope = Rope::new();
let text = "🏀".repeat(256);
rope.push(&text);
assert_eq!(rope.text(), text);
}
#[test]
fn test_random() {
let iterations = env::var("ITERATIONS")
@ -415,9 +486,9 @@ mod tests {
let mut expected = String::new();
let mut actual = Rope::new();
for _ in 0..operations {
let end_ix = rng.gen_range(0..=expected.len());
let end_ix = rng.gen_range(0..=expected.chars().count());
let start_ix = rng.gen_range(0..=end_ix);
let len = rng.gen_range(0..=20);
let len = rng.gen_range(0..=64);
let new_text: String = RandomCharIter::new(&mut rng).take(len).collect();
let mut new_actual = Rope::new();
@ -429,16 +500,20 @@ mod tests {
actual = new_actual;
let mut new_expected = String::new();
new_expected.push_str(&expected[..start_ix]);
new_expected.extend(expected.chars().take(start_ix));
new_expected.push_str(&new_text);
new_expected.push_str(&expected[end_ix..]);
new_expected.extend(expected.chars().skip(end_ix));
expected = new_expected;
assert_eq!(actual.text(), expected);
log::info!("text: {:?}", expected);
for _ in 0..5 {
let ix = rng.gen_range(0..=expected.len());
assert_eq!(actual.chars_at(ix).collect::<String>(), expected[ix..]);
let ix = rng.gen_range(0..=expected.chars().count());
assert_eq!(
actual.chars_at(ix).collect::<String>(),
expected.chars().skip(ix).collect::<String>()
);
}
let mut point = Point::new(0, 0);
@ -447,6 +522,10 @@ mod tests {
assert_eq!(actual.to_point(offset).unwrap(), point);
assert_eq!(actual.to_offset(point).unwrap(), offset);
if ch == '\n' {
assert!(actual
.to_offset(Point::new(point.row, point.column + 1))
.is_err());
point.row += 1;
point.column = 0
} else {
@ -454,6 +533,20 @@ mod tests {
}
offset += 1;
}
assert_eq!(actual.to_point(offset).unwrap(), point);
assert!(actual.to_point(offset + 1).is_err());
assert_eq!(actual.to_offset(point).unwrap(), offset);
assert!(actual.to_offset(Point::new(point.row + 1, 0)).is_err());
for _ in 0..5 {
let end_ix = rng.gen_range(0..=expected.chars().count());
let start_ix = rng.gen_range(0..=end_ix);
let byte_range = byte_range_for_char_range(&expected, start_ix..end_ix);
assert_eq!(
actual.cursor(start_ix).summary(end_ix),
TextSummary::from(&expected[byte_range])
);
}
}
}
}

View file

@ -830,7 +830,7 @@ mod tests {
#[gpui::test]
fn test_random_folds(app: &mut gpui::MutableAppContext) {
use crate::editor::ToPoint;
use crate::util::RandomCharIter;
use crate::util::{byte_range_for_char_range, RandomCharIter};
use rand::prelude::*;
use std::env;
@ -905,7 +905,10 @@ mod tests {
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
next_row = fold_start.row;
expected_text.replace_range(fold_range.start..fold_range.end, "");
expected_text.replace_range(
byte_range_for_char_range(&expected_text, fold_range.start..fold_range.end),
"",
);
}
expected_buffer_rows.extend((0..=next_row).rev());
expected_buffer_rows.reverse();

View file

@ -1,5 +1,20 @@
use rand::prelude::*;
use std::cmp::Ordering;
use std::{cmp::Ordering, ops::Range};
pub fn byte_range_for_char_range(text: impl AsRef<str>, char_range: Range<usize>) -> Range<usize> {
let text = text.as_ref();
let mut result = text.len()..text.len();
for (i, (offset, _)) in text.char_indices().enumerate() {
if i == char_range.start {
result.start = offset;
}
if i == char_range.end {
result.end = offset;
break;
}
}
result
}
pub fn post_inc(value: &mut usize) -> usize {
let prev = *value;
@ -44,7 +59,21 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
fn next(&mut self) -> Option<Self::Item> {
if self.0.gen_bool(1.0 / 5.0) {
Some('\n')
} else {
}
// two-byte greek letters
else if self.0.gen_bool(1.0 / 8.0) {
Some(std::char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))).unwrap())
}
// three-byte characters
else if self.0.gen_bool(1.0 / 10.0) {
['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).cloned()
}
// four-byte characters
else if self.0.gen_bool(1.0 / 12.0) {
['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).cloned()
}
// ascii letters
else {
Some(self.0.gen_range(b'a'..b'z' + 1).into())
}
}