diff --git a/Cargo.lock b/Cargo.lock index 4932886028..0504fafb62 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -759,20 +759,12 @@ dependencies = [ "arrayvec 0.7.1", "clock", "gpui", - "lazy_static", "log", - "parking_lot", "rand 0.8.3", "rpc", "seahash", - "serde 1.0.125", - "similar", "smallvec", "sum_tree", - "theme", - "tree-sitter", - "tree-sitter-rust", - "unindent", ] [[package]] @@ -1632,6 +1624,7 @@ dependencies = [ "buffer", "clock", "gpui", + "language", "lazy_static", "log", "parking_lot", @@ -2824,6 +2817,30 @@ dependencies = [ "log", ] +[[package]] +name = "language" +version = "0.1.0" +dependencies = [ + "anyhow", + "buffer", + "clock", + "futures", + "gpui", + "lazy_static", + "log", + "parking_lot", + "rand 0.8.3", + "rpc", + "serde 1.0.125", + "similar", + "smol", + "theme", + "tree-sitter", + "tree-sitter-rust", + "unindent", + "util", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -3801,6 +3818,7 @@ dependencies = [ "fuzzy", "gpui", "ignore", + "language", "lazy_static", "libc", "log", @@ -6159,6 +6177,7 @@ dependencies = [ "client", "editor", "gpui", + "language", "log", "postage", "project", @@ -6229,6 +6248,7 @@ dependencies = [ "ignore", "image 0.23.14", "indexmap", + "language", "lazy_static", "libc", "log", diff --git a/crates/buffer/Cargo.toml b/crates/buffer/Cargo.toml index 541c449d46..e4112c20d5 100644 --- a/crates/buffer/Cargo.toml +++ b/crates/buffer/Cargo.toml @@ -4,29 +4,20 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["rand"] +test-support = ["rand", "seahash"] [dependencies] clock = { path = "../clock" } -gpui = { path = "../gpui" } rpc = { path = "../rpc" } sum_tree = { path = "../sum_tree" } -theme = { path = "../theme" } anyhow = "1.0.38" arrayvec = "0.7.1" -lazy_static = "1.4" log = "0.4" -parking_lot = "0.11.1" rand = { version = "0.8.3", optional = true } -seahash = "4.1" -serde = { version = "1", features = ["derive"] } -similar = "1.3" +seahash = { version = "4.1", optional = true } smallvec = { version = "1.6", features = ["union"] } -tree-sitter = "0.19.5" [dev-dependencies] gpui = { path = "../gpui", features = ["test-support"] } - +seahash = "4.1" rand = "0.8.3" -tree-sitter-rust = "0.19.0" -unindent = "0.1.7" diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index c678918824..1ac82727df 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,3 +1,5 @@ +use crate::Point; + use super::{Buffer, Content}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; @@ -10,6 +12,24 @@ pub struct Anchor { pub version: clock::Global, } +#[derive(Clone)] +pub struct AnchorMap { + pub(crate) version: clock::Global, + pub(crate) entries: Vec<((usize, Bias), T)>, +} + +#[derive(Clone)] +pub struct AnchorSet(pub(crate) AnchorMap<()>); + +#[derive(Clone)] +pub struct AnchorRangeMap { + pub(crate) version: clock::Global, + pub(crate) entries: Vec<(Range<(usize, Bias)>, T)>, +} + +#[derive(Clone)] +pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); + impl Anchor { pub fn min() -> Self { Self { @@ -62,6 +82,60 @@ impl Anchor { } } +impl AnchorMap { + pub fn to_points<'a>( + &'a self, + content: impl Into> + 'a, + ) -> impl Iterator + 'a { + let content = content.into(); + content + .summaries_for_anchors(self) + .map(move |(sum, value)| (sum.lines, value)) + } + + pub fn version(&self) -> &clock::Global { + &self.version + } +} + +impl AnchorSet { + pub fn to_points<'a>( + &'a self, + content: impl Into> + 'a, + ) -> impl Iterator + 'a { + self.0.to_points(content).map(move |(point, _)| point) + } +} + +impl AnchorRangeMap { + pub fn to_point_ranges<'a>( + &'a self, + content: impl Into> + 'a, + ) -> impl Iterator, &'a T)> + 'a { + let content = content.into(); + content + .summaries_for_anchor_ranges(self) + .map(move |(range, value)| ((range.start.lines..range.end.lines), value)) + } + + pub fn version(&self) -> &clock::Global { + &self.version + } +} + +impl AnchorRangeSet { + pub fn to_point_ranges<'a>( + &'a self, + content: impl Into> + 'a, + ) -> impl Iterator> + 'a { + self.0.to_point_ranges(content).map(|(range, _)| range) + } + + pub fn version(&self) -> &clock::Global { + self.0.version() + } +} + pub trait AnchorRangeExt { fn cmp<'a>(&self, b: &Range, buffer: impl Into>) -> Result; } diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index a2f34a5d78..a5771ad4c0 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -1,96 +1,45 @@ mod anchor; -mod highlight_map; -mod language; mod operation_queue; mod point; #[cfg(any(test, feature = "test-support"))] pub mod random_char_iter; pub mod rope; mod selection; +#[cfg(test)] +mod tests; pub use anchor::*; use anyhow::{anyhow, Result}; use clock::ReplicaId; -use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; -pub use highlight_map::{HighlightId, HighlightMap}; -use language::Tree; -pub use language::{AutoclosePair, Language, LanguageConfig, LanguageRegistry}; -use lazy_static::lazy_static; use operation_queue::OperationQueue; -use parking_lot::Mutex; pub use point::*; #[cfg(any(test, feature = "test-support"))] pub use random_char_iter::*; pub use rope::{Chunks, Rope, TextSummary}; use rpc::proto; -use seahash::SeaHasher; pub use selection::*; -use similar::{ChangeTag, TextDiff}; use std::{ - any::Any, - cell::RefCell, cmp, convert::{TryFrom, TryInto}, - ffi::OsString, - hash::BuildHasher, iter::Iterator, - ops::{Deref, DerefMut, Range}, - path::{Path, PathBuf}, + ops::Range, str, sync::Arc, - time::{Duration, Instant, SystemTime, UNIX_EPOCH}, + time::{Duration, Instant}, }; -use sum_tree::{Bias, FilterCursor, SumTree}; -use tree_sitter::{InputEdit, Parser, QueryCursor}; - -pub trait File { - fn worktree_id(&self) -> usize; - - fn entry_id(&self) -> Option; - - fn set_entry_id(&mut self, entry_id: Option); - - fn mtime(&self) -> SystemTime; - - fn set_mtime(&mut self, mtime: SystemTime); - - fn path(&self) -> &Arc; - - fn set_path(&mut self, path: Arc); - - fn full_path(&self, cx: &AppContext) -> PathBuf; - - /// Returns the last component of this handle's absolute path. If this handle refers to the root - /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option; - - fn is_deleted(&self) -> bool; - - fn save( - &self, - buffer_id: u64, - text: Rope, - version: clock::Global, - cx: &mut MutableAppContext, - ) -> Task>; - - fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext); - - fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext); - - fn boxed_clone(&self) -> Box; - - fn as_any(&self) -> &dyn Any; -} +pub use sum_tree::Bias; +use sum_tree::{FilterCursor, SumTree}; +#[cfg(any(test, feature = "test-support"))] #[derive(Clone, Default)] struct DeterministicState; -impl BuildHasher for DeterministicState { - type Hasher = SeaHasher; +#[cfg(any(test, feature = "test-support"))] +impl std::hash::BuildHasher for DeterministicState { + type Hasher = seahash::SeaHasher; fn build_hasher(&self) -> Self::Hasher { - SeaHasher::new() + seahash::SeaHasher::new() } } @@ -106,66 +55,15 @@ type HashMap = std::collections::HashMap; #[cfg(not(any(test, feature = "test-support")))] type HashSet = std::collections::HashSet; -thread_local! { - static PARSER: RefCell = RefCell::new(Parser::new()); -} - -lazy_static! { - static ref QUERY_CURSORS: Mutex> = Default::default(); -} - -struct QueryCursorHandle(Option); - -impl QueryCursorHandle { - fn new() -> Self { - QueryCursorHandle(Some( - QUERY_CURSORS - .lock() - .pop() - .unwrap_or_else(|| QueryCursor::new()), - )) - } -} - -impl Deref for QueryCursorHandle { - type Target = QueryCursor; - - fn deref(&self) -> &Self::Target { - self.0.as_ref().unwrap() - } -} - -impl DerefMut for QueryCursorHandle { - fn deref_mut(&mut self) -> &mut Self::Target { - self.0.as_mut().unwrap() - } -} - -impl Drop for QueryCursorHandle { - fn drop(&mut self) { - let mut cursor = self.0.take().unwrap(); - cursor.set_byte_range(0..usize::MAX); - cursor.set_point_range(Point::zero().into()..Point::MAX.into()); - QUERY_CURSORS.lock().push(cursor) - } -} - +#[derive(Clone)] pub struct Buffer { fragments: SumTree, visible_text: Rope, deleted_text: Rope, pub version: clock::Global, - saved_version: clock::Global, - saved_mtime: SystemTime, last_edit: clock::Local, undo_map: UndoMap, history: History, - file: Option>, - language: Option>, - sync_parse_timeout: Duration, - syntax_tree: Mutex>, - parsing_in_background: bool, - parse_count: usize, selections: HashMap, deferred_ops: OperationQueue, deferred_replicas: HashSet, @@ -173,8 +71,6 @@ pub struct Buffer { remote_id: u64, local_clock: clock::Local, lamport_clock: clock::Lamport, - #[cfg(test)] - operations: Vec, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -183,27 +79,23 @@ pub struct SelectionSet { pub active: bool, } -#[derive(Clone)] -struct SyntaxTree { - tree: Tree, - dirty: bool, - version: clock::Global, -} - #[derive(Clone, Debug)] -struct Transaction { +pub struct Transaction { start: clock::Global, end: clock::Global, - buffer_was_dirty: bool, edits: Vec, ranges: Vec>, - selections_before: Option<(SelectionSetId, Arc<[Selection]>)>, - selections_after: Option<(SelectionSetId, Arc<[Selection]>)>, + selections_before: HashMap>, + selections_after: HashMap>, first_edit_at: Instant, last_edit_at: Instant, } impl Transaction { + pub fn starting_selection_set_ids<'a>(&'a self) -> impl Iterator + 'a { + self.selections_before.keys().copied() + } + fn push_edit(&mut self, edit: &EditOperation) { self.edits.push(edit.timestamp.local()); self.end.observe(edit.timestamp.local()); @@ -281,8 +173,7 @@ impl History { fn start_transaction( &mut self, start: clock::Global, - buffer_was_dirty: bool, - selections: Option<(SelectionSetId, Arc<[Selection]>)>, + selections_before: HashMap>, now: Instant, ) { self.transaction_depth += 1; @@ -290,11 +181,10 @@ impl History { self.undo_stack.push(Transaction { start: start.clone(), end: start, - buffer_was_dirty, edits: Vec::new(), ranges: Vec::new(), - selections_before: selections, - selections_after: None, + selections_before, + selections_after: Default::default(), first_edit_at: now, last_edit_at: now, }); @@ -303,16 +193,21 @@ impl History { fn end_transaction( &mut self, - selections: Option<(SelectionSetId, Arc<[Selection]>)>, + selections_after: HashMap>, now: Instant, ) -> Option<&Transaction> { assert_ne!(self.transaction_depth, 0); self.transaction_depth -= 1; if self.transaction_depth == 0 { - let transaction = self.undo_stack.last_mut().unwrap(); - transaction.selections_after = selections; - transaction.last_edit_at = now; - Some(transaction) + if self.undo_stack.last().unwrap().ranges.is_empty() { + self.undo_stack.pop(); + None + } else { + let transaction = self.undo_stack.last_mut().unwrap(); + transaction.selections_after = selections_after; + transaction.last_edit_at = now; + Some(transaction) + } } else { None } @@ -345,7 +240,9 @@ impl History { if let Some(transaction) = transactions_to_merge.last_mut() { last_transaction.last_edit_at = transaction.last_edit_at; - last_transaction.selections_after = transaction.selections_after.take(); + last_transaction + .selections_after + .extend(transaction.selections_after.drain()); last_transaction.end = transaction.end.clone(); } } @@ -455,12 +352,6 @@ impl Edit { } } -struct Diff { - base_version: clock::Global, - new_text: Arc, - changes: Vec<(ChangeTag, usize)>, -} - #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] struct InsertionTimestamp { replica_id: ReplicaId, @@ -551,53 +442,7 @@ pub struct UndoOperation { } impl Buffer { - pub fn new>>( - replica_id: ReplicaId, - base_text: T, - cx: &mut ModelContext, - ) -> Self { - Self::build( - replica_id, - History::new(base_text.into()), - None, - cx.model_id() as u64, - None, - cx, - ) - } - - pub fn from_history( - replica_id: ReplicaId, - history: History, - file: Option>, - language: Option>, - cx: &mut ModelContext, - ) -> Self { - Self::build( - replica_id, - history, - file, - cx.model_id() as u64, - language, - cx, - ) - } - - fn build( - replica_id: ReplicaId, - history: History, - file: Option>, - remote_id: u64, - language: Option>, - cx: &mut ModelContext, - ) -> Self { - let saved_mtime; - if let Some(file) = file.as_ref() { - saved_mtime = file.mtime(); - } else { - saved_mtime = UNIX_EPOCH; - } - + pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer { let mut fragments = SumTree::new(); let visible_text = Rope::from(history.base_text.as_ref()); @@ -614,22 +459,14 @@ impl Buffer { ); } - let mut result = Self { + Buffer { visible_text, deleted_text: Rope::new(), fragments, version: clock::Global::new(), - saved_version: clock::Global::new(), last_edit: clock::Local::default(), undo_map: Default::default(), history, - file, - syntax_tree: Mutex::new(None), - parsing_in_background: false, - parse_count: 0, - sync_parse_timeout: Duration::from_millis(1), - language, - saved_mtime, selections: HashMap::default(), deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), @@ -637,50 +474,16 @@ impl Buffer { remote_id, local_clock: clock::Local::new(replica_id), lamport_clock: clock::Lamport::new(replica_id), - - #[cfg(test)] - operations: Default::default(), - }; - result.reparse(cx); - result - } - - pub fn replica_id(&self) -> ReplicaId { - self.local_clock.replica_id - } - - pub fn snapshot(&self) -> Snapshot { - Snapshot { - visible_text: self.visible_text.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - tree: self.syntax_tree(), - is_parsing: self.parsing_in_background, - language: self.language.clone(), - query_cursor: QueryCursorHandle::new(), } } - pub fn from_proto( - replica_id: ReplicaId, - message: proto::Buffer, - file: Option>, - language: Option>, - cx: &mut ModelContext, - ) -> Result { - let mut buffer = Buffer::build( - replica_id, - History::new(message.content.into()), - file, - message.id, - language, - cx, - ); + pub fn from_proto(replica_id: u16, message: proto::Buffer) -> Result { + let mut buffer = Buffer::new(replica_id, message.id, History::new(message.content.into())); let ops = message .history .into_iter() .map(|op| Operation::Edit(op.into())); - buffer.apply_ops(ops, cx)?; + buffer.apply_ops(ops)?; buffer.selections = message .selections .into_iter() @@ -704,10 +507,10 @@ impl Buffer { Ok(buffer) } - pub fn to_proto(&self, cx: &mut ModelContext) -> proto::Buffer { + pub fn to_proto(&self) -> proto::Buffer { let ops = self.history.ops.values().map(Into::into).collect(); proto::Buffer { - id: cx.model_id() as u64, + id: self.remote_id, content: self.history.base_text.to_string(), history: ops, selections: self @@ -723,354 +526,62 @@ impl Buffer { } } - pub fn file(&self) -> Option<&dyn File> { - self.file.as_deref() + pub fn version(&self) -> clock::Global { + self.version.clone() } - pub fn file_mut(&mut self) -> Option<&mut dyn File> { - self.file.as_mut().map(|f| f.deref_mut() as &mut dyn File) + pub fn snapshot(&self) -> Snapshot { + Snapshot { + visible_text: self.visible_text.clone(), + fragments: self.fragments.clone(), + version: self.version.clone(), + } } - pub fn save( - &mut self, - cx: &mut ModelContext, - ) -> Result>> { - let file = self - .file - .as_ref() - .ok_or_else(|| anyhow!("buffer has no file"))?; - let text = self.visible_text.clone(); - let version = self.version.clone(); - let save = file.save(self.remote_id, text, version, cx.as_mut()); - Ok(cx.spawn(|this, mut cx| async move { - let (version, mtime) = save.await?; - this.update(&mut cx, |this, cx| { - this.did_save(version.clone(), mtime, None, cx); - }); - Ok((version, mtime)) - })) + pub fn content<'a>(&'a self) -> Content<'a> { + self.into() } pub fn as_rope(&self) -> &Rope { &self.visible_text } - pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { - self.language = language; - self.reparse(cx); + pub fn text_summary_for_range(&self, range: Range) -> TextSummary { + self.content().text_summary_for_range(range) } - pub fn did_save( - &mut self, - version: clock::Global, - mtime: SystemTime, - new_file: Option>, - cx: &mut ModelContext, - ) { - self.saved_mtime = mtime; - self.saved_version = version; - if let Some(new_file) = new_file { - self.file = Some(new_file); - } - cx.emit(Event::Saved); + pub fn anchor_before(&self, position: T) -> Anchor { + self.anchor_at(position, Bias::Left) } - pub fn file_updated( - &mut self, - path: Arc, - mtime: SystemTime, - new_text: Option, - cx: &mut ModelContext, - ) { - let file = self.file.as_mut().unwrap(); - let mut changed = false; - if path != *file.path() { - file.set_path(path); - changed = true; - } - - if mtime != file.mtime() { - file.set_mtime(mtime); - changed = true; - if let Some(new_text) = new_text { - if self.version == self.saved_version { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) - .await; - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - this.saved_mtime = mtime; - cx.emit(Event::Reloaded); - } - }); - }) - .detach(); - } - } - } - - if changed { - cx.emit(Event::FileHandleChanged); - } + pub fn anchor_after(&self, position: T) -> Anchor { + self.anchor_at(position, Bias::Right) } - pub fn file_deleted(&mut self, cx: &mut ModelContext) { - if self.version == self.saved_version { - cx.emit(Event::Dirtied); - } - cx.emit(Event::FileHandleChanged); + pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { + self.content().anchor_at(position, bias) } - pub fn close(&mut self, cx: &mut ModelContext) { - cx.emit(Event::Closed); + pub fn point_for_offset(&self, offset: usize) -> Result { + self.content().point_for_offset(offset) } - pub fn language(&self) -> Option<&Arc> { - self.language.as_ref() + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + self.visible_text.clip_point(point, bias) } - pub fn parse_count(&self) -> usize { - self.parse_count + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { + self.visible_text.clip_offset(offset, bias) } - pub fn syntax_tree(&self) -> Option { - if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { - let mut delta = 0_isize; - for edit in self.edits_since(syntax_tree.version.clone()) { - let start_offset = (edit.old_bytes.start as isize + delta) as usize; - let start_point = self.visible_text.to_point(start_offset); - syntax_tree.tree.edit(&InputEdit { - start_byte: start_offset, - old_end_byte: start_offset + edit.deleted_bytes(), - new_end_byte: start_offset + edit.inserted_bytes(), - start_position: start_point.into(), - old_end_position: (start_point + edit.deleted_lines()).into(), - new_end_position: self - .visible_text - .to_point(start_offset + edit.inserted_bytes()) - .into(), - }); - delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; - syntax_tree.dirty = true; - } - syntax_tree.version = self.version(); - Some(syntax_tree.tree.clone()) - } else { - None - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn is_parsing(&self) -> bool { - self.parsing_in_background - } - - #[cfg(test)] - pub fn set_sync_parse_timeout(&mut self, timeout: Duration) { - self.sync_parse_timeout = timeout; - } - - fn reparse(&mut self, cx: &mut ModelContext) -> bool { - if self.parsing_in_background { - return false; - } - - if let Some(language) = self.language.clone() { - // The parse tree is out of date, so grab the syntax tree to synchronously - // splice all the edits that have happened since the last parse. - let old_tree = self.syntax_tree(); - let parsed_text = self.visible_text.clone(); - let parsed_version = self.version(); - let parse_task = cx.background().spawn({ - let language = language.clone(); - async move { Self::parse_text(&parsed_text, old_tree, &language) } - }); - - match cx - .background() - .block_with_timeout(self.sync_parse_timeout, parse_task) - { - Ok(new_tree) => { - *self.syntax_tree.lock() = Some(SyntaxTree { - tree: new_tree, - dirty: false, - version: parsed_version, - }); - self.parse_count += 1; - cx.emit(Event::Reparsed); - cx.notify(); - return true; - } - Err(parse_task) => { - self.parsing_in_background = true; - cx.spawn(move |this, mut cx| async move { - let new_tree = parse_task.await; - this.update(&mut cx, move |this, cx| { - let language_changed = - this.language.as_ref().map_or(true, |curr_language| { - !Arc::ptr_eq(curr_language, &language) - }); - let parse_again = this.version > parsed_version || language_changed; - *this.syntax_tree.lock() = Some(SyntaxTree { - tree: new_tree, - dirty: false, - version: parsed_version, - }); - this.parse_count += 1; - this.parsing_in_background = false; - - if parse_again && this.reparse(cx) { - return; - } - - cx.emit(Event::Reparsed); - cx.notify(); - }); - }) - .detach(); - } - } - } - false - } - - fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { - PARSER.with(|parser| { - let mut parser = parser.borrow_mut(); - parser - .set_language(language.grammar) - .expect("incompatible grammar"); - let mut chunks = text.chunks_in_range(0..text.len()); - let tree = parser - .parse_with( - &mut move |offset, _| { - chunks.seek(offset); - chunks.next().unwrap_or("").as_bytes() - }, - old_tree.as_ref(), - ) - .unwrap(); - tree - }) - } - - pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { - if let Some(tree) = self.syntax_tree() { - let root = tree.root_node(); - let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut node = root.descendant_for_byte_range(range.start, range.end); - while node.map_or(false, |n| n.byte_range() == range) { - node = node.unwrap().parent(); - } - node.map(|n| n.byte_range()) - } else { - None - } - } - - pub fn enclosing_bracket_ranges( - &self, - range: Range, - ) -> Option<(Range, Range)> { - let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; - let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; - let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; - - // Find bracket pairs that *inclusively* contain the given range. - let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; - let mut cursor = QueryCursorHandle::new(); - let matches = cursor.set_byte_range(range).matches( - &lang.brackets_query, - tree.root_node(), - TextProvider(&self.visible_text), - ); - - // Get the ranges of the innermost pair of brackets. - matches - .filter_map(|mat| { - let open = mat.nodes_for_capture_index(open_capture_ix).next()?; - let close = mat.nodes_for_capture_index(close_capture_ix).next()?; - Some((open.byte_range(), close.byte_range())) - }) - .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) - } - - fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { - // TODO: it would be nice to not allocate here. - let old_text = self.text(); - let base_version = self.version(); - cx.background().spawn(async move { - let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref()) - .iter_all_changes() - .map(|c| (c.tag(), c.value().len())) - .collect::>(); - Diff { - base_version, - new_text, - changes, - } - }) - } - - pub fn set_text_from_disk(&self, new_text: Arc, cx: &mut ModelContext) -> Task<()> { - cx.spawn(|this, mut cx| async move { - let diff = this - .read_with(&cx, |this, cx| this.diff(new_text, cx)) - .await; - - this.update(&mut cx, |this, cx| { - if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); - } - }); - }) - } - - fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { - if self.version == diff.base_version { - self.start_transaction(None).unwrap(); - let mut offset = 0; - for (tag, len) in diff.changes { - let range = offset..(offset + len); - match tag { - ChangeTag::Equal => offset += len, - ChangeTag::Delete => self.edit(Some(range), "", cx), - ChangeTag::Insert => { - self.edit(Some(offset..offset), &diff.new_text[range], cx); - offset += len; - } - } - } - self.end_transaction(None, cx).unwrap(); - true - } else { - false - } - } - - pub fn is_dirty(&self) -> bool { - self.version > self.saved_version - || self.file.as_ref().map_or(false, |file| file.is_deleted()) - } - - pub fn has_conflict(&self) -> bool { - self.version > self.saved_version - && self - .file - .as_ref() - .map_or(false, |file| file.mtime() > self.saved_mtime) + pub fn replica_id(&self) -> ReplicaId { + self.local_clock.replica_id } pub fn remote_id(&self) -> u64 { self.remote_id } - pub fn version(&self) -> clock::Global { - self.version.clone() - } - pub fn text_summary(&self) -> TextSummary { self.visible_text.summary() } @@ -1096,18 +607,29 @@ impl Buffer { } pub fn text_for_range<'a, T: ToOffset>(&'a self, range: Range) -> Chunks<'a> { - let start = range.start.to_offset(self); - let end = range.end.to_offset(self); - self.visible_text.chunks_in_range(start..end) + self.content().text_for_range(range) } pub fn chars(&self) -> impl Iterator + '_ { self.chars_at(0) } - pub fn chars_at(&self, position: T) -> impl Iterator + '_ { - let offset = position.to_offset(self); - self.visible_text.chars_at(offset) + pub fn chars_at<'a, T: 'a + ToOffset>( + &'a self, + position: T, + ) -> impl Iterator + 'a { + self.content().chars_at(position) + } + + pub fn reversed_chars_at<'a, T: 'a + ToOffset>( + &'a self, + position: T, + ) -> impl Iterator + 'a { + self.content().reversed_chars_at(position) + } + + pub fn chars_for_range(&self, range: Range) -> impl Iterator + '_ { + self.text_for_range(range).flat_map(str::chars) } pub fn bytes_at(&self, position: T) -> impl Iterator + '_ { @@ -1127,271 +649,169 @@ impl Buffer { .eq(needle.bytes()) } - pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { - let since_2 = since.clone(); - let cursor = if since == self.version { - None - } else { - Some(self.fragments.filter( - move |summary| summary.max_version.changed_since(&since_2), - &None, - )) - }; - - Edits { - visible_text: &self.visible_text, - deleted_text: &self.deleted_text, - cursor, - undos: &self.undo_map, - since, - old_offset: 0, - new_offset: 0, - old_point: Point::zero(), - new_point: Point::zero(), - } - } - pub fn deferred_ops_len(&self) -> usize { self.deferred_ops.len() } - pub fn start_transaction(&mut self, set_id: Option) -> Result<()> { - self.start_transaction_at(set_id, Instant::now()) - } - - fn start_transaction_at(&mut self, set_id: Option, now: Instant) -> Result<()> { - let selections = if let Some(set_id) = set_id { - let set = self - .selections - .get(&set_id) - .ok_or_else(|| anyhow!("invalid selection set {:?}", set_id))?; - Some((set_id, set.selections.clone())) - } else { - None - }; - self.history - .start_transaction(self.version.clone(), self.is_dirty(), selections, now); - Ok(()) - } - - pub fn end_transaction( - &mut self, - set_id: Option, - cx: &mut ModelContext, - ) -> Result<()> { - self.end_transaction_at(set_id, Instant::now(), cx) - } - - fn end_transaction_at( - &mut self, - set_id: Option, - now: Instant, - cx: &mut ModelContext, - ) -> Result<()> { - let selections = if let Some(set_id) = set_id { - let set = self - .selections - .get(&set_id) - .ok_or_else(|| anyhow!("invalid selection set {:?}", set_id))?; - Some((set_id, set.selections.clone())) - } else { - None - }; - - if let Some(transaction) = self.history.end_transaction(selections, now) { - let since = transaction.start.clone(); - let was_dirty = transaction.buffer_was_dirty; - self.history.group(); - - cx.notify(); - if self.edits_since(since).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - } - - Ok(()) - } - - pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) + pub fn edit(&mut self, ranges: R, new_text: T) -> EditOperation where - I: IntoIterator>, + R: IntoIterator, + I: ExactSizeIterator>, S: ToOffset, T: Into, { let new_text = new_text.into(); - let new_text = if new_text.len() > 0 { + let new_text_len = new_text.len(); + let new_text = if new_text_len > 0 { Some(new_text) } else { None }; - let has_new_text = new_text.is_some(); - // Skip invalid ranges and coalesce contiguous ones. - let mut ranges: Vec> = Vec::new(); - for range in ranges_iter { - let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); - if has_new_text || !range.is_empty() { - if let Some(prev_range) = ranges.last_mut() { - if prev_range.end >= range.start { - prev_range.end = cmp::max(prev_range.end, range.end); - } else { - ranges.push(range); - } - } else { - ranges.push(range); - } - } - } - - if !ranges.is_empty() { - self.start_transaction_at(None, Instant::now()).unwrap(); - let timestamp = InsertionTimestamp { - replica_id: self.replica_id, - local: self.local_clock.tick().value, - lamport: self.lamport_clock.tick().value, - }; - let edit = self.apply_local_edit(&ranges, new_text, timestamp); - - self.history.push(edit.clone()); - self.history.push_undo(edit.timestamp.local()); - self.last_edit = edit.timestamp.local(); - self.version.observe(edit.timestamp.local()); - - self.end_transaction_at(None, Instant::now(), cx).unwrap(); - self.send_operation(Operation::Edit(edit), cx); + self.start_transaction(None).unwrap(); + let timestamp = InsertionTimestamp { + replica_id: self.replica_id, + local: self.local_clock.tick().value, + lamport: self.lamport_clock.tick().value, }; + let edit = self.apply_local_edit(ranges.into_iter(), new_text, timestamp); + + self.history.push(edit.clone()); + self.history.push_undo(edit.timestamp.local()); + self.last_edit = edit.timestamp.local(); + self.version.observe(edit.timestamp.local()); + self.end_transaction(None); + edit } - fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { - cx.emit(Event::Edited); - if !was_dirty { - cx.emit(Event::Dirtied); - } - } - - pub fn add_selection_set( + fn apply_local_edit( &mut self, - selections: impl Into>, - cx: &mut ModelContext, - ) -> SelectionSetId { - let selections = selections.into(); - let lamport_timestamp = self.lamport_clock.tick(); - self.selections.insert( - lamport_timestamp, - SelectionSet { - selections: selections.clone(), - active: false, - }, - ); - cx.notify(); + ranges: impl ExactSizeIterator>, + new_text: Option, + timestamp: InsertionTimestamp, + ) -> EditOperation { + let mut edit = EditOperation { + timestamp, + version: self.version(), + ranges: Vec::with_capacity(ranges.len()), + new_text: None, + }; - self.send_operation( - Operation::UpdateSelections { - set_id: lamport_timestamp, - selections: Some(selections), - lamport_timestamp, - }, - cx, - ); + let mut ranges = ranges + .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) + .peekable(); - lamport_timestamp - } + let mut new_ropes = + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = + old_fragments.slice(&ranges.peek().unwrap().start, Bias::Right, &None); + new_ropes.push_tree(new_fragments.summary().text); - pub fn update_selection_set( - &mut self, - set_id: SelectionSetId, - selections: impl Into>, - cx: &mut ModelContext, - ) -> Result<()> { - let selections = selections.into(); - let set = self - .selections - .get_mut(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; - set.selections = selections.clone(); - let lamport_timestamp = self.lamport_clock.tick(); - cx.notify(); - self.send_operation( - Operation::UpdateSelections { - set_id, - selections: Some(selections), - lamport_timestamp, - }, - cx, - ); - Ok(()) - } + let mut fragment_start = old_fragments.start().visible; + for range in ranges { + let fragment_end = old_fragments.end(&None).visible; - pub fn set_active_selection_set( - &mut self, - set_id: Option, - cx: &mut ModelContext, - ) -> Result<()> { - if let Some(set_id) = set_id { - assert_eq!(set_id.replica_id, self.replica_id()); - } + // If the current fragment ends before this range, then jump ahead to the first fragment + // that extends past the start of this range, reusing any intervening fragments. + if fragment_end < range.start { + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().visible { + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + old_fragments.next(&None); + } - for (id, set) in &mut self.selections { - if id.replica_id == self.local_clock.replica_id { - if Some(*id) == set_id { - set.active = true; - } else { - set.active = false; + let slice = old_fragments.slice(&range.start, Bias::Right, &None); + new_ropes.push_tree(slice.summary().text); + new_fragments.push_tree(slice, &None); + fragment_start = old_fragments.start().visible; + } + + let full_range_start = range.start + old_fragments.start().deleted; + + // Preserve any portion of the current fragment that precedes this range. + if fragment_start < range.start { + let mut prefix = old_fragments.item().unwrap().clone(); + prefix.len = range.start - fragment_start; + new_ropes.push_fragment(&prefix, prefix.visible); + new_fragments.push(prefix, &None); + fragment_start = range.start; + } + + // Insert the new text before any existing fragments within the range. + if let Some(new_text) = new_text.as_deref() { + new_ropes.push_str(new_text); + new_fragments.push( + Fragment { + timestamp, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }, + &None, + ); + } + + // Advance through every fragment that intersects this range, marking the intersecting + // portions as deleted. + while fragment_start < range.end { + let fragment = old_fragments.item().unwrap(); + let fragment_end = old_fragments.end(&None).visible; + let mut intersection = fragment.clone(); + let intersection_end = cmp::min(range.end, fragment_end); + if fragment.visible { + intersection.len = intersection_end - fragment_start; + intersection.deletions.insert(timestamp.local()); + intersection.visible = false; + } + if intersection.len > 0 { + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); + fragment_start = intersection_end; + } + if fragment_end <= range.end { + old_fragments.next(&None); } } + + let full_range_end = range.end + old_fragments.start().deleted; + edit.ranges.push(full_range_start..full_range_end); } - let lamport_timestamp = self.lamport_clock.tick(); - self.send_operation( - Operation::SetActiveSelections { - set_id, - lamport_timestamp, - }, - cx, - ); - Ok(()) + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().visible { + let fragment_end = old_fragments.end(&None).visible; + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + old_fragments.next(&None); + } + + let suffix = old_fragments.suffix(&None); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); + let (visible_text, deleted_text) = new_ropes.finish(); + drop(old_fragments); + + self.fragments = new_fragments; + self.visible_text = visible_text; + self.deleted_text = deleted_text; + edit.new_text = new_text; + edit } - pub fn remove_selection_set( - &mut self, - set_id: SelectionSetId, - cx: &mut ModelContext, - ) -> Result<()> { - self.selections - .remove(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; - let lamport_timestamp = self.lamport_clock.tick(); - cx.notify(); - self.send_operation( - Operation::UpdateSelections { - set_id, - selections: None, - lamport_timestamp, - }, - cx, - ); - Ok(()) - } - - pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { - self.selections - .get(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id)) - } - - pub fn selection_sets(&self) -> impl Iterator { - self.selections.iter() - } - - pub fn apply_ops>( - &mut self, - ops: I, - cx: &mut ModelContext, - ) -> Result<()> { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - + pub fn apply_ops>(&mut self, ops: I) -> Result<()> { let mut deferred_ops = Vec::new(); for op in ops { if self.can_apply_op(&op) { @@ -1403,13 +823,6 @@ impl Buffer { } self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops()?; - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - Ok(()) } @@ -1625,90 +1038,6 @@ impl Buffer { self.lamport_clock.observe(timestamp.lamport()); } - #[cfg(not(test))] - pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { - if let Some(file) = &self.file { - file.buffer_updated(self.remote_id, operation, cx.as_mut()); - } - } - - #[cfg(test)] - pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext) { - self.operations.push(operation); - } - - pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { - self.selections - .retain(|set_id, _| set_id.replica_id != replica_id); - cx.notify(); - } - - pub fn undo(&mut self, cx: &mut ModelContext) { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - - if let Some(transaction) = self.history.pop_undo().cloned() { - let selections = transaction.selections_before.clone(); - self.undo_or_redo(transaction, cx).unwrap(); - if let Some((set_id, selections)) = selections { - let _ = self.update_selection_set(set_id, selections, cx); - } - } - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - } - - pub fn redo(&mut self, cx: &mut ModelContext) { - let was_dirty = self.is_dirty(); - let old_version = self.version.clone(); - - if let Some(transaction) = self.history.pop_redo().cloned() { - let selections = transaction.selections_after.clone(); - self.undo_or_redo(transaction, cx).unwrap(); - if let Some((set_id, selections)) = selections { - let _ = self.update_selection_set(set_id, selections, cx); - } - } - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - } - - fn undo_or_redo( - &mut self, - transaction: Transaction, - cx: &mut ModelContext, - ) -> Result<()> { - let mut counts = HashMap::default(); - for edit_id in transaction.edits { - counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); - } - - let undo = UndoOperation { - id: self.local_clock.tick(), - counts, - ranges: transaction.ranges, - version: transaction.start.clone(), - }; - self.apply_undo(&undo)?; - self.version.observe(undo.id); - - let operation = Operation::Undo { - undo, - lamport_timestamp: self.lamport_clock.tick(), - }; - self.send_operation(operation, cx); - - Ok(()) - } - fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { self.undo_map.insert(undo); @@ -1823,158 +1152,218 @@ impl Buffer { } } - fn apply_local_edit( + pub fn peek_undo_stack(&self) -> Option<&Transaction> { + self.history.undo_stack.last() + } + + pub fn start_transaction( &mut self, - ranges: &[Range], - new_text: Option, - timestamp: InsertionTimestamp, - ) -> EditOperation { - let mut edit = EditOperation { - timestamp, - version: self.version(), - ranges: Vec::with_capacity(ranges.len()), - new_text: None, + selection_set_ids: impl IntoIterator, + ) -> Result<()> { + self.start_transaction_at(selection_set_ids, Instant::now()) + } + + pub fn start_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + ) -> Result<()> { + let selections = selection_set_ids + .into_iter() + .map(|set_id| { + let set = self + .selections + .get(&set_id) + .expect("invalid selection set id"); + (set_id, set.selections.clone()) + }) + .collect(); + self.history + .start_transaction(self.version.clone(), selections, now); + Ok(()) + } + + pub fn end_transaction(&mut self, selection_set_ids: impl IntoIterator) { + self.end_transaction_at(selection_set_ids, Instant::now()); + } + + pub fn end_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + ) -> Option { + let selections = selection_set_ids + .into_iter() + .map(|set_id| { + let set = self + .selections + .get(&set_id) + .expect("invalid selection set id"); + (set_id, set.selections.clone()) + }) + .collect(); + + if let Some(transaction) = self.history.end_transaction(selections, now) { + let since = transaction.start.clone(); + self.history.group(); + Some(since) + } else { + None + } + } + + pub fn remove_peer(&mut self, replica_id: ReplicaId) { + self.selections + .retain(|set_id, _| set_id.replica_id != replica_id) + } + + pub fn undo(&mut self) -> Vec { + let mut ops = Vec::new(); + if let Some(transaction) = self.history.pop_undo().cloned() { + let selections = transaction.selections_before.clone(); + ops.push(self.undo_or_redo(transaction).unwrap()); + for (set_id, selections) in selections { + ops.extend(self.update_selection_set(set_id, selections)); + } + } + ops + } + + pub fn redo(&mut self) -> Vec { + let mut ops = Vec::new(); + if let Some(transaction) = self.history.pop_redo().cloned() { + let selections = transaction.selections_after.clone(); + ops.push(self.undo_or_redo(transaction).unwrap()); + for (set_id, selections) in selections { + ops.extend(self.update_selection_set(set_id, selections)); + } + } + ops + } + + fn undo_or_redo(&mut self, transaction: Transaction) -> Result { + let mut counts = HashMap::default(); + for edit_id in transaction.edits { + counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); + } + + let undo = UndoOperation { + id: self.local_clock.tick(), + counts, + ranges: transaction.ranges, + version: transaction.start.clone(), + }; + self.apply_undo(&undo)?; + self.version.observe(undo.id); + + Ok(Operation::Undo { + undo, + lamport_timestamp: self.lamport_clock.tick(), + }) + } + + pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { + self.selections + .get(&set_id) + .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id)) + } + + pub fn selection_sets(&self) -> impl Iterator { + self.selections.iter() + } + + pub fn update_selection_set( + &mut self, + set_id: SelectionSetId, + selections: impl Into>, + ) -> Result { + let selections = selections.into(); + let set = self + .selections + .get_mut(&set_id) + .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; + set.selections = selections.clone(); + Ok(Operation::UpdateSelections { + set_id, + selections: Some(selections), + lamport_timestamp: self.lamport_clock.tick(), + }) + } + + pub fn add_selection_set(&mut self, selections: impl Into>) -> Operation { + let selections = selections.into(); + let lamport_timestamp = self.lamport_clock.tick(); + self.selections.insert( + lamport_timestamp, + SelectionSet { + selections: selections.clone(), + active: false, + }, + ); + Operation::UpdateSelections { + set_id: lamport_timestamp, + selections: Some(selections), + lamport_timestamp, + } + } + + pub fn set_active_selection_set( + &mut self, + set_id: Option, + ) -> Result { + if let Some(set_id) = set_id { + assert_eq!(set_id.replica_id, self.replica_id()); + } + + for (id, set) in &mut self.selections { + if id.replica_id == self.local_clock.replica_id { + if Some(*id) == set_id { + set.active = true; + } else { + set.active = false; + } + } + } + + Ok(Operation::SetActiveSelections { + set_id, + lamport_timestamp: self.lamport_clock.tick(), + }) + } + + pub fn remove_selection_set(&mut self, set_id: SelectionSetId) -> Result { + self.selections + .remove(&set_id) + .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; + Ok(Operation::UpdateSelections { + set_id, + selections: None, + lamport_timestamp: self.lamport_clock.tick(), + }) + } + + pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { + let since_2 = since.clone(); + let cursor = if since == self.version { + None + } else { + Some(self.fragments.filter( + move |summary| summary.max_version.changed_since(&since_2), + &None, + )) }; - let mut new_ropes = - RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::<(usize, FragmentTextSummary)>(); - let mut new_fragments = old_fragments.slice(&ranges[0].start, Bias::Right, &None); - new_ropes.push_tree(new_fragments.summary().text); - - let mut fragment_start = old_fragments.start().1.visible; - for range in ranges { - let fragment_end = old_fragments.end(&None).1.visible; - - // If the current fragment ends before this range, then jump ahead to the first fragment - // that extends past the start of this range, reusing any intervening fragments. - if fragment_end < range.start { - // If the current fragment has been partially consumed, then consume the rest of it - // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().1.visible { - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); - } - old_fragments.next(&None); - } - - let slice = old_fragments.slice(&range.start, Bias::Right, &None); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); - fragment_start = old_fragments.start().1.visible; - } - - let full_range_start = range.start + old_fragments.start().1.deleted; - - // Preserve any portion of the current fragment that precedes this range. - if fragment_start < range.start { - let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start - fragment_start; - new_ropes.push_fragment(&prefix, prefix.visible); - new_fragments.push(prefix, &None); - fragment_start = range.start; - } - - // Insert the new text before any existing fragments within the range. - if let Some(new_text) = new_text.as_deref() { - new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); - } - - // Advance through every fragment that intersects this range, marking the intersecting - // portions as deleted. - while fragment_start < range.end { - let fragment = old_fragments.item().unwrap(); - let fragment_end = old_fragments.end(&None).1.visible; - let mut intersection = fragment.clone(); - let intersection_end = cmp::min(range.end, fragment_end); - if fragment.visible { - intersection.len = intersection_end - fragment_start; - intersection.deletions.insert(timestamp.local()); - intersection.visible = false; - } - if intersection.len > 0 { - new_ropes.push_fragment(&intersection, fragment.visible); - new_fragments.push(intersection, &None); - fragment_start = intersection_end; - } - if fragment_end <= range.end { - old_fragments.next(&None); - } - } - - let full_range_end = range.end + old_fragments.start().1.deleted; - edit.ranges.push(full_range_start..full_range_end); + Edits { + visible_text: &self.visible_text, + deleted_text: &self.deleted_text, + cursor, + undos: &self.undo_map, + since, + old_offset: 0, + new_offset: 0, + old_point: Point::zero(), + new_point: Point::zero(), } - - // If the current fragment has been partially consumed, then consume the rest of it - // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().1.visible { - let fragment_end = old_fragments.end(&None).1.visible; - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); - } - old_fragments.next(&None); - } - - let suffix = old_fragments.suffix(&None); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); - let (visible_text, deleted_text) = new_ropes.finish(); - drop(old_fragments); - - self.fragments = new_fragments; - self.visible_text = visible_text; - self.deleted_text = deleted_text; - edit.new_text = new_text; - edit - } - - fn content<'a>(&'a self) -> Content<'a> { - self.into() - } - - pub fn text_summary_for_range(&self, range: Range) -> TextSummary { - self.content().text_summary_for_range(range) - } - - pub fn anchor_before(&self, position: T) -> Anchor { - self.anchor_at(position, Bias::Left) - } - - pub fn anchor_after(&self, position: T) -> Anchor { - self.anchor_at(position, Bias::Right) - } - - pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - self.content().anchor_at(position, bias) - } - - pub fn point_for_offset(&self, offset: usize) -> Result { - self.content().point_for_offset(offset) - } - - pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - self.visible_text.clip_point(point, bias) - } - - pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { - self.visible_text.clip_offset(offset, bias) } } @@ -1990,8 +1379,7 @@ impl Buffer { &mut self, rng: &mut T, old_range_count: usize, - cx: &mut ModelContext, - ) -> (Vec>, String) + ) -> (Vec>, String, Operation) where T: rand::Rng, { @@ -2013,21 +1401,17 @@ impl Buffer { old_ranges, new_text ); - self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx); - (old_ranges, new_text) + let op = self.edit(old_ranges.iter().cloned(), new_text.as_str()); + (old_ranges, new_text, Operation::Edit(op)) } - pub fn randomly_mutate( - &mut self, - rng: &mut T, - cx: &mut ModelContext, - ) -> (Vec>, String) + pub fn randomly_mutate(&mut self, rng: &mut T) -> Vec where T: rand::Rng, { use rand::prelude::*; - let (old_ranges, new_text) = self.randomly_edit(rng, 5, cx); + let mut ops = vec![self.randomly_edit(rng, 5).2]; // Randomly add, remove or mutate selection sets. let replica_selection_sets = &self @@ -2037,7 +1421,7 @@ impl Buffer { .collect::>(); let set_id = replica_selection_sets.choose(rng); if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { - self.remove_selection_set(*set_id.unwrap(), cx).unwrap(); + ops.push(self.remove_selection_set(*set_id.unwrap()).unwrap()); } else { let mut ranges = Vec::new(); for _ in 0..5 { @@ -2045,20 +1429,22 @@ impl Buffer { } let new_selections = self.selections_from_ranges(ranges).unwrap(); - if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { - self.add_selection_set(new_selections, cx); + let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { + self.add_selection_set(new_selections) } else { - self.update_selection_set(*set_id.unwrap(), new_selections, cx) - .unwrap(); - } + self.update_selection_set(*set_id.unwrap(), new_selections) + .unwrap() + }; + ops.push(op); } - (old_ranges, new_text) + ops } - pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext) { + pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec { use rand::prelude::*; + let mut ops = Vec::new(); for _ in 0..rng.gen_range(1..=5) { if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() { log::info!( @@ -2066,9 +1452,10 @@ impl Buffer { self.replica_id, transaction ); - self.undo_or_redo(transaction, cx).unwrap(); + ops.push(self.undo_or_redo(transaction).unwrap()); } } + ops } fn selections_from_ranges(&self, ranges: I) -> Result> @@ -2129,76 +1516,20 @@ impl Buffer { .keys() .map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap())) } - - pub fn enclosing_bracket_point_ranges( - &self, - range: Range, - ) -> Option<(Range, Range)> { - self.enclosing_bracket_ranges(range).map(|(start, end)| { - let point_start = start.start.to_point(self)..start.end.to_point(self); - let point_end = end.start.to_point(self)..end.end.to_point(self); - (point_start, point_end) - }) - } -} - -impl Clone for Buffer { - fn clone(&self) -> Self { - Self { - fragments: self.fragments.clone(), - visible_text: self.visible_text.clone(), - deleted_text: self.deleted_text.clone(), - version: self.version.clone(), - saved_version: self.saved_version.clone(), - saved_mtime: self.saved_mtime, - last_edit: self.last_edit.clone(), - undo_map: self.undo_map.clone(), - history: self.history.clone(), - selections: self.selections.clone(), - deferred_ops: self.deferred_ops.clone(), - file: self.file.as_ref().map(|f| f.boxed_clone()), - language: self.language.clone(), - syntax_tree: Mutex::new(self.syntax_tree.lock().clone()), - parsing_in_background: false, - sync_parse_timeout: self.sync_parse_timeout, - parse_count: self.parse_count, - deferred_replicas: self.deferred_replicas.clone(), - replica_id: self.replica_id, - remote_id: self.remote_id.clone(), - local_clock: self.local_clock.clone(), - lamport_clock: self.lamport_clock.clone(), - - #[cfg(test)] - operations: self.operations.clone(), - } - } } +#[derive(Clone)] pub struct Snapshot { visible_text: Rope, fragments: SumTree, version: clock::Global, - tree: Option, - is_parsing: bool, - language: Option>, - query_cursor: QueryCursorHandle, -} - -impl Clone for Snapshot { - fn clone(&self) -> Self { - Self { - visible_text: self.visible_text.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - tree: self.tree.clone(), - is_parsing: self.is_parsing, - language: self.language.clone(), - query_cursor: QueryCursorHandle::new(), - } - } } impl Snapshot { + pub fn as_rope(&self) -> &Rope { + &self.visible_text + } + pub fn len(&self) -> usize { self.visible_text.len() } @@ -2207,6 +1538,10 @@ impl Snapshot { self.content().line_len(row) } + pub fn indent_column_for_line(&self, row: u32) -> u32 { + self.content().indent_column_for_line(row) + } + pub fn text(&self) -> Rope { self.visible_text.clone() } @@ -2219,38 +1554,11 @@ impl Snapshot { self.visible_text.max_point() } - pub fn text_for_range(&self, range: Range) -> Chunks { + pub fn text_for_range(&self, range: Range) -> Chunks { + let range = range.start.to_offset(self)..range.end.to_offset(self); self.visible_text.chunks_in_range(range) } - pub fn highlighted_text_for_range(&mut self, range: Range) -> HighlightedChunks { - let chunks = self.visible_text.chunks_in_range(range.clone()); - if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { - let captures = self.query_cursor.set_byte_range(range.clone()).captures( - &language.highlight_query, - tree.root_node(), - TextProvider(&self.visible_text), - ); - - HighlightedChunks { - range, - chunks, - highlights: Some(Highlights { - captures, - next_capture: None, - stack: Default::default(), - highlight_map: language.highlight_map(), - }), - } - } else { - HighlightedChunks { - range, - chunks, - highlights: None, - } - } - } - pub fn text_summary_for_range(&self, range: Range) -> TextSummary where T: ToOffset, @@ -2287,7 +1595,7 @@ impl Snapshot { self.content().anchor_at(position, Bias::Right) } - fn content(&self) -> Content { + pub fn content(&self) -> Content { self.into() } } @@ -2347,6 +1655,22 @@ impl<'a> Content<'a> { self.fragments.extent::(&None) } + pub fn chars_at(&self, position: T) -> impl Iterator + 'a { + let offset = position.to_offset(self); + self.visible_text.chars_at(offset) + } + + pub fn reversed_chars_at(&self, position: T) -> impl Iterator + 'a { + let offset = position.to_offset(self); + self.visible_text.reversed_chars_at(offset) + } + + pub fn text_for_range(&self, range: Range) -> Chunks<'a> { + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); + self.visible_text.chunks_in_range(start..end) + } + fn line_len(&self, row: u32) -> u32 { let row_start_offset = Point::new(row, 0).to_offset(self); let row_end_offset = if row >= self.max_point().row { @@ -2357,6 +1681,18 @@ impl<'a> Content<'a> { (row_end_offset - row_start_offset) as u32 } + pub fn indent_column_for_line(&self, row: u32) -> u32 { + let mut result = 0; + for c in self.chars_at(Point::new(row, 0)) { + if c == ' ' { + result += 1; + } else { + break; + } + } + result + } + fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary { let cx = Some(anchor.version.clone()); let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); @@ -2373,19 +1709,134 @@ impl<'a> Content<'a> { self.visible_text.cursor(range.start).summary(range.end) } + fn summaries_for_anchors( + &self, + map: &'a AnchorMap, + ) -> impl Iterator { + let cx = Some(map.version.clone()); + let mut summary = TextSummary::default(); + let mut rope_cursor = self.visible_text.cursor(0); + let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); + map.entries.iter().map(move |((offset, bias), value)| { + cursor.seek_forward(&VersionedOffset::Offset(*offset), *bias, &cx); + let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { + offset - cursor.start().0.offset() + } else { + 0 + }; + summary += rope_cursor.summary(cursor.start().1 + overshoot); + (summary.clone(), value) + }) + } + + fn summaries_for_anchor_ranges( + &self, + map: &'a AnchorRangeMap, + ) -> impl Iterator, &'a T)> { + let cx = Some(map.version.clone()); + let mut summary = TextSummary::default(); + let mut rope_cursor = self.visible_text.cursor(0); + let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); + map.entries.iter().map(move |(range, value)| { + let Range { + start: (start_offset, start_bias), + end: (end_offset, end_bias), + } = range; + + cursor.seek_forward(&VersionedOffset::Offset(*start_offset), *start_bias, &cx); + let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { + start_offset - cursor.start().0.offset() + } else { + 0 + }; + summary += rope_cursor.summary(cursor.start().1 + overshoot); + let start_summary = summary.clone(); + + cursor.seek_forward(&VersionedOffset::Offset(*end_offset), *end_bias, &cx); + let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { + end_offset - cursor.start().0.offset() + } else { + 0 + }; + summary += rope_cursor.summary(cursor.start().1 + overshoot); + let end_summary = summary.clone(); + + (start_summary..end_summary, value) + }) + } + fn anchor_at(&self, position: T, bias: Bias) -> Anchor { let offset = position.to_offset(self); let max_offset = self.len(); assert!(offset <= max_offset, "offset is out of range"); - let mut cursor = self.fragments.cursor::<(usize, FragmentTextSummary)>(); + let mut cursor = self.fragments.cursor::(); cursor.seek(&offset, bias, &None); Anchor { - offset: offset + cursor.start().1.deleted, + offset: offset + cursor.start().deleted, bias, version: self.version.clone(), } } + pub fn anchor_map(&self, entries: E) -> AnchorMap + where + E: IntoIterator, + { + let version = self.version.clone(); + let mut cursor = self.fragments.cursor::(); + let entries = entries + .into_iter() + .map(|((offset, bias), value)| { + cursor.seek_forward(&offset, bias, &None); + let full_offset = cursor.start().deleted + offset; + ((full_offset, bias), value) + }) + .collect(); + + AnchorMap { version, entries } + } + + pub fn anchor_range_map(&self, entries: E) -> AnchorRangeMap + where + E: IntoIterator, T)>, + { + let version = self.version.clone(); + let mut cursor = self.fragments.cursor::(); + let entries = entries + .into_iter() + .map(|(range, value)| { + let Range { + start: (start_offset, start_bias), + end: (end_offset, end_bias), + } = range; + cursor.seek_forward(&start_offset, start_bias, &None); + let full_start_offset = cursor.start().deleted + start_offset; + cursor.seek_forward(&end_offset, end_bias, &None); + let full_end_offset = cursor.start().deleted + end_offset; + ( + (full_start_offset, start_bias)..(full_end_offset, end_bias), + value, + ) + }) + .collect(); + + AnchorRangeMap { version, entries } + } + + pub fn anchor_set(&self, entries: E) -> AnchorSet + where + E: IntoIterator, + { + AnchorSet(self.anchor_map(entries.into_iter().map(|range| (range, ())))) + } + + pub fn anchor_range_set(&self, entries: E) -> AnchorRangeSet + where + E: IntoIterator>, + { + AnchorRangeSet(self.anchor_range_map(entries.into_iter().map(|range| (range, ())))) + } + fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize { let cx = Some(anchor.version.clone()); let mut cursor = self @@ -2463,27 +1914,6 @@ impl<'a> RopeBuilder<'a> { } } -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum Event { - Edited, - Dirtied, - Saved, - FileHandleChanged, - Reloaded, - Reparsed, - Closed, -} - -impl Entity for Buffer { - type Event = Event; - - fn release(&mut self, cx: &mut gpui::MutableAppContext) { - if let Some(file) = self.file.as_ref() { - file.buffer_removed(self.remote_id, cx); - } - } -} - impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { type Item = Edit; @@ -2548,126 +1978,6 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { } } -struct ByteChunks<'a>(rope::Chunks<'a>); - -impl<'a> Iterator for ByteChunks<'a> { - type Item = &'a [u8]; - - fn next(&mut self) -> Option { - self.0.next().map(str::as_bytes) - } -} - -struct TextProvider<'a>(&'a Rope); - -impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> { - type I = ByteChunks<'a>; - - fn text(&mut self, node: tree_sitter::Node) -> Self::I { - ByteChunks(self.0.chunks_in_range(node.byte_range())) - } -} - -struct Highlights<'a> { - captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, - next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, - stack: Vec<(usize, HighlightId)>, - highlight_map: HighlightMap, -} - -pub struct HighlightedChunks<'a> { - range: Range, - chunks: Chunks<'a>, - highlights: Option>, -} - -impl<'a> HighlightedChunks<'a> { - pub fn seek(&mut self, offset: usize) { - self.range.start = offset; - self.chunks.seek(self.range.start); - if let Some(highlights) = self.highlights.as_mut() { - highlights - .stack - .retain(|(end_offset, _)| *end_offset > offset); - if let Some((mat, capture_ix)) = &highlights.next_capture { - let capture = mat.captures[*capture_ix as usize]; - if offset >= capture.node.start_byte() { - let next_capture_end = capture.node.end_byte(); - if offset < next_capture_end { - highlights.stack.push(( - next_capture_end, - highlights.highlight_map.get(capture.index), - )); - } - highlights.next_capture.take(); - } - } - highlights.captures.set_byte_range(self.range.clone()); - } - } - - pub fn offset(&self) -> usize { - self.range.start - } -} - -impl<'a> Iterator for HighlightedChunks<'a> { - type Item = (&'a str, HighlightId); - - fn next(&mut self) -> Option { - let mut next_capture_start = usize::MAX; - - if let Some(highlights) = self.highlights.as_mut() { - while let Some((parent_capture_end, _)) = highlights.stack.last() { - if *parent_capture_end <= self.range.start { - highlights.stack.pop(); - } else { - break; - } - } - - if highlights.next_capture.is_none() { - highlights.next_capture = highlights.captures.next(); - } - - while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() { - let capture = mat.captures[*capture_ix as usize]; - if self.range.start < capture.node.start_byte() { - next_capture_start = capture.node.start_byte(); - break; - } else { - let style_id = highlights.highlight_map.get(capture.index); - highlights.stack.push((capture.node.end_byte(), style_id)); - highlights.next_capture = highlights.captures.next(); - } - } - } - - if let Some(chunk) = self.chunks.peek() { - let chunk_start = self.range.start; - let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start); - let mut style_id = HighlightId::default(); - if let Some((parent_capture_end, parent_style_id)) = - self.highlights.as_ref().and_then(|h| h.stack.last()) - { - chunk_end = chunk_end.min(*parent_capture_end); - style_id = *parent_style_id; - } - - let slice = - &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()]; - self.range.start = chunk_end; - if self.range.start == self.chunks.offset() + chunk.len() { - self.chunks.next().unwrap(); - } - - Some((slice, style_id)) - } else { - None - } - } -} - impl Fragment { fn is_visible(&self, undos: &UndoMap) -> bool { !undos.is_undone(self.timestamp.local()) @@ -2753,6 +2063,16 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { } } +impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize { + fn cmp( + &self, + cursor_location: &FragmentTextSummary, + _: &Option, + ) -> cmp::Ordering { + Ord::cmp(self, &cursor_location.visible) + } +} + #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum VersionedOffset { Offset(usize), @@ -3129,983 +2449,3 @@ impl ToPoint for usize { content.into().visible_text.to_point(*self) } } - -#[cfg(test)] -mod tests { - use crate::random_char_iter::RandomCharIter; - - use super::*; - use gpui::ModelHandle; - use rand::prelude::*; - use std::{cell::RefCell, cmp::Ordering, env, mem, rc::Rc}; - - #[gpui::test] - fn test_edit(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "abc", cx); - assert_eq!(buffer.text(), "abc"); - buffer.edit(vec![3..3], "def", cx); - assert_eq!(buffer.text(), "abcdef"); - buffer.edit(vec![0..0], "ghi", cx); - assert_eq!(buffer.text(), "ghiabcdef"); - buffer.edit(vec![5..5], "jkl", cx); - assert_eq!(buffer.text(), "ghiabjklcdef"); - buffer.edit(vec![6..7], "", cx); - assert_eq!(buffer.text(), "ghiabjlcdef"); - buffer.edit(vec![4..9], "mno", cx); - assert_eq!(buffer.text(), "ghiamnoef"); - buffer - }); - } - - #[gpui::test] - fn test_edit_events(cx: &mut gpui::MutableAppContext) { - let mut now = Instant::now(); - let buffer_1_events = Rc::new(RefCell::new(Vec::new())); - let buffer_2_events = Rc::new(RefCell::new(Vec::new())); - - let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx)); - let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx)); - let buffer_ops = buffer1.update(cx, |buffer, cx| { - let buffer_1_events = buffer_1_events.clone(); - cx.subscribe(&buffer1, move |_, _, event, _| { - buffer_1_events.borrow_mut().push(event.clone()) - }) - .detach(); - let buffer_2_events = buffer_2_events.clone(); - cx.subscribe(&buffer2, move |_, _, event, _| { - buffer_2_events.borrow_mut().push(event.clone()) - }) - .detach(); - - // An edit emits an edited event, followed by a dirtied event, - // since the buffer was previously in a clean state. - buffer.edit(Some(2..4), "XYZ", cx); - - // An empty transaction does not emit any events. - buffer.start_transaction(None).unwrap(); - buffer.end_transaction(None, cx).unwrap(); - - // A transaction containing two edits emits one edited event. - now += Duration::from_secs(1); - buffer.start_transaction_at(None, now).unwrap(); - buffer.edit(Some(5..5), "u", cx); - buffer.edit(Some(6..6), "w", cx); - buffer.end_transaction_at(None, now, cx).unwrap(); - - // Undoing a transaction emits one edited event. - buffer.undo(cx); - - buffer.operations.clone() - }); - - // Incorporating a set of remote ops emits a single edited event, - // followed by a dirtied event. - buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer_ops, cx).unwrap(); - }); - - let buffer_1_events = buffer_1_events.borrow(); - assert_eq!( - *buffer_1_events, - vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited] - ); - - let buffer_2_events = buffer_2_events.borrow(); - assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]); - } - - #[gpui::test(iterations = 100)] - fn test_random_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(10); - - let reference_string_len = rng.gen_range(0..3); - let mut reference_string = RandomCharIter::new(&mut rng) - .take(reference_string_len) - .collect::(); - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, reference_string.as_str(), cx); - buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); - let mut buffer_versions = Vec::new(); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - - for _i in 0..operations { - let (old_ranges, new_text) = buffer.randomly_mutate(&mut rng, cx); - for old_range in old_ranges.iter().rev() { - reference_string.replace_range(old_range.clone(), &new_text); - } - assert_eq!(buffer.text(), reference_string); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - - if rng.gen_bool(0.25) { - buffer.randomly_undo_redo(&mut rng, cx); - reference_string = buffer.text(); - log::info!( - "buffer text {:?}, version: {:?}", - buffer.text(), - buffer.version() - ); - } - - let range = buffer.random_byte_range(0, &mut rng); - assert_eq!( - buffer.text_summary_for_range(range.clone()), - TextSummary::from(&reference_string[range]) - ); - - if rng.gen_bool(0.3) { - buffer_versions.push(buffer.clone()); - } - } - - for mut old_buffer in buffer_versions { - let edits = buffer - .edits_since(old_buffer.version.clone()) - .collect::>(); - - log::info!( - "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", - old_buffer.version(), - old_buffer.text(), - edits, - ); - - let mut delta = 0_isize; - for edit in edits { - let old_start = (edit.old_bytes.start as isize + delta) as usize; - let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); - old_buffer.edit( - Some(old_start..old_start + edit.deleted_bytes()), - new_text, - cx, - ); - delta += edit.delta(); - } - assert_eq!(old_buffer.text(), buffer.text()); - } - - buffer - }); - } - - #[gpui::test] - fn test_line_len(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abcd\nefg\nhij", cx); - buffer.edit(vec![12..12], "kl\nmno", cx); - buffer.edit(vec![18..18], "\npqrs\n", cx); - buffer.edit(vec![18..21], "\nPQ", cx); - - assert_eq!(buffer.line_len(0), 4); - assert_eq!(buffer.line_len(1), 3); - assert_eq!(buffer.line_len(2), 5); - assert_eq!(buffer.line_len(3), 3); - assert_eq!(buffer.line_len(4), 4); - assert_eq!(buffer.line_len(5), 0); - buffer - }); - } - - #[gpui::test] - fn test_text_summary_for_range(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let buffer = Buffer::new(0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz", cx); - assert_eq!( - buffer.text_summary_for_range(1..3), - TextSummary { - bytes: 2, - lines: Point::new(1, 0), - first_line_chars: 1, - last_line_chars: 0, - longest_row: 0, - longest_row_chars: 1, - } - ); - assert_eq!( - buffer.text_summary_for_range(1..12), - TextSummary { - bytes: 11, - lines: Point::new(3, 0), - first_line_chars: 1, - last_line_chars: 0, - longest_row: 2, - longest_row_chars: 4, - } - ); - assert_eq!( - buffer.text_summary_for_range(0..20), - TextSummary { - bytes: 20, - lines: Point::new(4, 1), - first_line_chars: 2, - last_line_chars: 1, - longest_row: 3, - longest_row_chars: 6, - } - ); - assert_eq!( - buffer.text_summary_for_range(0..22), - TextSummary { - bytes: 22, - lines: Point::new(4, 3), - first_line_chars: 2, - last_line_chars: 3, - longest_row: 3, - longest_row_chars: 6, - } - ); - assert_eq!( - buffer.text_summary_for_range(7..22), - TextSummary { - bytes: 15, - lines: Point::new(2, 3), - first_line_chars: 4, - last_line_chars: 3, - longest_row: 1, - longest_row_chars: 6, - } - ); - buffer - }); - } - - #[gpui::test] - fn test_chars_at(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abcd\nefgh\nij", cx); - buffer.edit(vec![12..12], "kl\nmno", cx); - buffer.edit(vec![18..18], "\npqrs", cx); - buffer.edit(vec![18..21], "\nPQ", cx); - - let chars = buffer.chars_at(Point::new(0, 0)); - assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); - - let chars = buffer.chars_at(Point::new(1, 0)); - assert_eq!(chars.collect::(), "efgh\nijkl\nmno\nPQrs"); - - let chars = buffer.chars_at(Point::new(2, 0)); - assert_eq!(chars.collect::(), "ijkl\nmno\nPQrs"); - - let chars = buffer.chars_at(Point::new(3, 0)); - assert_eq!(chars.collect::(), "mno\nPQrs"); - - let chars = buffer.chars_at(Point::new(4, 0)); - assert_eq!(chars.collect::(), "PQrs"); - - // Regression test: - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n", cx); - buffer.edit(vec![60..60], "\n", cx); - - let chars = buffer.chars_at(Point::new(6, 0)); - assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); - - buffer - }); - } - - #[gpui::test] - fn test_anchors(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - buffer.edit(vec![0..0], "abc", cx); - let left_anchor = buffer.anchor_before(2); - let right_anchor = buffer.anchor_after(2); - - buffer.edit(vec![1..1], "def\n", cx); - assert_eq!(buffer.text(), "adef\nbc"); - assert_eq!(left_anchor.to_offset(&buffer), 6); - assert_eq!(right_anchor.to_offset(&buffer), 6); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - - buffer.edit(vec![2..3], "", cx); - assert_eq!(buffer.text(), "adf\nbc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 5); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - - buffer.edit(vec![5..5], "ghi\n", cx); - assert_eq!(buffer.text(), "adf\nbghi\nc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 9); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); - - buffer.edit(vec![7..9], "", cx); - assert_eq!(buffer.text(), "adf\nbghc"); - assert_eq!(left_anchor.to_offset(&buffer), 5); - assert_eq!(right_anchor.to_offset(&buffer), 7); - assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },); - assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 }); - - // Ensure anchoring to a point is equivalent to anchoring to an offset. - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 0 }), - buffer.anchor_before(0) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 1 }), - buffer.anchor_before(1) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 2 }), - buffer.anchor_before(2) - ); - assert_eq!( - buffer.anchor_before(Point { row: 0, column: 3 }), - buffer.anchor_before(3) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 0 }), - buffer.anchor_before(4) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 1 }), - buffer.anchor_before(5) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 2 }), - buffer.anchor_before(6) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 3 }), - buffer.anchor_before(7) - ); - assert_eq!( - buffer.anchor_before(Point { row: 1, column: 4 }), - buffer.anchor_before(8) - ); - - // Comparison between anchors. - let anchor_at_offset_0 = buffer.anchor_before(0); - let anchor_at_offset_1 = buffer.anchor_before(1); - let anchor_at_offset_2 = buffer.anchor_before(2); - - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Equal - ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Equal - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Equal - ); - - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Less - ); - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Less - ); - assert_eq!( - anchor_at_offset_0 - .cmp(&anchor_at_offset_2, &buffer) - .unwrap(), - Ordering::Less - ); - - assert_eq!( - anchor_at_offset_1 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Greater - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_1, &buffer) - .unwrap(), - Ordering::Greater - ); - assert_eq!( - anchor_at_offset_2 - .cmp(&anchor_at_offset_0, &buffer) - .unwrap(), - Ordering::Greater - ); - buffer - }); - } - - #[gpui::test] - fn test_anchors_at_start_and_end(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "", cx); - let before_start_anchor = buffer.anchor_before(0); - let after_end_anchor = buffer.anchor_after(0); - - buffer.edit(vec![0..0], "abc", cx); - assert_eq!(buffer.text(), "abc"); - assert_eq!(before_start_anchor.to_offset(&buffer), 0); - assert_eq!(after_end_anchor.to_offset(&buffer), 3); - - let after_start_anchor = buffer.anchor_after(0); - let before_end_anchor = buffer.anchor_before(3); - - buffer.edit(vec![3..3], "def", cx); - buffer.edit(vec![0..0], "ghi", cx); - assert_eq!(buffer.text(), "ghiabcdef"); - assert_eq!(before_start_anchor.to_offset(&buffer), 0); - assert_eq!(after_start_anchor.to_offset(&buffer), 3); - assert_eq!(before_end_anchor.to_offset(&buffer), 6); - assert_eq!(after_end_anchor.to_offset(&buffer), 9); - buffer - }); - } - - #[gpui::test] - async fn test_apply_diff(mut cx: gpui::TestAppContext) { - let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); - - let text = "a\nccc\ndddd\nffffff\n"; - let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; - buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); - cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); - - let text = "a\n1\n\nccc\ndd2dd\nffffff\n"; - let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; - buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); - cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); - } - - #[gpui::test] - fn test_undo_redo(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut buffer = Buffer::new(0, "1234", cx); - // Set group interval to zero so as to not group edits in the undo stack. - buffer.history.group_interval = Duration::from_secs(0); - - buffer.edit(vec![1..1], "abx", cx); - buffer.edit(vec![3..4], "yzef", cx); - buffer.edit(vec![3..5], "cd", cx); - assert_eq!(buffer.text(), "1abcdef234"); - - let transactions = buffer.history.undo_stack.clone(); - assert_eq!(transactions.len(), 3); - - buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abcdef234"); - - buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abcdef234"); - - buffer.undo_or_redo(transactions[2].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[0].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(transactions[1].clone(), cx).unwrap(); - assert_eq!(buffer.text(), "1234"); - - buffer - }); - } - - #[gpui::test] - fn test_history(cx: &mut gpui::MutableAppContext) { - cx.add_model(|cx| { - let mut now = Instant::now(); - let mut buffer = Buffer::new(0, "123456", cx); - - let set_id = - buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap(), cx); - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer.edit(vec![2..4], "cd", cx); - buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); - assert_eq!(buffer.text(), "12cd56"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer - .update_selection_set( - set_id, - buffer.selections_from_ranges(vec![1..3]).unwrap(), - cx, - ) - .unwrap(); - buffer.edit(vec![4..5], "e", cx); - buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - now += buffer.history.group_interval + Duration::from_millis(1); - buffer.start_transaction_at(Some(set_id), now).unwrap(); - buffer - .update_selection_set( - set_id, - buffer.selections_from_ranges(vec![2..2]).unwrap(), - cx, - ) - .unwrap(); - buffer.edit(vec![0..1], "a", cx); - buffer.edit(vec![1..1], "b", cx); - buffer.end_transaction_at(Some(set_id), now, cx).unwrap(); - assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - - // Last transaction happened past the group interval, undo it on its - // own. - buffer.undo(cx); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - // First two transactions happened within the group interval, undo them - // together. - buffer.undo(cx); - assert_eq!(buffer.text(), "123456"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - - // Redo the first two transactions together. - buffer.redo(cx); - assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - - // Redo the last transaction on its own. - buffer.redo(cx); - assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - - buffer - }); - } - - #[gpui::test] - fn test_concurrent_edits(cx: &mut gpui::MutableAppContext) { - let text = "abcdef"; - - let buffer1 = cx.add_model(|cx| Buffer::new(1, text, cx)); - let buffer2 = cx.add_model(|cx| Buffer::new(2, text, cx)); - let buffer3 = cx.add_model(|cx| Buffer::new(3, text, cx)); - - let buf1_op = buffer1.update(cx, |buffer, cx| { - buffer.edit(vec![1..2], "12", cx); - assert_eq!(buffer.text(), "a12cdef"); - buffer.operations.last().unwrap().clone() - }); - let buf2_op = buffer2.update(cx, |buffer, cx| { - buffer.edit(vec![3..4], "34", cx); - assert_eq!(buffer.text(), "abc34ef"); - buffer.operations.last().unwrap().clone() - }); - let buf3_op = buffer3.update(cx, |buffer, cx| { - buffer.edit(vec![5..6], "56", cx); - assert_eq!(buffer.text(), "abcde56"); - buffer.operations.last().unwrap().clone() - }); - - buffer1.update(cx, |buffer, _| { - buffer.apply_op(buf2_op.clone()).unwrap(); - buffer.apply_op(buf3_op.clone()).unwrap(); - }); - buffer2.update(cx, |buffer, _| { - buffer.apply_op(buf1_op.clone()).unwrap(); - buffer.apply_op(buf3_op.clone()).unwrap(); - }); - buffer3.update(cx, |buffer, _| { - buffer.apply_op(buf1_op.clone()).unwrap(); - buffer.apply_op(buf2_op.clone()).unwrap(); - }); - - assert_eq!(buffer1.read(cx).text(), "a12c34e56"); - assert_eq!(buffer2.read(cx).text(), "a12c34e56"); - assert_eq!(buffer3.read(cx).text(), "a12c34e56"); - } - - #[gpui::test(iterations = 100)] - fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { - let peers = env::var("PEERS") - .map(|i| i.parse().expect("invalid `PEERS` variable")) - .unwrap_or(5); - let operations = env::var("OPERATIONS") - .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) - .unwrap_or(10); - - let base_text_len = rng.gen_range(0..10); - let base_text = RandomCharIter::new(&mut rng) - .take(base_text_len) - .collect::(); - let mut replica_ids = Vec::new(); - let mut buffers = Vec::new(); - let mut network = Network::new(rng.clone()); - - for i in 0..peers { - let buffer = cx.add_model(|cx| { - let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx); - buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); - buf - }); - buffers.push(buffer); - replica_ids.push(i as u16); - network.add_peer(i as u16); - } - - log::info!("initial text: {:?}", base_text); - - let mut mutation_count = operations; - loop { - let replica_index = rng.gen_range(0..peers); - let replica_id = replica_ids[replica_index]; - buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) { - 0..=50 if mutation_count != 0 => { - buffer.randomly_mutate(&mut rng, cx); - network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); - log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); - mutation_count -= 1; - } - 51..=70 if mutation_count != 0 => { - buffer.randomly_undo_redo(&mut rng, cx); - network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations)); - mutation_count -= 1; - } - 71..=100 if network.has_unreceived(replica_id) => { - let ops = network.receive(replica_id); - if !ops.is_empty() { - log::info!( - "peer {} applying {} ops from the network.", - replica_id, - ops.len() - ); - buffer.apply_ops(ops, cx).unwrap(); - } - } - _ => {} - }); - - if mutation_count == 0 && network.is_idle() { - break; - } - } - - let first_buffer = buffers[0].read(cx); - for buffer in &buffers[1..] { - let buffer = buffer.read(cx); - assert_eq!( - buffer.text(), - first_buffer.text(), - "Replica {} text != Replica 0 text", - buffer.replica_id - ); - assert_eq!( - buffer.selection_sets().collect::>(), - first_buffer.selection_sets().collect::>() - ); - assert_eq!( - buffer.all_selection_ranges().collect::>(), - first_buffer - .all_selection_ranges() - .collect::>() - ); - } - } - - #[gpui::test] - async fn test_reparse(mut cx: gpui::TestAppContext) { - let rust_lang = rust_lang(); - let buffer = cx.add_model(|cx| { - let text = "fn a() {}".into(); - Buffer::from_history(0, History::new(text), None, Some(rust_lang.clone()), cx) - }); - - // Wait for the initial text to parse - buffer - .condition(&cx, |buffer, _| !buffer.is_parsing()) - .await; - assert_eq!( - get_tree_sexp(&buffer, &cx), - concat!( - "(source_file (function_item name: (identifier) ", - "parameters: (parameters) ", - "body: (block)))" - ) - ); - - buffer.update(&mut cx, |buffer, _| { - buffer.set_sync_parse_timeout(Duration::ZERO) - }); - - // Perform some edits (add parameter and variable reference) - // Parsing doesn't begin until the transaction is complete - buffer.update(&mut cx, |buf, cx| { - buf.start_transaction(None).unwrap(); - - let offset = buf.text().find(")").unwrap(); - buf.edit(vec![offset..offset], "b: C", cx); - assert!(!buf.is_parsing()); - - let offset = buf.text().find("}").unwrap(); - buf.edit(vec![offset..offset], " d; ", cx); - assert!(!buf.is_parsing()); - - buf.end_transaction(None, cx).unwrap(); - assert_eq!(buf.text(), "fn a(b: C) { d; }"); - assert!(buf.is_parsing()); - }); - buffer - .condition(&cx, |buffer, _| !buffer.is_parsing()) - .await; - assert_eq!( - get_tree_sexp(&buffer, &cx), - concat!( - "(source_file (function_item name: (identifier) ", - "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", - "body: (block (identifier))))" - ) - ); - - // Perform a series of edits without waiting for the current parse to complete: - // * turn identifier into a field expression - // * turn field expression into a method call - // * add a turbofish to the method call - buffer.update(&mut cx, |buf, cx| { - let offset = buf.text().find(";").unwrap(); - buf.edit(vec![offset..offset], ".e", cx); - assert_eq!(buf.text(), "fn a(b: C) { d.e; }"); - assert!(buf.is_parsing()); - }); - buffer.update(&mut cx, |buf, cx| { - let offset = buf.text().find(";").unwrap(); - buf.edit(vec![offset..offset], "(f)", cx); - assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }"); - assert!(buf.is_parsing()); - }); - buffer.update(&mut cx, |buf, cx| { - let offset = buf.text().find("(f)").unwrap(); - buf.edit(vec![offset..offset], "::", cx); - assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); - assert!(buf.is_parsing()); - }); - buffer - .condition(&cx, |buffer, _| !buffer.is_parsing()) - .await; - assert_eq!( - get_tree_sexp(&buffer, &cx), - concat!( - "(source_file (function_item name: (identifier) ", - "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", - "body: (block (call_expression ", - "function: (generic_function ", - "function: (field_expression value: (identifier) field: (field_identifier)) ", - "type_arguments: (type_arguments (type_identifier))) ", - "arguments: (arguments (identifier))))))", - ) - ); - - buffer.update(&mut cx, |buf, cx| { - buf.undo(cx); - assert_eq!(buf.text(), "fn a() {}"); - assert!(buf.is_parsing()); - }); - buffer - .condition(&cx, |buffer, _| !buffer.is_parsing()) - .await; - assert_eq!( - get_tree_sexp(&buffer, &cx), - concat!( - "(source_file (function_item name: (identifier) ", - "parameters: (parameters) ", - "body: (block)))" - ) - ); - - buffer.update(&mut cx, |buf, cx| { - buf.redo(cx); - assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); - assert!(buf.is_parsing()); - }); - buffer - .condition(&cx, |buffer, _| !buffer.is_parsing()) - .await; - assert_eq!( - get_tree_sexp(&buffer, &cx), - concat!( - "(source_file (function_item name: (identifier) ", - "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", - "body: (block (call_expression ", - "function: (generic_function ", - "function: (field_expression value: (identifier) field: (field_identifier)) ", - "type_arguments: (type_arguments (type_identifier))) ", - "arguments: (arguments (identifier))))))", - ) - ); - - fn get_tree_sexp(buffer: &ModelHandle, cx: &gpui::TestAppContext) -> String { - buffer.read_with(cx, |buffer, _| { - buffer.syntax_tree().unwrap().root_node().to_sexp() - }) - } - } - - #[gpui::test] - async fn test_enclosing_bracket_ranges(mut cx: gpui::TestAppContext) { - use unindent::Unindent as _; - - let rust_lang = rust_lang(); - let buffer = cx.add_model(|cx| { - let text = " - mod x { - mod y { - - } - } - " - .unindent() - .into(); - Buffer::from_history(0, History::new(text), None, Some(rust_lang.clone()), cx) - }); - buffer - .condition(&cx, |buffer, _| !buffer.is_parsing()) - .await; - buffer.read_with(&cx, |buf, _| { - assert_eq!( - buf.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)), - Some(( - Point::new(0, 6)..Point::new(0, 7), - Point::new(4, 0)..Point::new(4, 1) - )) - ); - assert_eq!( - buf.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)), - Some(( - Point::new(1, 10)..Point::new(1, 11), - Point::new(3, 4)..Point::new(3, 5) - )) - ); - assert_eq!( - buf.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)), - Some(( - Point::new(1, 10)..Point::new(1, 11), - Point::new(3, 4)..Point::new(3, 5) - )) - ); - }); - } - - #[derive(Clone)] - struct Envelope { - message: T, - sender: ReplicaId, - } - - struct Network { - inboxes: std::collections::BTreeMap>>, - all_messages: Vec, - rng: R, - } - - impl Network { - fn new(rng: R) -> Self { - Network { - inboxes: Default::default(), - all_messages: Vec::new(), - rng, - } - } - - fn add_peer(&mut self, id: ReplicaId) { - self.inboxes.insert(id, Vec::new()); - } - - fn is_idle(&self) -> bool { - self.inboxes.values().all(|i| i.is_empty()) - } - - fn broadcast(&mut self, sender: ReplicaId, messages: Vec) { - for (replica, inbox) in self.inboxes.iter_mut() { - if *replica != sender { - for message in &messages { - let min_index = inbox - .iter() - .enumerate() - .rev() - .find_map(|(index, envelope)| { - if sender == envelope.sender { - Some(index + 1) - } else { - None - } - }) - .unwrap_or(0); - - // Insert one or more duplicates of this message *after* the previous - // message delivered by this replica. - for _ in 0..self.rng.gen_range(1..4) { - let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1); - inbox.insert( - insertion_index, - Envelope { - message: message.clone(), - sender, - }, - ); - } - } - } - } - self.all_messages.extend(messages); - } - - fn has_unreceived(&self, receiver: ReplicaId) -> bool { - !self.inboxes[&receiver].is_empty() - } - - fn receive(&mut self, receiver: ReplicaId) -> Vec { - let inbox = self.inboxes.get_mut(&receiver).unwrap(); - let count = self.rng.gen_range(0..inbox.len() + 1); - inbox - .drain(0..count) - .map(|envelope| envelope.message) - .collect() - } - } - - fn rust_lang() -> Arc { - Arc::new( - Language::new( - LanguageConfig { - name: "Rust".to_string(), - path_suffixes: vec!["rs".to_string()], - ..Default::default() - }, - tree_sitter_rust::language(), - ) - .with_brackets_query(r#" ("{" @open "}" @close) "#) - .unwrap(), - ) - } -} diff --git a/crates/buffer/src/point.rs b/crates/buffer/src/point.rs index 77dd9dfe37..a2da4e4f6c 100644 --- a/crates/buffer/src/point.rs +++ b/crates/buffer/src/point.rs @@ -109,21 +109,3 @@ impl Ord for Point { } } } - -impl Into for Point { - fn into(self) -> tree_sitter::Point { - tree_sitter::Point { - row: self.row as usize, - column: self.column as usize, - } - } -} - -impl From for Point { - fn from(point: tree_sitter::Point) -> Self { - Self { - row: point.row as u32, - column: point.column as u32, - } - } -} diff --git a/crates/buffer/src/rope.rs b/crates/buffer/src/rope.rs index a5f4b905ba..a1c5714002 100644 --- a/crates/buffer/src/rope.rs +++ b/crates/buffer/src/rope.rs @@ -115,6 +115,11 @@ impl Rope { self.chunks_in_range(start..self.len()).flat_map(str::chars) } + pub fn reversed_chars_at(&self, start: usize) -> impl Iterator + '_ { + self.reversed_chunks_in_range(0..start) + .flat_map(|chunk| chunk.chars().rev()) + } + pub fn bytes_at(&self, start: usize) -> impl Iterator + '_ { self.chunks_in_range(start..self.len()).flat_map(str::bytes) } @@ -123,8 +128,12 @@ impl Rope { self.chunks_in_range(0..self.len()) } - pub fn chunks_in_range<'a>(&'a self, range: Range) -> Chunks<'a> { - Chunks::new(self, range) + pub fn chunks_in_range(&self, range: Range) -> Chunks { + Chunks::new(self, range, false) + } + + pub fn reversed_chunks_in_range(&self, range: Range) -> Chunks { + Chunks::new(self, range, true) } pub fn to_point(&self, offset: usize) -> Point { @@ -268,6 +277,7 @@ impl<'a> Cursor<'a> { } } + self.offset = end_offset; summary } @@ -283,38 +293,65 @@ impl<'a> Cursor<'a> { pub struct Chunks<'a> { chunks: sum_tree::Cursor<'a, Chunk, usize>, range: Range, + reversed: bool, } impl<'a> Chunks<'a> { - pub fn new(rope: &'a Rope, range: Range) -> Self { + pub fn new(rope: &'a Rope, range: Range, reversed: bool) -> Self { let mut chunks = rope.chunks.cursor(); - chunks.seek(&range.start, Bias::Right, &()); - Self { chunks, range } + if reversed { + chunks.seek(&range.end, Bias::Left, &()); + } else { + chunks.seek(&range.start, Bias::Right, &()); + } + Self { + chunks, + range, + reversed, + } } pub fn offset(&self) -> usize { - self.range.start.max(*self.chunks.start()) + if self.reversed { + self.range.end.min(self.chunks.end(&())) + } else { + self.range.start.max(*self.chunks.start()) + } } pub fn seek(&mut self, offset: usize) { - if offset >= self.chunks.end(&()) { - self.chunks.seek_forward(&offset, Bias::Right, &()); + let bias = if self.reversed { + Bias::Left } else { - self.chunks.seek(&offset, Bias::Right, &()); + Bias::Right + }; + + if offset >= self.chunks.end(&()) { + self.chunks.seek_forward(&offset, bias, &()); + } else { + self.chunks.seek(&offset, bias, &()); + } + + if self.reversed { + self.range.end = offset; + } else { + self.range.start = offset; } - self.range.start = offset; } pub fn peek(&self) -> Option<&'a str> { - if let Some(chunk) = self.chunks.item() { - let offset = *self.chunks.start(); - if self.range.end > offset { - let start = self.range.start.saturating_sub(*self.chunks.start()); - let end = self.range.end - self.chunks.start(); - return Some(&chunk.0[start..chunk.0.len().min(end)]); - } + let chunk = self.chunks.item()?; + if self.reversed && self.range.start >= self.chunks.end(&()) { + return None; } - None + let chunk_start = *self.chunks.start(); + if self.range.end <= chunk_start { + return None; + } + + let start = self.range.start.saturating_sub(chunk_start); + let end = self.range.end - chunk_start; + Some(&chunk.0[start..chunk.0.len().min(end)]) } } @@ -324,7 +361,11 @@ impl<'a> Iterator for Chunks<'a> { fn next(&mut self) -> Option { let result = self.peek(); if result.is_some() { - self.chunks.next(&()); + if self.reversed { + self.chunks.prev(&()); + } else { + self.chunks.next(&()); + } } result } @@ -570,6 +611,16 @@ mod tests { actual.chunks_in_range(start_ix..end_ix).collect::(), &expected[start_ix..end_ix] ); + + assert_eq!( + actual + .reversed_chunks_in_range(start_ix..end_ix) + .collect::>() + .into_iter() + .rev() + .collect::(), + &expected[start_ix..end_ix] + ); } let mut point = Point::new(0, 0); diff --git a/crates/buffer/src/tests.rs b/crates/buffer/src/tests.rs new file mode 100644 index 0000000000..bb29f7de98 --- /dev/null +++ b/crates/buffer/src/tests.rs @@ -0,0 +1,658 @@ +use super::*; +use clock::ReplicaId; +use rand::prelude::*; +use std::{ + cmp::Ordering, + env, + iter::Iterator, + time::{Duration, Instant}, +}; + +#[test] +fn test_edit() { + let mut buffer = Buffer::new(0, 0, History::new("abc".into())); + assert_eq!(buffer.text(), "abc"); + buffer.edit(vec![3..3], "def"); + assert_eq!(buffer.text(), "abcdef"); + buffer.edit(vec![0..0], "ghi"); + assert_eq!(buffer.text(), "ghiabcdef"); + buffer.edit(vec![5..5], "jkl"); + assert_eq!(buffer.text(), "ghiabjklcdef"); + buffer.edit(vec![6..7], ""); + assert_eq!(buffer.text(), "ghiabjlcdef"); + buffer.edit(vec![4..9], "mno"); + assert_eq!(buffer.text(), "ghiamnoef"); +} + +#[gpui::test(iterations = 100)] +fn test_random_edits(mut rng: StdRng) { + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let reference_string_len = rng.gen_range(0..3); + let mut reference_string = RandomCharIter::new(&mut rng) + .take(reference_string_len) + .collect::(); + let mut buffer = Buffer::new(0, 0, History::new(reference_string.clone().into())); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + let mut buffer_versions = Vec::new(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + + for _i in 0..operations { + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); + for old_range in old_ranges.iter().rev() { + reference_string.replace_range(old_range.clone(), &new_text); + } + assert_eq!(buffer.text(), reference_string); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + + if rng.gen_bool(0.25) { + buffer.randomly_undo_redo(&mut rng); + reference_string = buffer.text(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + } + + let range = buffer.random_byte_range(0, &mut rng); + assert_eq!( + buffer.text_summary_for_range(range.clone()), + TextSummary::from(&reference_string[range]) + ); + + if rng.gen_bool(0.3) { + buffer_versions.push(buffer.clone()); + } + } + + for mut old_buffer in buffer_versions { + let edits = buffer + .edits_since(old_buffer.version.clone()) + .collect::>(); + + log::info!( + "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", + old_buffer.version(), + old_buffer.text(), + edits, + ); + + let mut delta = 0_isize; + for edit in edits { + let old_start = (edit.old_bytes.start as isize + delta) as usize; + let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); + old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text); + delta += edit.delta(); + } + assert_eq!(old_buffer.text(), buffer.text()); + } +} + +#[test] +fn test_line_len() { + let mut buffer = Buffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abcd\nefg\nhij"); + buffer.edit(vec![12..12], "kl\nmno"); + buffer.edit(vec![18..18], "\npqrs\n"); + buffer.edit(vec![18..21], "\nPQ"); + + assert_eq!(buffer.line_len(0), 4); + assert_eq!(buffer.line_len(1), 3); + assert_eq!(buffer.line_len(2), 5); + assert_eq!(buffer.line_len(3), 3); + assert_eq!(buffer.line_len(4), 4); + assert_eq!(buffer.line_len(5), 0); +} + +#[test] +fn test_text_summary_for_range() { + let buffer = Buffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into())); + assert_eq!( + buffer.text_summary_for_range(1..3), + TextSummary { + bytes: 2, + lines: Point::new(1, 0), + first_line_chars: 1, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 1, + } + ); + assert_eq!( + buffer.text_summary_for_range(1..12), + TextSummary { + bytes: 11, + lines: Point::new(3, 0), + first_line_chars: 1, + last_line_chars: 0, + longest_row: 2, + longest_row_chars: 4, + } + ); + assert_eq!( + buffer.text_summary_for_range(0..20), + TextSummary { + bytes: 20, + lines: Point::new(4, 1), + first_line_chars: 2, + last_line_chars: 1, + longest_row: 3, + longest_row_chars: 6, + } + ); + assert_eq!( + buffer.text_summary_for_range(0..22), + TextSummary { + bytes: 22, + lines: Point::new(4, 3), + first_line_chars: 2, + last_line_chars: 3, + longest_row: 3, + longest_row_chars: 6, + } + ); + assert_eq!( + buffer.text_summary_for_range(7..22), + TextSummary { + bytes: 15, + lines: Point::new(2, 3), + first_line_chars: 4, + last_line_chars: 3, + longest_row: 1, + longest_row_chars: 6, + } + ); +} + +#[test] +fn test_chars_at() { + let mut buffer = Buffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abcd\nefgh\nij"); + buffer.edit(vec![12..12], "kl\nmno"); + buffer.edit(vec![18..18], "\npqrs"); + buffer.edit(vec![18..21], "\nPQ"); + + let chars = buffer.chars_at(Point::new(0, 0)); + assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); + + let chars = buffer.chars_at(Point::new(1, 0)); + assert_eq!(chars.collect::(), "efgh\nijkl\nmno\nPQrs"); + + let chars = buffer.chars_at(Point::new(2, 0)); + assert_eq!(chars.collect::(), "ijkl\nmno\nPQrs"); + + let chars = buffer.chars_at(Point::new(3, 0)); + assert_eq!(chars.collect::(), "mno\nPQrs"); + + let chars = buffer.chars_at(Point::new(4, 0)); + assert_eq!(chars.collect::(), "PQrs"); + + // Regression test: + let mut buffer = Buffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n"); + buffer.edit(vec![60..60], "\n"); + + let chars = buffer.chars_at(Point::new(6, 0)); + assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); +} + +#[test] +fn test_anchors() { + let mut buffer = Buffer::new(0, 0, History::new("".into())); + buffer.edit(vec![0..0], "abc"); + let left_anchor = buffer.anchor_before(2); + let right_anchor = buffer.anchor_after(2); + + buffer.edit(vec![1..1], "def\n"); + assert_eq!(buffer.text(), "adef\nbc"); + assert_eq!(left_anchor.to_offset(&buffer), 6); + assert_eq!(right_anchor.to_offset(&buffer), 6); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + + buffer.edit(vec![2..3], ""); + assert_eq!(buffer.text(), "adf\nbc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 5); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + + buffer.edit(vec![5..5], "ghi\n"); + assert_eq!(buffer.text(), "adf\nbghi\nc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 9); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); + + buffer.edit(vec![7..9], ""); + assert_eq!(buffer.text(), "adf\nbghc"); + assert_eq!(left_anchor.to_offset(&buffer), 5); + assert_eq!(right_anchor.to_offset(&buffer), 7); + assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },); + assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 }); + + // Ensure anchoring to a point is equivalent to anchoring to an offset. + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 0 }), + buffer.anchor_before(0) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 1 }), + buffer.anchor_before(1) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 2 }), + buffer.anchor_before(2) + ); + assert_eq!( + buffer.anchor_before(Point { row: 0, column: 3 }), + buffer.anchor_before(3) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 0 }), + buffer.anchor_before(4) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 1 }), + buffer.anchor_before(5) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 2 }), + buffer.anchor_before(6) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 3 }), + buffer.anchor_before(7) + ); + assert_eq!( + buffer.anchor_before(Point { row: 1, column: 4 }), + buffer.anchor_before(8) + ); + + // Comparison between anchors. + let anchor_at_offset_0 = buffer.anchor_before(0); + let anchor_at_offset_1 = buffer.anchor_before(1); + let anchor_at_offset_2 = buffer.anchor_before(2); + + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Equal + ); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Equal + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Equal + ); + + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Less + ); + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Less + ); + assert_eq!( + anchor_at_offset_0 + .cmp(&anchor_at_offset_2, &buffer) + .unwrap(), + Ordering::Less + ); + + assert_eq!( + anchor_at_offset_1 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Greater + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_1, &buffer) + .unwrap(), + Ordering::Greater + ); + assert_eq!( + anchor_at_offset_2 + .cmp(&anchor_at_offset_0, &buffer) + .unwrap(), + Ordering::Greater + ); +} + +#[test] +fn test_anchors_at_start_and_end() { + let mut buffer = Buffer::new(0, 0, History::new("".into())); + let before_start_anchor = buffer.anchor_before(0); + let after_end_anchor = buffer.anchor_after(0); + + buffer.edit(vec![0..0], "abc"); + assert_eq!(buffer.text(), "abc"); + assert_eq!(before_start_anchor.to_offset(&buffer), 0); + assert_eq!(after_end_anchor.to_offset(&buffer), 3); + + let after_start_anchor = buffer.anchor_after(0); + let before_end_anchor = buffer.anchor_before(3); + + buffer.edit(vec![3..3], "def"); + buffer.edit(vec![0..0], "ghi"); + assert_eq!(buffer.text(), "ghiabcdef"); + assert_eq!(before_start_anchor.to_offset(&buffer), 0); + assert_eq!(after_start_anchor.to_offset(&buffer), 3); + assert_eq!(before_end_anchor.to_offset(&buffer), 6); + assert_eq!(after_end_anchor.to_offset(&buffer), 9); +} + +#[test] +fn test_undo_redo() { + let mut buffer = Buffer::new(0, 0, History::new("1234".into())); + // Set group interval to zero so as to not group edits in the undo stack. + buffer.history.group_interval = Duration::from_secs(0); + + buffer.edit(vec![1..1], "abx"); + buffer.edit(vec![3..4], "yzef"); + buffer.edit(vec![3..5], "cd"); + assert_eq!(buffer.text(), "1abcdef234"); + + let transactions = buffer.history.undo_stack.clone(); + assert_eq!(transactions.len(), 3); + + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1cdef234"); + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdef234"); + + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdx234"); + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abx234"); + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1abyzef234"); + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abcdef234"); + + buffer.undo_or_redo(transactions[2].clone()).unwrap(); + assert_eq!(buffer.text(), "1abyzef234"); + buffer.undo_or_redo(transactions[0].clone()).unwrap(); + assert_eq!(buffer.text(), "1yzef234"); + buffer.undo_or_redo(transactions[1].clone()).unwrap(); + assert_eq!(buffer.text(), "1234"); +} + +#[test] +fn test_history() { + let mut now = Instant::now(); + let mut buffer = Buffer::new(0, 0, History::new("123456".into())); + + let set_id = if let Operation::UpdateSelections { set_id, .. } = + buffer.add_selection_set(buffer.selections_from_ranges(vec![4..4]).unwrap()) + { + set_id + } else { + unreachable!() + }; + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.edit(vec![2..4], "cd"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "12cd56"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); + + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer + .update_selection_set(set_id, buffer.selections_from_ranges(vec![1..3]).unwrap()) + .unwrap(); + buffer.edit(vec![4..5], "e"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + now += buffer.history.group_interval + Duration::from_millis(1); + buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer + .update_selection_set(set_id, buffer.selections_from_ranges(vec![2..2]).unwrap()) + .unwrap(); + buffer.edit(vec![0..1], "a"); + buffer.edit(vec![1..1], "b"); + buffer.end_transaction_at(Some(set_id), now).unwrap(); + assert_eq!(buffer.text(), "ab2cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); + + // Last transaction happened past the group interval, undo it on its + // own. + buffer.undo(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + // First two transactions happened within the group interval, undo them + // together. + buffer.undo(); + assert_eq!(buffer.text(), "123456"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); + + // Redo the first two transactions together. + buffer.redo(); + assert_eq!(buffer.text(), "12cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); + + // Redo the last transaction on its own. + buffer.redo(); + assert_eq!(buffer.text(), "ab2cde6"); + assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); + + buffer.start_transaction_at(None, now).unwrap(); + assert!(buffer.end_transaction_at(None, now).is_none()); + buffer.undo(); + assert_eq!(buffer.text(), "12cde6"); +} + +#[test] +fn test_concurrent_edits() { + let text = "abcdef"; + + let mut buffer1 = Buffer::new(1, 0, History::new(text.into())); + let mut buffer2 = Buffer::new(2, 0, History::new(text.into())); + let mut buffer3 = Buffer::new(3, 0, History::new(text.into())); + + let buf1_op = buffer1.edit(vec![1..2], "12"); + assert_eq!(buffer1.text(), "a12cdef"); + let buf2_op = buffer2.edit(vec![3..4], "34"); + assert_eq!(buffer2.text(), "abc34ef"); + let buf3_op = buffer3.edit(vec![5..6], "56"); + assert_eq!(buffer3.text(), "abcde56"); + + buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); + buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); + buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); + buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); + buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + + assert_eq!(buffer1.text(), "a12c34e56"); + assert_eq!(buffer2.text(), "a12c34e56"); + assert_eq!(buffer3.text(), "a12c34e56"); +} + +#[gpui::test(iterations = 100)] +fn test_random_concurrent_edits(mut rng: StdRng) { + let peers = env::var("PEERS") + .map(|i| i.parse().expect("invalid `PEERS` variable")) + .unwrap_or(5); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let base_text_len = rng.gen_range(0..10); + let base_text = RandomCharIter::new(&mut rng) + .take(base_text_len) + .collect::(); + let mut replica_ids = Vec::new(); + let mut buffers = Vec::new(); + let mut network = Network::new(rng.clone()); + + for i in 0..peers { + let mut buffer = Buffer::new(i as ReplicaId, 0, History::new(base_text.clone().into())); + buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + buffers.push(buffer); + replica_ids.push(i as u16); + network.add_peer(i as u16); + } + + log::info!("initial text: {:?}", base_text); + + let mut mutation_count = operations; + loop { + let replica_index = rng.gen_range(0..peers); + let replica_id = replica_ids[replica_index]; + let buffer = &mut buffers[replica_index]; + match rng.gen_range(0..=100) { + 0..=50 if mutation_count != 0 => { + let ops = buffer.randomly_mutate(&mut rng); + network.broadcast(buffer.replica_id, ops); + log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); + mutation_count -= 1; + } + 51..=70 if mutation_count != 0 => { + let ops = buffer.randomly_undo_redo(&mut rng); + network.broadcast(buffer.replica_id, ops); + mutation_count -= 1; + } + 71..=100 if network.has_unreceived(replica_id) => { + let ops = network.receive(replica_id); + if !ops.is_empty() { + log::info!( + "peer {} applying {} ops from the network.", + replica_id, + ops.len() + ); + buffer.apply_ops(ops).unwrap(); + } + } + _ => {} + } + + if mutation_count == 0 && network.is_idle() { + break; + } + } + + let first_buffer = &buffers[0]; + for buffer in &buffers[1..] { + assert_eq!( + buffer.text(), + first_buffer.text(), + "Replica {} text != Replica 0 text", + buffer.replica_id + ); + assert_eq!( + buffer.selection_sets().collect::>(), + first_buffer.selection_sets().collect::>() + ); + assert_eq!( + buffer.all_selection_ranges().collect::>(), + first_buffer + .all_selection_ranges() + .collect::>() + ); + } +} + +#[derive(Clone)] +struct Envelope { + message: T, + sender: ReplicaId, +} + +struct Network { + inboxes: std::collections::BTreeMap>>, + all_messages: Vec, + rng: R, +} + +impl Network { + fn new(rng: R) -> Self { + Network { + inboxes: Default::default(), + all_messages: Vec::new(), + rng, + } + } + + fn add_peer(&mut self, id: ReplicaId) { + self.inboxes.insert(id, Vec::new()); + } + + fn is_idle(&self) -> bool { + self.inboxes.values().all(|i| i.is_empty()) + } + + fn broadcast(&mut self, sender: ReplicaId, messages: Vec) { + for (replica, inbox) in self.inboxes.iter_mut() { + if *replica != sender { + for message in &messages { + let min_index = inbox + .iter() + .enumerate() + .rev() + .find_map(|(index, envelope)| { + if sender == envelope.sender { + Some(index + 1) + } else { + None + } + }) + .unwrap_or(0); + + // Insert one or more duplicates of this message *after* the previous + // message delivered by this replica. + for _ in 0..self.rng.gen_range(1..4) { + let insertion_index = self.rng.gen_range(min_index..inbox.len() + 1); + inbox.insert( + insertion_index, + Envelope { + message: message.clone(), + sender, + }, + ); + } + } + } + } + self.all_messages.extend(messages); + } + + fn has_unreceived(&self, receiver: ReplicaId) -> bool { + !self.inboxes[&receiver].is_empty() + } + + fn receive(&mut self, receiver: ReplicaId) -> Vec { + let inbox = self.inboxes.get_mut(&receiver).unwrap(); + let count = self.rng.gen_range(0..inbox.len() + 1); + inbox + .drain(0..count) + .map(|envelope| envelope.message) + .collect() + } +} diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 5212d42526..59ed90d460 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -4,12 +4,17 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["buffer/test-support", "gpui/test-support"] +test-support = [ + "buffer/test-support", + "language/test-support", + "gpui/test-support", +] [dependencies] buffer = { path = "../buffer" } clock = { path = "../clock" } gpui = { path = "../gpui" } +language = { path = "../language" } sum_tree = { path = "../sum_tree" } theme = { path = "../theme" } util = { path = "../util" } @@ -24,6 +29,7 @@ smol = "1.2" [dev-dependencies] buffer = { path = "../buffer", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } rand = "0.8" unindent = "0.1.7" diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index d9655d9a9c..30a506ea92 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -2,9 +2,9 @@ mod fold_map; mod tab_map; mod wrap_map; -use buffer::{Anchor, Buffer, Point, ToOffset, ToPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle}; +use language::{Anchor, Buffer, Point, ToOffset, ToPoint}; use std::ops::Range; use sum_tree::Bias; use tab_map::TabMap; @@ -109,7 +109,7 @@ impl DisplayMap { } pub struct DisplayMapSnapshot { - buffer_snapshot: buffer::Snapshot, + buffer_snapshot: language::Snapshot, folds_snapshot: fold_map::Snapshot, tabs_snapshot: tab_map::Snapshot, wraps_snapshot: wrap_map::Snapshot, @@ -358,8 +358,8 @@ impl ToDisplayPoint for Anchor { mod tests { use super::*; use crate::{movement, test::*}; - use buffer::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal}; use gpui::{color::Color, MutableAppContext}; + use language::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal}; use rand::{prelude::StdRng, Rng}; use std::{env, sync::Arc}; use theme::SyntaxTheme; @@ -436,7 +436,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx)); + buffer.update(&mut cx, |buffer, _| buffer.randomly_edit(&mut rng, 5)); } } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 3dc671e59d..73e032e7f3 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,5 +1,5 @@ -use buffer::{Anchor, Buffer, Point, ToOffset, AnchorRangeExt, HighlightId, TextSummary}; use gpui::{AppContext, ModelHandle}; +use language::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset}; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, @@ -485,7 +485,7 @@ impl FoldMap { pub struct Snapshot { transforms: SumTree, folds: SumTree, - buffer_snapshot: buffer::Snapshot, + buffer_snapshot: language::Snapshot, pub version: usize, } @@ -994,7 +994,7 @@ impl<'a> Iterator for Chunks<'a> { pub struct HighlightedChunks<'a> { transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>, - buffer_chunks: buffer::HighlightedChunks<'a>, + buffer_chunks: language::HighlightedChunks<'a>, buffer_chunk: Option<(usize, &'a str, HighlightId)>, buffer_offset: usize, } @@ -1331,10 +1331,10 @@ mod tests { snapshot_edits.extend(map.randomly_mutate(&mut rng, cx.as_ref())); } _ => { - let edits = buffer.update(cx, |buffer, cx| { + let edits = buffer.update(cx, |buffer, _| { let start_version = buffer.version.clone(); let edit_count = rng.gen_range(1..=5); - buffer.randomly_edit(&mut rng, edit_count, cx); + buffer.randomly_edit(&mut rng, edit_count); buffer.edits_since(start_version).collect::>() }); log::info!("editing {:?}", edits); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 4fa684c47e..cfab4fd941 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,5 +1,5 @@ use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot}; -use buffer::{rope, HighlightId}; +use language::{rope, HighlightId}; use parking_lot::Mutex; use std::{mem, ops::Range}; use sum_tree::Bias; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index fa26685a65..897dfa01b9 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -2,8 +2,8 @@ use super::{ fold_map, tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary}, }; -use buffer::{HighlightId, Point}; use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task}; +use language::{HighlightId, Point}; use lazy_static::lazy_static; use smol::future::yield_now; use std::{collections::VecDeque, ops::Range, time::Duration}; @@ -899,7 +899,7 @@ mod tests { display_map::{fold_map::FoldMap, tab_map::TabMap}, test::Observer, }; - use buffer::{Buffer, RandomCharIter}; + use language::{Buffer, RandomCharIter}; use rand::prelude::*; use std::env; @@ -990,7 +990,7 @@ mod tests { } } _ => { - buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx)); + buffer.update(&mut cx, |buffer, _| buffer.randomly_mutate(&mut rng)); } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e3e48e475c..cf0a101b0f 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2,7 +2,6 @@ use super::{ DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select, SelectPhase, Snapshot, MAX_LINE_LEN, }; -use buffer::HighlightId; use clock::ReplicaId; use gpui::{ color::Color, @@ -18,6 +17,7 @@ use gpui::{ MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle, }; use json::json; +use language::HighlightId; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, @@ -1043,7 +1043,7 @@ mod tests { test::sample_text, {Editor, EditorSettings}, }; - use buffer::Buffer; + use language::Buffer; #[gpui::test] fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) { diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index 856aa37799..61414d5dc7 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -5,7 +5,6 @@ pub mod movement; #[cfg(test)] mod test; -use buffer::*; use clock::ReplicaId; pub use display_map::DisplayPoint; use display_map::*; @@ -15,13 +14,14 @@ use gpui::{ text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle, MutableAppContext, RenderContext, View, ViewContext, WeakViewHandle, }; +use language::*; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; use std::{ cell::RefCell, cmp::{self, Ordering}, - mem, + iter, mem, ops::{Range, RangeInclusive}, rc::Rc, sync::Arc, @@ -38,6 +38,8 @@ action!(Cancel); action!(Backspace); action!(Delete); action!(Input, String); +action!(Newline); +action!(Tab); action!(DeleteLine); action!(DeleteToPreviousWordBoundary); action!(DeleteToNextWordBoundary); @@ -95,13 +97,13 @@ pub fn init(cx: &mut MutableAppContext) { Binding::new("ctrl-h", Backspace, Some("Editor")), Binding::new("delete", Delete, Some("Editor")), Binding::new("ctrl-d", Delete, Some("Editor")), - Binding::new("enter", Input("\n".into()), Some("Editor && mode == full")), + Binding::new("enter", Newline, Some("Editor && mode == full")), Binding::new( "alt-enter", Input("\n".into()), Some("Editor && mode == auto_height"), ), - Binding::new("tab", Input("\t".into()), Some("Editor")), + Binding::new("tab", Tab, Some("Editor")), Binding::new("ctrl-shift-K", DeleteLine, Some("Editor")), Binding::new( "alt-backspace", @@ -193,8 +195,10 @@ pub fn init(cx: &mut MutableAppContext) { cx.add_action(Editor::select); cx.add_action(Editor::cancel); cx.add_action(Editor::handle_input); + cx.add_action(Editor::newline); cx.add_action(Editor::backspace); cx.add_action(Editor::delete); + cx.add_action(Editor::tab); cx.add_action(Editor::delete_line); cx.add_action(Editor::delete_to_previous_word_boundary); cx.add_action(Editor::delete_to_next_word_boundary); @@ -292,7 +296,7 @@ pub struct Editor { pending_selection: Option, next_selection_id: usize, add_selections_state: Option, - autoclose_stack: Vec, + autoclose_stack: Vec, select_larger_syntax_node_stack: Vec>, scroll_position: Vector2F, scroll_top_anchor: Anchor, @@ -320,9 +324,9 @@ struct AddSelectionsState { stack: Vec, } -struct AutoclosePairState { +struct BracketPairState { ranges: SmallVec<[Range; 32]>, - pair: AutoclosePair, + pair: BracketPair, } #[derive(Serialize, Deserialize)] @@ -750,6 +754,130 @@ impl Editor { } } + pub fn newline(&mut self, _: &Newline, cx: &mut ViewContext) { + self.start_transaction(cx); + let mut old_selections = SmallVec::<[_; 32]>::new(); + { + let selections = self.selections(cx); + let buffer = self.buffer.read(cx); + for selection in selections.iter() { + let start_point = selection.start.to_point(buffer); + let indent = buffer + .indent_column_for_line(start_point.row) + .min(start_point.column); + let start = selection.start.to_offset(buffer); + let end = selection.end.to_offset(buffer); + + let mut insert_extra_newline = false; + if let Some(language) = buffer.language() { + let leading_whitespace_len = buffer + .reversed_chars_at(start) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + + let trailing_whitespace_len = buffer + .chars_at(end) + .take_while(|c| c.is_whitespace() && *c != '\n') + .map(|c| c.len_utf8()) + .sum::(); + + insert_extra_newline = language.brackets().iter().any(|pair| { + let pair_start = pair.start.trim_end(); + let pair_end = pair.end.trim_start(); + + pair.newline + && buffer.contains_str_at(end + trailing_whitespace_len, pair_end) + && buffer.contains_str_at( + (start - leading_whitespace_len).saturating_sub(pair_start.len()), + pair_start, + ) + }); + } + + old_selections.push((selection.id, start..end, indent, insert_extra_newline)); + } + } + + let mut new_selections = Vec::with_capacity(old_selections.len()); + self.buffer.update(cx, |buffer, cx| { + let mut delta = 0_isize; + let mut pending_edit: Option = None; + for (_, range, indent, insert_extra_newline) in &old_selections { + if pending_edit.as_ref().map_or(false, |pending| { + pending.indent != *indent + || pending.insert_extra_newline != *insert_extra_newline + }) { + let pending = pending_edit.take().unwrap(); + let mut new_text = String::with_capacity(1 + pending.indent as usize); + new_text.push('\n'); + new_text.extend(iter::repeat(' ').take(pending.indent as usize)); + if pending.insert_extra_newline { + new_text = new_text.repeat(2); + } + buffer.edit_with_autoindent(pending.ranges, new_text, cx); + delta += pending.delta; + } + + let start = (range.start as isize + delta) as usize; + let end = (range.end as isize + delta) as usize; + let mut text_len = *indent as usize + 1; + if *insert_extra_newline { + text_len *= 2; + } + + let pending = pending_edit.get_or_insert_with(Default::default); + pending.delta += text_len as isize - (end - start) as isize; + pending.indent = *indent; + pending.insert_extra_newline = *insert_extra_newline; + pending.ranges.push(start..end); + } + + let pending = pending_edit.unwrap(); + let mut new_text = String::with_capacity(1 + pending.indent as usize); + new_text.push('\n'); + new_text.extend(iter::repeat(' ').take(pending.indent as usize)); + if pending.insert_extra_newline { + new_text = new_text.repeat(2); + } + buffer.edit_with_autoindent(pending.ranges, new_text, cx); + + let mut delta = 0_isize; + new_selections.extend(old_selections.into_iter().map( + |(id, range, indent, insert_extra_newline)| { + let start = (range.start as isize + delta) as usize; + let end = (range.end as isize + delta) as usize; + let text_before_cursor_len = indent as usize + 1; + let anchor = buffer.anchor_before(start + text_before_cursor_len); + let text_len = if insert_extra_newline { + text_before_cursor_len * 2 + } else { + text_before_cursor_len + }; + delta += text_len as isize - (end - start) as isize; + Selection { + id, + start: anchor.clone(), + end: anchor, + reversed: false, + goal: SelectionGoal::None, + } + }, + )) + }); + + self.update_selections(new_selections, true, cx); + self.end_transaction(cx); + + #[derive(Default)] + struct PendingEdit { + indent: u32, + insert_extra_newline: bool, + delta: isize, + ranges: SmallVec<[Range; 32]>, + } + } + fn insert(&mut self, text: &str, cx: &mut ViewContext) { self.start_transaction(cx); let mut old_selections = SmallVec::<[_; 32]>::new(); @@ -766,7 +894,7 @@ impl Editor { let mut new_selections = Vec::new(); self.buffer.update(cx, |buffer, cx| { let edit_ranges = old_selections.iter().map(|(_, range)| range.clone()); - buffer.edit(edit_ranges, text, cx); + buffer.edit_with_autoindent(edit_ranges, text, cx); let text_len = text.len() as isize; let mut delta = 0_isize; new_selections = old_selections @@ -797,7 +925,7 @@ impl Editor { let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| { let autoclose_pair = buffer.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start.to_offset(&*buffer); - let pair = language.autoclose_pairs().iter().find(|pair| { + let pair = language.brackets().iter().find(|pair| { buffer.contains_str_at( first_selection_start.saturating_sub(pair.start.len()), &pair.start, @@ -832,7 +960,7 @@ impl Editor { buffer.edit(selection_ranges, &pair.end, cx); if pair.end.len() == 1 { - Some(AutoclosePairState { + Some(BracketPairState { ranges: selections .iter() .map(|selection| { @@ -950,6 +1078,51 @@ impl Editor { self.end_transaction(cx); } + pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext) { + self.start_transaction(cx); + let tab_size = self.build_settings.borrow()(cx).tab_size; + let mut selections = self.selections(cx).to_vec(); + self.buffer.update(cx, |buffer, cx| { + let mut last_indented_row = None; + for selection in &mut selections { + let mut range = selection.point_range(buffer); + if range.is_empty() { + let char_column = buffer + .chars_for_range(Point::new(range.start.row, 0)..range.start) + .count(); + let chars_to_next_tab_stop = tab_size - (char_column % tab_size); + buffer.edit( + [range.start..range.start], + " ".repeat(chars_to_next_tab_stop), + cx, + ); + range.start.column += chars_to_next_tab_stop as u32; + + let head = buffer.anchor_before(range.start); + selection.start = head.clone(); + selection.end = head; + } else { + for row in range.start.row..=range.end.row { + if last_indented_row != Some(row) { + let char_column = buffer.indent_column_for_line(row) as usize; + let chars_to_next_tab_stop = tab_size - (char_column % tab_size); + let row_start = Point::new(row, 0); + buffer.edit( + [row_start..row_start], + " ".repeat(chars_to_next_tab_stop), + cx, + ); + last_indented_row = Some(row); + } + } + } + } + }); + + self.update_selections(selections, true, cx); + self.end_transaction(cx); + } + pub fn delete_line(&mut self, _: &DeleteLine, cx: &mut ViewContext) { self.start_transaction(cx); @@ -2488,17 +2661,17 @@ impl Editor { fn on_buffer_event( &mut self, _: ModelHandle, - event: &buffer::Event, + event: &language::Event, cx: &mut ViewContext, ) { match event { - buffer::Event::Edited => cx.emit(Event::Edited), - buffer::Event::Dirtied => cx.emit(Event::Dirtied), - buffer::Event::Saved => cx.emit(Event::Saved), - buffer::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged), - buffer::Event::Reloaded => cx.emit(Event::FileHandleChanged), - buffer::Event::Closed => cx.emit(Event::Closed), - buffer::Event::Reparsed => {} + language::Event::Edited => cx.emit(Event::Edited), + language::Event::Dirtied => cx.emit(Event::Dirtied), + language::Event::Saved => cx.emit(Event::Saved), + language::Event::FileHandleChanged => cx.emit(Event::FileHandleChanged), + language::Event::Reloaded => cx.emit(Event::FileHandleChanged), + language::Event::Closed => cx.emit(Event::Closed), + language::Event::Reparsed => {} } } @@ -3507,6 +3680,30 @@ mod tests { assert_eq!(buffer.read(cx).text(), "e t te our"); } + #[gpui::test] + fn test_newline(cx: &mut gpui::MutableAppContext) { + let buffer = cx.add_model(|cx| Buffer::new(0, "aaaa\n bbbb\n", cx)); + let settings = EditorSettings::test(&cx); + let (_, view) = cx.add_window(Default::default(), |cx| { + build_editor(buffer.clone(), settings, cx) + }); + + view.update(cx, |view, cx| { + view.select_display_ranges( + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), + DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), + DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6), + ], + cx, + ) + .unwrap(); + + view.newline(&Newline, cx); + assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n"); + }); + } + #[gpui::test] fn test_backspace(cx: &mut gpui::MutableAppContext) { let buffer = cx.add_model(|cx| { @@ -4355,14 +4552,18 @@ mod tests { let settings = cx.read(EditorSettings::test); let language = Arc::new(Language::new( LanguageConfig { - autoclose_pairs: vec![ - AutoclosePair { + brackets: vec![ + BracketPair { start: "{".to_string(), end: "}".to_string(), + close: true, + newline: true, }, - AutoclosePair { + BracketPair { start: "/*".to_string(), end: " */".to_string(), + close: true, + newline: true, }, ], ..Default::default() @@ -4461,6 +4662,76 @@ mod tests { }); } + #[gpui::test] + async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) { + let settings = cx.read(EditorSettings::test); + let language = Arc::new(Language::new( + LanguageConfig { + brackets: vec![ + BracketPair { + start: "{".to_string(), + end: "}".to_string(), + close: true, + newline: true, + }, + BracketPair { + start: "/* ".to_string(), + end: " */".to_string(), + close: true, + newline: true, + }, + ], + ..Default::default() + }, + tree_sitter_rust::language(), + )); + + let text = concat!( + "{ }\n", // Suppress rustfmt + " x\n", // + " /* */\n", // + "x\n", // + "{{} }\n", // + ); + + let buffer = cx.add_model(|cx| { + let history = History::new(text.into()); + Buffer::from_history(0, history, None, Some(language), cx) + }); + let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); + view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) + .await; + + view.update(&mut cx, |view, cx| { + view.select_display_ranges( + &[ + DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), + DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), + DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4), + ], + cx, + ) + .unwrap(); + view.newline(&Newline, cx); + + assert_eq!( + view.buffer().read(cx).text(), + concat!( + "{ \n", // Suppress rustfmt + "\n", // + "}\n", // + " x\n", // + " /* \n", // + " \n", // + " */\n", // + "x\n", // + "{{} \n", // + "}\n", // + ) + ); + }); + } + impl Editor { fn selection_ranges(&self, cx: &mut MutableAppContext) -> Vec> { self.selections_in_range( diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml new file mode 100644 index 0000000000..3cbfb3ae12 --- /dev/null +++ b/crates/language/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "language" +version = "0.1.0" +edition = "2018" + +[features] +test-support = ["rand", "buffer/test-support"] + +[dependencies] +buffer = { path = "../buffer" } +clock = { path = "../clock" } +gpui = { path = "../gpui" } +rpc = { path = "../rpc" } +theme = { path = "../theme" } +util = { path = "../util" } +anyhow = "1.0.38" +futures = "0.3" +lazy_static = "1.4" +log = "0.4" +parking_lot = "0.11.1" +rand = { version = "0.8.3", optional = true } +serde = { version = "1", features = ["derive"] } +similar = "1.3" +smol = "1.2" +tree-sitter = "0.19.5" + +[dev-dependencies] +buffer = { path = "../buffer", features = ["test-support"] } +gpui = { path = "../gpui", features = ["test-support"] } +rand = "0.8.3" +tree-sitter-rust = "0.19.0" +unindent = "0.1.7" diff --git a/crates/buffer/src/highlight_map.rs b/crates/language/src/highlight_map.rs similarity index 100% rename from crates/buffer/src/highlight_map.rs rename to crates/language/src/highlight_map.rs diff --git a/crates/buffer/src/language.rs b/crates/language/src/language.rs similarity index 73% rename from crates/buffer/src/language.rs rename to crates/language/src/language.rs index 1a9a29aac5..2260990566 100644 --- a/crates/buffer/src/language.rs +++ b/crates/language/src/language.rs @@ -11,20 +11,23 @@ pub use tree_sitter::{Parser, Tree}; pub struct LanguageConfig { pub name: String, pub path_suffixes: Vec, - pub autoclose_pairs: Vec, + pub brackets: Vec, } -#[derive(Clone, Deserialize)] -pub struct AutoclosePair { +#[derive(Clone, Debug, Deserialize)] +pub struct BracketPair { pub start: String, pub end: String, + pub close: bool, + pub newline: bool, } pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Grammar, - pub(crate) highlight_query: Query, + pub(crate) highlights_query: Query, pub(crate) brackets_query: Query, + pub(crate) indents_query: Query, pub(crate) highlight_map: Mutex, } @@ -68,19 +71,25 @@ impl Language { Self { config, brackets_query: Query::new(grammar, "").unwrap(), - highlight_query: Query::new(grammar, "").unwrap(), + highlights_query: Query::new(grammar, "").unwrap(), + indents_query: Query::new(grammar, "").unwrap(), grammar, highlight_map: Default::default(), } } - pub fn with_highlights_query(mut self, highlights_query_source: &str) -> Result { - self.highlight_query = Query::new(self.grammar, highlights_query_source)?; + pub fn with_highlights_query(mut self, source: &str) -> Result { + self.highlights_query = Query::new(self.grammar, source)?; Ok(self) } - pub fn with_brackets_query(mut self, brackets_query_source: &str) -> Result { - self.brackets_query = Query::new(self.grammar, brackets_query_source)?; + pub fn with_brackets_query(mut self, source: &str) -> Result { + self.brackets_query = Query::new(self.grammar, source)?; + Ok(self) + } + + pub fn with_indents_query(mut self, source: &str) -> Result { + self.indents_query = Query::new(self.grammar, source)?; Ok(self) } @@ -88,8 +97,8 @@ impl Language { self.config.name.as_str() } - pub fn autoclose_pairs(&self) -> &[AutoclosePair] { - &self.config.autoclose_pairs + pub fn brackets(&self) -> &[BracketPair] { + &self.config.brackets } pub fn highlight_map(&self) -> HighlightMap { @@ -97,7 +106,8 @@ impl Language { } pub fn set_theme(&self, theme: &SyntaxTheme) { - *self.highlight_map.lock() = HighlightMap::new(self.highlight_query.capture_names(), theme); + *self.highlight_map.lock() = + HighlightMap::new(self.highlights_query.capture_names(), theme); } } @@ -110,28 +120,22 @@ mod tests { let grammar = tree_sitter_rust::language(); let registry = LanguageRegistry { languages: vec![ - Arc::new(Language { - config: LanguageConfig { + Arc::new(Language::new( + LanguageConfig { name: "Rust".to_string(), path_suffixes: vec!["rs".to_string()], ..Default::default() }, grammar, - highlight_query: Query::new(grammar, "").unwrap(), - brackets_query: Query::new(grammar, "").unwrap(), - highlight_map: Default::default(), - }), - Arc::new(Language { - config: LanguageConfig { + )), + Arc::new(Language::new( + LanguageConfig { name: "Make".to_string(), path_suffixes: vec!["Makefile".to_string(), "mk".to_string()], ..Default::default() }, grammar, - highlight_query: Query::new(grammar, "").unwrap(), - brackets_query: Query::new(grammar, "").unwrap(), - highlight_map: Default::default(), - }), + )), ], }; diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs new file mode 100644 index 0000000000..b80eed7e33 --- /dev/null +++ b/crates/language/src/lib.rs @@ -0,0 +1,1477 @@ +mod highlight_map; +mod language; +#[cfg(test)] +mod tests; + +pub use self::{ + highlight_map::{HighlightId, HighlightMap}, + language::{BracketPair, Language, LanguageConfig, LanguageRegistry}, +}; +use anyhow::{anyhow, Result}; +pub use buffer::{Buffer as TextBuffer, *}; +use clock::ReplicaId; +use futures::FutureExt as _; +use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; +use lazy_static::lazy_static; +use parking_lot::Mutex; +use rpc::proto; +use similar::{ChangeTag, TextDiff}; +use smol::future::yield_now; +use std::{ + any::Any, + cell::RefCell, + cmp, + collections::{BTreeMap, HashMap, HashSet}, + ffi::OsString, + future::Future, + iter::Iterator, + ops::{Deref, DerefMut, Range}, + path::{Path, PathBuf}, + str, + sync::Arc, + time::{Duration, Instant, SystemTime, UNIX_EPOCH}, +}; +use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; +use util::TryFutureExt as _; + +thread_local! { + static PARSER: RefCell = RefCell::new(Parser::new()); +} + +lazy_static! { + static ref QUERY_CURSORS: Mutex> = Default::default(); +} + +// TODO - Make this configurable +const INDENT_SIZE: u32 = 4; + +pub struct Buffer { + text: TextBuffer, + file: Option>, + saved_version: clock::Global, + saved_mtime: SystemTime, + language: Option>, + autoindent_requests: Vec>, + pending_autoindent: Option>, + sync_parse_timeout: Duration, + syntax_tree: Mutex>, + parsing_in_background: bool, + parse_count: usize, + #[cfg(test)] + operations: Vec, +} + +pub struct Snapshot { + text: buffer::Snapshot, + tree: Option, + is_parsing: bool, + language: Option>, + query_cursor: QueryCursorHandle, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum Event { + Edited, + Dirtied, + Saved, + FileHandleChanged, + Reloaded, + Reparsed, + Closed, +} + +pub trait File { + fn worktree_id(&self) -> usize; + + fn entry_id(&self) -> Option; + + fn mtime(&self) -> SystemTime; + + fn path(&self) -> &Arc; + + fn full_path(&self, cx: &AppContext) -> PathBuf; + + /// Returns the last component of this handle's absolute path. If this handle refers to the root + /// of its worktree, then this method will return the name of the worktree itself. + fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option; + + fn is_deleted(&self) -> bool; + + fn save( + &self, + buffer_id: u64, + text: Rope, + version: clock::Global, + cx: &mut MutableAppContext, + ) -> Task>; + + fn load_local(&self, cx: &AppContext) -> Option>>; + + fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext); + + fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext); + + fn boxed_clone(&self) -> Box; + + fn as_any(&self) -> &dyn Any; +} + +struct QueryCursorHandle(Option); + +#[derive(Clone)] +struct SyntaxTree { + tree: Tree, + version: clock::Global, +} + +#[derive(Clone)] +struct AutoindentRequest { + selection_set_ids: HashSet, + before_edit: Snapshot, + edited: AnchorSet, + inserted: Option, +} + +#[derive(Debug)] +struct IndentSuggestion { + basis_row: u32, + indent: bool, +} + +struct TextProvider<'a>(&'a Rope); + +struct Highlights<'a> { + captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, + next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, + stack: Vec<(usize, HighlightId)>, + highlight_map: HighlightMap, +} + +pub struct HighlightedChunks<'a> { + range: Range, + chunks: Chunks<'a>, + highlights: Option>, +} + +struct Diff { + base_version: clock::Global, + new_text: Arc, + changes: Vec<(ChangeTag, usize)>, +} + +impl Buffer { + pub fn new>>( + replica_id: ReplicaId, + base_text: T, + cx: &mut ModelContext, + ) -> Self { + Self::build( + TextBuffer::new( + replica_id, + cx.model_id() as u64, + History::new(base_text.into()), + ), + None, + None, + cx, + ) + } + + pub fn from_history( + replica_id: ReplicaId, + history: History, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Self { + Self::build( + TextBuffer::new(replica_id, cx.model_id() as u64, history), + file, + language, + cx, + ) + } + + pub fn from_proto( + replica_id: ReplicaId, + message: proto::Buffer, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Result { + Ok(Self::build( + TextBuffer::from_proto(replica_id, message)?, + file, + language, + cx, + )) + } + + fn build( + buffer: TextBuffer, + file: Option>, + language: Option>, + cx: &mut ModelContext, + ) -> Self { + let saved_mtime; + if let Some(file) = file.as_ref() { + saved_mtime = file.mtime(); + } else { + saved_mtime = UNIX_EPOCH; + } + + let mut result = Self { + text: buffer, + saved_mtime, + saved_version: clock::Global::new(), + file, + syntax_tree: Mutex::new(None), + parsing_in_background: false, + parse_count: 0, + sync_parse_timeout: Duration::from_millis(1), + autoindent_requests: Default::default(), + pending_autoindent: Default::default(), + language, + + #[cfg(test)] + operations: Default::default(), + }; + result.reparse(cx); + result + } + + pub fn snapshot(&self) -> Snapshot { + Snapshot { + text: self.text.snapshot(), + tree: self.syntax_tree(), + is_parsing: self.parsing_in_background, + language: self.language.clone(), + query_cursor: QueryCursorHandle::new(), + } + } + + pub fn file(&self) -> Option<&dyn File> { + self.file.as_deref() + } + + pub fn save( + &mut self, + cx: &mut ModelContext, + ) -> Result>> { + let file = self + .file + .as_ref() + .ok_or_else(|| anyhow!("buffer has no file"))?; + let text = self.as_rope().clone(); + let version = self.version.clone(); + let save = file.save(self.remote_id(), text, version, cx.as_mut()); + Ok(cx.spawn(|this, mut cx| async move { + let (version, mtime) = save.await?; + this.update(&mut cx, |this, cx| { + this.did_save(version.clone(), mtime, None, cx); + }); + Ok((version, mtime)) + })) + } + + pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { + self.language = language; + self.reparse(cx); + } + + pub fn did_save( + &mut self, + version: clock::Global, + mtime: SystemTime, + new_file: Option>, + cx: &mut ModelContext, + ) { + self.saved_mtime = mtime; + self.saved_version = version; + if let Some(new_file) = new_file { + self.file = Some(new_file); + } + cx.emit(Event::Saved); + } + + pub fn file_updated( + &mut self, + new_file: Box, + cx: &mut ModelContext, + ) -> Option> { + let old_file = self.file.as_ref()?; + let mut file_changed = false; + let mut task = None; + + if new_file.path() != old_file.path() { + file_changed = true; + } + + if new_file.is_deleted() { + if !old_file.is_deleted() { + file_changed = true; + if !self.is_dirty() { + cx.emit(Event::Dirtied); + } + } + } else { + let new_mtime = new_file.mtime(); + if new_mtime != old_file.mtime() { + file_changed = true; + + if !self.is_dirty() { + task = Some(cx.spawn(|this, mut cx| { + async move { + let new_text = this.read_with(&cx, |this, cx| { + this.file.as_ref().and_then(|file| file.load_local(cx)) + }); + if let Some(new_text) = new_text { + let new_text = new_text.await?; + let diff = this + .read_with(&cx, |this, cx| this.diff(new_text.into(), cx)) + .await; + this.update(&mut cx, |this, cx| { + if this.apply_diff(diff, cx) { + this.saved_version = this.version.clone(); + this.saved_mtime = new_mtime; + cx.emit(Event::Reloaded); + } + }); + } + Ok(()) + } + .log_err() + .map(drop) + })); + } + } + } + + if file_changed { + cx.emit(Event::FileHandleChanged); + } + self.file = Some(new_file); + task + } + + pub fn close(&mut self, cx: &mut ModelContext) { + cx.emit(Event::Closed); + } + + pub fn language(&self) -> Option<&Arc> { + self.language.as_ref() + } + + pub fn parse_count(&self) -> usize { + self.parse_count + } + + fn syntax_tree(&self) -> Option { + if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { + self.interpolate_tree(syntax_tree); + Some(syntax_tree.tree.clone()) + } else { + None + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn is_parsing(&self) -> bool { + self.parsing_in_background + } + + #[cfg(test)] + pub fn set_sync_parse_timeout(&mut self, timeout: Duration) { + self.sync_parse_timeout = timeout; + } + + fn reparse(&mut self, cx: &mut ModelContext) -> bool { + if self.parsing_in_background { + return false; + } + + if let Some(language) = self.language.clone() { + let old_tree = self.syntax_tree(); + let text = self.as_rope().clone(); + let parsed_version = self.version(); + let parse_task = cx.background().spawn({ + let language = language.clone(); + async move { Self::parse_text(&text, old_tree, &language) } + }); + + match cx + .background() + .block_with_timeout(self.sync_parse_timeout, parse_task) + { + Ok(new_tree) => { + self.did_finish_parsing(new_tree, parsed_version, cx); + return true; + } + Err(parse_task) => { + self.parsing_in_background = true; + cx.spawn(move |this, mut cx| async move { + let new_tree = parse_task.await; + this.update(&mut cx, move |this, cx| { + let language_changed = + this.language.as_ref().map_or(true, |curr_language| { + !Arc::ptr_eq(curr_language, &language) + }); + let parse_again = this.version > parsed_version || language_changed; + this.parsing_in_background = false; + this.did_finish_parsing(new_tree, parsed_version, cx); + + if parse_again && this.reparse(cx) { + return; + } + }); + }) + .detach(); + } + } + } + false + } + + fn parse_text(text: &Rope, old_tree: Option, language: &Language) -> Tree { + PARSER.with(|parser| { + let mut parser = parser.borrow_mut(); + parser + .set_language(language.grammar) + .expect("incompatible grammar"); + let mut chunks = text.chunks_in_range(0..text.len()); + let tree = parser + .parse_with( + &mut move |offset, _| { + chunks.seek(offset); + chunks.next().unwrap_or("").as_bytes() + }, + old_tree.as_ref(), + ) + .unwrap(); + tree + }) + } + + fn interpolate_tree(&self, tree: &mut SyntaxTree) { + let mut delta = 0_isize; + for edit in self.edits_since(tree.version.clone()) { + let start_offset = (edit.old_bytes.start as isize + delta) as usize; + let start_point = self.as_rope().to_point(start_offset); + tree.tree.edit(&InputEdit { + start_byte: start_offset, + old_end_byte: start_offset + edit.deleted_bytes(), + new_end_byte: start_offset + edit.inserted_bytes(), + start_position: start_point.to_ts_point(), + old_end_position: (start_point + edit.deleted_lines()).to_ts_point(), + new_end_position: self + .as_rope() + .to_point(start_offset + edit.inserted_bytes()) + .to_ts_point(), + }); + delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; + } + tree.version = self.version(); + } + + fn did_finish_parsing( + &mut self, + tree: Tree, + version: clock::Global, + cx: &mut ModelContext, + ) { + self.parse_count += 1; + *self.syntax_tree.lock() = Some(SyntaxTree { tree, version }); + self.request_autoindent(cx); + cx.emit(Event::Reparsed); + cx.notify(); + } + + fn request_autoindent(&mut self, cx: &mut ModelContext) { + if let Some(indent_columns) = self.compute_autoindents() { + let indent_columns = cx.background().spawn(indent_columns); + match cx + .background() + .block_with_timeout(Duration::from_micros(500), indent_columns) + { + Ok(indent_columns) => self.apply_autoindents(indent_columns, cx), + Err(indent_columns) => { + self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move { + let indent_columns = indent_columns.await; + this.update(&mut cx, |this, cx| { + this.apply_autoindents(indent_columns, cx); + }); + })); + } + } + } + } + + fn compute_autoindents(&self) -> Option>> { + let max_rows_between_yields = 100; + let snapshot = self.snapshot(); + if snapshot.language.is_none() + || snapshot.tree.is_none() + || self.autoindent_requests.is_empty() + { + return None; + } + + let autoindent_requests = self.autoindent_requests.clone(); + Some(async move { + let mut indent_columns = BTreeMap::new(); + for request in autoindent_requests { + let old_to_new_rows = request + .edited + .to_points(&request.before_edit) + .map(|point| point.row) + .zip(request.edited.to_points(&snapshot).map(|point| point.row)) + .collect::>(); + + let mut old_suggestions = HashMap::::default(); + let old_edited_ranges = + contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields); + for old_edited_range in old_edited_ranges { + let suggestions = request + .before_edit + .suggest_autoindents(old_edited_range.clone()) + .into_iter() + .flatten(); + for (old_row, suggestion) in old_edited_range.zip(suggestions) { + let indentation_basis = old_to_new_rows + .get(&suggestion.basis_row) + .and_then(|from_row| old_suggestions.get(from_row).copied()) + .unwrap_or_else(|| { + request + .before_edit + .indent_column_for_line(suggestion.basis_row) + }); + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + old_suggestions.insert( + *old_to_new_rows.get(&old_row).unwrap(), + indentation_basis + delta, + ); + } + yield_now().await; + } + + // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the + // buffer before the edit, but keyed by the row for these lines after the edits were applied. + let new_edited_row_ranges = + contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields); + for new_edited_row_range in new_edited_row_ranges { + let suggestions = snapshot + .suggest_autoindents(new_edited_row_range.clone()) + .into_iter() + .flatten(); + for (new_row, suggestion) in new_edited_row_range.zip(suggestions) { + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + let new_indentation = indent_columns + .get(&suggestion.basis_row) + .copied() + .unwrap_or_else(|| { + snapshot.indent_column_for_line(suggestion.basis_row) + }) + + delta; + if old_suggestions + .get(&new_row) + .map_or(true, |old_indentation| new_indentation != *old_indentation) + { + indent_columns.insert(new_row, new_indentation); + } + } + yield_now().await; + } + + if let Some(inserted) = request.inserted.as_ref() { + let inserted_row_ranges = contiguous_ranges( + inserted + .to_point_ranges(&snapshot) + .flat_map(|range| range.start.row..range.end.row + 1), + max_rows_between_yields, + ); + for inserted_row_range in inserted_row_ranges { + let suggestions = snapshot + .suggest_autoindents(inserted_row_range.clone()) + .into_iter() + .flatten(); + for (row, suggestion) in inserted_row_range.zip(suggestions) { + let delta = if suggestion.indent { INDENT_SIZE } else { 0 }; + let new_indentation = indent_columns + .get(&suggestion.basis_row) + .copied() + .unwrap_or_else(|| { + snapshot.indent_column_for_line(suggestion.basis_row) + }) + + delta; + indent_columns.insert(row, new_indentation); + } + yield_now().await; + } + } + } + indent_columns + }) + } + + fn apply_autoindents( + &mut self, + indent_columns: BTreeMap, + cx: &mut ModelContext, + ) { + let selection_set_ids = self + .autoindent_requests + .drain(..) + .flat_map(|req| req.selection_set_ids.clone()) + .collect::>(); + + self.start_transaction(selection_set_ids.iter().copied()) + .unwrap(); + for (row, indent_column) in &indent_columns { + self.set_indent_column_for_line(*row, *indent_column, cx); + } + + for selection_set_id in &selection_set_ids { + if let Ok(set) = self.selection_set(*selection_set_id) { + let new_selections = set + .selections + .iter() + .map(|selection| { + let start_point = selection.start.to_point(&self.text); + if start_point.column == 0 { + let end_point = selection.end.to_point(&self.text); + let delta = Point::new( + 0, + indent_columns.get(&start_point.row).copied().unwrap_or(0), + ); + if delta.column > 0 { + return Selection { + id: selection.id, + goal: selection.goal, + reversed: selection.reversed, + start: self + .anchor_at(start_point + delta, selection.start.bias), + end: self.anchor_at(end_point + delta, selection.end.bias), + }; + } + } + selection.clone() + }) + .collect::>(); + self.update_selection_set(*selection_set_id, new_selections, cx) + .unwrap(); + } + } + + self.end_transaction(selection_set_ids.iter().copied(), cx) + .unwrap(); + } + + pub fn indent_column_for_line(&self, row: u32) -> u32 { + self.content().indent_column_for_line(row) + } + + fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { + let current_column = self.indent_column_for_line(row); + if column > current_column { + let offset = Point::new(row, 0).to_offset(&*self); + self.edit( + [offset..offset], + " ".repeat((column - current_column) as usize), + cx, + ); + } else if column < current_column { + self.edit( + [Point::new(row, 0)..Point::new(row, current_column - column)], + "", + cx, + ); + } + } + + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + if let Some(tree) = self.syntax_tree() { + let root = tree.root_node(); + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut node = root.descendant_for_byte_range(range.start, range.end); + while node.map_or(false, |n| n.byte_range() == range) { + node = node.unwrap().parent(); + } + node.map(|n| n.byte_range()) + } else { + None + } + } + + pub fn enclosing_bracket_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + let (lang, tree) = self.language.as_ref().zip(self.syntax_tree())?; + let open_capture_ix = lang.brackets_query.capture_index_for_name("open")?; + let close_capture_ix = lang.brackets_query.capture_index_for_name("close")?; + + // Find bracket pairs that *inclusively* contain the given range. + let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; + let mut cursor = QueryCursorHandle::new(); + let matches = cursor.set_byte_range(range).matches( + &lang.brackets_query, + tree.root_node(), + TextProvider(self.as_rope()), + ); + + // Get the ranges of the innermost pair of brackets. + matches + .filter_map(|mat| { + let open = mat.nodes_for_capture_index(open_capture_ix).next()?; + let close = mat.nodes_for_capture_index(close_capture_ix).next()?; + Some((open.byte_range(), close.byte_range())) + }) + .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) + } + + fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { + // TODO: it would be nice to not allocate here. + let old_text = self.text(); + let base_version = self.version(); + cx.background().spawn(async move { + let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref()) + .iter_all_changes() + .map(|c| (c.tag(), c.value().len())) + .collect::>(); + Diff { + base_version, + new_text, + changes, + } + }) + } + + fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { + if self.version == diff.base_version { + self.start_transaction(None).unwrap(); + let mut offset = 0; + for (tag, len) in diff.changes { + let range = offset..(offset + len); + match tag { + ChangeTag::Equal => offset += len, + ChangeTag::Delete => self.edit(Some(range), "", cx), + ChangeTag::Insert => { + self.edit(Some(offset..offset), &diff.new_text[range], cx); + offset += len; + } + } + } + self.end_transaction(None, cx).unwrap(); + true + } else { + false + } + } + + pub fn is_dirty(&self) -> bool { + self.version > self.saved_version + || self.file.as_ref().map_or(false, |file| file.is_deleted()) + } + + pub fn has_conflict(&self) -> bool { + self.version > self.saved_version + && self + .file + .as_ref() + .map_or(false, |file| file.mtime() > self.saved_mtime) + } + + pub fn start_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + ) -> Result<()> { + self.start_transaction_at(selection_set_ids, Instant::now()) + } + + fn start_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + ) -> Result<()> { + self.text.start_transaction_at(selection_set_ids, now) + } + + pub fn end_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + cx: &mut ModelContext, + ) -> Result<()> { + self.end_transaction_at(selection_set_ids, Instant::now(), cx) + } + + fn end_transaction_at( + &mut self, + selection_set_ids: impl IntoIterator, + now: Instant, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) { + cx.notify(); + let was_dirty = start_version != self.saved_version; + let edited = self.edits_since(start_version).next().is_some(); + if edited { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + } + Ok(()) + } + + pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) + where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, false, cx) + } + + pub fn edit_with_autoindent( + &mut self, + ranges_iter: I, + new_text: T, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, true, cx) + } + + pub fn edit_internal( + &mut self, + ranges_iter: I, + new_text: T, + autoindent: bool, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + let new_text = new_text.into(); + + // Skip invalid ranges and coalesce contiguous ones. + let mut ranges: Vec> = Vec::new(); + for range in ranges_iter { + let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); + if !new_text.is_empty() || !range.is_empty() { + if let Some(prev_range) = ranges.last_mut() { + if prev_range.end >= range.start { + prev_range.end = cmp::max(prev_range.end, range.end); + } else { + ranges.push(range); + } + } else { + ranges.push(range); + } + } + } + if ranges.is_empty() { + return; + } + + self.start_transaction(None).unwrap(); + self.pending_autoindent.take(); + let autoindent_request = if autoindent && self.language.is_some() { + let before_edit = self.snapshot(); + let edited = self.content().anchor_set(ranges.iter().filter_map(|range| { + let start = range.start.to_point(&*self); + if new_text.starts_with('\n') && start.column == self.line_len(start.row) { + None + } else { + Some((range.start, Bias::Left)) + } + })); + Some((before_edit, edited)) + } else { + None + }; + + let first_newline_ix = new_text.find('\n'); + let new_text_len = new_text.len(); + + let edit = self.text.edit(ranges.iter().cloned(), new_text); + + if let Some((before_edit, edited)) = autoindent_request { + let mut inserted = None; + if let Some(first_newline_ix) = first_newline_ix { + let mut delta = 0isize; + inserted = Some(self.content().anchor_range_set(ranges.iter().map(|range| { + let start = (delta + range.start as isize) as usize + first_newline_ix + 1; + let end = (delta + range.start as isize) as usize + new_text_len; + delta += (range.end as isize - range.start as isize) + new_text_len as isize; + (start, Bias::Left)..(end, Bias::Right) + }))); + } + + let selection_set_ids = self + .text + .peek_undo_stack() + .unwrap() + .starting_selection_set_ids() + .collect(); + self.autoindent_requests.push(Arc::new(AutoindentRequest { + selection_set_ids, + before_edit, + edited, + inserted, + })); + } + + self.end_transaction(None, cx).unwrap(); + self.send_operation(Operation::Edit(edit), cx); + } + + fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { + cx.emit(Event::Edited); + if !was_dirty { + cx.emit(Event::Dirtied); + } + } + + pub fn add_selection_set( + &mut self, + selections: impl Into>, + cx: &mut ModelContext, + ) -> SelectionSetId { + let operation = self.text.add_selection_set(selections); + if let Operation::UpdateSelections { set_id, .. } = &operation { + let set_id = *set_id; + cx.notify(); + self.send_operation(operation, cx); + set_id + } else { + unreachable!() + } + } + + pub fn update_selection_set( + &mut self, + set_id: SelectionSetId, + selections: impl Into>, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.text.update_selection_set(set_id, selections)?; + cx.notify(); + self.send_operation(operation, cx); + Ok(()) + } + + pub fn set_active_selection_set( + &mut self, + set_id: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.text.set_active_selection_set(set_id)?; + self.send_operation(operation, cx); + Ok(()) + } + + pub fn remove_selection_set( + &mut self, + set_id: SelectionSetId, + cx: &mut ModelContext, + ) -> Result<()> { + let operation = self.text.remove_selection_set(set_id)?; + cx.notify(); + self.send_operation(operation, cx); + Ok(()) + } + + pub fn apply_ops>( + &mut self, + ops: I, + cx: &mut ModelContext, + ) -> Result<()> { + self.pending_autoindent.take(); + + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + self.text.apply_ops(ops)?; + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + + Ok(()) + } + + #[cfg(not(test))] + pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { + if let Some(file) = &self.file { + file.buffer_updated(self.remote_id(), operation, cx.as_mut()); + } + } + + #[cfg(test)] + pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext) { + self.operations.push(operation); + } + + pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { + self.text.remove_peer(replica_id); + cx.notify(); + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + for operation in self.text.undo() { + self.send_operation(operation, cx); + } + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + } + + pub fn redo(&mut self, cx: &mut ModelContext) { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + for operation in self.text.redo() { + self.send_operation(operation, cx); + } + + cx.notify(); + if self.edits_since(old_version).next().is_some() { + self.did_edit(was_dirty, cx); + self.reparse(cx); + } + } +} + +#[cfg(any(test, feature = "test-support"))] +impl Buffer { + pub fn randomly_edit(&mut self, rng: &mut T, old_range_count: usize) + where + T: rand::Rng, + { + self.text.randomly_edit(rng, old_range_count); + } + + pub fn randomly_mutate(&mut self, rng: &mut T) + where + T: rand::Rng, + { + self.text.randomly_mutate(rng); + } +} + +impl Entity for Buffer { + type Event = Event; + + fn release(&mut self, cx: &mut gpui::MutableAppContext) { + if let Some(file) = self.file.as_ref() { + file.buffer_removed(self.remote_id(), cx); + } + } +} + +impl Clone for Buffer { + fn clone(&self) -> Self { + Self { + text: self.text.clone(), + saved_version: self.saved_version.clone(), + saved_mtime: self.saved_mtime, + file: self.file.as_ref().map(|f| f.boxed_clone()), + language: self.language.clone(), + syntax_tree: Mutex::new(self.syntax_tree.lock().clone()), + parsing_in_background: false, + sync_parse_timeout: self.sync_parse_timeout, + parse_count: self.parse_count, + autoindent_requests: Default::default(), + pending_autoindent: Default::default(), + + #[cfg(test)] + operations: self.operations.clone(), + } + } +} + +impl Deref for Buffer { + type Target = TextBuffer; + + fn deref(&self) -> &Self::Target { + &self.text + } +} + +impl<'a> From<&'a Buffer> for Content<'a> { + fn from(buffer: &'a Buffer) -> Self { + Self::from(&buffer.text) + } +} + +impl<'a> From<&'a mut Buffer> for Content<'a> { + fn from(buffer: &'a mut Buffer) -> Self { + Self::from(&buffer.text) + } +} + +impl<'a> From<&'a Snapshot> for Content<'a> { + fn from(snapshot: &'a Snapshot) -> Self { + Self::from(&snapshot.text) + } +} + +impl Snapshot { + fn suggest_autoindents<'a>( + &'a self, + row_range: Range, + ) -> Option + 'a> { + let mut query_cursor = QueryCursorHandle::new(); + if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { + let prev_non_blank_row = self.prev_non_blank_row(row_range.start); + + // Get the "indentation ranges" that intersect this row range. + let indent_capture_ix = language.indents_query.capture_index_for_name("indent"); + let end_capture_ix = language.indents_query.capture_index_for_name("end"); + query_cursor.set_point_range( + Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point() + ..Point::new(row_range.end, 0).to_ts_point(), + ); + let mut indentation_ranges = Vec::<(Range, &'static str)>::new(); + for mat in query_cursor.matches( + &language.indents_query, + tree.root_node(), + TextProvider(self.as_rope()), + ) { + let mut node_kind = ""; + let mut start: Option = None; + let mut end: Option = None; + for capture in mat.captures { + if Some(capture.index) == indent_capture_ix { + node_kind = capture.node.kind(); + start.get_or_insert(Point::from_ts_point(capture.node.start_position())); + end.get_or_insert(Point::from_ts_point(capture.node.end_position())); + } else if Some(capture.index) == end_capture_ix { + end = Some(Point::from_ts_point(capture.node.start_position().into())); + } + } + + if let Some((start, end)) = start.zip(end) { + if start.row == end.row { + continue; + } + + let range = start..end; + match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) { + Err(ix) => indentation_ranges.insert(ix, (range, node_kind)), + Ok(ix) => { + let prev_range = &mut indentation_ranges[ix]; + prev_range.0.end = prev_range.0.end.max(range.end); + } + } + } + } + + let mut prev_row = prev_non_blank_row.unwrap_or(0); + Some(row_range.map(move |row| { + let row_start = Point::new(row, self.indent_column_for_line(row)); + + let mut indent_from_prev_row = false; + let mut outdent_to_row = u32::MAX; + for (range, _node_kind) in &indentation_ranges { + if range.start.row >= row { + break; + } + + if range.start.row == prev_row && range.end > row_start { + indent_from_prev_row = true; + } + if range.end.row >= prev_row && range.end <= row_start { + outdent_to_row = outdent_to_row.min(range.start.row); + } + } + + let suggestion = if outdent_to_row == prev_row { + IndentSuggestion { + basis_row: prev_row, + indent: false, + } + } else if indent_from_prev_row { + IndentSuggestion { + basis_row: prev_row, + indent: true, + } + } else if outdent_to_row < prev_row { + IndentSuggestion { + basis_row: outdent_to_row, + indent: false, + } + } else { + IndentSuggestion { + basis_row: prev_row, + indent: false, + } + }; + + prev_row = row; + suggestion + })) + } else { + None + } + } + + fn prev_non_blank_row(&self, mut row: u32) -> Option { + while row > 0 { + row -= 1; + if !self.is_line_blank(row) { + return Some(row); + } + } + None + } + + fn is_line_blank(&self, row: u32) -> bool { + self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row))) + .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none()) + } + + pub fn highlighted_text_for_range( + &mut self, + range: Range, + ) -> HighlightedChunks { + let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); + let chunks = self.text.as_rope().chunks_in_range(range.clone()); + if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { + let captures = self.query_cursor.set_byte_range(range.clone()).captures( + &language.highlights_query, + tree.root_node(), + TextProvider(self.text.as_rope()), + ); + + HighlightedChunks { + range, + chunks, + highlights: Some(Highlights { + captures, + next_capture: None, + stack: Default::default(), + highlight_map: language.highlight_map(), + }), + } + } else { + HighlightedChunks { + range, + chunks, + highlights: None, + } + } + } +} + +impl Clone for Snapshot { + fn clone(&self) -> Self { + Self { + text: self.text.clone(), + tree: self.tree.clone(), + is_parsing: self.is_parsing, + language: self.language.clone(), + query_cursor: QueryCursorHandle::new(), + } + } +} + +impl Deref for Snapshot { + type Target = buffer::Snapshot; + + fn deref(&self) -> &Self::Target { + &self.text + } +} + +impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> { + type I = ByteChunks<'a>; + + fn text(&mut self, node: tree_sitter::Node) -> Self::I { + ByteChunks(self.0.chunks_in_range(node.byte_range())) + } +} + +struct ByteChunks<'a>(rope::Chunks<'a>); + +impl<'a> Iterator for ByteChunks<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option { + self.0.next().map(str::as_bytes) + } +} + +impl<'a> HighlightedChunks<'a> { + pub fn seek(&mut self, offset: usize) { + self.range.start = offset; + self.chunks.seek(self.range.start); + if let Some(highlights) = self.highlights.as_mut() { + highlights + .stack + .retain(|(end_offset, _)| *end_offset > offset); + if let Some((mat, capture_ix)) = &highlights.next_capture { + let capture = mat.captures[*capture_ix as usize]; + if offset >= capture.node.start_byte() { + let next_capture_end = capture.node.end_byte(); + if offset < next_capture_end { + highlights.stack.push(( + next_capture_end, + highlights.highlight_map.get(capture.index), + )); + } + highlights.next_capture.take(); + } + } + highlights.captures.set_byte_range(self.range.clone()); + } + } + + pub fn offset(&self) -> usize { + self.range.start + } +} + +impl<'a> Iterator for HighlightedChunks<'a> { + type Item = (&'a str, HighlightId); + + fn next(&mut self) -> Option { + let mut next_capture_start = usize::MAX; + + if let Some(highlights) = self.highlights.as_mut() { + while let Some((parent_capture_end, _)) = highlights.stack.last() { + if *parent_capture_end <= self.range.start { + highlights.stack.pop(); + } else { + break; + } + } + + if highlights.next_capture.is_none() { + highlights.next_capture = highlights.captures.next(); + } + + while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() { + let capture = mat.captures[*capture_ix as usize]; + if self.range.start < capture.node.start_byte() { + next_capture_start = capture.node.start_byte(); + break; + } else { + let style_id = highlights.highlight_map.get(capture.index); + highlights.stack.push((capture.node.end_byte(), style_id)); + highlights.next_capture = highlights.captures.next(); + } + } + } + + if let Some(chunk) = self.chunks.peek() { + let chunk_start = self.range.start; + let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start); + let mut style_id = HighlightId::default(); + if let Some((parent_capture_end, parent_style_id)) = + self.highlights.as_ref().and_then(|h| h.stack.last()) + { + chunk_end = chunk_end.min(*parent_capture_end); + style_id = *parent_style_id; + } + + let slice = + &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()]; + self.range.start = chunk_end; + if self.range.start == self.chunks.offset() + chunk.len() { + self.chunks.next().unwrap(); + } + + Some((slice, style_id)) + } else { + None + } + } +} + +impl QueryCursorHandle { + fn new() -> Self { + QueryCursorHandle(Some( + QUERY_CURSORS + .lock() + .pop() + .unwrap_or_else(|| QueryCursor::new()), + )) + } +} + +impl Deref for QueryCursorHandle { + type Target = QueryCursor; + + fn deref(&self) -> &Self::Target { + self.0.as_ref().unwrap() + } +} + +impl DerefMut for QueryCursorHandle { + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_mut().unwrap() + } +} + +impl Drop for QueryCursorHandle { + fn drop(&mut self) { + let mut cursor = self.0.take().unwrap(); + cursor.set_byte_range(0..usize::MAX); + cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point()); + QUERY_CURSORS.lock().push(cursor) + } +} + +trait ToTreeSitterPoint { + fn to_ts_point(self) -> tree_sitter::Point; + fn from_ts_point(point: tree_sitter::Point) -> Self; +} + +impl ToTreeSitterPoint for Point { + fn to_ts_point(self) -> tree_sitter::Point { + tree_sitter::Point::new(self.row as usize, self.column as usize) + } + + fn from_ts_point(point: tree_sitter::Point) -> Self { + Point::new(point.row as u32, point.column as u32) + } +} + +fn contiguous_ranges( + values: impl IntoIterator, + max_len: usize, +) -> impl Iterator> { + let mut values = values.into_iter(); + let mut current_range: Option> = None; + std::iter::from_fn(move || loop { + if let Some(value) = values.next() { + if let Some(range) = &mut current_range { + if value == range.end && range.len() < max_len { + range.end += 1; + continue; + } + } + + let prev_range = current_range.clone(); + current_range = Some(value..(value + 1)); + if prev_range.is_some() { + return prev_range; + } + } else { + return current_range.take(); + } + }) +} diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs new file mode 100644 index 0000000000..23cdced4c7 --- /dev/null +++ b/crates/language/src/tests.rs @@ -0,0 +1,467 @@ +use super::*; +use gpui::{ModelHandle, MutableAppContext}; +use std::rc::Rc; +use unindent::Unindent as _; + +#[gpui::test] +fn test_edit_events(cx: &mut gpui::MutableAppContext) { + let mut now = Instant::now(); + let buffer_1_events = Rc::new(RefCell::new(Vec::new())); + let buffer_2_events = Rc::new(RefCell::new(Vec::new())); + + let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx)); + let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx)); + let buffer_ops = buffer1.update(cx, |buffer, cx| { + let buffer_1_events = buffer_1_events.clone(); + cx.subscribe(&buffer1, move |_, _, event, _| { + buffer_1_events.borrow_mut().push(event.clone()) + }) + .detach(); + let buffer_2_events = buffer_2_events.clone(); + cx.subscribe(&buffer2, move |_, _, event, _| { + buffer_2_events.borrow_mut().push(event.clone()) + }) + .detach(); + + // An edit emits an edited event, followed by a dirtied event, + // since the buffer was previously in a clean state. + buffer.edit(Some(2..4), "XYZ", cx); + + // An empty transaction does not emit any events. + buffer.start_transaction(None).unwrap(); + buffer.end_transaction(None, cx).unwrap(); + + // A transaction containing two edits emits one edited event. + now += Duration::from_secs(1); + buffer.start_transaction_at(None, now).unwrap(); + buffer.edit(Some(5..5), "u", cx); + buffer.edit(Some(6..6), "w", cx); + buffer.end_transaction_at(None, now, cx).unwrap(); + + // Undoing a transaction emits one edited event. + buffer.undo(cx); + + buffer.operations.clone() + }); + + // Incorporating a set of remote ops emits a single edited event, + // followed by a dirtied event. + buffer2.update(cx, |buffer, cx| { + buffer.apply_ops(buffer_ops, cx).unwrap(); + }); + + let buffer_1_events = buffer_1_events.borrow(); + assert_eq!( + *buffer_1_events, + vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited] + ); + + let buffer_2_events = buffer_2_events.borrow(); + assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]); +} + +#[gpui::test] +async fn test_apply_diff(mut cx: gpui::TestAppContext) { + let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + + let text = "a\nccc\ndddd\nffffff\n"; + let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; + buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); + cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); + + let text = "a\n1\n\nccc\ndd2dd\nffffff\n"; + let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await; + buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx)); + cx.read(|cx| assert_eq!(buffer.read(cx).text(), text)); +} + +#[gpui::test] +async fn test_reparse(mut cx: gpui::TestAppContext) { + let buffer = cx.add_model(|cx| { + let text = "fn a() {}".into(); + Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx) + }); + + // Wait for the initial text to parse + buffer + .condition(&cx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &cx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters) ", + "body: (block)))" + ) + ); + + buffer.update(&mut cx, |buffer, _| { + buffer.set_sync_parse_timeout(Duration::ZERO) + }); + + // Perform some edits (add parameter and variable reference) + // Parsing doesn't begin until the transaction is complete + buffer.update(&mut cx, |buf, cx| { + buf.start_transaction(None).unwrap(); + + let offset = buf.text().find(")").unwrap(); + buf.edit(vec![offset..offset], "b: C", cx); + assert!(!buf.is_parsing()); + + let offset = buf.text().find("}").unwrap(); + buf.edit(vec![offset..offset], " d; ", cx); + assert!(!buf.is_parsing()); + + buf.end_transaction(None, cx).unwrap(); + assert_eq!(buf.text(), "fn a(b: C) { d; }"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&cx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &cx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", + "body: (block (identifier))))" + ) + ); + + // Perform a series of edits without waiting for the current parse to complete: + // * turn identifier into a field expression + // * turn field expression into a method call + // * add a turbofish to the method call + buffer.update(&mut cx, |buf, cx| { + let offset = buf.text().find(";").unwrap(); + buf.edit(vec![offset..offset], ".e", cx); + assert_eq!(buf.text(), "fn a(b: C) { d.e; }"); + assert!(buf.is_parsing()); + }); + buffer.update(&mut cx, |buf, cx| { + let offset = buf.text().find(";").unwrap(); + buf.edit(vec![offset..offset], "(f)", cx); + assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }"); + assert!(buf.is_parsing()); + }); + buffer.update(&mut cx, |buf, cx| { + let offset = buf.text().find("(f)").unwrap(); + buf.edit(vec![offset..offset], "::", cx); + assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&cx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &cx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", + "body: (block (call_expression ", + "function: (generic_function ", + "function: (field_expression value: (identifier) field: (field_identifier)) ", + "type_arguments: (type_arguments (type_identifier))) ", + "arguments: (arguments (identifier))))))", + ) + ); + + buffer.update(&mut cx, |buf, cx| { + buf.undo(cx); + assert_eq!(buf.text(), "fn a() {}"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&cx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &cx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters) ", + "body: (block)))" + ) + ); + + buffer.update(&mut cx, |buf, cx| { + buf.redo(cx); + assert_eq!(buf.text(), "fn a(b: C) { d.e::(f); }"); + assert!(buf.is_parsing()); + }); + buffer + .condition(&cx, |buffer, _| !buffer.is_parsing()) + .await; + assert_eq!( + get_tree_sexp(&buffer, &cx), + concat!( + "(source_file (function_item name: (identifier) ", + "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ", + "body: (block (call_expression ", + "function: (generic_function ", + "function: (field_expression value: (identifier) field: (field_identifier)) ", + "type_arguments: (type_arguments (type_identifier))) ", + "arguments: (arguments (identifier))))))", + ) + ); + + fn get_tree_sexp(buffer: &ModelHandle, cx: &gpui::TestAppContext) -> String { + buffer.read_with(cx, |buffer, _| { + buffer.syntax_tree().unwrap().root_node().to_sexp() + }) + } +} + +#[gpui::test] +fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { + let buffer = cx.add_model(|cx| { + let text = " + mod x { + mod y { + + } + } + " + .unindent() + .into(); + Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx) + }); + let buffer = buffer.read(cx); + assert_eq!( + buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)), + Some(( + Point::new(0, 6)..Point::new(0, 7), + Point::new(4, 0)..Point::new(4, 1) + )) + ); + assert_eq!( + buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)), + Some(( + Point::new(1, 10)..Point::new(1, 11), + Point::new(3, 4)..Point::new(3, 5) + )) + ); + assert_eq!( + buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)), + Some(( + Point::new(1, 10)..Point::new(1, 11), + Point::new(3, 4)..Point::new(3, 5) + )) + ); +} + +#[gpui::test] +fn test_edit_with_autoindent(cx: &mut MutableAppContext) { + cx.add_model(|cx| { + let text = "fn a() {}".into(); + let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx); + + buffer.edit_with_autoindent([8..8], "\n\n", cx); + assert_eq!(buffer.text(), "fn a() {\n \n}"); + + buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx); + assert_eq!(buffer.text(), "fn a() {\n b()\n \n}"); + + buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx); + assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}"); + + buffer + }); +} + +#[gpui::test] +fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { + cx.add_model(|cx| { + let text = History::new("fn a() {}".into()); + let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx); + + let selection_set_id = buffer.add_selection_set(Vec::new(), cx); + buffer.start_transaction(Some(selection_set_id)).unwrap(); + buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx); + buffer + .update_selection_set( + selection_set_id, + vec![ + Selection { + id: 0, + start: buffer.anchor_before(Point::new(1, 0)), + end: buffer.anchor_before(Point::new(1, 0)), + reversed: false, + goal: SelectionGoal::None, + }, + Selection { + id: 1, + start: buffer.anchor_before(Point::new(4, 0)), + end: buffer.anchor_before(Point::new(4, 0)), + reversed: false, + goal: SelectionGoal::None, + }, + ], + cx, + ) + .unwrap(); + assert_eq!(buffer.text(), "fn a(\n\n) {}\n\n"); + + // Ending the transaction runs the auto-indent. The selection + // at the start of the auto-indented row is pushed to the right. + buffer.end_transaction(Some(selection_set_id), cx).unwrap(); + assert_eq!(buffer.text(), "fn a(\n \n) {}\n\n"); + let selection_ranges = buffer + .selection_set(selection_set_id) + .unwrap() + .selections + .iter() + .map(|selection| selection.point_range(&buffer)) + .collect::>(); + + assert_eq!(selection_ranges[0], empty(Point::new(1, 4))); + assert_eq!(selection_ranges[1], empty(Point::new(4, 0))); + + buffer + }); +} + +#[gpui::test] +fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) { + cx.add_model(|cx| { + let text = " + fn a() { + c; + d; + } + " + .unindent() + .into(); + let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx); + + // Lines 2 and 3 don't match the indentation suggestion. When editing these lines, + // their indentation is not adjusted. + buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx); + assert_eq!( + buffer.text(), + " + fn a() { + c(); + d(); + } + " + .unindent() + ); + + // When appending new content after these lines, the indentation is based on the + // preceding lines' actual indentation. + buffer.edit_with_autoindent( + [empty(Point::new(1, 1)), empty(Point::new(2, 1))], + "\n.f\n.g", + cx, + ); + assert_eq!( + buffer.text(), + " + fn a() { + c + .f + .g(); + d + .f + .g(); + } + " + .unindent() + ); + buffer + }); +} + +#[gpui::test] +fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) { + cx.add_model(|cx| { + let text = History::new( + " + fn a() {} + " + .unindent() + .into(), + ); + let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx); + + buffer.edit_with_autoindent([5..5], "\nb", cx); + assert_eq!( + buffer.text(), + " + fn a( + b) {} + " + .unindent() + ); + + // The indentation suggestion changed because `@end` node (a close paren) + // is now at the beginning of the line. + buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx); + assert_eq!( + buffer.text(), + " + fn a( + ) {} + " + .unindent() + ); + + buffer + }); +} + +#[test] +fn test_contiguous_ranges() { + assert_eq!( + contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::>(), + &[1..4, 5..7, 9..13] + ); + + // Respects the `max_len` parameter + assert_eq!( + contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::>(), + &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32], + ); +} + +impl Buffer { + pub fn enclosing_bracket_point_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + self.enclosing_bracket_ranges(range).map(|(start, end)| { + let point_start = start.start.to_point(self)..start.end.to_point(self); + let point_end = end.start.to_point(self)..end.end.to_point(self); + (point_start, point_end) + }) + } +} + +fn rust_lang() -> Arc { + Arc::new( + Language::new( + LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + ..Default::default() + }, + tree_sitter_rust::language(), + ) + .with_indents_query( + r#" + (call_expression) @indent + (field_expression) @indent + (_ "(" ")" @end) @indent + (_ "{" "}" @end) @indent + "#, + ) + .unwrap() + .with_brackets_query(r#" ("{" @open "}" @close) "#) + .unwrap(), + ) +} + +fn empty(point: Point) -> Range { + point..point +} diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index a6d69ad954..a0ca5e6e75 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2018" [features] -test-support = [] +test-support = ["language/test-support", "buffer/test-support"] [dependencies] buffer = { path = "../buffer" } @@ -13,6 +13,7 @@ clock = { path = "../clock" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } gpui = { path = "../gpui" } +language = { path = "../language" } lsp = { path = "../lsp" } rpc = { path = "../rpc" } sum_tree = { path = "../sum_tree" } @@ -34,6 +35,7 @@ toml = "0.5" [dev-dependencies] client = { path = "../client", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index 184dfd4d9c..3e129c8fb8 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -3,11 +3,11 @@ mod ignore; mod worktree; use anyhow::Result; -use buffer::LanguageRegistry; use client::Client; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; +use language::LanguageRegistry; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -302,9 +302,9 @@ impl Entity for Project { #[cfg(test)] mod tests { use super::*; - use buffer::LanguageRegistry; use fs::RealFs; use gpui::TestAppContext; + use language::LanguageRegistry; use serde_json::json; use std::{os::unix, path::PathBuf}; use util::test::temp_tree; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 9b98ed9eeb..500e9d502f 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -4,7 +4,6 @@ use super::{ }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Result}; -use buffer::{Buffer, History, LanguageRegistry, Operation, Rope}; use client::{proto, Client, PeerId, TypedEnvelope}; use clock::ReplicaId; use futures::{Stream, StreamExt}; @@ -13,6 +12,7 @@ use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, }; +use language::{Buffer, History, LanguageRegistry, Operation, Rope}; use lazy_static::lazy_static; use lsp::LanguageServer; use parking_lot::Mutex; @@ -588,54 +588,40 @@ impl Worktree { } }; + let worktree_handle = cx.handle(); let mut buffers_to_delete = Vec::new(); for (buffer_id, buffer) in open_buffers { if let Some(buffer) = buffer.upgrade(cx) { buffer.update(cx, |buffer, cx| { - let buffer_is_clean = !buffer.is_dirty(); - - if let Some(file) = buffer.file_mut() { - let mut file_changed = false; - - if let Some(entry) = file + if let Some(old_file) = buffer.file() { + let new_file = if let Some(entry) = old_file .entry_id() .and_then(|entry_id| self.entry_for_id(entry_id)) { - if entry.path != *file.path() { - file.set_path(entry.path.clone()); - file_changed = true; + File { + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree_handle.clone(), } + } else if let Some(entry) = self.entry_for_path(old_file.path().as_ref()) { + File { + entry_id: Some(entry.id), + mtime: entry.mtime, + path: entry.path.clone(), + worktree: worktree_handle.clone(), + } + } else { + File { + entry_id: None, + path: old_file.path().clone(), + mtime: old_file.mtime(), + worktree: worktree_handle.clone(), + } + }; - if entry.mtime != file.mtime() { - file.set_mtime(entry.mtime); - file_changed = true; - if let Some(worktree) = self.as_local() { - if buffer_is_clean { - let abs_path = worktree.absolutize(file.path().as_ref()); - refresh_buffer(abs_path, &worktree.fs, cx); - } - } - } - } else if let Some(entry) = self.entry_for_path(file.path().as_ref()) { - file.set_entry_id(Some(entry.id)); - file.set_mtime(entry.mtime); - if let Some(worktree) = self.as_local() { - if buffer_is_clean { - let abs_path = worktree.absolutize(file.path().as_ref()); - refresh_buffer(abs_path, &worktree.fs, cx); - } - } - file_changed = true; - } else if !file.is_deleted() { - if buffer_is_clean { - cx.emit(buffer::Event::Dirtied); - } - file.set_entry_id(None); - file_changed = true; - } - - if file_changed { - cx.emit(buffer::Event::FileHandleChanged); + if let Some(task) = buffer.file_updated(Box::new(new_file), cx) { + task.detach(); } } }); @@ -866,7 +852,7 @@ impl LocalWorktree { .update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx)) .await?; let language = this.read_with(&cx, |this, cx| { - use buffer::File; + use language::File; this.languages() .select_language(file.full_path(cx)) @@ -913,7 +899,7 @@ impl LocalWorktree { .insert(buffer.id() as u64, buffer.clone()); Ok(proto::OpenBufferResponse { - buffer: Some(buffer.update(cx.as_mut(), |buffer, cx| buffer.to_proto(cx))), + buffer: Some(buffer.update(cx.as_mut(), |buffer, _| buffer.to_proto())), }) }) }) @@ -1187,24 +1173,6 @@ fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { Ok(builder.build()?) } -pub fn refresh_buffer(abs_path: PathBuf, fs: &Arc, cx: &mut ModelContext) { - let fs = fs.clone(); - cx.spawn(|buffer, mut cx| async move { - let new_text = fs.load(&abs_path).await; - match new_text { - Err(error) => log::error!("error refreshing buffer after file changed: {}", error), - Ok(new_text) => { - buffer - .update(&mut cx, |buffer, cx| { - buffer.set_text_from_disk(new_text.into(), cx) - }) - .await; - } - } - }) - .detach() -} - impl Deref for LocalWorktree { type Target = Snapshot; @@ -1283,7 +1251,7 @@ impl RemoteWorktree { .ok_or_else(|| anyhow!("worktree was closed"))?; let file = File::new(entry.id, this.clone(), entry.path, entry.mtime); let language = this.read_with(&cx, |this, cx| { - use buffer::File; + use language::File; this.languages() .select_language(file.full_path(cx)) @@ -1794,7 +1762,7 @@ impl File { } } -impl buffer::File for File { +impl language::File for File { fn worktree_id(&self) -> usize { self.worktree.id() } @@ -1803,26 +1771,14 @@ impl buffer::File for File { self.entry_id } - fn set_entry_id(&mut self, entry_id: Option) { - self.entry_id = entry_id; - } - fn mtime(&self) -> SystemTime { self.mtime } - fn set_mtime(&mut self, mtime: SystemTime) { - self.mtime = mtime; - } - fn path(&self) -> &Arc { &self.path } - fn set_path(&mut self, path: Arc) { - self.path = path; - } - fn full_path(&self, cx: &AppContext) -> PathBuf { let worktree = self.worktree.read(cx); let mut full_path = PathBuf::new(); @@ -1891,6 +1847,16 @@ impl buffer::File for File { }) } + fn load_local(&self, cx: &AppContext) -> Option>> { + let worktree = self.worktree.read(cx).as_local()?; + let abs_path = worktree.absolutize(&self.path); + let fs = worktree.fs.clone(); + Some( + cx.background() + .spawn(async move { fs.load(&abs_path).await }), + ) + } + fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) { self.worktree.update(cx, |worktree, cx| { if let Some((rpc, remote_id)) = match worktree { @@ -1946,7 +1912,7 @@ impl buffer::File for File { }); } - fn boxed_clone(&self) -> Box { + fn boxed_clone(&self) -> Box { Box::new(self.clone()) } @@ -3272,7 +3238,7 @@ mod tests { assert!(buffer.is_dirty()); assert_eq!( *events.borrow(), - &[buffer::Event::Edited, buffer::Event::Dirtied] + &[language::Event::Edited, language::Event::Dirtied] ); events.borrow_mut().clear(); buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx); @@ -3281,7 +3247,7 @@ mod tests { // after saving, the buffer is not dirty, and emits a saved event. buffer1.update(&mut cx, |buffer, cx| { assert!(!buffer.is_dirty()); - assert_eq!(*events.borrow(), &[buffer::Event::Saved]); + assert_eq!(*events.borrow(), &[language::Event::Saved]); events.borrow_mut().clear(); buffer.edit(vec![1..1], "B", cx); @@ -3295,9 +3261,9 @@ mod tests { assert_eq!( *events.borrow(), &[ - buffer::Event::Edited, - buffer::Event::Dirtied, - buffer::Event::Edited + language::Event::Edited, + language::Event::Dirtied, + language::Event::Edited ], ); events.borrow_mut().clear(); @@ -3309,7 +3275,7 @@ mod tests { assert!(buffer.is_dirty()); }); - assert_eq!(*events.borrow(), &[buffer::Event::Edited]); + assert_eq!(*events.borrow(), &[language::Event::Edited]); // When a file is deleted, the buffer is considered dirty. let events = Rc::new(RefCell::new(Vec::new())); @@ -3329,7 +3295,7 @@ mod tests { buffer2.condition(&cx, |b, _| b.is_dirty()).await; assert_eq!( *events.borrow(), - &[buffer::Event::Dirtied, buffer::Event::FileHandleChanged] + &[language::Event::Dirtied, language::Event::FileHandleChanged] ); // When a file is already dirty when deleted, we don't emit a Dirtied event. @@ -3355,7 +3321,7 @@ mod tests { buffer3 .condition(&cx, |_, _| !events.borrow().is_empty()) .await; - assert_eq!(*events.borrow(), &[buffer::Event::FileHandleChanged]); + assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]); cx.read(|cx| assert!(buffer3.read(cx).is_dirty())); } @@ -3446,12 +3412,13 @@ mod tests { buffer.update(&mut cx, |buffer, cx| { buffer.edit(vec![0..0], " ", cx); assert!(buffer.is_dirty()); + assert!(!buffer.has_conflict()); }); // Change the file on disk again, adding blank lines to the beginning. fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap(); - // Becaues the buffer is modified, it doesn't reload from disk, but is + // Because the buffer is modified, it doesn't reload from disk, but is // marked as having a conflict. buffer .condition(&cx, |buffer, _| buffer.has_conflict()) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 147a655f72..073bf5bc7c 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -976,13 +976,13 @@ mod tests { time::Duration, }; use zed::{ - buffer::LanguageRegistry, client::{ self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials, EstablishConnectionError, UserStore, }, editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, + language::LanguageRegistry, people_panel::JoinWorktree, project::{ProjectPath, Worktree}, workspace::{Workspace, WorkspaceParams}, diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 3dcd3e557e..a96eb23aaf 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -8,7 +8,7 @@ test-support = [ "client/test-support", "project/test-support", "tree-sitter", - "tree-sitter-rust" + "tree-sitter-rust", ] [dependencies] @@ -16,6 +16,7 @@ buffer = { path = "../buffer" } client = { path = "../client" } editor = { path = "../editor" } gpui = { path = "../gpui" } +language = { path = "../language" } project = { path = "../project" } theme = { path = "../theme" } anyhow = "1.0.38" diff --git a/crates/workspace/src/items.rs b/crates/workspace/src/items.rs index b62858ed51..07c511602c 100644 --- a/crates/workspace/src/items.rs +++ b/crates/workspace/src/items.rs @@ -1,9 +1,9 @@ use super::{Item, ItemView}; use crate::Settings; use anyhow::Result; -use buffer::{Buffer, File as _}; use editor::{Editor, EditorSettings, Event}; use gpui::{fonts::TextStyle, AppContext, ModelHandle, Task, ViewContext}; +use language::{Buffer, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::path::Path; diff --git a/crates/workspace/src/lib.rs b/crates/workspace/src/lib.rs index c227ee61bd..ec1f39e480 100644 --- a/crates/workspace/src/lib.rs +++ b/crates/workspace/src/lib.rs @@ -5,7 +5,7 @@ pub mod settings; pub mod sidebar; use anyhow::Result; -use buffer::{Buffer, LanguageRegistry}; +use language::{Buffer, LanguageRegistry}; use client::{Authenticate, ChannelList, Client, UserStore}; use gpui::{ action, elements::*, json::to_string_pretty, keymap::Binding, platform::CursorStyle, @@ -271,8 +271,8 @@ impl WorkspaceParams { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut MutableAppContext) -> Self { let mut languages = LanguageRegistry::new(); - languages.add(Arc::new(buffer::Language::new( - buffer::LanguageConfig { + languages.add(Arc::new(language::Language::new( + language::LanguageConfig { name: "Rust".to_string(), path_suffixes: vec!["rs".to_string()], ..Default::default() diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index d6695709f0..53718d5a69 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -18,6 +18,7 @@ test-support = [ "buffer/test-support", "client/test-support", "gpui/test-support", + "language/test-support", "project/test-support", "rpc/test-support", "tempdir", @@ -33,6 +34,7 @@ fuzzy = { path = "../fuzzy" } editor = { path = "../editor" } file_finder = { path = "../file_finder" } gpui = { path = "../gpui" } +language = { path = "../language" } people_panel = { path = "../people_panel" } project = { path = "../project" } project_panel = { path = "../project_panel" } @@ -85,6 +87,7 @@ url = "2.2" buffer = { path = "../buffer", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } project = { path = "../project", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } client = { path = "../client", features = ["test-support"] } diff --git a/crates/zed/assets/themes/light.toml b/crates/zed/assets/themes/light.toml index d19b5ad4c9..677a9fd6f6 100644 --- a/crates/zed/assets/themes/light.toml +++ b/crates/zed/assets/themes/light.toml @@ -20,13 +20,13 @@ extends = "_base" [selection] host = { selection = "#3B57BC33", cursor = "$text.0.color" } guests = [ - { selection = "#FDF35133", cursor = "#FDF351" }, - { selection = "#4EACAD33", cursor = "#4EACAD" }, { selection = "#D0453B33", cursor = "#D0453B" }, { selection = "#3B874B33", cursor = "#3B874B" }, { selection = "#BD7CB433", cursor = "#BD7CB4" }, { selection = "#EE823133", cursor = "#EE8231" }, { selection = "#5A2B9233", cursor = "#5A2B92" }, + { selection = "#FDF35133", cursor = "#FDF351" }, + { selection = "#4EACAD33", cursor = "#4EACAD" } ] [status] diff --git a/crates/zed/languages/rust/config.toml b/crates/zed/languages/rust/config.toml index ece9b57ca2..11b273d137 100644 --- a/crates/zed/languages/rust/config.toml +++ b/crates/zed/languages/rust/config.toml @@ -1,9 +1,10 @@ name = "Rust" path_suffixes = ["rs"] -autoclose_pairs = [ - { start = "{", end = "}" }, - { start = "[", end = "]" }, - { start = "(", end = ")" }, - { start = "\"", end = "\"" }, - { start = "/*", end = " */" }, +brackets = [ + { start = "{", end = "}", close = true, newline = true }, + { start = "[", end = "]", close = true, newline = true }, + { start = "(", end = ")", close = true, newline = true }, + { start = "<", end = ">", close = false, newline = true }, + { start = "\"", end = "\"", close = true, newline = false }, + { start = "/*", end = " */", close = true, newline = false }, ] diff --git a/crates/zed/languages/rust/indents.scm b/crates/zed/languages/rust/indents.scm new file mode 100644 index 0000000000..a154dc665b --- /dev/null +++ b/crates/zed/languages/rust/indents.scm @@ -0,0 +1,12 @@ +[ + ((where_clause) _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) +] @indent + +(_ "[" "]" @end) @indent +(_ "<" ">" @end) @indent +(_ "{" "}" @end) @indent +(_ "(" ")" @end) @indent diff --git a/crates/zed/src/language.rs b/crates/zed/src/language.rs index 774d8c6502..a82f7a2cbb 100644 --- a/crates/zed/src/language.rs +++ b/crates/zed/src/language.rs @@ -1,4 +1,4 @@ -use buffer::{Language, LanguageRegistry}; +pub use language::{Language, LanguageRegistry}; use rust_embed::RustEmbed; use std::borrow::Cow; use std::{str, sync::Arc}; @@ -22,6 +22,8 @@ fn rust() -> Language { .unwrap() .with_brackets_query(load_query("rust/brackets.scm").as_ref()) .unwrap() + .with_indents_query(load_query("rust/indents.scm").as_ref()) + .unwrap() } fn load_query(path: &str) -> Cow<'static, str> { diff --git a/crates/zed/src/lib.rs b/crates/zed/src/lib.rs index f8711c7175..cec9e29aa8 100644 --- a/crates/zed/src/lib.rs +++ b/crates/zed/src/lib.rs @@ -4,8 +4,7 @@ pub mod menus; #[cfg(any(test, feature = "test-support"))] pub mod test; -pub use buffer; -use buffer::LanguageRegistry; +use self::language::LanguageRegistry; use chat_panel::ChatPanel; pub use client; pub use editor; diff --git a/crates/zed/src/test.rs b/crates/zed/src/test.rs index 3f9161a066..8a7a398910 100644 --- a/crates/zed/src/test.rs +++ b/crates/zed/src/test.rs @@ -1,7 +1,7 @@ use crate::{assets::Assets, AppState}; -use buffer::LanguageRegistry; use client::{http::ServerResponse, test::FakeHttpClient, ChannelList, Client, UserStore}; use gpui::{AssetSource, MutableAppContext}; +use language::LanguageRegistry; use parking_lot::Mutex; use postage::watch; use project::fs::FakeFs;