Merge pull request #2047 from zed-industries/optimize-large-multi-buffers
Avoid stalling the UI thread when running large searches
This commit is contained in:
commit
c85ad96b45
12 changed files with 249 additions and 99 deletions
5
Cargo.lock
generated
5
Cargo.lock
generated
|
@ -739,8 +739,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "bromberg_sl2"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ed88064f69518b7e3ea50ecfc1b61d43f19248618a377b95ae5c8b611134d4d"
|
||||
source = "git+https://github.com/zed-industries/bromberg_sl2?rev=dac565a90e8f9245f48ff46225c915dc50f76920#dac565a90e8f9245f48ff46225c915dc50f76920"
|
||||
dependencies = [
|
||||
"digest 0.9.0",
|
||||
"lazy_static",
|
||||
|
@ -5511,6 +5510,7 @@ dependencies = [
|
|||
"anyhow",
|
||||
"collections",
|
||||
"editor",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
|
@ -5521,6 +5521,7 @@ dependencies = [
|
|||
"serde_json",
|
||||
"settings",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"theme",
|
||||
"unindent",
|
||||
"util",
|
||||
|
|
|
@ -410,10 +410,10 @@ async fn test_random_collaboration(
|
|||
guest_buffer.read_with(client_cx, |b, _| b.saved_version().clone());
|
||||
assert_eq!(guest_saved_version, host_saved_version);
|
||||
|
||||
let host_saved_version_fingerprint = host_buffer
|
||||
.read_with(host_cx, |b, _| b.saved_version_fingerprint().to_string());
|
||||
let guest_saved_version_fingerprint = guest_buffer
|
||||
.read_with(client_cx, |b, _| b.saved_version_fingerprint().to_string());
|
||||
let host_saved_version_fingerprint =
|
||||
host_buffer.read_with(host_cx, |b, _| b.saved_version_fingerprint());
|
||||
let guest_saved_version_fingerprint =
|
||||
guest_buffer.read_with(client_cx, |b, _| b.saved_version_fingerprint());
|
||||
assert_eq!(
|
||||
guest_saved_version_fingerprint,
|
||||
host_saved_version_fingerprint
|
||||
|
|
|
@ -8,13 +8,14 @@ use anyhow::{anyhow, Context, Result};
|
|||
use collections::HashSet;
|
||||
use futures::future::try_join_all;
|
||||
use futures::FutureExt;
|
||||
|
||||
use gpui::{
|
||||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::proto::serialize_anchor as serialize_text_anchor;
|
||||
use language::{Bias, Buffer, OffsetRangeExt, Point, SelectionGoal};
|
||||
use language::{
|
||||
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, OffsetRangeExt, Point,
|
||||
SelectionGoal,
|
||||
};
|
||||
use project::{FormatTrigger, Item as _, Project, ProjectPath};
|
||||
use rpc::proto::{self, update_view};
|
||||
use settings::Settings;
|
||||
|
@ -1157,9 +1158,11 @@ fn path_for_file<'a>(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use gpui::MutableAppContext;
|
||||
use language::RopeFingerprint;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -1189,7 +1192,7 @@ mod tests {
|
|||
todo!()
|
||||
}
|
||||
|
||||
fn mtime(&self) -> std::time::SystemTime {
|
||||
fn mtime(&self) -> SystemTime {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
@ -1208,7 +1211,7 @@ mod tests {
|
|||
_: clock::Global,
|
||||
_: project::LineEnding,
|
||||
_: &mut MutableAppContext,
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, String, std::time::SystemTime)>> {
|
||||
) -> gpui::Task<anyhow::Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ pub use anchor::{Anchor, AnchorRangeExt};
|
|||
use anyhow::Result;
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, Bound, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, SinkExt};
|
||||
use git::diff::DiffHunk;
|
||||
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
pub use language::Completion;
|
||||
|
@ -763,6 +764,63 @@ impl MultiBuffer {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn stream_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) {
|
||||
let (mut tx, rx) = mpsc::channel(256);
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
for (buffer, ranges) in excerpts {
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
});
|
||||
|
||||
for (excerpt_id, range_count) in
|
||||
excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id: excerpt_id.clone(),
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
(task, rx)
|
||||
}
|
||||
|
||||
pub fn push_excerpts<O>(
|
||||
&mut self,
|
||||
buffer: ModelHandle<Buffer>,
|
||||
|
@ -787,39 +845,8 @@ impl MultiBuffer {
|
|||
{
|
||||
let buffer_id = buffer.id();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let max_point = buffer_snapshot.max_point();
|
||||
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| {
|
||||
range.start.to_point(&buffer_snapshot)..range.end.to_point(&buffer_snapshot)
|
||||
})
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end =
|
||||
Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
let (excerpt_ranges, range_counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
|
||||
let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx);
|
||||
|
||||
|
@ -3603,9 +3630,51 @@ impl ToPointUtf16 for PointUtf16 {
|
|||
}
|
||||
}
|
||||
|
||||
fn build_excerpt_ranges<T>(
|
||||
buffer: &BufferSnapshot,
|
||||
ranges: &[Range<T>],
|
||||
context_line_count: u32,
|
||||
) -> (Vec<ExcerptRange<Point>>, Vec<usize>)
|
||||
where
|
||||
T: text::ToPoint,
|
||||
{
|
||||
let max_point = buffer.max_point();
|
||||
let mut range_counts = Vec::new();
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_iter = ranges
|
||||
.iter()
|
||||
.map(|range| range.start.to_point(buffer)..range.end.to_point(buffer))
|
||||
.peekable();
|
||||
while let Some(range) = range_iter.next() {
|
||||
let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
|
||||
let mut excerpt_end = Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
let mut ranges_in_excerpt = 1;
|
||||
|
||||
while let Some(next_range) = range_iter.peek() {
|
||||
if next_range.start.row <= excerpt_end.row + context_line_count {
|
||||
excerpt_end =
|
||||
Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
|
||||
ranges_in_excerpt += 1;
|
||||
range_iter.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
excerpt_ranges.push(ExcerptRange {
|
||||
context: excerpt_start..excerpt_end,
|
||||
primary: Some(range),
|
||||
});
|
||||
range_counts.push(ranges_in_excerpt);
|
||||
}
|
||||
|
||||
(excerpt_ranges, range_counts)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures::StreamExt;
|
||||
use gpui::{MutableAppContext, TestAppContext};
|
||||
use language::{Buffer, Rope};
|
||||
use rand::prelude::*;
|
||||
|
@ -4010,6 +4079,44 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx));
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
let ranges = vec![
|
||||
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
|
||||
snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)),
|
||||
snapshot.anchor_before(Point::new(15, 0))
|
||||
..snapshot.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx)
|
||||
});
|
||||
|
||||
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
|
||||
// Ensure task is finished when stream completes.
|
||||
task.await;
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
anchor_ranges
|
||||
.iter()
|
||||
.map(|range| range.to_point(&snapshot))
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
Point::new(2, 2)..Point::new(3, 2),
|
||||
Point::new(6, 1)..Point::new(6, 3),
|
||||
Point::new(12, 0)..Point::new(12, 0)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_empty_multibuffer(cx: &mut MutableAppContext) {
|
||||
let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
|
||||
|
|
|
@ -60,7 +60,7 @@ pub struct Buffer {
|
|||
git_diff_status: GitDiffStatus,
|
||||
file: Option<Arc<dyn File>>,
|
||||
saved_version: clock::Global,
|
||||
saved_version_fingerprint: String,
|
||||
saved_version_fingerprint: RopeFingerprint,
|
||||
saved_mtime: SystemTime,
|
||||
transaction_depth: usize,
|
||||
was_dirty_before_starting_transaction: Option<bool>,
|
||||
|
@ -221,7 +221,7 @@ pub trait File: Send + Sync {
|
|||
version: clock::Global,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>>;
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>>;
|
||||
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
|
||||
|
@ -238,7 +238,7 @@ pub trait LocalFile: File {
|
|||
&self,
|
||||
buffer_id: u64,
|
||||
version: &clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut MutableAppContext,
|
||||
|
@ -386,7 +386,8 @@ impl Buffer {
|
|||
.ok_or_else(|| anyhow!("missing line_ending"))?,
|
||||
));
|
||||
this.saved_version = proto::deserialize_version(message.saved_version);
|
||||
this.saved_version_fingerprint = message.saved_version_fingerprint;
|
||||
this.saved_version_fingerprint =
|
||||
proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
||||
this.saved_mtime = message
|
||||
.saved_mtime
|
||||
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
|
||||
|
@ -402,7 +403,7 @@ impl Buffer {
|
|||
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
|
||||
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
|
||||
saved_version: proto::serialize_version(&self.saved_version),
|
||||
saved_version_fingerprint: self.saved_version_fingerprint.clone(),
|
||||
saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
|
||||
saved_mtime: Some(self.saved_mtime.into()),
|
||||
}
|
||||
}
|
||||
|
@ -530,7 +531,7 @@ impl Buffer {
|
|||
pub fn save(
|
||||
&mut self,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>> {
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
let file = if let Some(file) = self.file.as_ref() {
|
||||
file
|
||||
} else {
|
||||
|
@ -548,7 +549,7 @@ impl Buffer {
|
|||
cx.spawn(|this, mut cx| async move {
|
||||
let (version, fingerprint, mtime) = save.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.did_save(version.clone(), fingerprint.clone(), mtime, None, cx);
|
||||
this.did_save(version.clone(), fingerprint, mtime, None, cx);
|
||||
});
|
||||
Ok((version, fingerprint, mtime))
|
||||
})
|
||||
|
@ -558,8 +559,8 @@ impl Buffer {
|
|||
&self.saved_version
|
||||
}
|
||||
|
||||
pub fn saved_version_fingerprint(&self) -> &str {
|
||||
&self.saved_version_fingerprint
|
||||
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
|
||||
self.saved_version_fingerprint
|
||||
}
|
||||
|
||||
pub fn saved_mtime(&self) -> SystemTime {
|
||||
|
@ -581,7 +582,7 @@ impl Buffer {
|
|||
pub fn did_save(
|
||||
&mut self,
|
||||
version: clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
mtime: SystemTime,
|
||||
new_file: Option<Arc<dyn File>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -630,7 +631,7 @@ impl Buffer {
|
|||
pub fn did_reload(
|
||||
&mut self,
|
||||
version: clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut ModelContext<Self>,
|
||||
|
@ -643,7 +644,7 @@ impl Buffer {
|
|||
file.buffer_reloaded(
|
||||
self.remote_id(),
|
||||
&self.saved_version,
|
||||
self.saved_version_fingerprint.clone(),
|
||||
self.saved_version_fingerprint,
|
||||
self.line_ending(),
|
||||
self.saved_mtime,
|
||||
cx,
|
||||
|
|
|
@ -11,6 +11,15 @@ use text::*;
|
|||
|
||||
pub use proto::{BufferState, Operation};
|
||||
|
||||
pub fn serialize_fingerprint(fingerprint: RopeFingerprint) -> String {
|
||||
fingerprint.to_hex()
|
||||
}
|
||||
|
||||
pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
|
||||
RopeFingerprint::from_hex(fingerprint)
|
||||
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
|
||||
}
|
||||
|
||||
pub fn deserialize_line_ending(message: proto::LineEnding) -> fs::LineEnding {
|
||||
match message {
|
||||
proto::LineEnding::Unix => fs::LineEnding::Unix,
|
||||
|
|
|
@ -22,8 +22,8 @@ use gpui::{
|
|||
use language::{
|
||||
point_to_lsp,
|
||||
proto::{
|
||||
deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
|
||||
serialize_version,
|
||||
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
||||
serialize_anchor, serialize_version,
|
||||
},
|
||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
|
||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent,
|
||||
|
@ -5124,7 +5124,7 @@ impl Project {
|
|||
buffer_id,
|
||||
version: serialize_version(&saved_version),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint,
|
||||
fingerprint: language::proto::serialize_fingerprint(fingerprint),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -5216,7 +5216,9 @@ impl Project {
|
|||
buffer_id,
|
||||
version: language::proto::serialize_version(buffer.saved_version()),
|
||||
mtime: Some(buffer.saved_mtime().into()),
|
||||
fingerprint: buffer.saved_version_fingerprint().into(),
|
||||
fingerprint: language::proto::serialize_fingerprint(
|
||||
buffer.saved_version_fingerprint(),
|
||||
),
|
||||
line_ending: language::proto::serialize_line_ending(
|
||||
buffer.line_ending(),
|
||||
) as i32,
|
||||
|
@ -5971,6 +5973,7 @@ impl Project {
|
|||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
|
||||
let version = deserialize_version(envelope.payload.version);
|
||||
let mtime = envelope
|
||||
.payload
|
||||
|
@ -5985,7 +5988,7 @@ impl Project {
|
|||
.and_then(|buffer| buffer.upgrade(cx));
|
||||
if let Some(buffer) = buffer {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
|
||||
buffer.did_save(version, fingerprint, mtime, None, cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
|
@ -6000,6 +6003,7 @@ impl Project {
|
|||
) -> Result<()> {
|
||||
let payload = envelope.payload;
|
||||
let version = deserialize_version(payload.version);
|
||||
let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
|
||||
let line_ending = deserialize_line_ending(
|
||||
proto::LineEnding::from_i32(payload.line_ending)
|
||||
.ok_or_else(|| anyhow!("missing line ending"))?,
|
||||
|
@ -6015,7 +6019,7 @@ impl Project {
|
|||
.and_then(|buffer| buffer.upgrade(cx));
|
||||
if let Some(buffer) = buffer {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.did_reload(version, payload.fingerprint, line_ending, mtime, cx);
|
||||
buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
@ -20,10 +20,12 @@ use gpui::{
|
|||
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
|
||||
Task,
|
||||
};
|
||||
use language::Unclipped;
|
||||
use language::{
|
||||
proto::{deserialize_version, serialize_line_ending, serialize_version},
|
||||
Buffer, DiagnosticEntry, PointUtf16, Rope,
|
||||
proto::{
|
||||
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
|
||||
serialize_version,
|
||||
},
|
||||
Buffer, DiagnosticEntry, PointUtf16, Rope, RopeFingerprint, Unclipped,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use postage::{
|
||||
|
@ -1863,7 +1865,7 @@ impl language::File for File {
|
|||
version: clock::Global,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> Task<Result<(clock::Global, String, SystemTime)>> {
|
||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
||||
self.worktree.update(cx, |worktree, cx| match worktree {
|
||||
Worktree::Local(worktree) => {
|
||||
let rpc = worktree.client.clone();
|
||||
|
@ -1878,7 +1880,7 @@ impl language::File for File {
|
|||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
mtime: Some(entry.mtime.into()),
|
||||
fingerprint: fingerprint.clone(),
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
})?;
|
||||
}
|
||||
Ok((version, fingerprint, entry.mtime))
|
||||
|
@ -1896,11 +1898,12 @@ impl language::File for File {
|
|||
})
|
||||
.await?;
|
||||
let version = deserialize_version(response.version);
|
||||
let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
|
||||
let mtime = response
|
||||
.mtime
|
||||
.ok_or_else(|| anyhow!("missing mtime"))?
|
||||
.into();
|
||||
Ok((version, response.fingerprint, mtime))
|
||||
Ok((version, fingerprint, mtime))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -1943,7 +1946,7 @@ impl language::LocalFile for File {
|
|||
&self,
|
||||
buffer_id: u64,
|
||||
version: &clock::Global,
|
||||
fingerprint: String,
|
||||
fingerprint: RopeFingerprint,
|
||||
line_ending: LineEnding,
|
||||
mtime: SystemTime,
|
||||
cx: &mut MutableAppContext,
|
||||
|
@ -1957,7 +1960,7 @@ impl language::LocalFile for File {
|
|||
buffer_id,
|
||||
version: serialize_version(version),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint,
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
line_ending: serialize_line_ending(line_ending) as i32,
|
||||
})
|
||||
.log_err();
|
||||
|
|
|
@ -7,7 +7,7 @@ edition = "2021"
|
|||
path = "src/rope.rs"
|
||||
|
||||
[dependencies]
|
||||
bromberg_sl2 = "0.6"
|
||||
bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "dac565a90e8f9245f48ff46225c915dc50f76920" }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
arrayvec = "0.7.1"
|
||||
|
|
|
@ -4,7 +4,7 @@ mod point_utf16;
|
|||
mod unclipped;
|
||||
|
||||
use arrayvec::ArrayString;
|
||||
use bromberg_sl2::{DigestString, HashMatrix};
|
||||
use bromberg_sl2::HashMatrix;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp, fmt, io, mem,
|
||||
|
@ -25,6 +25,12 @@ const CHUNK_BASE: usize = 6;
|
|||
#[cfg(not(test))]
|
||||
const CHUNK_BASE: usize = 16;
|
||||
|
||||
/// Type alias to [HashMatrix], an implementation of a homomorphic hash function. Two [Rope] instances
|
||||
/// containing the same text will produce the same fingerprint. This hash function is special in that
|
||||
/// it allows us to hash individual chunks and aggregate them up the [Rope]'s tree, with the resulting
|
||||
/// hash being equivalent to hashing all the text contained in the [Rope] at once.
|
||||
pub type RopeFingerprint = HashMatrix;
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct Rope {
|
||||
chunks: SumTree<Chunk>,
|
||||
|
@ -361,8 +367,8 @@ impl Rope {
|
|||
.column
|
||||
}
|
||||
|
||||
pub fn fingerprint(&self) -> String {
|
||||
self.chunks.summary().fingerprint.to_hex()
|
||||
pub fn fingerprint(&self) -> RopeFingerprint {
|
||||
self.chunks.summary().fingerprint
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -856,7 +862,7 @@ impl sum_tree::Item for Chunk {
|
|||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct ChunkSummary {
|
||||
text: TextSummary,
|
||||
fingerprint: HashMatrix,
|
||||
fingerprint: RopeFingerprint,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for ChunkSummary {
|
||||
|
|
|
@ -19,10 +19,12 @@ theme = { path = "../theme" }
|
|||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
anyhow = "1.0"
|
||||
futures = "0.3"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { path = "../editor", features = ["test-support"] }
|
||||
|
|
|
@ -7,6 +7,7 @@ use editor::{
|
|||
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
|
||||
SelectAll, MAX_TAB_TITLE_LEN,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
actions, elements::*, platform::CursorStyle, Action, AnyViewHandle, AppContext, ElementBox,
|
||||
Entity, ModelContext, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription,
|
||||
|
@ -17,6 +18,7 @@ use project::{search::SearchQuery, Project};
|
|||
use settings::Settings;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
mem,
|
||||
ops::Range,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
|
@ -66,6 +68,7 @@ struct ProjectSearch {
|
|||
pending_search: Option<Task<Option<()>>>,
|
||||
match_ranges: Vec<Range<Anchor>>,
|
||||
active_query: Option<SearchQuery>,
|
||||
search_id: usize,
|
||||
}
|
||||
|
||||
pub struct ProjectSearchView {
|
||||
|
@ -77,6 +80,7 @@ pub struct ProjectSearchView {
|
|||
regex: bool,
|
||||
query_contains_error: bool,
|
||||
active_match_index: Option<usize>,
|
||||
search_id: usize,
|
||||
}
|
||||
|
||||
pub struct ProjectSearchBar {
|
||||
|
@ -97,6 +101,7 @@ impl ProjectSearch {
|
|||
pending_search: Default::default(),
|
||||
match_ranges: Default::default(),
|
||||
active_query: None,
|
||||
search_id: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -109,6 +114,7 @@ impl ProjectSearch {
|
|||
pending_search: Default::default(),
|
||||
match_ranges: self.match_ranges.clone(),
|
||||
active_query: self.active_query.clone(),
|
||||
search_id: self.search_id,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -116,32 +122,37 @@ impl ProjectSearch {
|
|||
let search = self
|
||||
.project
|
||||
.update(cx, |project, cx| project.search(query.clone(), cx));
|
||||
self.search_id += 1;
|
||||
self.active_query = Some(query);
|
||||
self.match_ranges.clear();
|
||||
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
|
||||
let matches = search.await.log_err()?;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
let this = this.upgrade(&cx)?;
|
||||
let mut matches = matches.into_iter().collect::<Vec<_>>();
|
||||
let (_task, mut match_ranges) = this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.clear();
|
||||
matches.sort_by_key(|(buffer, _)| buffer.read(cx).file().map(|file| file.path()));
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.clear(cx);
|
||||
excerpts.stream_excerpts_with_context_lines(matches, 1, cx)
|
||||
})
|
||||
});
|
||||
|
||||
while let Some(match_range) = match_ranges.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.match_ranges.clear();
|
||||
let mut matches = matches.into_iter().collect::<Vec<_>>();
|
||||
matches
|
||||
.sort_by_key(|(buffer, _)| buffer.read(cx).file().map(|file| file.path()));
|
||||
this.excerpts.update(cx, |excerpts, cx| {
|
||||
excerpts.clear(cx);
|
||||
for (buffer, buffer_matches) in matches {
|
||||
let ranges_to_highlight = excerpts.push_excerpts_with_context_lines(
|
||||
buffer,
|
||||
buffer_matches.clone(),
|
||||
1,
|
||||
cx,
|
||||
);
|
||||
this.match_ranges.extend(ranges_to_highlight);
|
||||
}
|
||||
});
|
||||
this.pending_search.take();
|
||||
this.match_ranges.push(match_range);
|
||||
while let Ok(Some(match_range)) = match_ranges.try_next() {
|
||||
this.match_ranges.push(match_range);
|
||||
}
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_search.take();
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
None
|
||||
}));
|
||||
cx.notify();
|
||||
|
@ -393,7 +404,7 @@ impl ProjectSearchView {
|
|||
whole_word = active_query.whole_word();
|
||||
}
|
||||
}
|
||||
cx.observe(&model, |this, _, cx| this.model_changed(true, cx))
|
||||
cx.observe(&model, |this, _, cx| this.model_changed(cx))
|
||||
.detach();
|
||||
|
||||
let query_editor = cx.add_view(|cx| {
|
||||
|
@ -428,6 +439,7 @@ impl ProjectSearchView {
|
|||
.detach();
|
||||
|
||||
let mut this = ProjectSearchView {
|
||||
search_id: model.read(cx).search_id,
|
||||
model,
|
||||
query_editor,
|
||||
results_editor,
|
||||
|
@ -437,7 +449,7 @@ impl ProjectSearchView {
|
|||
query_contains_error: false,
|
||||
active_match_index: None,
|
||||
};
|
||||
this.model_changed(false, cx);
|
||||
this.model_changed(cx);
|
||||
this
|
||||
}
|
||||
|
||||
|
@ -557,11 +569,13 @@ impl ProjectSearchView {
|
|||
cx.focus(&self.results_editor);
|
||||
}
|
||||
|
||||
fn model_changed(&mut self, reset_selections: bool, cx: &mut ViewContext<Self>) {
|
||||
fn model_changed(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let match_ranges = self.model.read(cx).match_ranges.clone();
|
||||
if match_ranges.is_empty() {
|
||||
self.active_match_index = None;
|
||||
} else {
|
||||
let prev_search_id = mem::replace(&mut self.search_id, self.model.read(cx).search_id);
|
||||
let reset_selections = self.search_id != prev_search_id;
|
||||
self.results_editor.update(cx, |editor, cx| {
|
||||
if reset_selections {
|
||||
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
|
@ -935,13 +949,13 @@ impl ToolbarItemView for ProjectSearchBar {
|
|||
mod tests {
|
||||
use super::*;
|
||||
use editor::DisplayPoint;
|
||||
use gpui::{color::Color, TestAppContext};
|
||||
use gpui::{color::Color, executor::Deterministic, TestAppContext};
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_project_search(cx: &mut TestAppContext) {
|
||||
async fn test_project_search(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||
let fonts = cx.font_cache();
|
||||
let mut theme = gpui::fonts::with_font_cache(fonts.clone(), theme::Theme::default);
|
||||
theme.search.match_background = Color::red();
|
||||
|
@ -973,7 +987,7 @@ mod tests {
|
|||
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
|
||||
search_view.search(cx);
|
||||
});
|
||||
search_view.next_notification(cx).await;
|
||||
deterministic.run_until_parked();
|
||||
search_view.update(cx, |search_view, cx| {
|
||||
assert_eq!(
|
||||
search_view
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue