Avoid extra smol::channel when iterating through snapshot paths

This commit is contained in:
Antonio Scandurra 2022-02-25 11:49:33 +01:00
parent 6a0cca7178
commit 561123d6de
2 changed files with 71 additions and 60 deletions

View file

@ -28,6 +28,7 @@ use sha2::{Digest, Sha256};
use smol::block_on; use smol::block_on;
use std::{ use std::{
cell::RefCell, cell::RefCell,
cmp,
convert::TryInto, convert::TryInto,
hash::Hash, hash::Hash,
mem, mem,
@ -2050,33 +2051,18 @@ impl Project {
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>> { ) -> Task<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>> {
if self.is_local() { if self.is_local() {
let (paths_to_search_tx, paths_to_search_rx) = smol::channel::bounded(1024);
let snapshots = self let snapshots = self
.strong_worktrees(cx) .strong_worktrees(cx)
.filter_map(|tree| { .filter_map(|tree| {
let tree = tree.read(cx).as_local()?; let tree = tree.read(cx).as_local()?;
Some((tree.abs_path().clone(), tree.snapshot())) Some(tree.snapshot())
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.background()
.spawn(async move {
for (snapshot_abs_path, snapshot) in snapshots {
for file in snapshot.files(false, 0) {
if paths_to_search_tx
.send((snapshot.id(), snapshot_abs_path.clone(), file.path.clone()))
.await
.is_err()
{
return;
}
}
}
})
.detach();
let background = cx.background().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
let workers = background.num_cpus().min(path_count);
let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024); let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
let workers = cx.background().num_cpus();
cx.background() cx.background()
.spawn({ .spawn({
let fs = self.fs.clone(); let fs = self.fs.clone();
@ -2086,41 +2072,64 @@ impl Project {
let fs = &fs; let fs = &fs;
let query = &query; let query = &query;
let matching_paths_tx = &matching_paths_tx; let matching_paths_tx = &matching_paths_tx;
let paths_per_worker = (path_count + workers - 1) / workers;
let snapshots = &snapshots;
background background
.scoped(|scope| { .scoped(|scope| {
for _ in 0..workers { for worker_ix in 0..workers {
let mut paths_to_search_rx = paths_to_search_rx.clone(); let worker_start_ix = worker_ix * paths_per_worker;
let worker_end_ix = worker_start_ix + paths_per_worker;
scope.spawn(async move { scope.spawn(async move {
let mut path = PathBuf::new(); let mut snapshot_start_ix = 0;
while let Some(( let mut abs_path = PathBuf::new();
worktree_id, for snapshot in snapshots {
snapshot_abs_path, let snapshot_end_ix =
file_path, snapshot_start_ix + snapshot.visible_file_count();
)) = paths_to_search_rx.next().await if worker_end_ix <= snapshot_start_ix {
{
if matching_paths_tx.is_closed() {
break; break;
} } else if worker_start_ix > snapshot_end_ix {
snapshot_start_ix = snapshot_end_ix;
path.clear(); continue;
path.push(&snapshot_abs_path);
path.push(&file_path);
let matches = if let Some(file) =
fs.open_sync(&path).await.log_err()
{
query.detect(file).unwrap_or(false)
} else { } else {
false let start_in_snapshot = worker_start_ix
}; .saturating_sub(snapshot_start_ix);
let end_in_snapshot =
cmp::min(worker_end_ix, snapshot_end_ix)
- snapshot_start_ix;
if matches { for entry in snapshot
if matching_paths_tx .files(false, start_in_snapshot)
.send((worktree_id, file_path)) .take(end_in_snapshot - start_in_snapshot)
.await
.is_err()
{ {
break; if matching_paths_tx.is_closed() {
break;
}
abs_path.clear();
abs_path.push(&snapshot.abs_path());
abs_path.push(&entry.path);
let matches = if let Some(file) =
fs.open_sync(&abs_path).await.log_err()
{
query.detect(file).unwrap_or(false)
} else {
false
};
if matches {
let project_path =
(snapshot.id(), entry.path.clone());
if matching_paths_tx
.send(project_path)
.await
.is_err()
{
break;
}
}
} }
snapshot_start_ix = snapshot_end_ix;
} }
} }
}); });
@ -2175,14 +2184,16 @@ impl Project {
let mut buffers_rx = buffers_rx.clone(); let mut buffers_rx = buffers_rx.clone();
scope.spawn(async move { scope.spawn(async move {
while let Some((buffer, snapshot)) = buffers_rx.next().await { while let Some((buffer, snapshot)) = buffers_rx.next().await {
for range in query.search(snapshot.as_rope()).await { let buffer_matches = query
let range = snapshot.anchor_before(range.start) .search(snapshot.as_rope())
..snapshot.anchor_after(range.end); .await
worker_matched_buffers .iter()
.entry(buffer.clone()) .map(|range| {
.or_insert(Vec::new()) snapshot.anchor_before(range.start)
.push(range); ..snapshot.anchor_after(range.end)
} })
.collect();
worker_matched_buffers.insert(buffer.clone(), buffer_matches);
} }
}); });
} }
@ -4888,7 +4899,7 @@ mod tests {
.await; .await;
assert_eq!( assert_eq!(
search(&project, SearchQuery::text("TWO", false, false), &mut cx).await, search(&project, SearchQuery::text("TWO", false, true), &mut cx).await,
HashMap::from_iter([ HashMap::from_iter([
("two.rs".to_string(), vec![6..9]), ("two.rs".to_string(), vec![6..9]),
("three.rs".to_string(), vec![37..40]) ("three.rs".to_string(), vec![37..40])
@ -4906,7 +4917,7 @@ mod tests {
}); });
assert_eq!( assert_eq!(
search(&project, SearchQuery::text("TWO", false, false), &mut cx).await, search(&project, SearchQuery::text("TWO", false, true), &mut cx).await,
HashMap::from_iter([ HashMap::from_iter([
("two.rs".to_string(), vec![6..9]), ("two.rs".to_string(), vec![6..9]),
("three.rs".to_string(), vec![37..40]), ("three.rs".to_string(), vec![37..40]),

View file

@ -554,10 +554,6 @@ impl LocalWorktree {
Ok((tree, scan_states_tx)) Ok((tree, scan_states_tx))
} }
pub fn abs_path(&self) -> &Arc<Path> {
&self.abs_path
}
pub fn contains_abs_path(&self, path: &Path) -> bool { pub fn contains_abs_path(&self, path: &Path) -> bool {
path.starts_with(&self.abs_path) path.starts_with(&self.abs_path)
} }
@ -1017,6 +1013,10 @@ impl Snapshot {
} }
impl LocalSnapshot { impl LocalSnapshot {
pub fn abs_path(&self) -> &Arc<Path> {
&self.abs_path
}
#[cfg(test)] #[cfg(test)]
pub(crate) fn to_proto( pub(crate) fn to_proto(
&self, &self,