Replace the old worktree with the new one
This commit is contained in:
parent
5648c67d54
commit
358fad8242
11 changed files with 105 additions and 1456 deletions
|
@ -15,7 +15,7 @@ use crate::{
|
||||||
sum_tree::{self, Cursor, FilterCursor, SeekBias, SumTree},
|
sum_tree::{self, Cursor, FilterCursor, SeekBias, SumTree},
|
||||||
time::{self, ReplicaId},
|
time::{self, ReplicaId},
|
||||||
util::RandomCharIter,
|
util::RandomCharIter,
|
||||||
worktree_old::FileHandle,
|
worktree::FileHandle,
|
||||||
};
|
};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use gpui::{AppContext, Entity, ModelContext};
|
use gpui::{AppContext, Entity, ModelContext};
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
||||||
settings::Settings,
|
settings::Settings,
|
||||||
util, watch,
|
util, watch,
|
||||||
workspace::{Workspace, WorkspaceView},
|
workspace::{Workspace, WorkspaceView},
|
||||||
worktree_old::{match_paths, PathMatch, Worktree},
|
worktree::{match_paths, PathMatch, Worktree},
|
||||||
};
|
};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
color::{ColorF, ColorU},
|
color::{ColorF, ColorU},
|
||||||
|
@ -140,19 +140,16 @@ impl FileFinder {
|
||||||
let entry_id = path_match.entry_id;
|
let entry_id = path_match.entry_id;
|
||||||
|
|
||||||
self.worktree(tree_id, app).map(|tree| {
|
self.worktree(tree_id, app).map(|tree| {
|
||||||
let path = tree.entry_path(entry_id).unwrap();
|
let path = tree
|
||||||
|
.path_for_inode(entry_id, path_match.include_root)
|
||||||
|
.unwrap();
|
||||||
let file_name = path
|
let file_name = path
|
||||||
.file_name()
|
.file_name()
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let mut path = path.to_string_lossy().to_string();
|
let path = path.to_string_lossy().to_string();
|
||||||
if path_match.skipped_prefix_len > 0 {
|
|
||||||
let mut i = 0;
|
|
||||||
path.retain(|_| util::post_inc(&mut i) >= path_match.skipped_prefix_len)
|
|
||||||
}
|
|
||||||
|
|
||||||
let path_positions = path_match.positions.clone();
|
let path_positions = path_match.positions.clone();
|
||||||
let file_name_start = path.chars().count() - file_name.chars().count();
|
let file_name_start = path.chars().count() - file_name.chars().count();
|
||||||
let mut file_name_positions = Vec::new();
|
let mut file_name_positions = Vec::new();
|
||||||
|
@ -345,11 +342,25 @@ impl FileFinder {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn spawn_search(&mut self, query: String, ctx: &mut ViewContext<Self>) {
|
fn spawn_search(&mut self, query: String, ctx: &mut ViewContext<Self>) {
|
||||||
let worktrees = self.worktrees(ctx.as_ref());
|
let snapshots = self
|
||||||
|
.workspace
|
||||||
|
.read(ctx)
|
||||||
|
.worktrees()
|
||||||
|
.iter()
|
||||||
|
.map(|tree| tree.read(ctx).snapshot())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
let search_id = util::post_inc(&mut self.search_count);
|
let search_id = util::post_inc(&mut self.search_count);
|
||||||
let pool = ctx.as_ref().thread_pool().clone();
|
let pool = ctx.as_ref().thread_pool().clone();
|
||||||
let task = ctx.background_executor().spawn(async move {
|
let task = ctx.background_executor().spawn(async move {
|
||||||
let matches = match_paths(worktrees.as_slice(), &query, false, false, 100, pool);
|
let matches = match_paths(
|
||||||
|
snapshots.iter(),
|
||||||
|
&query,
|
||||||
|
snapshots.len() > 1,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
100,
|
||||||
|
pool,
|
||||||
|
);
|
||||||
(search_id, matches)
|
(search_id, matches)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -377,15 +388,6 @@ impl FileFinder {
|
||||||
.get(&tree_id)
|
.get(&tree_id)
|
||||||
.map(|worktree| worktree.read(app))
|
.map(|worktree| worktree.read(app))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn worktrees(&self, app: &AppContext) -> Vec<Worktree> {
|
|
||||||
self.workspace
|
|
||||||
.read(app)
|
|
||||||
.worktrees()
|
|
||||||
.iter()
|
|
||||||
.map(|worktree| worktree.read(app).clone())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -13,4 +13,3 @@ mod util;
|
||||||
pub mod watch;
|
pub mod watch;
|
||||||
pub mod workspace;
|
pub mod workspace;
|
||||||
mod worktree;
|
mod worktree;
|
||||||
mod worktree_old;
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use crate::{
|
||||||
settings::Settings,
|
settings::Settings,
|
||||||
time::ReplicaId,
|
time::ReplicaId,
|
||||||
watch,
|
watch,
|
||||||
worktree_old::{Worktree, WorktreeHandle as _},
|
worktree::{Worktree, WorktreeHandle as _},
|
||||||
};
|
};
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use gpui::{AppContext, Entity, Handle, ModelContext, ModelHandle, MutableAppContext, ViewContext};
|
use gpui::{AppContext, Entity, Handle, ModelContext, ModelHandle, MutableAppContext, ViewContext};
|
||||||
|
@ -117,7 +117,7 @@ impl Workspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let worktree = ctx.add_model(|ctx| Worktree::new(ctx.model_id(), path, ctx));
|
let worktree = ctx.add_model(|ctx| Worktree::new(path, ctx));
|
||||||
ctx.observe(&worktree, Self::on_worktree_updated);
|
ctx.observe(&worktree, Self::on_worktree_updated);
|
||||||
self.worktrees.insert(worktree);
|
self.worktrees.insert(worktree);
|
||||||
ctx.notify();
|
ctx.notify();
|
||||||
|
@ -211,9 +211,7 @@ impl WorkspaceHandle for ModelHandle<Workspace> {
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|tree| {
|
.flat_map(|tree| {
|
||||||
let tree_id = tree.id();
|
let tree_id = tree.id();
|
||||||
tree.read(app)
|
tree.read(app).files().map(move |inode| (tree_id, inode))
|
||||||
.files()
|
|
||||||
.map(move |file| (tree_id, file.entry_id))
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
@ -241,8 +239,8 @@ mod tests {
|
||||||
|
|
||||||
// Get the first file entry.
|
// Get the first file entry.
|
||||||
let tree = app.read(|ctx| workspace.read(ctx).worktrees.iter().next().unwrap().clone());
|
let tree = app.read(|ctx| workspace.read(ctx).worktrees.iter().next().unwrap().clone());
|
||||||
let entry_id = app.read(|ctx| tree.read(ctx).files().next().unwrap().entry_id);
|
let file_inode = app.read(|ctx| tree.read(ctx).files().next().unwrap());
|
||||||
let entry = (tree.id(), entry_id);
|
let entry = (tree.id(), file_inode);
|
||||||
|
|
||||||
// Open the same entry twice before it finishes loading.
|
// Open the same entry twice before it finishes loading.
|
||||||
let (future_1, future_2) = workspace.update(&mut app, |w, app| {
|
let (future_1, future_2) = workspace.update(&mut app, |w, app| {
|
||||||
|
|
|
@ -5,7 +5,7 @@ use gpui::{
|
||||||
color::rgbu, elements::*, json::to_string_pretty, keymap::Binding, AnyViewHandle, AppContext,
|
color::rgbu, elements::*, json::to_string_pretty, keymap::Binding, AnyViewHandle, AppContext,
|
||||||
ClipboardItem, Entity, ModelHandle, MutableAppContext, View, ViewContext, ViewHandle,
|
ClipboardItem, Entity, ModelHandle, MutableAppContext, View, ViewContext, ViewHandle,
|
||||||
};
|
};
|
||||||
use log::{error, info};
|
use log::error;
|
||||||
use std::{collections::HashSet, path::PathBuf};
|
use std::{collections::HashSet, path::PathBuf};
|
||||||
|
|
||||||
pub fn init(app: &mut MutableAppContext) {
|
pub fn init(app: &mut MutableAppContext) {
|
||||||
|
@ -227,19 +227,6 @@ impl WorkspaceView {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_example_entry(&mut self, ctx: &mut ViewContext<Self>) {
|
|
||||||
if let Some(tree) = self.workspace.read(ctx).worktrees().iter().next() {
|
|
||||||
if let Some(file) = tree.read(ctx).files().next() {
|
|
||||||
info!("open_entry ({}, {})", tree.id(), file.entry_id);
|
|
||||||
self.open_entry((tree.id(), file.entry_id), ctx);
|
|
||||||
} else {
|
|
||||||
error!("No example file found for worktree {}", tree.id());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error!("No worktree found while opening example entry");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save_active_item(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
|
pub fn save_active_item(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
|
||||||
self.active_pane.update(ctx, |pane, ctx| {
|
self.active_pane.update(ctx, |pane, ctx| {
|
||||||
if let Some(item) = pane.active_item() {
|
if let Some(item) = pane.active_item() {
|
||||||
|
|
|
@ -2,7 +2,7 @@ mod char_bag;
|
||||||
mod fuzzy;
|
mod fuzzy;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
editor::Snapshot as BufferSnapshot,
|
editor::{History, Snapshot as BufferSnapshot},
|
||||||
sum_tree::{self, Edit, SumTree},
|
sum_tree::{self, Edit, SumTree},
|
||||||
};
|
};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
|
@ -36,11 +36,6 @@ enum ScanState {
|
||||||
Err(io::Error),
|
Err(io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FilesIterItem {
|
|
||||||
pub ino: u64,
|
|
||||||
pub path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Worktree {
|
pub struct Worktree {
|
||||||
id: usize,
|
id: usize,
|
||||||
path: Arc<Path>,
|
path: Arc<Path>,
|
||||||
|
@ -85,14 +80,6 @@ impl Worktree {
|
||||||
tree
|
tree
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn snapshot(&self) -> Snapshot {
|
|
||||||
Snapshot {
|
|
||||||
id: self.id,
|
|
||||||
root_inode: self.root_inode(),
|
|
||||||
entries: self.entries.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn observe_scan_state(&mut self, scan_state: ScanState, ctx: &mut ModelContext<Self>) {
|
fn observe_scan_state(&mut self, scan_state: ScanState, ctx: &mut ModelContext<Self>) {
|
||||||
self.scan_state = scan_state;
|
self.scan_state = scan_state;
|
||||||
self.poll_entries(ctx);
|
self.poll_entries(ctx);
|
||||||
|
@ -129,28 +116,27 @@ impl Worktree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn files<'a>(&'a self) -> impl Iterator<Item = FilesIterItem> + 'a {
|
pub fn snapshot(&self) -> Snapshot {
|
||||||
self.entries.cursor::<(), ()>().filter_map(|e| {
|
Snapshot {
|
||||||
if let Entry::File { path, ino, .. } = e {
|
id: self.id,
|
||||||
Some(FilesIterItem {
|
root_inode: self.root_inode(),
|
||||||
ino: *ino,
|
entries: self.entries.clone(),
|
||||||
path: PathBuf::from(path.path.iter().collect::<String>()),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn root_entry(&self) -> Option<&Entry> {
|
pub fn contains_path(&self, path: &Path) -> bool {
|
||||||
self.root_inode().and_then(|ino| self.entries.get(&ino))
|
path.starts_with(&self.path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_count(&self) -> usize {
|
pub fn has_inode(&self, inode: u64) -> bool {
|
||||||
|
self.entries.get(&inode).is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_count(&self) -> usize {
|
||||||
self.entries.summary().file_count
|
self.entries.summary().file_count
|
||||||
}
|
}
|
||||||
|
|
||||||
fn abs_path_for_inode(&self, ino: u64) -> Result<PathBuf> {
|
pub fn abs_path_for_inode(&self, ino: u64) -> Result<PathBuf> {
|
||||||
let mut result = self.path.to_path_buf();
|
let mut result = self.path.to_path_buf();
|
||||||
result.push(self.path_for_inode(ino, false)?);
|
result.push(self.path_for_inode(ino, false)?);
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
@ -199,13 +185,17 @@ impl Worktree {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_file(&self, ino: u64, ctx: &AppContext) -> impl Future<Output = Result<String>> {
|
pub fn load_history(
|
||||||
|
&self,
|
||||||
|
ino: u64,
|
||||||
|
ctx: &AppContext,
|
||||||
|
) -> impl Future<Output = Result<History>> {
|
||||||
let path = self.abs_path_for_inode(ino);
|
let path = self.abs_path_for_inode(ino);
|
||||||
ctx.background_executor().spawn(async move {
|
ctx.background_executor().spawn(async move {
|
||||||
let mut file = std::fs::File::open(&path?)?;
|
let mut file = std::fs::File::open(&path?)?;
|
||||||
let mut base_text = String::new();
|
let mut base_text = String::new();
|
||||||
file.read_to_string(&mut base_text)?;
|
file.read_to_string(&mut base_text)?;
|
||||||
Ok(base_text)
|
Ok(History::new(Arc::from(base_text)))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,6 +243,17 @@ impl Worktree {
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn files<'a>(&'a self) -> impl Iterator<Item = u64> + 'a {
|
||||||
|
self.entries.cursor::<(), ()>().filter_map(|entry| {
|
||||||
|
if let Entry::File { inode, .. } = entry {
|
||||||
|
Some(*inode)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Entity for Worktree {
|
impl Entity for Worktree {
|
||||||
|
@ -287,8 +288,8 @@ impl FileHandle {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load(&self, ctx: &AppContext) -> impl Future<Output = Result<String>> {
|
pub fn load_history(&self, ctx: &AppContext) -> impl Future<Output = Result<History>> {
|
||||||
self.worktree.read(ctx).load_file(self.inode, ctx)
|
self.worktree.read(ctx).load_history(self.inode, ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save<'a>(&self, content: BufferSnapshot, ctx: &AppContext) -> Task<Result<()>> {
|
pub fn save<'a>(&self, content: BufferSnapshot, ctx: &AppContext) -> Task<Result<()>> {
|
||||||
|
@ -306,7 +307,7 @@ pub enum Entry {
|
||||||
Dir {
|
Dir {
|
||||||
parent: Option<u64>,
|
parent: Option<u64>,
|
||||||
name: Arc<OsStr>,
|
name: Arc<OsStr>,
|
||||||
ino: u64,
|
inode: u64,
|
||||||
is_symlink: bool,
|
is_symlink: bool,
|
||||||
is_ignored: bool,
|
is_ignored: bool,
|
||||||
children: Arc<[u64]>,
|
children: Arc<[u64]>,
|
||||||
|
@ -316,7 +317,7 @@ pub enum Entry {
|
||||||
parent: Option<u64>,
|
parent: Option<u64>,
|
||||||
name: Arc<OsStr>,
|
name: Arc<OsStr>,
|
||||||
path: PathEntry,
|
path: PathEntry,
|
||||||
ino: u64,
|
inode: u64,
|
||||||
is_symlink: bool,
|
is_symlink: bool,
|
||||||
is_ignored: bool,
|
is_ignored: bool,
|
||||||
},
|
},
|
||||||
|
@ -325,8 +326,8 @@ pub enum Entry {
|
||||||
impl Entry {
|
impl Entry {
|
||||||
fn ino(&self) -> u64 {
|
fn ino(&self) -> u64 {
|
||||||
match self {
|
match self {
|
||||||
Entry::Dir { ino, .. } => *ino,
|
Entry::Dir { inode: ino, .. } => *ino,
|
||||||
Entry::File { ino, .. } => *ino,
|
Entry::File { inode: ino, .. } => *ino,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -468,7 +469,7 @@ impl BackgroundScanner {
|
||||||
let dir_entry = Entry::Dir {
|
let dir_entry = Entry::Dir {
|
||||||
parent: None,
|
parent: None,
|
||||||
name,
|
name,
|
||||||
ino,
|
inode: ino,
|
||||||
is_symlink,
|
is_symlink,
|
||||||
is_ignored,
|
is_ignored,
|
||||||
children: Arc::from([]),
|
children: Arc::from([]),
|
||||||
|
@ -511,7 +512,7 @@ impl BackgroundScanner {
|
||||||
parent: None,
|
parent: None,
|
||||||
name,
|
name,
|
||||||
path: PathEntry::new(ino, &relative_path, is_ignored),
|
path: PathEntry::new(ino, &relative_path, is_ignored),
|
||||||
ino,
|
inode: ino,
|
||||||
is_symlink,
|
is_symlink,
|
||||||
is_ignored,
|
is_ignored,
|
||||||
}));
|
}));
|
||||||
|
@ -555,7 +556,7 @@ impl BackgroundScanner {
|
||||||
let dir_entry = Entry::Dir {
|
let dir_entry = Entry::Dir {
|
||||||
parent: Some(job.ino),
|
parent: Some(job.ino),
|
||||||
name,
|
name,
|
||||||
ino,
|
inode: ino,
|
||||||
is_symlink,
|
is_symlink,
|
||||||
is_ignored,
|
is_ignored,
|
||||||
children: Arc::from([]),
|
children: Arc::from([]),
|
||||||
|
@ -579,7 +580,7 @@ impl BackgroundScanner {
|
||||||
parent: Some(job.ino),
|
parent: Some(job.ino),
|
||||||
name,
|
name,
|
||||||
path: PathEntry::new(ino, &relative_path, is_ignored),
|
path: PathEntry::new(ino, &relative_path, is_ignored),
|
||||||
ino,
|
inode: ino,
|
||||||
is_symlink,
|
is_symlink,
|
||||||
is_ignored,
|
is_ignored,
|
||||||
});
|
});
|
||||||
|
@ -621,6 +622,23 @@ struct ScanJob {
|
||||||
scan_queue: crossbeam_channel::Sender<io::Result<ScanJob>>,
|
scan_queue: crossbeam_channel::Sender<io::Result<ScanJob>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait WorktreeHandle {
|
||||||
|
fn file(&self, entry_id: u64, app: &AppContext) -> Result<FileHandle>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WorktreeHandle for ModelHandle<Worktree> {
|
||||||
|
fn file(&self, inode: u64, app: &AppContext) -> Result<FileHandle> {
|
||||||
|
if self.read(app).has_inode(inode) {
|
||||||
|
Ok(FileHandle {
|
||||||
|
worktree: self.clone(),
|
||||||
|
inode,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("entry does not exist in tree"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
trait UnwrapIgnoreTuple {
|
trait UnwrapIgnoreTuple {
|
||||||
fn unwrap(self) -> Ignore;
|
fn unwrap(self) -> Ignore;
|
||||||
}
|
}
|
||||||
|
@ -705,22 +723,28 @@ mod tests {
|
||||||
|
|
||||||
let buffer = Buffer::new(1, "a line of text.\n".repeat(10 * 1024));
|
let buffer = Buffer::new(1, "a line of text.\n".repeat(10 * 1024));
|
||||||
|
|
||||||
let entry = app.read(|ctx| {
|
let file_inode = app.read(|ctx| {
|
||||||
let entry = tree.read(ctx).files().next().unwrap();
|
let tree = tree.read(ctx);
|
||||||
assert_eq!(entry.path.file_name().unwrap(), "file1");
|
let inode = tree.files().next().unwrap();
|
||||||
entry
|
assert_eq!(
|
||||||
|
tree.path_for_inode(inode, false)
|
||||||
|
.unwrap()
|
||||||
|
.file_name()
|
||||||
|
.unwrap(),
|
||||||
|
"file1"
|
||||||
|
);
|
||||||
|
inode
|
||||||
});
|
});
|
||||||
let file_ino = entry.ino;
|
|
||||||
|
|
||||||
tree.update(&mut app, |tree, ctx| {
|
tree.update(&mut app, |tree, ctx| {
|
||||||
smol::block_on(tree.save(file_ino, buffer.snapshot(), ctx.as_ref())).unwrap()
|
smol::block_on(tree.save(file_inode, buffer.snapshot(), ctx.as_ref())).unwrap()
|
||||||
});
|
});
|
||||||
|
|
||||||
let loaded_text = app
|
let loaded_history = app
|
||||||
.read(|ctx| tree.read(ctx).load_file(file_ino, ctx))
|
.read(|ctx| tree.read(ctx).load_history(file_inode, ctx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(loaded_text, buffer.text());
|
assert_eq!(loaded_history.base_text.as_ref(), buffer.text());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ use gpui::scoped_pool;
|
||||||
|
|
||||||
use crate::sum_tree::SeekBias;
|
use crate::sum_tree::SeekBias;
|
||||||
|
|
||||||
use super::{char_bag::CharBag, Entry, FileCount, Snapshot, Worktree};
|
use super::{char_bag::CharBag, Entry, FileCount, Snapshot};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cmp::{max, min, Ordering, Reverse},
|
cmp::{max, min, Ordering, Reverse},
|
||||||
|
@ -47,7 +47,7 @@ pub struct PathMatch {
|
||||||
pub positions: Vec<usize>,
|
pub positions: Vec<usize>,
|
||||||
pub tree_id: usize,
|
pub tree_id: usize,
|
||||||
pub entry_id: u64,
|
pub entry_id: u64,
|
||||||
pub skipped_prefix_len: usize,
|
pub include_root: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for PathMatch {
|
impl PartialEq for PathMatch {
|
||||||
|
@ -237,7 +237,7 @@ fn match_single_tree_paths<'a>(
|
||||||
entry_id: path_entry.ino,
|
entry_id: path_entry.ino,
|
||||||
score,
|
score,
|
||||||
positions: match_positions.clone(),
|
positions: match_positions.clone(),
|
||||||
skipped_prefix_len,
|
include_root: skipped_prefix_len == 0,
|
||||||
}));
|
}));
|
||||||
if results.len() == max_results {
|
if results.len() == max_results {
|
||||||
*min_score = results.peek().unwrap().0.score;
|
*min_score = results.peek().unwrap().0.score;
|
||||||
|
|
|
@ -1,44 +0,0 @@
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
pub struct CharBag(u64);
|
|
||||||
|
|
||||||
impl CharBag {
|
|
||||||
pub fn is_superset(self, other: CharBag) -> bool {
|
|
||||||
self.0 & other.0 == other.0
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert(&mut self, c: char) {
|
|
||||||
if c >= 'a' && c <= 'z' {
|
|
||||||
let mut count = self.0;
|
|
||||||
let idx = c as u8 - 'a' as u8;
|
|
||||||
count = count >> (idx * 2);
|
|
||||||
count = ((count << 1) | 1) & 3;
|
|
||||||
count = count << idx * 2;
|
|
||||||
self.0 |= count;
|
|
||||||
} else if c >= '0' && c <= '9' {
|
|
||||||
let idx = c as u8 - '0' as u8;
|
|
||||||
self.0 |= 1 << (idx + 52);
|
|
||||||
} else if c == '-' {
|
|
||||||
self.0 |= 1 << 62;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for CharBag {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
let mut bag = Self(0);
|
|
||||||
for c in s.chars() {
|
|
||||||
bag.insert(c);
|
|
||||||
}
|
|
||||||
bag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&[char]> for CharBag {
|
|
||||||
fn from(chars: &[char]) -> Self {
|
|
||||||
let mut bag = Self(0);
|
|
||||||
for c in chars {
|
|
||||||
bag.insert(*c);
|
|
||||||
}
|
|
||||||
bag
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,501 +0,0 @@
|
||||||
use gpui::scoped_pool;
|
|
||||||
|
|
||||||
use super::char_bag::CharBag;
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
cmp::{max, min, Ordering, Reverse},
|
|
||||||
collections::BinaryHeap,
|
|
||||||
};
|
|
||||||
|
|
||||||
const BASE_DISTANCE_PENALTY: f64 = 0.6;
|
|
||||||
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
|
|
||||||
const MIN_DISTANCE_PENALTY: f64 = 0.2;
|
|
||||||
|
|
||||||
pub struct PathEntry {
|
|
||||||
pub ino: u64,
|
|
||||||
pub path_chars: CharBag,
|
|
||||||
pub path: Vec<char>,
|
|
||||||
pub lowercase_path: Vec<char>,
|
|
||||||
pub is_ignored: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PathMatch {
|
|
||||||
pub score: f64,
|
|
||||||
pub positions: Vec<usize>,
|
|
||||||
pub tree_id: usize,
|
|
||||||
pub entry_id: u64,
|
|
||||||
pub skipped_prefix_len: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for PathMatch {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.score.eq(&other.score)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for PathMatch {}
|
|
||||||
|
|
||||||
impl PartialOrd for PathMatch {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
self.score.partial_cmp(&other.score)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for PathMatch {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.partial_cmp(other).unwrap_or(Ordering::Equal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn match_paths(
|
|
||||||
paths_by_tree_id: &[(usize, usize, &[PathEntry])],
|
|
||||||
query: &str,
|
|
||||||
include_ignored: bool,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
pool: scoped_pool::Pool,
|
|
||||||
) -> Vec<PathMatch> {
|
|
||||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let query = query.chars().collect::<Vec<_>>();
|
|
||||||
let lowercase_query = &lowercase_query;
|
|
||||||
let query = &query;
|
|
||||||
let query_chars = CharBag::from(&lowercase_query[..]);
|
|
||||||
|
|
||||||
let cpus = num_cpus::get();
|
|
||||||
let path_count = paths_by_tree_id
|
|
||||||
.iter()
|
|
||||||
.fold(0, |sum, (_, _, paths)| sum + paths.len());
|
|
||||||
let segment_size = (path_count + cpus - 1) / cpus;
|
|
||||||
let mut segment_results = (0..cpus).map(|_| BinaryHeap::new()).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
pool.scoped(|scope| {
|
|
||||||
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
|
|
||||||
scope.execute(move || {
|
|
||||||
let segment_start = segment_idx * segment_size;
|
|
||||||
let segment_end = segment_start + segment_size;
|
|
||||||
|
|
||||||
let mut min_score = 0.0;
|
|
||||||
let mut last_positions = Vec::new();
|
|
||||||
last_positions.resize(query.len(), 0);
|
|
||||||
let mut match_positions = Vec::new();
|
|
||||||
match_positions.resize(query.len(), 0);
|
|
||||||
let mut score_matrix = Vec::new();
|
|
||||||
let mut best_position_matrix = Vec::new();
|
|
||||||
|
|
||||||
let mut tree_start = 0;
|
|
||||||
for (tree_id, skipped_prefix_len, paths) in paths_by_tree_id {
|
|
||||||
let tree_end = tree_start + paths.len();
|
|
||||||
if tree_start < segment_end && segment_start < tree_end {
|
|
||||||
let start = max(tree_start, segment_start) - tree_start;
|
|
||||||
let end = min(tree_end, segment_end) - tree_start;
|
|
||||||
|
|
||||||
match_single_tree_paths(
|
|
||||||
*tree_id,
|
|
||||||
*skipped_prefix_len,
|
|
||||||
paths,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
query,
|
|
||||||
lowercase_query,
|
|
||||||
query_chars,
|
|
||||||
include_ignored,
|
|
||||||
smart_case,
|
|
||||||
results,
|
|
||||||
max_results,
|
|
||||||
&mut min_score,
|
|
||||||
&mut match_positions,
|
|
||||||
&mut last_positions,
|
|
||||||
&mut score_matrix,
|
|
||||||
&mut best_position_matrix,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if tree_end >= segment_end {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
tree_start = tree_end;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut results = segment_results
|
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.map(|r| r.0)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
results.sort_unstable_by(|a, b| b.score.partial_cmp(&a.score).unwrap());
|
|
||||||
results.truncate(max_results);
|
|
||||||
results
|
|
||||||
}
|
|
||||||
|
|
||||||
fn match_single_tree_paths(
|
|
||||||
tree_id: usize,
|
|
||||||
skipped_prefix_len: usize,
|
|
||||||
path_entries: &[PathEntry],
|
|
||||||
start: usize,
|
|
||||||
end: usize,
|
|
||||||
query: &[char],
|
|
||||||
lowercase_query: &[char],
|
|
||||||
query_chars: CharBag,
|
|
||||||
include_ignored: bool,
|
|
||||||
smart_case: bool,
|
|
||||||
results: &mut BinaryHeap<Reverse<PathMatch>>,
|
|
||||||
max_results: usize,
|
|
||||||
min_score: &mut f64,
|
|
||||||
match_positions: &mut Vec<usize>,
|
|
||||||
last_positions: &mut Vec<usize>,
|
|
||||||
score_matrix: &mut Vec<Option<f64>>,
|
|
||||||
best_position_matrix: &mut Vec<usize>,
|
|
||||||
) {
|
|
||||||
for i in start..end {
|
|
||||||
let path_entry = unsafe { &path_entries.get_unchecked(i) };
|
|
||||||
|
|
||||||
if !include_ignored && path_entry.is_ignored {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !path_entry.path_chars.is_superset(query_chars) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !find_last_positions(
|
|
||||||
last_positions,
|
|
||||||
skipped_prefix_len,
|
|
||||||
&path_entry.lowercase_path,
|
|
||||||
&lowercase_query[..],
|
|
||||||
) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let matrix_len = query.len() * (path_entry.path.len() - skipped_prefix_len);
|
|
||||||
score_matrix.clear();
|
|
||||||
score_matrix.resize(matrix_len, None);
|
|
||||||
best_position_matrix.clear();
|
|
||||||
best_position_matrix.resize(matrix_len, skipped_prefix_len);
|
|
||||||
|
|
||||||
let score = score_match(
|
|
||||||
&query[..],
|
|
||||||
&lowercase_query[..],
|
|
||||||
&path_entry.path,
|
|
||||||
&path_entry.lowercase_path,
|
|
||||||
skipped_prefix_len,
|
|
||||||
smart_case,
|
|
||||||
&last_positions,
|
|
||||||
score_matrix,
|
|
||||||
best_position_matrix,
|
|
||||||
match_positions,
|
|
||||||
*min_score,
|
|
||||||
);
|
|
||||||
|
|
||||||
if score > 0.0 {
|
|
||||||
results.push(Reverse(PathMatch {
|
|
||||||
tree_id,
|
|
||||||
entry_id: path_entry.ino,
|
|
||||||
score,
|
|
||||||
positions: match_positions.clone(),
|
|
||||||
skipped_prefix_len,
|
|
||||||
}));
|
|
||||||
if results.len() == max_results {
|
|
||||||
*min_score = results.peek().unwrap().0.score;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_last_positions(
|
|
||||||
last_positions: &mut Vec<usize>,
|
|
||||||
skipped_prefix_len: usize,
|
|
||||||
path: &[char],
|
|
||||||
query: &[char],
|
|
||||||
) -> bool {
|
|
||||||
let mut path = path.iter();
|
|
||||||
for (i, char) in query.iter().enumerate().rev() {
|
|
||||||
if let Some(j) = path.rposition(|c| c == char) {
|
|
||||||
if j >= skipped_prefix_len {
|
|
||||||
last_positions[i] = j;
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn score_match(
|
|
||||||
query: &[char],
|
|
||||||
query_cased: &[char],
|
|
||||||
path: &[char],
|
|
||||||
path_cased: &[char],
|
|
||||||
skipped_prefix_len: usize,
|
|
||||||
smart_case: bool,
|
|
||||||
last_positions: &[usize],
|
|
||||||
score_matrix: &mut [Option<f64>],
|
|
||||||
best_position_matrix: &mut [usize],
|
|
||||||
match_positions: &mut [usize],
|
|
||||||
min_score: f64,
|
|
||||||
) -> f64 {
|
|
||||||
let score = recursive_score_match(
|
|
||||||
query,
|
|
||||||
query_cased,
|
|
||||||
path,
|
|
||||||
path_cased,
|
|
||||||
skipped_prefix_len,
|
|
||||||
smart_case,
|
|
||||||
last_positions,
|
|
||||||
score_matrix,
|
|
||||||
best_position_matrix,
|
|
||||||
min_score,
|
|
||||||
0,
|
|
||||||
skipped_prefix_len,
|
|
||||||
query.len() as f64,
|
|
||||||
) * query.len() as f64;
|
|
||||||
|
|
||||||
if score <= 0.0 {
|
|
||||||
return 0.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let path_len = path.len() - skipped_prefix_len;
|
|
||||||
let mut cur_start = 0;
|
|
||||||
for i in 0..query.len() {
|
|
||||||
match_positions[i] = best_position_matrix[i * path_len + cur_start] - skipped_prefix_len;
|
|
||||||
cur_start = match_positions[i] + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
score
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recursive_score_match(
|
|
||||||
query: &[char],
|
|
||||||
query_cased: &[char],
|
|
||||||
path: &[char],
|
|
||||||
path_cased: &[char],
|
|
||||||
skipped_prefix_len: usize,
|
|
||||||
smart_case: bool,
|
|
||||||
last_positions: &[usize],
|
|
||||||
score_matrix: &mut [Option<f64>],
|
|
||||||
best_position_matrix: &mut [usize],
|
|
||||||
min_score: f64,
|
|
||||||
query_idx: usize,
|
|
||||||
path_idx: usize,
|
|
||||||
cur_score: f64,
|
|
||||||
) -> f64 {
|
|
||||||
if query_idx == query.len() {
|
|
||||||
return 1.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let path_len = path.len() - skipped_prefix_len;
|
|
||||||
|
|
||||||
if let Some(memoized) = score_matrix[query_idx * path_len + path_idx - skipped_prefix_len] {
|
|
||||||
return memoized;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut score = 0.0;
|
|
||||||
let mut best_position = 0;
|
|
||||||
|
|
||||||
let query_char = query_cased[query_idx];
|
|
||||||
let limit = last_positions[query_idx];
|
|
||||||
|
|
||||||
let mut last_slash = 0;
|
|
||||||
for j in path_idx..=limit {
|
|
||||||
let path_char = path_cased[j];
|
|
||||||
let is_path_sep = path_char == '/' || path_char == '\\';
|
|
||||||
|
|
||||||
if query_idx == 0 && is_path_sep {
|
|
||||||
last_slash = j;
|
|
||||||
}
|
|
||||||
|
|
||||||
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
|
|
||||||
let mut char_score = 1.0;
|
|
||||||
if j > path_idx {
|
|
||||||
let last = path[j - 1];
|
|
||||||
let curr = path[j];
|
|
||||||
|
|
||||||
if last == '/' {
|
|
||||||
char_score = 0.9;
|
|
||||||
} else if last == '-' || last == '_' || last == ' ' || last.is_numeric() {
|
|
||||||
char_score = 0.8;
|
|
||||||
} else if last.is_lowercase() && curr.is_uppercase() {
|
|
||||||
char_score = 0.8;
|
|
||||||
} else if last == '.' {
|
|
||||||
char_score = 0.7;
|
|
||||||
} else if query_idx == 0 {
|
|
||||||
char_score = BASE_DISTANCE_PENALTY;
|
|
||||||
} else {
|
|
||||||
char_score = MIN_DISTANCE_PENALTY.max(
|
|
||||||
BASE_DISTANCE_PENALTY
|
|
||||||
- (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply a severe penalty if the case doesn't match.
|
|
||||||
// This will make the exact matches have higher score than the case-insensitive and the
|
|
||||||
// path insensitive matches.
|
|
||||||
if (smart_case || path[j] == '/') && query[query_idx] != path[j] {
|
|
||||||
char_score *= 0.001;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut multiplier = char_score;
|
|
||||||
|
|
||||||
// Scale the score based on how deep within the patch we found the match.
|
|
||||||
if query_idx == 0 {
|
|
||||||
multiplier /= (path.len() - last_slash) as f64;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut next_score = 1.0;
|
|
||||||
if min_score > 0.0 {
|
|
||||||
next_score = cur_score * multiplier;
|
|
||||||
// Scores only decrease. If we can't pass the previous best, bail
|
|
||||||
if next_score < min_score {
|
|
||||||
// Ensure that score is non-zero so we use it in the memo table.
|
|
||||||
if score == 0.0 {
|
|
||||||
score = 1e-18;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_score = recursive_score_match(
|
|
||||||
query,
|
|
||||||
query_cased,
|
|
||||||
path,
|
|
||||||
path_cased,
|
|
||||||
skipped_prefix_len,
|
|
||||||
smart_case,
|
|
||||||
last_positions,
|
|
||||||
score_matrix,
|
|
||||||
best_position_matrix,
|
|
||||||
min_score,
|
|
||||||
query_idx + 1,
|
|
||||||
j + 1,
|
|
||||||
next_score,
|
|
||||||
) * multiplier;
|
|
||||||
|
|
||||||
if new_score > score {
|
|
||||||
score = new_score;
|
|
||||||
best_position = j;
|
|
||||||
// Optimization: can't score better than 1.
|
|
||||||
if new_score == 1.0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if best_position != 0 {
|
|
||||||
best_position_matrix[query_idx * path_len + path_idx - skipped_prefix_len] = best_position;
|
|
||||||
}
|
|
||||||
|
|
||||||
score_matrix[query_idx * path_len + path_idx - skipped_prefix_len] = Some(score);
|
|
||||||
score
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_match_path_entries() {
|
|
||||||
let paths = vec![
|
|
||||||
"",
|
|
||||||
"a",
|
|
||||||
"ab",
|
|
||||||
"abC",
|
|
||||||
"abcd",
|
|
||||||
"alphabravocharlie",
|
|
||||||
"AlphaBravoCharlie",
|
|
||||||
"thisisatestdir",
|
|
||||||
"/////ThisIsATestDir",
|
|
||||||
"/this/is/a/test/dir",
|
|
||||||
"/test/tiatd",
|
|
||||||
];
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
match_query("abc", false, &paths),
|
|
||||||
vec![
|
|
||||||
("abC", vec![0, 1, 2]),
|
|
||||||
("abcd", vec![0, 1, 2]),
|
|
||||||
("AlphaBravoCharlie", vec![0, 5, 10]),
|
|
||||||
("alphabravocharlie", vec![4, 5, 10]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
match_query("t/i/a/t/d", false, &paths),
|
|
||||||
vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
match_query("tiatd", false, &paths),
|
|
||||||
vec![
|
|
||||||
("/test/tiatd", vec![6, 7, 8, 9, 10]),
|
|
||||||
("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
|
|
||||||
("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
|
|
||||||
("thisisatestdir", vec![0, 2, 6, 7, 11]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn match_query<'a>(
|
|
||||||
query: &str,
|
|
||||||
smart_case: bool,
|
|
||||||
paths: &Vec<&'a str>,
|
|
||||||
) -> Vec<(&'a str, Vec<usize>)> {
|
|
||||||
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let query = query.chars().collect::<Vec<_>>();
|
|
||||||
let query_chars = CharBag::from(&lowercase_query[..]);
|
|
||||||
|
|
||||||
let mut path_entries = Vec::new();
|
|
||||||
for (i, path) in paths.iter().enumerate() {
|
|
||||||
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let path_chars = CharBag::from(&lowercase_path[..]);
|
|
||||||
let path = path.chars().collect();
|
|
||||||
path_entries.push(PathEntry {
|
|
||||||
ino: i as u64,
|
|
||||||
path_chars,
|
|
||||||
path,
|
|
||||||
lowercase_path,
|
|
||||||
is_ignored: false,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut match_positions = Vec::new();
|
|
||||||
let mut last_positions = Vec::new();
|
|
||||||
match_positions.resize(query.len(), 0);
|
|
||||||
last_positions.resize(query.len(), 0);
|
|
||||||
|
|
||||||
let mut results = BinaryHeap::new();
|
|
||||||
match_single_tree_paths(
|
|
||||||
0,
|
|
||||||
0,
|
|
||||||
&path_entries,
|
|
||||||
0,
|
|
||||||
path_entries.len(),
|
|
||||||
&query[..],
|
|
||||||
&lowercase_query[..],
|
|
||||||
query_chars,
|
|
||||||
true,
|
|
||||||
smart_case,
|
|
||||||
&mut results,
|
|
||||||
100,
|
|
||||||
&mut 0.0,
|
|
||||||
&mut match_positions,
|
|
||||||
&mut last_positions,
|
|
||||||
&mut Vec::new(),
|
|
||||||
&mut Vec::new(),
|
|
||||||
);
|
|
||||||
|
|
||||||
results
|
|
||||||
.into_iter()
|
|
||||||
.rev()
|
|
||||||
.map(|result| {
|
|
||||||
(
|
|
||||||
paths[result.0.entry_id as usize].clone(),
|
|
||||||
result.0.positions,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
mod char_bag;
|
|
||||||
mod fuzzy;
|
|
||||||
mod worktree;
|
|
||||||
|
|
||||||
pub use worktree::{match_paths, FileHandle, PathMatch, Worktree, WorktreeHandle};
|
|
|
@ -1,811 +0,0 @@
|
||||||
pub use super::fuzzy::PathMatch;
|
|
||||||
use super::{
|
|
||||||
char_bag::CharBag,
|
|
||||||
fuzzy::{self, PathEntry},
|
|
||||||
};
|
|
||||||
use crate::{
|
|
||||||
editor::{History, Snapshot},
|
|
||||||
throttle::throttled,
|
|
||||||
util::post_inc,
|
|
||||||
};
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use crossbeam_channel as channel;
|
|
||||||
use easy_parallel::Parallel;
|
|
||||||
use gpui::{scoped_pool, AppContext, Entity, ModelContext, ModelHandle, Task};
|
|
||||||
use ignore::dir::{Ignore, IgnoreBuilder};
|
|
||||||
use parking_lot::RwLock;
|
|
||||||
use postage::watch;
|
|
||||||
use smol::prelude::*;
|
|
||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
ffi::{OsStr, OsString},
|
|
||||||
fmt, fs,
|
|
||||||
io::{self, Write},
|
|
||||||
os::unix::fs::MetadataExt,
|
|
||||||
path::Path,
|
|
||||||
path::PathBuf,
|
|
||||||
sync::Arc,
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Worktree(Arc<RwLock<WorktreeState>>);
|
|
||||||
|
|
||||||
struct WorktreeState {
|
|
||||||
id: usize,
|
|
||||||
path: PathBuf,
|
|
||||||
root_ino: Option<u64>,
|
|
||||||
entries: HashMap<u64, Entry>,
|
|
||||||
file_paths: Vec<PathEntry>,
|
|
||||||
histories: HashMap<u64, History>,
|
|
||||||
scan_state: watch::Sender<ScanState>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
enum ScanState {
|
|
||||||
Scanning,
|
|
||||||
Idle,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct DirToScan {
|
|
||||||
ino: u64,
|
|
||||||
path: PathBuf,
|
|
||||||
relative_path: PathBuf,
|
|
||||||
ignore: Option<Ignore>,
|
|
||||||
dirs_to_scan: channel::Sender<io::Result<DirToScan>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Worktree {
|
|
||||||
pub fn new<T>(id: usize, path: T, ctx: &mut ModelContext<Self>) -> Self
|
|
||||||
where
|
|
||||||
T: Into<PathBuf>,
|
|
||||||
{
|
|
||||||
let scan_state = watch::channel_with(ScanState::Scanning);
|
|
||||||
|
|
||||||
let tree = Self(Arc::new(RwLock::new(WorktreeState {
|
|
||||||
id,
|
|
||||||
path: path.into(),
|
|
||||||
root_ino: None,
|
|
||||||
entries: HashMap::new(),
|
|
||||||
file_paths: Vec::new(),
|
|
||||||
histories: HashMap::new(),
|
|
||||||
scan_state: scan_state.0,
|
|
||||||
})));
|
|
||||||
|
|
||||||
{
|
|
||||||
let tree = tree.clone();
|
|
||||||
ctx.as_ref().thread_pool().spawn(move || {
|
|
||||||
if let Err(error) = tree.scan_dirs() {
|
|
||||||
log::error!("error scanning worktree: {}", error);
|
|
||||||
}
|
|
||||||
tree.set_scan_state(ScanState::Idle);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.spawn_stream(
|
|
||||||
throttled(Duration::from_millis(100), scan_state.1),
|
|
||||||
Self::observe_scan_state,
|
|
||||||
|_, _| {},
|
|
||||||
)
|
|
||||||
.detach();
|
|
||||||
|
|
||||||
tree
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_scan_state(&self, state: ScanState) {
|
|
||||||
*self.0.write().scan_state.borrow_mut() = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn scan_dirs(&self) -> io::Result<()> {
|
|
||||||
let path = self.0.read().path.clone();
|
|
||||||
let metadata = fs::metadata(&path)?;
|
|
||||||
let ino = metadata.ino();
|
|
||||||
let is_symlink = fs::symlink_metadata(&path)?.file_type().is_symlink();
|
|
||||||
let name = path
|
|
||||||
.file_name()
|
|
||||||
.map(|name| OsString::from(name))
|
|
||||||
.unwrap_or(OsString::from("/"));
|
|
||||||
let relative_path = PathBuf::from(&name);
|
|
||||||
|
|
||||||
let mut ignore = IgnoreBuilder::new().build().add_parents(&path).unwrap();
|
|
||||||
if metadata.is_dir() {
|
|
||||||
ignore = ignore.add_child(&path).unwrap();
|
|
||||||
}
|
|
||||||
let is_ignored = ignore.matched(&path, metadata.is_dir()).is_ignore();
|
|
||||||
|
|
||||||
if metadata.file_type().is_dir() {
|
|
||||||
let is_ignored = is_ignored || name == ".git";
|
|
||||||
self.insert_dir(None, name, ino, is_symlink, is_ignored);
|
|
||||||
let (tx, rx) = channel::unbounded();
|
|
||||||
|
|
||||||
tx.send(Ok(DirToScan {
|
|
||||||
ino,
|
|
||||||
path,
|
|
||||||
relative_path,
|
|
||||||
ignore: Some(ignore),
|
|
||||||
dirs_to_scan: tx.clone(),
|
|
||||||
}))
|
|
||||||
.unwrap();
|
|
||||||
drop(tx);
|
|
||||||
|
|
||||||
Parallel::<io::Result<()>>::new()
|
|
||||||
.each(0..16, |_| {
|
|
||||||
while let Ok(result) = rx.recv() {
|
|
||||||
self.scan_dir(result?)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.run()
|
|
||||||
.into_iter()
|
|
||||||
.collect::<io::Result<()>>()?;
|
|
||||||
} else {
|
|
||||||
self.insert_file(None, name, ino, is_symlink, is_ignored, relative_path);
|
|
||||||
}
|
|
||||||
self.0.write().root_ino = Some(ino);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn scan_dir(&self, to_scan: DirToScan) -> io::Result<()> {
|
|
||||||
let mut new_children = Vec::new();
|
|
||||||
|
|
||||||
for child_entry in fs::read_dir(&to_scan.path)? {
|
|
||||||
let child_entry = child_entry?;
|
|
||||||
let name = child_entry.file_name();
|
|
||||||
let relative_path = to_scan.relative_path.join(&name);
|
|
||||||
let metadata = child_entry.metadata()?;
|
|
||||||
let ino = metadata.ino();
|
|
||||||
let is_symlink = metadata.file_type().is_symlink();
|
|
||||||
|
|
||||||
if metadata.is_dir() {
|
|
||||||
let path = to_scan.path.join(&name);
|
|
||||||
let mut is_ignored = true;
|
|
||||||
let mut ignore = None;
|
|
||||||
|
|
||||||
if let Some(parent_ignore) = to_scan.ignore.as_ref() {
|
|
||||||
let child_ignore = parent_ignore.add_child(&path).unwrap();
|
|
||||||
is_ignored = child_ignore.matched(&path, true).is_ignore() || name == ".git";
|
|
||||||
if !is_ignored {
|
|
||||||
ignore = Some(child_ignore);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.insert_dir(Some(to_scan.ino), name, ino, is_symlink, is_ignored);
|
|
||||||
new_children.push(ino);
|
|
||||||
|
|
||||||
let dirs_to_scan = to_scan.dirs_to_scan.clone();
|
|
||||||
let _ = to_scan.dirs_to_scan.send(Ok(DirToScan {
|
|
||||||
ino,
|
|
||||||
path,
|
|
||||||
relative_path,
|
|
||||||
ignore,
|
|
||||||
dirs_to_scan,
|
|
||||||
}));
|
|
||||||
} else {
|
|
||||||
let is_ignored = to_scan.ignore.as_ref().map_or(true, |i| {
|
|
||||||
i.matched(to_scan.path.join(&name), false).is_ignore()
|
|
||||||
});
|
|
||||||
|
|
||||||
self.insert_file(
|
|
||||||
Some(to_scan.ino),
|
|
||||||
name,
|
|
||||||
ino,
|
|
||||||
is_symlink,
|
|
||||||
is_ignored,
|
|
||||||
relative_path,
|
|
||||||
);
|
|
||||||
new_children.push(ino);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(Entry::Dir { children, .. }) = &mut self.0.write().entries.get_mut(&to_scan.ino)
|
|
||||||
{
|
|
||||||
*children = new_children.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_dir(
|
|
||||||
&self,
|
|
||||||
parent: Option<u64>,
|
|
||||||
name: OsString,
|
|
||||||
ino: u64,
|
|
||||||
is_symlink: bool,
|
|
||||||
is_ignored: bool,
|
|
||||||
) {
|
|
||||||
let mut state = self.0.write();
|
|
||||||
let entries = &mut state.entries;
|
|
||||||
entries.insert(
|
|
||||||
ino,
|
|
||||||
Entry::Dir {
|
|
||||||
parent,
|
|
||||||
name,
|
|
||||||
ino,
|
|
||||||
is_symlink,
|
|
||||||
is_ignored,
|
|
||||||
children: Vec::new(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
*state.scan_state.borrow_mut() = ScanState::Scanning;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_file(
|
|
||||||
&self,
|
|
||||||
parent: Option<u64>,
|
|
||||||
name: OsString,
|
|
||||||
ino: u64,
|
|
||||||
is_symlink: bool,
|
|
||||||
is_ignored: bool,
|
|
||||||
path: PathBuf,
|
|
||||||
) {
|
|
||||||
let path = path.to_string_lossy();
|
|
||||||
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
|
|
||||||
let path = path.chars().collect::<Vec<_>>();
|
|
||||||
let path_chars = CharBag::from(&path[..]);
|
|
||||||
|
|
||||||
let mut state = self.0.write();
|
|
||||||
state.entries.insert(
|
|
||||||
ino,
|
|
||||||
Entry::File {
|
|
||||||
parent,
|
|
||||||
name,
|
|
||||||
ino,
|
|
||||||
is_symlink,
|
|
||||||
is_ignored,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
state.file_paths.push(PathEntry {
|
|
||||||
ino,
|
|
||||||
path_chars,
|
|
||||||
path,
|
|
||||||
lowercase_path,
|
|
||||||
is_ignored,
|
|
||||||
});
|
|
||||||
*state.scan_state.borrow_mut() = ScanState::Scanning;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn entry_path(&self, mut entry_id: u64) -> Result<PathBuf> {
|
|
||||||
let state = self.0.read();
|
|
||||||
|
|
||||||
let mut entries = Vec::new();
|
|
||||||
loop {
|
|
||||||
let entry = state
|
|
||||||
.entries
|
|
||||||
.get(&entry_id)
|
|
||||||
.ok_or_else(|| anyhow!("entry does not exist in worktree"))?;
|
|
||||||
entries.push(entry);
|
|
||||||
if let Some(parent_id) = entry.parent() {
|
|
||||||
entry_id = parent_id;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut path = PathBuf::new();
|
|
||||||
for entry in entries.into_iter().rev() {
|
|
||||||
path.push(entry.name());
|
|
||||||
}
|
|
||||||
Ok(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn abs_entry_path(&self, entry_id: u64) -> Result<PathBuf> {
|
|
||||||
let mut path = self.0.read().path.clone();
|
|
||||||
path.pop();
|
|
||||||
Ok(path.join(self.entry_path(entry_id)?))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
let state = self.0.read();
|
|
||||||
state.root_ino.and_then(|mut ino| {
|
|
||||||
'components: for component in path {
|
|
||||||
if let Entry::Dir { children, .. } = &state.entries[&ino] {
|
|
||||||
for child in children {
|
|
||||||
if state.entries[child].name() == component {
|
|
||||||
ino = *child;
|
|
||||||
continue 'components;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return None;
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(ino)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fmt_entry(&self, f: &mut fmt::Formatter<'_>, entry_id: u64, indent: usize) -> fmt::Result {
|
|
||||||
match &self.0.read().entries[&entry_id] {
|
|
||||||
Entry::Dir { name, children, .. } => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}{}/ ({})\n",
|
|
||||||
" ".repeat(indent),
|
|
||||||
name.to_string_lossy(),
|
|
||||||
entry_id
|
|
||||||
)?;
|
|
||||||
for child_id in children.iter() {
|
|
||||||
self.fmt_entry(f, *child_id, indent + 2)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
Entry::File { name, .. } => write!(
|
|
||||||
f,
|
|
||||||
"{}{} ({})\n",
|
|
||||||
" ".repeat(indent),
|
|
||||||
name.to_string_lossy(),
|
|
||||||
entry_id
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn path(&self) -> PathBuf {
|
|
||||||
PathBuf::from(&self.0.read().path)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn contains_path(&self, path: &Path) -> bool {
|
|
||||||
path.starts_with(self.path())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn iter(&self) -> Iter {
|
|
||||||
Iter {
|
|
||||||
tree: self.clone(),
|
|
||||||
stack: Vec::new(),
|
|
||||||
started: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn files(&self) -> FilesIter {
|
|
||||||
FilesIter {
|
|
||||||
iter: self.iter(),
|
|
||||||
path: PathBuf::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_entry(&self, entry_id: u64) -> bool {
|
|
||||||
self.0.read().entries.contains_key(&entry_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn entry_count(&self) -> usize {
|
|
||||||
self.0.read().entries.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_count(&self) -> usize {
|
|
||||||
self.0.read().file_paths.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_history(&self, entry_id: u64) -> impl Future<Output = Result<History>> {
|
|
||||||
let tree = self.clone();
|
|
||||||
|
|
||||||
async move {
|
|
||||||
if let Some(history) = tree.0.read().histories.get(&entry_id) {
|
|
||||||
return Ok(history.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = tree.abs_entry_path(entry_id)?;
|
|
||||||
|
|
||||||
let mut file = smol::fs::File::open(&path).await?;
|
|
||||||
let mut base_text = String::new();
|
|
||||||
file.read_to_string(&mut base_text).await?;
|
|
||||||
let history = History::new(Arc::from(base_text));
|
|
||||||
tree.0.write().histories.insert(entry_id, history.clone());
|
|
||||||
Ok(history)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save<'a>(&self, entry_id: u64, content: Snapshot, ctx: &AppContext) -> Task<Result<()>> {
|
|
||||||
let path = self.abs_entry_path(entry_id);
|
|
||||||
ctx.background_executor().spawn(async move {
|
|
||||||
let buffer_size = content.text_summary().bytes.min(10 * 1024);
|
|
||||||
let file = std::fs::File::create(&path?)?;
|
|
||||||
let mut writer = std::io::BufWriter::with_capacity(buffer_size, file);
|
|
||||||
for chunk in content.fragments() {
|
|
||||||
writer.write(chunk.as_bytes())?;
|
|
||||||
}
|
|
||||||
writer.flush()?;
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn observe_scan_state(&mut self, _: ScanState, ctx: &mut ModelContext<Self>) {
|
|
||||||
// log::info!("observe {:?}", std::time::Instant::now());
|
|
||||||
ctx.notify()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Worktree {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
if self.entry_count() == 0 {
|
|
||||||
write!(f, "Empty tree\n")
|
|
||||||
} else {
|
|
||||||
self.fmt_entry(f, 0, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Entity for Worktree {
|
|
||||||
type Event = ();
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorktreeState {
|
|
||||||
fn root_entry(&self) -> Option<&Entry> {
|
|
||||||
self.root_ino
|
|
||||||
.and_then(|root_ino| self.entries.get(&root_ino))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait WorktreeHandle {
|
|
||||||
fn file(&self, entry_id: u64, app: &AppContext) -> Result<FileHandle>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WorktreeHandle for ModelHandle<Worktree> {
|
|
||||||
fn file(&self, entry_id: u64, app: &AppContext) -> Result<FileHandle> {
|
|
||||||
if self.read(app).has_entry(entry_id) {
|
|
||||||
Ok(FileHandle {
|
|
||||||
worktree: self.clone(),
|
|
||||||
entry_id,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Err(anyhow!("entry does not exist in tree"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum Entry {
|
|
||||||
Dir {
|
|
||||||
parent: Option<u64>,
|
|
||||||
name: OsString,
|
|
||||||
ino: u64,
|
|
||||||
is_symlink: bool,
|
|
||||||
is_ignored: bool,
|
|
||||||
children: Vec<u64>,
|
|
||||||
},
|
|
||||||
File {
|
|
||||||
parent: Option<u64>,
|
|
||||||
name: OsString,
|
|
||||||
ino: u64,
|
|
||||||
is_symlink: bool,
|
|
||||||
is_ignored: bool,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Entry {
|
|
||||||
fn parent(&self) -> Option<u64> {
|
|
||||||
match self {
|
|
||||||
Entry::Dir { parent, .. } | Entry::File { parent, .. } => *parent,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ino(&self) -> u64 {
|
|
||||||
match self {
|
|
||||||
Entry::Dir { ino, .. } | Entry::File { ino, .. } => *ino,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn name(&self) -> &OsStr {
|
|
||||||
match self {
|
|
||||||
Entry::Dir { name, .. } | Entry::File { name, .. } => name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct FileHandle {
|
|
||||||
worktree: ModelHandle<Worktree>,
|
|
||||||
entry_id: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileHandle {
|
|
||||||
pub fn path(&self, app: &AppContext) -> PathBuf {
|
|
||||||
self.worktree.read(app).entry_path(self.entry_id).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_history(&self, app: &AppContext) -> impl Future<Output = Result<History>> {
|
|
||||||
self.worktree.read(app).load_history(self.entry_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save<'a>(&self, content: Snapshot, ctx: &AppContext) -> Task<Result<()>> {
|
|
||||||
let worktree = self.worktree.read(ctx);
|
|
||||||
worktree.save(self.entry_id, content, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn entry_id(&self) -> (usize, u64) {
|
|
||||||
(self.worktree.id(), self.entry_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct IterStackEntry {
|
|
||||||
entry_id: u64,
|
|
||||||
child_idx: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Iter {
|
|
||||||
tree: Worktree,
|
|
||||||
stack: Vec<IterStackEntry>,
|
|
||||||
started: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for Iter {
|
|
||||||
type Item = Traversal;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
let state = self.tree.0.read();
|
|
||||||
|
|
||||||
if !self.started {
|
|
||||||
self.started = true;
|
|
||||||
|
|
||||||
return if let Some(entry) = state.root_entry().cloned() {
|
|
||||||
self.stack.push(IterStackEntry {
|
|
||||||
entry_id: entry.ino(),
|
|
||||||
child_idx: 0,
|
|
||||||
});
|
|
||||||
|
|
||||||
Some(Traversal::Push {
|
|
||||||
entry_id: entry.ino(),
|
|
||||||
entry,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
while let Some(parent) = self.stack.last_mut() {
|
|
||||||
if let Some(Entry::Dir { children, .. }) = &state.entries.get(&parent.entry_id) {
|
|
||||||
if parent.child_idx < children.len() {
|
|
||||||
let child_id = children[post_inc(&mut parent.child_idx)];
|
|
||||||
|
|
||||||
self.stack.push(IterStackEntry {
|
|
||||||
entry_id: child_id,
|
|
||||||
child_idx: 0,
|
|
||||||
});
|
|
||||||
|
|
||||||
return Some(Traversal::Push {
|
|
||||||
entry_id: child_id,
|
|
||||||
entry: state.entries[&child_id].clone(),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
self.stack.pop();
|
|
||||||
|
|
||||||
return Some(Traversal::Pop);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.stack.pop();
|
|
||||||
|
|
||||||
return Some(Traversal::Pop);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Traversal {
|
|
||||||
Push { entry_id: u64, entry: Entry },
|
|
||||||
Pop,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct FilesIter {
|
|
||||||
iter: Iter,
|
|
||||||
path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct FilesIterItem {
|
|
||||||
pub entry_id: u64,
|
|
||||||
pub path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for FilesIter {
|
|
||||||
type Item = FilesIterItem;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
loop {
|
|
||||||
match self.iter.next() {
|
|
||||||
Some(Traversal::Push {
|
|
||||||
entry_id, entry, ..
|
|
||||||
}) => match entry {
|
|
||||||
Entry::Dir { name, .. } => {
|
|
||||||
self.path.push(name);
|
|
||||||
}
|
|
||||||
Entry::File { name, .. } => {
|
|
||||||
self.path.push(name);
|
|
||||||
return Some(FilesIterItem {
|
|
||||||
entry_id,
|
|
||||||
path: self.path.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Some(Traversal::Pop) => {
|
|
||||||
self.path.pop();
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trait UnwrapIgnoreTuple {
|
|
||||||
fn unwrap(self) -> Ignore;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnwrapIgnoreTuple for (Ignore, Option<ignore::Error>) {
|
|
||||||
fn unwrap(self) -> Ignore {
|
|
||||||
if let Some(error) = self.1 {
|
|
||||||
log::error!("error loading gitignore data: {}", error);
|
|
||||||
}
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn match_paths(
|
|
||||||
trees: &[Worktree],
|
|
||||||
query: &str,
|
|
||||||
include_ignored: bool,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
pool: scoped_pool::Pool,
|
|
||||||
) -> Vec<PathMatch> {
|
|
||||||
let tree_states = trees.iter().map(|tree| tree.0.read()).collect::<Vec<_>>();
|
|
||||||
fuzzy::match_paths(
|
|
||||||
&tree_states
|
|
||||||
.iter()
|
|
||||||
.map(|tree| {
|
|
||||||
let skip_prefix = if trees.len() == 1 {
|
|
||||||
if let Some(Entry::Dir { name, .. }) = tree.root_entry() {
|
|
||||||
let name = name.to_string_lossy();
|
|
||||||
if name == "/" {
|
|
||||||
1
|
|
||||||
} else {
|
|
||||||
name.chars().count() + 1
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
};
|
|
||||||
|
|
||||||
(tree.id, skip_prefix, &tree.file_paths[..])
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()[..],
|
|
||||||
query,
|
|
||||||
include_ignored,
|
|
||||||
smart_case,
|
|
||||||
max_results,
|
|
||||||
pool,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
use crate::editor::Buffer;
|
|
||||||
use crate::test::*;
|
|
||||||
use anyhow::Result;
|
|
||||||
use gpui::App;
|
|
||||||
use serde_json::json;
|
|
||||||
use std::os::unix;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_populate_and_search() {
|
|
||||||
App::test_async((), |mut app| async move {
|
|
||||||
let dir = temp_tree(json!({
|
|
||||||
"root": {
|
|
||||||
"apple": "",
|
|
||||||
"banana": {
|
|
||||||
"carrot": {
|
|
||||||
"date": "",
|
|
||||||
"endive": "",
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"fennel": {
|
|
||||||
"grape": "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
let root_link_path = dir.path().join("root_link");
|
|
||||||
unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
|
|
||||||
|
|
||||||
let tree = app.add_model(|ctx| Worktree::new(1, root_link_path, ctx));
|
|
||||||
app.finish_pending_tasks().await;
|
|
||||||
|
|
||||||
app.read(|ctx| {
|
|
||||||
let tree = tree.read(ctx);
|
|
||||||
assert_eq!(tree.file_count(), 4);
|
|
||||||
let results = match_paths(
|
|
||||||
&[tree.clone()],
|
|
||||||
"bna",
|
|
||||||
false,
|
|
||||||
false,
|
|
||||||
10,
|
|
||||||
ctx.thread_pool().clone(),
|
|
||||||
)
|
|
||||||
.iter()
|
|
||||||
.map(|result| tree.entry_path(result.entry_id))
|
|
||||||
.collect::<Result<Vec<PathBuf>, _>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
results,
|
|
||||||
vec![
|
|
||||||
PathBuf::from("root_link/banana/carrot/date"),
|
|
||||||
PathBuf::from("root_link/banana/carrot/endive"),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_save_file() {
|
|
||||||
App::test_async((), |mut app| async move {
|
|
||||||
let dir = temp_tree(json!({
|
|
||||||
"file1": "the old contents",
|
|
||||||
}));
|
|
||||||
|
|
||||||
let tree = app.add_model(|ctx| Worktree::new(1, dir.path(), ctx));
|
|
||||||
app.finish_pending_tasks().await;
|
|
||||||
|
|
||||||
let buffer = Buffer::new(1, "a line of text.\n".repeat(10 * 1024));
|
|
||||||
|
|
||||||
let entry = app.read(|ctx| {
|
|
||||||
let entry = tree.read(ctx).files().next().unwrap();
|
|
||||||
assert_eq!(entry.path.file_name().unwrap(), "file1");
|
|
||||||
entry
|
|
||||||
});
|
|
||||||
let file_id = entry.entry_id;
|
|
||||||
|
|
||||||
tree.update(&mut app, |tree, ctx| {
|
|
||||||
smol::block_on(tree.save(file_id, buffer.snapshot(), ctx.as_ref())).unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
let history = app
|
|
||||||
.read(|ctx| tree.read(ctx).load_history(file_id))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(history.base_text.as_ref(), buffer.text());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_rescan() {
|
|
||||||
App::test_async((), |mut app| async move {
|
|
||||||
let dir = temp_tree(json!({
|
|
||||||
"dir1": {
|
|
||||||
"file": "contents"
|
|
||||||
},
|
|
||||||
"dir2": {
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
let tree = app.add_model(|ctx| Worktree::new(1, dir.path(), ctx));
|
|
||||||
app.finish_pending_tasks().await;
|
|
||||||
|
|
||||||
let file_entry = app.read(|ctx| tree.read(ctx).entry_for_path("dir1/file").unwrap());
|
|
||||||
|
|
||||||
app.read(|ctx| {
|
|
||||||
let tree = tree.read(ctx);
|
|
||||||
assert_eq!(
|
|
||||||
tree.abs_entry_path(file_entry).unwrap(),
|
|
||||||
tree.path().join("dir1/file")
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
std::fs::rename(dir.path().join("dir1/file"), dir.path().join("dir2/file")).unwrap();
|
|
||||||
|
|
||||||
assert_condition(1, 300, || {
|
|
||||||
app.read(|ctx| {
|
|
||||||
let tree = tree.read(ctx);
|
|
||||||
tree.abs_entry_path(file_entry).unwrap() == tree.path().join("dir2/file")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Add table
Add a link
Reference in a new issue