Include ignored files in fuzzy search when root entry is ignored
This commit is contained in:
parent
32c6ae3188
commit
56f9c7bc1b
3 changed files with 74 additions and 99 deletions
|
@ -4,7 +4,7 @@ use gpui::{
|
||||||
RenderContext, Task, View, ViewContext, ViewHandle,
|
RenderContext, Task, View, ViewContext, ViewHandle,
|
||||||
};
|
};
|
||||||
use picker::{Picker, PickerDelegate};
|
use picker::{Picker, PickerDelegate};
|
||||||
use project::{Project, ProjectPath, WorktreeId};
|
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
|
||||||
use settings::Settings;
|
use settings::Settings;
|
||||||
use std::{
|
use std::{
|
||||||
path::Path,
|
path::Path,
|
||||||
|
@ -134,17 +134,40 @@ impl FileFinder {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn spawn_search(&mut self, query: String, cx: &mut ViewContext<Self>) -> Task<()> {
|
fn spawn_search(&mut self, query: String, cx: &mut ViewContext<Self>) -> Task<()> {
|
||||||
|
let worktrees = self
|
||||||
|
.project
|
||||||
|
.read(cx)
|
||||||
|
.visible_worktrees(cx)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let include_root_name = worktrees.len() > 1;
|
||||||
|
let candidate_sets = worktrees
|
||||||
|
.into_iter()
|
||||||
|
.map(|worktree| {
|
||||||
|
let worktree = worktree.read(cx);
|
||||||
|
PathMatchCandidateSet {
|
||||||
|
snapshot: worktree.snapshot(),
|
||||||
|
include_ignored: worktree
|
||||||
|
.root_entry()
|
||||||
|
.map_or(false, |entry| entry.is_ignored),
|
||||||
|
include_root_name,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let search_id = util::post_inc(&mut self.search_count);
|
let search_id = util::post_inc(&mut self.search_count);
|
||||||
self.cancel_flag.store(true, atomic::Ordering::Relaxed);
|
self.cancel_flag.store(true, atomic::Ordering::Relaxed);
|
||||||
self.cancel_flag = Arc::new(AtomicBool::new(false));
|
self.cancel_flag = Arc::new(AtomicBool::new(false));
|
||||||
let cancel_flag = self.cancel_flag.clone();
|
let cancel_flag = self.cancel_flag.clone();
|
||||||
let project = self.project.clone();
|
|
||||||
cx.spawn(|this, mut cx| async move {
|
cx.spawn(|this, mut cx| async move {
|
||||||
let matches = project
|
let matches = fuzzy::match_paths(
|
||||||
.read_with(&cx, |project, cx| {
|
candidate_sets.as_slice(),
|
||||||
project.match_paths(&query, false, false, 100, cancel_flag.as_ref(), cx)
|
&query,
|
||||||
})
|
false,
|
||||||
.await;
|
100,
|
||||||
|
&cancel_flag,
|
||||||
|
cx.background(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed);
|
let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed);
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.set_matches(search_id, did_cancel, query, matches, cx)
|
this.set_matches(search_id, did_cancel, query, matches, cx)
|
||||||
|
@ -475,4 +498,34 @@ mod tests {
|
||||||
assert_eq!(f.selected_index(), 0);
|
assert_eq!(f.selected_index(), 0);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
|
||||||
|
let app_state = cx.update(AppState::test);
|
||||||
|
app_state
|
||||||
|
.fs
|
||||||
|
.as_fake()
|
||||||
|
.insert_tree(
|
||||||
|
"/root",
|
||||||
|
json!({
|
||||||
|
"dir1": {},
|
||||||
|
"dir2": {
|
||||||
|
"dir3": {}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||||
|
let (_, workspace) = cx.add_window(|cx| Workspace::new(project, cx));
|
||||||
|
let (_, finder) =
|
||||||
|
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
|
||||||
|
finder
|
||||||
|
.update(cx, |f, cx| f.spawn_search("dir".into(), cx))
|
||||||
|
.await;
|
||||||
|
cx.read(|cx| {
|
||||||
|
let finder = finder.read(cx);
|
||||||
|
assert_eq!(finder.matches.len(), 0);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,6 @@ use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use collections::{hash_map, BTreeMap, HashMap, HashSet};
|
use collections::{hash_map, BTreeMap, HashMap, HashSet};
|
||||||
use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt};
|
use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt};
|
||||||
use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
|
|
||||||
use gpui::{
|
use gpui::{
|
||||||
AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
||||||
MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
|
MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
|
||||||
|
@ -58,7 +57,7 @@ use std::{
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
str,
|
str,
|
||||||
sync::{
|
sync::{
|
||||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||||
Arc,
|
Arc,
|
||||||
},
|
},
|
||||||
time::Instant,
|
time::Instant,
|
||||||
|
@ -5678,43 +5677,6 @@ impl Project {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn match_paths<'a>(
|
|
||||||
&self,
|
|
||||||
query: &'a str,
|
|
||||||
include_ignored: bool,
|
|
||||||
smart_case: bool,
|
|
||||||
max_results: usize,
|
|
||||||
cancel_flag: &'a AtomicBool,
|
|
||||||
cx: &AppContext,
|
|
||||||
) -> impl 'a + Future<Output = Vec<PathMatch>> {
|
|
||||||
let worktrees = self
|
|
||||||
.worktrees(cx)
|
|
||||||
.filter(|worktree| worktree.read(cx).is_visible())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
let include_root_name = worktrees.len() > 1;
|
|
||||||
let candidate_sets = worktrees
|
|
||||||
.into_iter()
|
|
||||||
.map(|worktree| CandidateSet {
|
|
||||||
snapshot: worktree.read(cx).snapshot(),
|
|
||||||
include_ignored,
|
|
||||||
include_root_name,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let background = cx.background().clone();
|
|
||||||
async move {
|
|
||||||
fuzzy::match_paths(
|
|
||||||
candidate_sets.as_slice(),
|
|
||||||
query,
|
|
||||||
smart_case,
|
|
||||||
max_results,
|
|
||||||
cancel_flag,
|
|
||||||
background,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn edits_from_lsp(
|
fn edits_from_lsp(
|
||||||
&mut self,
|
&mut self,
|
||||||
buffer: &ModelHandle<Buffer>,
|
buffer: &ModelHandle<Buffer>,
|
||||||
|
@ -5942,14 +5904,14 @@ impl OpenBuffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CandidateSet {
|
pub struct PathMatchCandidateSet {
|
||||||
snapshot: Snapshot,
|
pub snapshot: Snapshot,
|
||||||
include_ignored: bool,
|
pub include_ignored: bool,
|
||||||
include_root_name: bool,
|
pub include_root_name: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
|
impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
|
||||||
type Candidates = CandidateSetIter<'a>;
|
type Candidates = PathMatchCandidateSetIter<'a>;
|
||||||
|
|
||||||
fn id(&self) -> usize {
|
fn id(&self) -> usize {
|
||||||
self.snapshot.id().to_usize()
|
self.snapshot.id().to_usize()
|
||||||
|
@ -5974,23 +5936,23 @@ impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn candidates(&'a self, start: usize) -> Self::Candidates {
|
fn candidates(&'a self, start: usize) -> Self::Candidates {
|
||||||
CandidateSetIter {
|
PathMatchCandidateSetIter {
|
||||||
traversal: self.snapshot.files(self.include_ignored, start),
|
traversal: self.snapshot.files(self.include_ignored, start),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CandidateSetIter<'a> {
|
pub struct PathMatchCandidateSetIter<'a> {
|
||||||
traversal: Traversal<'a>,
|
traversal: Traversal<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for CandidateSetIter<'a> {
|
impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
|
||||||
type Item = PathMatchCandidate<'a>;
|
type Item = fuzzy::PathMatchCandidate<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.traversal.next().map(|entry| {
|
self.traversal.next().map(|entry| {
|
||||||
if let EntryKind::File(char_bag) = entry.kind {
|
if let EntryKind::File(char_bag) = entry.kind {
|
||||||
PathMatchCandidate {
|
fuzzy::PathMatchCandidate {
|
||||||
path: &entry.path,
|
path: &entry.path,
|
||||||
char_bag,
|
char_bag,
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,12 +8,12 @@ use language::{
|
||||||
};
|
};
|
||||||
use lsp::Url;
|
use lsp::Url;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
|
use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
use util::{assert_set_eq, test::temp_tree};
|
use util::{assert_set_eq, test::temp_tree};
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
|
async fn test_symlinks(cx: &mut gpui::TestAppContext) {
|
||||||
let dir = temp_tree(json!({
|
let dir = temp_tree(json!({
|
||||||
"root": {
|
"root": {
|
||||||
"apple": "",
|
"apple": "",
|
||||||
|
@ -38,7 +38,6 @@ async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
|
let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
|
||||||
|
|
||||||
project.read_with(cx, |project, cx| {
|
project.read_with(cx, |project, cx| {
|
||||||
let tree = project.worktrees(cx).next().unwrap().read(cx);
|
let tree = project.worktrees(cx).next().unwrap().read(cx);
|
||||||
assert_eq!(tree.file_count(), 5);
|
assert_eq!(tree.file_count(), 5);
|
||||||
|
@ -47,23 +46,6 @@ async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
|
||||||
tree.inode_for_path("finnochio/grape")
|
tree.inode_for_path("finnochio/grape")
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
let cancel_flag = Default::default();
|
|
||||||
let results = project
|
|
||||||
.read_with(cx, |project, cx| {
|
|
||||||
project.match_paths("bna", false, false, 10, &cancel_flag, cx)
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
assert_eq!(
|
|
||||||
results
|
|
||||||
.into_iter()
|
|
||||||
.map(|result| result.path)
|
|
||||||
.collect::<Vec<Arc<Path>>>(),
|
|
||||||
vec![
|
|
||||||
PathBuf::from("banana/carrot/date").into(),
|
|
||||||
PathBuf::from("banana/carrot/endive").into(),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
|
@ -1645,28 +1627,6 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
|
||||||
chunks
|
chunks
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
|
||||||
async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
|
|
||||||
let dir = temp_tree(json!({
|
|
||||||
"root": {
|
|
||||||
"dir1": {},
|
|
||||||
"dir2": {
|
|
||||||
"dir3": {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
|
|
||||||
let cancel_flag = Default::default();
|
|
||||||
let results = project
|
|
||||||
.read_with(cx, |project, cx| {
|
|
||||||
project.match_paths("dir", false, false, 10, &cancel_flag, cx)
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
|
|
||||||
assert!(results.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[gpui::test(iterations = 10)]
|
#[gpui::test(iterations = 10)]
|
||||||
async fn test_definition(cx: &mut gpui::TestAppContext) {
|
async fn test_definition(cx: &mut gpui::TestAppContext) {
|
||||||
let mut language = Language::new(
|
let mut language = Language::new(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue