Allow to exclude files from worktrees (#3356)

* Part of https://github.com/zed-industries/community/issues/70

Allows to fully remove from Zed certain files or file groups: no items
will be scanned or added into worktrees, so nothing will be shown in
project tree, project search and go to file would not see those,
corresponding FS events will be ignored.

One exclusion is .git files: those are still not shown or accessible by
default, yet tracked in the worktrees.

By default, is configured to 
```json
  "file_scan_exclusions": [
    "**/.git",
    "**/.svn",
    "**/.hg",
    "**/CVS",
    "**/.DS_Store",
    "**/Thumbs.db",
    "**/.classpath",
    "**/.settings"
  ],
```

* In addition, contains code preparations for "search in included files"
feature: new SearchOptions variant, search crate and RPC adjustments

Release Notes:

- Added `file_scan_exclusions` section to project settings to completely
ignore certain files in Zed
This commit is contained in:
Kirill Bulatov 2023-11-17 23:06:47 +02:00 committed by GitHub
commit 12b59daa1e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 3422 additions and 2417 deletions

View file

@ -268,6 +268,19 @@
// Whether to show warnings or not by default. // Whether to show warnings or not by default.
"include_warnings": true "include_warnings": true
}, },
// Add files or globs of files that will be excluded by Zed entirely:
// they will be skipped during FS scan(s), file tree and file search
// will lack the corresponding file entries.
"file_scan_exclusions": [
"**/.git",
"**/.svn",
"**/.hg",
"**/CVS",
"**/.DS_Store",
"**/Thumbs.db",
"**/.classpath",
"**/.settings"
],
// Git gutter behavior configuration. // Git gutter behavior configuration.
"git": { "git": {
// Control whether the git gutter is shown. May take 2 values: // Control whether the git gutter is shown. May take 2 values:

View file

@ -5052,7 +5052,7 @@ async fn test_project_search(
let mut results = HashMap::default(); let mut results = HashMap::default();
let mut search_rx = project_b.update(cx_b, |project, cx| { let mut search_rx = project_b.update(cx_b, |project, cx| {
project.search( project.search(
SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(), SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
cx, cx,
) )
}); });

View file

@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let mut search = project.update(cx, |project, cx| { let mut search = project.update(cx, |project, cx| {
project.search( project.search(
SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(), SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
.unwrap(),
cx, cx,
) )
}); });

View file

@ -4599,7 +4599,7 @@ async fn test_project_search(
let mut results = HashMap::default(); let mut results = HashMap::default();
let mut search_rx = project_b.update(cx_b, |project, cx| { let mut search_rx = project_b.update(cx_b, |project, cx| {
project.search( project.search(
SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(), SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
cx, cx,
) )
}); });

View file

@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let mut search = project.update(cx, |project, cx| { let mut search = project.update(cx, |project, cx| {
project.search( project.search(
SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(), SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
.unwrap(),
cx, cx,
) )
}); });

View file

@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
lazy_static! { lazy_static! {
static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex( static ref MENTIONS_SEARCH: SearchQuery =
"@[-_\\w]+", SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
false,
false,
Default::default(),
Default::default()
)
.unwrap();
} }
pub struct MessageEditor { pub struct MessageEditor {

View file

@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
lazy_static! { lazy_static! {
static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex( static ref MENTIONS_SEARCH: SearchQuery =
"@[-_\\w]+", SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
false,
false,
Default::default(),
Default::default()
)
.unwrap();
} }
pub struct MessageEditor { pub struct MessageEditor {

View file

@ -20,10 +20,6 @@ impl IgnoreStack {
Arc::new(Self::All) Arc::new(Self::All)
} }
pub fn is_all(&self) -> bool {
matches!(self, IgnoreStack::All)
}
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> { pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
match self.as_ref() { match self.as_ref() {
IgnoreStack::All => self, IgnoreStack::All => self,

View file

@ -5548,7 +5548,16 @@ impl Project {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let background = cx.background().clone(); let background = cx.background().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); let path_count: usize = snapshots
.iter()
.map(|s| {
if query.include_ignored() {
s.file_count()
} else {
s.visible_file_count()
}
})
.sum();
if path_count == 0 { if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024); let (_, rx) = smol::channel::bounded(1024);
return rx; return rx;
@ -5561,8 +5570,16 @@ impl Project {
.iter() .iter()
.filter_map(|(_, b)| { .filter_map(|(_, b)| {
let buffer = b.upgrade(cx)?; let buffer = b.upgrade(cx)?;
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
if let Some(path) = snapshot.file().map(|file| file.path()) { let is_ignored = buffer
.project_path(cx)
.and_then(|path| self.entry_for_path(&path, cx))
.map_or(false, |entry| entry.is_ignored);
(is_ignored, buffer.snapshot())
});
if is_ignored && !query.include_ignored() {
return None;
} else if let Some(path) = snapshot.file().map(|file| file.path()) {
Some((path.clone(), (buffer, snapshot))) Some((path.clone(), (buffer, snapshot)))
} else { } else {
unnamed_files.push(buffer); unnamed_files.push(buffer);
@ -5735,7 +5752,12 @@ impl Project {
let mut snapshot_start_ix = 0; let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new(); let mut abs_path = PathBuf::new();
for snapshot in snapshots { for snapshot in snapshots {
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count(); let snapshot_end_ix = snapshot_start_ix
+ if query.include_ignored() {
snapshot.file_count()
} else {
snapshot.visible_file_count()
};
if worker_end_ix <= snapshot_start_ix { if worker_end_ix <= snapshot_start_ix {
break; break;
} else if worker_start_ix > snapshot_end_ix { } else if worker_start_ix > snapshot_end_ix {
@ -5748,7 +5770,7 @@ impl Project {
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix; cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
for entry in snapshot for entry in snapshot
.files(false, start_in_snapshot) .files(query.include_ignored(), start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot) .take(end_in_snapshot - start_in_snapshot)
{ {
if matching_paths_tx.is_closed() { if matching_paths_tx.is_closed() {

View file

@ -10,6 +10,8 @@ pub struct ProjectSettings {
pub lsp: HashMap<Arc<str>, LspSettings>, pub lsp: HashMap<Arc<str>, LspSettings>,
#[serde(default)] #[serde(default)]
pub git: GitSettings, pub git: GitSettings,
#[serde(default)]
pub file_scan_exclusions: Option<Vec<String>>,
} }
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]

View file

@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!( assert_eq!(
search( search(
&project, &project,
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx cx
) )
.await .await
@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!( assert_eq!(
search( search(
&project, &project,
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx cx
) )
.await .await
@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
Vec::new() Vec::new()
) )
@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.rs").unwrap()], vec![PathMatcher::new("*.rs").unwrap()],
Vec::new() Vec::new()
) )
@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(), PathMatcher::new("*.odd").unwrap(),
@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
) )
@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![PathMatcher::new("*.rs").unwrap()], vec![PathMatcher::new("*.rs").unwrap()],
) )
@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![ vec![
PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.rs").unwrap(),
@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
) )
@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()],
).unwrap(), ).unwrap(),
@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap() PathMatcher::new("*.odd").unwrap()
@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap() PathMatcher::new("*.odd").unwrap()

View file

@ -39,6 +39,7 @@ pub enum SearchQuery {
replacement: Option<String>, replacement: Option<String>,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
inner: SearchInputs, inner: SearchInputs,
}, },
@ -48,6 +49,7 @@ pub enum SearchQuery {
multiline: bool, multiline: bool,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
inner: SearchInputs, inner: SearchInputs,
}, },
} }
@ -57,6 +59,7 @@ impl SearchQuery {
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
files_to_include: Vec<PathMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> { ) -> Result<Self> {
@ -74,6 +77,7 @@ impl SearchQuery {
replacement: None, replacement: None,
whole_word, whole_word,
case_sensitive, case_sensitive,
include_ignored,
inner, inner,
}) })
} }
@ -82,6 +86,7 @@ impl SearchQuery {
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
files_to_include: Vec<PathMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> { ) -> Result<Self> {
@ -111,6 +116,7 @@ impl SearchQuery {
multiline, multiline,
whole_word, whole_word,
case_sensitive, case_sensitive,
include_ignored,
inner, inner,
}) })
} }
@ -121,6 +127,7 @@ impl SearchQuery {
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
) )
@ -129,6 +136,7 @@ impl SearchQuery {
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
) )
@ -156,6 +164,7 @@ impl SearchQuery {
regex: self.is_regex(), regex: self.is_regex(),
whole_word: self.whole_word(), whole_word: self.whole_word(),
case_sensitive: self.case_sensitive(), case_sensitive: self.case_sensitive(),
include_ignored: self.include_ignored(),
files_to_include: self files_to_include: self
.files_to_include() .files_to_include()
.iter() .iter()
@ -336,6 +345,17 @@ impl SearchQuery {
} }
} }
pub fn include_ignored(&self) -> bool {
match self {
Self::Text {
include_ignored, ..
} => *include_ignored,
Self::Regex {
include_ignored, ..
} => *include_ignored,
}
}
pub fn is_regex(&self) -> bool { pub fn is_regex(&self) -> bool {
matches!(self, Self::Regex { .. }) matches!(self, Self::Regex { .. })
} }

View file

@ -1,5 +1,6 @@
use crate::{ use crate::{
copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
ProjectEntryId, RemoveOptions,
}; };
use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
@ -21,7 +22,10 @@ use futures::{
}; };
use fuzzy::CharBag; use fuzzy::CharBag;
use git::{DOT_GIT, GITIGNORE}; use git::{DOT_GIT, GITIGNORE};
use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; use gpui::{
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
};
use itertools::Itertools;
use language::{ use language::{
proto::{ proto::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@ -36,6 +40,7 @@ use postage::{
prelude::{Sink as _, Stream as _}, prelude::{Sink as _, Stream as _},
watch, watch,
}; };
use settings::SettingsStore;
use smol::channel::{self, Sender}; use smol::channel::{self, Sender};
use std::{ use std::{
any::Any, any::Any,
@ -55,7 +60,10 @@ use std::{
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use util::{paths::HOME, ResultExt}; use util::{
paths::{PathMatcher, HOME},
ResultExt,
};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize); pub struct WorktreeId(usize);
@ -70,7 +78,8 @@ pub struct LocalWorktree {
scan_requests_tx: channel::Sender<ScanRequest>, scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>, path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>), is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
_background_scanner_task: Task<()>, _settings_subscription: Subscription,
_background_scanner_tasks: Vec<Task<()>>,
share: Option<ShareState>, share: Option<ShareState>,
diagnostics: HashMap< diagnostics: HashMap<
Arc<Path>, Arc<Path>,
@ -216,6 +225,7 @@ pub struct LocalSnapshot {
/// All of the git repositories in the worktree, indexed by the project entry /// All of the git repositories in the worktree, indexed by the project entry
/// id of their parent directory. /// id of their parent directory.
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>, git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
file_scan_exclusions: Vec<PathMatcher>,
} }
struct BackgroundScannerState { struct BackgroundScannerState {
@ -299,17 +309,54 @@ impl Worktree {
.await .await
.context("failed to stat worktree path")?; .context("failed to stat worktree path")?;
let closure_fs = Arc::clone(&fs);
let closure_next_entry_id = Arc::clone(&next_entry_id);
let closure_abs_path = abs_path.to_path_buf();
Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| { Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
let settings_subscription = cx.observe_global::<SettingsStore, _>(move |this, cx| {
if let Self::Local(this) = this {
let new_file_scan_exclusions =
file_scan_exclusions(settings::get::<ProjectSettings>(cx));
if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
log::info!(
"Re-scanning directories, new scan exclude files: {:?}",
this.snapshot
.file_scan_exclusions
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
channel::unbounded();
this.scan_requests_tx = scan_requests_tx;
this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
this._background_scanner_tasks = start_background_scan_tasks(
&closure_abs_path,
this.snapshot(),
scan_requests_rx,
path_prefixes_to_scan_rx,
Arc::clone(&closure_next_entry_id),
Arc::clone(&closure_fs),
cx,
);
this.is_scanning = watch::channel_with(true);
}
}
});
let root_name = abs_path let root_name = abs_path
.file_name() .file_name()
.map_or(String::new(), |f| f.to_string_lossy().to_string()); .map_or(String::new(), |f| f.to_string_lossy().to_string());
let mut snapshot = LocalSnapshot { let mut snapshot = LocalSnapshot {
file_scan_exclusions: file_scan_exclusions(settings::get::<ProjectSettings>(cx)),
ignores_by_parent_abs_path: Default::default(), ignores_by_parent_abs_path: Default::default(),
git_repositories: Default::default(), git_repositories: Default::default(),
snapshot: Snapshot { snapshot: Snapshot {
id: WorktreeId::from_usize(cx.model_id()), id: WorktreeId::from_usize(cx.model_id()),
abs_path: abs_path.clone(), abs_path: abs_path.to_path_buf().into(),
root_name: root_name.clone(), root_name: root_name.clone(),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
entries_by_path: Default::default(), entries_by_path: Default::default(),
@ -334,60 +381,23 @@ impl Worktree {
let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let task_snapshot = snapshot.clone();
cx.spawn_weak(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
this.update(&mut cx, |this, cx| {
let this = this.as_local_mut().unwrap();
match state {
ScanState::Started => {
*this.is_scanning.0.borrow_mut() = true;
}
ScanState::Updated {
snapshot,
changes,
barrier,
scanning,
} => {
*this.is_scanning.0.borrow_mut() = scanning;
this.set_snapshot(snapshot, changes, cx);
drop(barrier);
}
}
cx.notify();
});
}
})
.detach();
let background_scanner_task = cx.background().spawn({
let fs = fs.clone();
let snapshot = snapshot.clone();
let background = cx.background().clone();
async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new(
snapshot,
next_entry_id,
fs,
scan_states_tx,
background,
scan_requests_rx,
path_prefixes_to_scan_rx,
)
.run(events)
.await;
}
});
Worktree::Local(LocalWorktree { Worktree::Local(LocalWorktree {
snapshot, snapshot,
is_scanning: watch::channel_with(true), is_scanning: watch::channel_with(true),
share: None, share: None,
scan_requests_tx, scan_requests_tx,
path_prefixes_to_scan_tx, path_prefixes_to_scan_tx,
_background_scanner_task: background_scanner_task, _settings_subscription: settings_subscription,
_background_scanner_tasks: start_background_scan_tasks(
&abs_path,
task_snapshot,
scan_requests_rx,
path_prefixes_to_scan_rx,
Arc::clone(&next_entry_id),
Arc::clone(&fs),
cx,
),
diagnostics: Default::default(), diagnostics: Default::default(),
diagnostic_summaries: Default::default(), diagnostic_summaries: Default::default(),
client, client,
@ -584,6 +594,76 @@ impl Worktree {
} }
} }
fn start_background_scan_tasks(
abs_path: &Path,
snapshot: LocalSnapshot,
scan_requests_rx: channel::Receiver<ScanRequest>,
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
next_entry_id: Arc<AtomicUsize>,
fs: Arc<dyn Fs>,
cx: &mut ModelContext<'_, Worktree>,
) -> Vec<Task<()>> {
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
let background_scanner = cx.background().spawn({
let abs_path = abs_path.to_path_buf();
let background = cx.background().clone();
async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new(
snapshot,
next_entry_id,
fs,
scan_states_tx,
background,
scan_requests_rx,
path_prefixes_to_scan_rx,
)
.run(events)
.await;
}
});
let scan_state_updater = cx.spawn_weak(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
this.update(&mut cx, |this, cx| {
let this = this.as_local_mut().unwrap();
match state {
ScanState::Started => {
*this.is_scanning.0.borrow_mut() = true;
}
ScanState::Updated {
snapshot,
changes,
barrier,
scanning,
} => {
*this.is_scanning.0.borrow_mut() = scanning;
this.set_snapshot(snapshot, changes, cx);
drop(barrier);
}
}
cx.notify();
});
}
});
vec![background_scanner, scan_state_updater]
}
fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
.sorted()
.filter_map(|pattern| {
PathMatcher::new(pattern)
.map(Some)
.unwrap_or_else(|e| {
log::error!(
"Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
);
None
})
})
.collect()
}
impl LocalWorktree { impl LocalWorktree {
pub fn contains_abs_path(&self, path: &Path) -> bool { pub fn contains_abs_path(&self, path: &Path) -> bool {
path.starts_with(&self.abs_path) path.starts_with(&self.abs_path)
@ -1481,7 +1561,7 @@ impl Snapshot {
self.entries_by_id.get(&entry_id, &()).is_some() self.entries_by_id.get(&entry_id, &()).is_some()
} }
pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> { fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
let entry = Entry::try_from((&self.root_char_bag, entry))?; let entry = Entry::try_from((&self.root_char_bag, entry))?;
let old_entry = self.entries_by_id.insert_or_replace( let old_entry = self.entries_by_id.insert_or_replace(
PathEntry { PathEntry {
@ -2145,6 +2225,12 @@ impl LocalSnapshot {
paths.sort_by(|a, b| a.0.cmp(b.0)); paths.sort_by(|a, b| a.0.cmp(b.0));
paths paths
} }
fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
self.file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
}
} }
impl BackgroundScannerState { impl BackgroundScannerState {
@ -2167,7 +2253,7 @@ impl BackgroundScannerState {
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
let mut containing_repository = None; let mut containing_repository = None;
if !ignore_stack.is_all() { if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
containing_repository = Some(( containing_repository = Some((
@ -2378,18 +2464,30 @@ impl BackgroundScannerState {
// Remove any git repositories whose .git entry no longer exists. // Remove any git repositories whose .git entry no longer exists.
let snapshot = &mut self.snapshot; let snapshot = &mut self.snapshot;
let mut repositories = mem::take(&mut snapshot.git_repositories); let mut ids_to_preserve = HashSet::default();
let mut repository_entries = mem::take(&mut snapshot.repository_entries); for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
repositories.retain(|work_directory_id, _| { let exists_in_snapshot = snapshot
snapshot .entry_for_id(work_directory_id)
.entry_for_id(*work_directory_id)
.map_or(false, |entry| { .map_or(false, |entry| {
snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
})
}); });
repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); if exists_in_snapshot {
snapshot.git_repositories = repositories; ids_to_preserve.insert(work_directory_id);
snapshot.repository_entries = repository_entries; } else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
if snapshot.is_abs_path_excluded(&git_dir_abs_path)
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{
ids_to_preserve.insert(work_directory_id);
}
}
}
snapshot
.git_repositories
.retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
snapshot
.repository_entries
.retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
} }
fn build_git_repository( fn build_git_repository(
@ -3094,7 +3192,7 @@ impl BackgroundScanner {
let ignore_stack = state let ignore_stack = state
.snapshot .snapshot
.ignore_stack_for_abs_path(&root_abs_path, true); .ignore_stack_for_abs_path(&root_abs_path, true);
if ignore_stack.is_all() { if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
root_entry.is_ignored = true; root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref()); state.insert_entry(root_entry.clone(), self.fs.as_ref());
} }
@ -3231,6 +3329,7 @@ impl BackgroundScanner {
return false; return false;
}; };
if !is_git_related(&abs_path) {
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
snapshot snapshot
.entry_for_path(parent) .entry_for_path(parent)
@ -3240,6 +3339,13 @@ impl BackgroundScanner {
log::debug!("ignoring event {relative_path:?} within unloaded directory"); log::debug!("ignoring event {relative_path:?} within unloaded directory");
return false; return false;
} }
if snapshot.is_abs_path_excluded(abs_path) {
log::debug!(
"ignoring FS event for path {relative_path:?} within excluded directory"
);
return false;
}
}
relative_paths.push(relative_path); relative_paths.push(relative_path);
true true
@ -3401,18 +3507,26 @@ impl BackgroundScanner {
} }
async fn scan_dir(&self, job: &ScanJob) -> Result<()> { async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
log::debug!("scan directory {:?}", job.path); let root_abs_path;
let mut ignore_stack;
let mut ignore_stack = job.ignore_stack.clone(); let mut new_ignore;
let mut new_ignore = None; let root_char_bag;
let (root_abs_path, root_char_bag, next_entry_id) = { let next_entry_id;
let snapshot = &self.state.lock().snapshot; {
( let state = self.state.lock();
snapshot.abs_path().clone(), let snapshot = &state.snapshot;
snapshot.root_char_bag, root_abs_path = snapshot.abs_path().clone();
self.next_entry_id.clone(), if snapshot.is_abs_path_excluded(&job.abs_path) {
) log::error!("skipping excluded directory {:?}", job.path);
}; return Ok(());
}
log::debug!("scanning directory {:?}", job.path);
ignore_stack = job.ignore_stack.clone();
new_ignore = None;
root_char_bag = snapshot.root_char_bag;
next_entry_id = self.next_entry_id.clone();
drop(state);
}
let mut dotgit_path = None; let mut dotgit_path = None;
let mut root_canonical_path = None; let mut root_canonical_path = None;
@ -3427,18 +3541,8 @@ impl BackgroundScanner {
continue; continue;
} }
}; };
let child_name = child_abs_path.file_name().unwrap(); let child_name = child_abs_path.file_name().unwrap();
let child_path: Arc<Path> = job.path.join(child_name).into(); let child_path: Arc<Path> = job.path.join(child_name).into();
let child_metadata = match self.fs.metadata(&child_abs_path).await {
Ok(Some(metadata)) => metadata,
Ok(None) => continue,
Err(err) => {
log::error!("error processing {:?}: {:?}", child_abs_path, err);
continue;
}
};
// If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
if child_name == *GITIGNORE { if child_name == *GITIGNORE {
match build_gitignore(&child_abs_path, self.fs.as_ref()).await { match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@ -3482,6 +3586,26 @@ impl BackgroundScanner {
dotgit_path = Some(child_path.clone()); dotgit_path = Some(child_path.clone());
} }
{
let mut state = self.state.lock();
if state.snapshot.is_abs_path_excluded(&child_abs_path) {
let relative_path = job.path.join(child_name);
log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path);
continue;
}
drop(state);
}
let child_metadata = match self.fs.metadata(&child_abs_path).await {
Ok(Some(metadata)) => metadata,
Ok(None) => continue,
Err(err) => {
log::error!("error processing {child_abs_path:?}: {err:?}");
continue;
}
};
let mut child_entry = Entry::new( let mut child_entry = Entry::new(
child_path.clone(), child_path.clone(),
&child_metadata, &child_metadata,
@ -3662,14 +3786,12 @@ impl BackgroundScanner {
self.next_entry_id.as_ref(), self.next_entry_id.as_ref(),
state.snapshot.root_char_bag, state.snapshot.root_char_bag,
); );
fs_entry.is_ignored = ignore_stack.is_all(); let is_dir = fs_entry.is_dir();
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
if !fs_entry.is_ignored { if !is_dir && !fs_entry.is_ignored {
if !fs_entry.is_dir() { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
if let Some((work_dir, repo)) =
state.snapshot.local_repo_for_path(&path)
{
if let Ok(repo_path) = path.strip_prefix(work_dir.0) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
let repo_path = RepoPath(repo_path.into()); let repo_path = RepoPath(repo_path.into());
let repo = repo.repo_ptr.lock(); let repo = repo.repo_ptr.lock();
@ -3677,7 +3799,6 @@ impl BackgroundScanner {
} }
} }
} }
}
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) {
if state.should_scan_directory(&fs_entry) { if state.should_scan_directory(&fs_entry) {
@ -3833,8 +3954,7 @@ impl BackgroundScanner {
ignore_stack.clone() ignore_stack.clone()
}; };
// Scan any directories that were previously ignored and weren't // Scan any directories that were previously ignored and weren't previously scanned.
// previously scanned.
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
let state = self.state.lock(); let state = self.state.lock();
if state.should_scan_directory(&entry) { if state.should_scan_directory(&entry) {
@ -4010,6 +4130,12 @@ impl BackgroundScanner {
} }
} }
fn is_git_related(abs_path: &Path) -> bool {
abs_path
.components()
.any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
}
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = root_char_bag; let mut result = root_char_bag;
result.extend( result.extend(

View file

@ -1,6 +1,7 @@
use crate::{ use crate::{
project_settings::ProjectSettings,
worktree::{Event, Snapshot, WorktreeModelHandle}, worktree::{Event, Snapshot, WorktreeModelHandle},
Entry, EntryKind, PathChange, Worktree, Entry, EntryKind, PathChange, Project, Worktree,
}; };
use anyhow::Result; use anyhow::Result;
use client::Client; use client::Client;
@ -12,6 +13,7 @@ use postage::stream::Stream;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use rand::prelude::*; use rand::prelude::*;
use serde_json::json; use serde_json::json;
use settings::SettingsStore;
use std::{ use std::{
env, env,
fmt::Write, fmt::Write,
@ -23,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
#[gpui::test] #[gpui::test]
async fn test_traversal(cx: &mut TestAppContext) { async fn test_traversal(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -78,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_descendent_entries(cx: &mut TestAppContext) { async fn test_descendent_entries(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -185,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) {
#[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) { async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -264,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
#[gpui::test] #[gpui::test]
async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -439,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_open_gitignored_files(cx: &mut TestAppContext) { async fn test_open_gitignored_files(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -599,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -722,6 +730,14 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
#[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(Vec::new());
});
});
});
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -827,6 +843,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_write_file(cx: &mut TestAppContext) { async fn test_write_file(cx: &mut TestAppContext) {
init_test(cx);
let dir = temp_tree(json!({ let dir = temp_tree(json!({
".git": {}, ".git": {},
".gitignore": "ignored-dir\n", ".gitignore": "ignored-dir\n",
@ -877,8 +894,105 @@ async fn test_write_file(cx: &mut TestAppContext) {
}); });
} }
#[gpui::test]
async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
init_test(cx);
let dir = temp_tree(json!({
".gitignore": "**/target\n/node_modules\n",
"target": {
"index": "blah2"
},
"node_modules": {
".DS_Store": "",
"prettier": {
"package.json": "{}",
},
},
"src": {
".DS_Store": "",
"foo": {
"foo.rs": "mod another;\n",
"another.rs": "// another",
},
"bar": {
"bar.rs": "// bar",
},
"lib.rs": "mod foo;\nmod bar;\n",
},
".DS_Store": "",
}));
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
});
});
});
let tree = Worktree::local(
build_client(cx),
dir.path(),
true,
Arc::new(RealFs),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(
tree,
&[
"src/foo/foo.rs",
"src/foo/another.rs",
"node_modules/.DS_Store",
"src/.DS_Store",
".DS_Store",
],
&["target", "node_modules"],
&["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
)
});
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["**/node_modules/**".to_string()]);
});
});
});
tree.flush_fs_events(cx).await;
cx.foreground().run_until_parked();
tree.read_with(cx, |tree, _| {
check_worktree_entries(
tree,
&[
"node_modules/prettier/package.json",
"node_modules/.DS_Store",
"node_modules",
],
&["target"],
&[
".gitignore",
"src/lib.rs",
"src/bar/bar.rs",
"src/foo/foo.rs",
"src/foo/another.rs",
"src/.DS_Store",
".DS_Store",
],
)
});
}
#[gpui::test(iterations = 30)] #[gpui::test(iterations = 30)]
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -938,6 +1052,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
init_test(cx);
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_fake = FakeFs::new(cx.background()); let fs_fake = FakeFs::new(cx.background());
@ -1054,6 +1169,7 @@ async fn test_random_worktree_operations_during_initial_scan(
cx: &mut TestAppContext, cx: &mut TestAppContext,
mut rng: StdRng, mut rng: StdRng,
) { ) {
init_test(cx);
let operations = env::var("OPERATIONS") let operations = env::var("OPERATIONS")
.map(|o| o.parse().unwrap()) .map(|o| o.parse().unwrap())
.unwrap_or(5); .unwrap_or(5);
@ -1143,6 +1259,7 @@ async fn test_random_worktree_operations_during_initial_scan(
#[gpui::test(iterations = 100)] #[gpui::test(iterations = 100)]
async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
init_test(cx);
let operations = env::var("OPERATIONS") let operations = env::var("OPERATIONS")
.map(|o| o.parse().unwrap()) .map(|o| o.parse().unwrap())
.unwrap_or(40); .unwrap_or(40);
@ -1557,6 +1674,7 @@ fn random_filename(rng: &mut impl Rng) -> String {
#[gpui::test] #[gpui::test]
async fn test_rename_work_directory(cx: &mut TestAppContext) { async fn test_rename_work_directory(cx: &mut TestAppContext) {
init_test(cx);
let root = temp_tree(json!({ let root = temp_tree(json!({
"projects": { "projects": {
"project1": { "project1": {
@ -1627,6 +1745,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_git_repository_for_path(cx: &mut TestAppContext) { async fn test_git_repository_for_path(cx: &mut TestAppContext) {
init_test(cx);
let root = temp_tree(json!({ let root = temp_tree(json!({
"c.txt": "", "c.txt": "",
"dir1": { "dir1": {
@ -1747,6 +1866,15 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) { async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]);
});
});
});
const IGNORE_RULE: &'static str = "**/target"; const IGNORE_RULE: &'static str = "**/target";
let root = temp_tree(json!({ let root = temp_tree(json!({
@ -1935,6 +2063,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
#[gpui::test] #[gpui::test]
async fn test_propagate_git_statuses(cx: &mut TestAppContext) { async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
fs.insert_tree( fs.insert_tree(
"/root", "/root",
@ -2139,3 +2268,44 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Sta
.map(|status| (status.path().unwrap().to_string(), status.status())) .map(|status| (status.path().unwrap().to_string(), status.status()))
.collect() .collect()
} }
#[track_caller]
fn check_worktree_entries(
tree: &Worktree,
expected_excluded_paths: &[&str],
expected_ignored_paths: &[&str],
expected_tracked_paths: &[&str],
) {
for path in expected_excluded_paths {
let entry = tree.entry_for_path(path);
assert!(
entry.is_none(),
"expected path '{path}' to be excluded, but got entry: {entry:?}",
);
}
for path in expected_ignored_paths {
let entry = tree
.entry_for_path(path)
.unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
assert!(
entry.is_ignored,
"expected path '{path}' to be ignored, but got entry: {entry:?}",
);
}
for path in expected_tracked_paths {
let entry = tree
.entry_for_path(path)
.unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
assert!(
!entry.is_ignored,
"expected path '{path}' to be tracked, but got entry: {entry:?}",
);
}
}
fn init_test(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
cx.set_global(SettingsStore::test(cx));
Project::init_settings(cx);
});
}

View file

@ -20,10 +20,6 @@ impl IgnoreStack {
Arc::new(Self::All) Arc::new(Self::All)
} }
pub fn is_all(&self) -> bool {
matches!(self, IgnoreStack::All)
}
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> { pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
match self.as_ref() { match self.as_ref() {
IgnoreStack::All => self, IgnoreStack::All => self,

View file

@ -5618,7 +5618,16 @@ impl Project {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let background = cx.background_executor().clone(); let background = cx.background_executor().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); let path_count: usize = snapshots
.iter()
.map(|s| {
if query.include_ignored() {
s.file_count()
} else {
s.visible_file_count()
}
})
.sum();
if path_count == 0 { if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024); let (_, rx) = smol::channel::bounded(1024);
return rx; return rx;
@ -5631,8 +5640,16 @@ impl Project {
.iter() .iter()
.filter_map(|(_, b)| { .filter_map(|(_, b)| {
let buffer = b.upgrade()?; let buffer = b.upgrade()?;
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
if let Some(path) = snapshot.file().map(|file| file.path()) { let is_ignored = buffer
.project_path(cx)
.and_then(|path| self.entry_for_path(&path, cx))
.map_or(false, |entry| entry.is_ignored);
(is_ignored, buffer.snapshot())
});
if is_ignored && !query.include_ignored() {
return None;
} else if let Some(path) = snapshot.file().map(|file| file.path()) {
Some((path.clone(), (buffer, snapshot))) Some((path.clone(), (buffer, snapshot)))
} else { } else {
unnamed_files.push(buffer); unnamed_files.push(buffer);
@ -5806,7 +5823,12 @@ impl Project {
let mut snapshot_start_ix = 0; let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new(); let mut abs_path = PathBuf::new();
for snapshot in snapshots { for snapshot in snapshots {
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count(); let snapshot_end_ix = snapshot_start_ix
+ if query.include_ignored() {
snapshot.file_count()
} else {
snapshot.visible_file_count()
};
if worker_end_ix <= snapshot_start_ix { if worker_end_ix <= snapshot_start_ix {
break; break;
} else if worker_start_ix > snapshot_end_ix { } else if worker_start_ix > snapshot_end_ix {
@ -5819,7 +5841,7 @@ impl Project {
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix; cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
for entry in snapshot for entry in snapshot
.files(false, start_in_snapshot) .files(query.include_ignored(), start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot) .take(end_in_snapshot - start_in_snapshot)
{ {
if matching_paths_tx.is_closed() { if matching_paths_tx.is_closed() {

View file

@ -11,6 +11,8 @@ pub struct ProjectSettings {
pub lsp: HashMap<Arc<str>, LspSettings>, pub lsp: HashMap<Arc<str>, LspSettings>,
#[serde(default)] #[serde(default)]
pub git: GitSettings, pub git: GitSettings,
#[serde(default)]
pub file_scan_exclusions: Option<Vec<String>>,
} }
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]

View file

@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!( assert_eq!(
search( search(
&project, &project,
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx cx
) )
.await .await
@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!( assert_eq!(
search( search(
&project, &project,
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx cx
) )
.await .await
@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
Vec::new() Vec::new()
) )
@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.rs").unwrap()], vec![PathMatcher::new("*.rs").unwrap()],
Vec::new() Vec::new()
) )
@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(), PathMatcher::new("*.odd").unwrap(),
@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
) )
@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![PathMatcher::new("*.rs").unwrap()], vec![PathMatcher::new("*.rs").unwrap()],
) )
@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
false,
Vec::new(), Vec::new(),
vec![ vec![
PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.rs").unwrap(),
@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()],
) )
@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()],
).unwrap(), ).unwrap(),
@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap() PathMatcher::new("*.odd").unwrap()
@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
false,
vec![ vec![
PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap() PathMatcher::new("*.odd").unwrap()

View file

@ -39,6 +39,7 @@ pub enum SearchQuery {
replacement: Option<String>, replacement: Option<String>,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
inner: SearchInputs, inner: SearchInputs,
}, },
@ -48,6 +49,7 @@ pub enum SearchQuery {
multiline: bool, multiline: bool,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
inner: SearchInputs, inner: SearchInputs,
}, },
} }
@ -57,6 +59,7 @@ impl SearchQuery {
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
files_to_include: Vec<PathMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> { ) -> Result<Self> {
@ -74,6 +77,7 @@ impl SearchQuery {
replacement: None, replacement: None,
whole_word, whole_word,
case_sensitive, case_sensitive,
include_ignored,
inner, inner,
}) })
} }
@ -82,6 +86,7 @@ impl SearchQuery {
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
include_ignored: bool,
files_to_include: Vec<PathMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> { ) -> Result<Self> {
@ -111,6 +116,7 @@ impl SearchQuery {
multiline, multiline,
whole_word, whole_word,
case_sensitive, case_sensitive,
include_ignored,
inner, inner,
}) })
} }
@ -121,6 +127,7 @@ impl SearchQuery {
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
) )
@ -129,6 +136,7 @@ impl SearchQuery {
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
) )
@ -156,6 +164,7 @@ impl SearchQuery {
regex: self.is_regex(), regex: self.is_regex(),
whole_word: self.whole_word(), whole_word: self.whole_word(),
case_sensitive: self.case_sensitive(), case_sensitive: self.case_sensitive(),
include_ignored: self.include_ignored(),
files_to_include: self files_to_include: self
.files_to_include() .files_to_include()
.iter() .iter()
@ -336,6 +345,17 @@ impl SearchQuery {
} }
} }
pub fn include_ignored(&self) -> bool {
match self {
Self::Text {
include_ignored, ..
} => *include_ignored,
Self::Regex {
include_ignored, ..
} => *include_ignored,
}
}
pub fn is_regex(&self) -> bool { pub fn is_regex(&self) -> bool {
matches!(self, Self::Regex { .. }) matches!(self, Self::Regex { .. })
} }

View file

@ -1,5 +1,6 @@
use crate::{ use crate::{
copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
ProjectEntryId, RemoveOptions,
}; };
use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context as _, Result}; use anyhow::{anyhow, Context as _, Result};
@ -25,6 +26,7 @@ use gpui::{
AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext, AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
Task, Task,
}; };
use itertools::Itertools;
use language::{ use language::{
proto::{ proto::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@ -39,6 +41,7 @@ use postage::{
prelude::{Sink as _, Stream as _}, prelude::{Sink as _, Stream as _},
watch, watch,
}; };
use settings::{Settings, SettingsStore};
use smol::channel::{self, Sender}; use smol::channel::{self, Sender};
use std::{ use std::{
any::Any, any::Any,
@ -58,7 +61,10 @@ use std::{
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use util::{paths::HOME, ResultExt}; use util::{
paths::{PathMatcher, HOME},
ResultExt,
};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize); pub struct WorktreeId(usize);
@ -73,7 +79,7 @@ pub struct LocalWorktree {
scan_requests_tx: channel::Sender<ScanRequest>, scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>, path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>), is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
_background_scanner_task: Task<()>, _background_scanner_tasks: Vec<Task<()>>,
share: Option<ShareState>, share: Option<ShareState>,
diagnostics: HashMap< diagnostics: HashMap<
Arc<Path>, Arc<Path>,
@ -219,6 +225,7 @@ pub struct LocalSnapshot {
/// All of the git repositories in the worktree, indexed by the project entry /// All of the git repositories in the worktree, indexed by the project entry
/// id of their parent directory. /// id of their parent directory.
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>, git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
file_scan_exclusions: Vec<PathMatcher>,
} }
struct BackgroundScannerState { struct BackgroundScannerState {
@ -302,17 +309,56 @@ impl Worktree {
.await .await
.context("failed to stat worktree path")?; .context("failed to stat worktree path")?;
let closure_fs = Arc::clone(&fs);
let closure_next_entry_id = Arc::clone(&next_entry_id);
let closure_abs_path = abs_path.to_path_buf();
cx.build_model(move |cx: &mut ModelContext<Worktree>| { cx.build_model(move |cx: &mut ModelContext<Worktree>| {
cx.observe_global::<SettingsStore>(move |this, cx| {
if let Self::Local(this) = this {
let new_file_scan_exclusions =
file_scan_exclusions(ProjectSettings::get_global(cx));
if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
log::info!(
"Re-scanning directories, new scan exclude files: {:?}",
this.snapshot
.file_scan_exclusions
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
channel::unbounded();
this.scan_requests_tx = scan_requests_tx;
this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
this._background_scanner_tasks = start_background_scan_tasks(
&closure_abs_path,
this.snapshot(),
scan_requests_rx,
path_prefixes_to_scan_rx,
Arc::clone(&closure_next_entry_id),
Arc::clone(&closure_fs),
cx,
);
this.is_scanning = watch::channel_with(true);
}
}
})
.detach();
let root_name = abs_path let root_name = abs_path
.file_name() .file_name()
.map_or(String::new(), |f| f.to_string_lossy().to_string()); .map_or(String::new(), |f| f.to_string_lossy().to_string());
let mut snapshot = LocalSnapshot { let mut snapshot = LocalSnapshot {
file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)),
ignores_by_parent_abs_path: Default::default(), ignores_by_parent_abs_path: Default::default(),
git_repositories: Default::default(), git_repositories: Default::default(),
snapshot: Snapshot { snapshot: Snapshot {
id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize), id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize),
abs_path: abs_path.clone(), abs_path: abs_path.to_path_buf().into(),
root_name: root_name.clone(), root_name: root_name.clone(),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
entries_by_path: Default::default(), entries_by_path: Default::default(),
@ -337,61 +383,22 @@ impl Worktree {
let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let task_snapshot = snapshot.clone();
cx.spawn(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
this.update(&mut cx, |this, cx| {
let this = this.as_local_mut().unwrap();
match state {
ScanState::Started => {
*this.is_scanning.0.borrow_mut() = true;
}
ScanState::Updated {
snapshot,
changes,
barrier,
scanning,
} => {
*this.is_scanning.0.borrow_mut() = scanning;
this.set_snapshot(snapshot, changes, cx);
drop(barrier);
}
}
cx.notify();
})
.ok();
}
})
.detach();
let background_scanner_task = cx.background_executor().spawn({
let fs = fs.clone();
let snapshot = snapshot.clone();
let background = cx.background_executor().clone();
async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new(
snapshot,
next_entry_id,
fs,
scan_states_tx,
background,
scan_requests_rx,
path_prefixes_to_scan_rx,
)
.run(events)
.await;
}
});
Worktree::Local(LocalWorktree { Worktree::Local(LocalWorktree {
snapshot, snapshot,
is_scanning: watch::channel_with(true), is_scanning: watch::channel_with(true),
share: None, share: None,
scan_requests_tx, scan_requests_tx,
path_prefixes_to_scan_tx, path_prefixes_to_scan_tx,
_background_scanner_task: background_scanner_task, _background_scanner_tasks: start_background_scan_tasks(
&abs_path,
task_snapshot,
scan_requests_rx,
path_prefixes_to_scan_rx,
Arc::clone(&next_entry_id),
Arc::clone(&fs),
cx,
),
diagnostics: Default::default(), diagnostics: Default::default(),
diagnostic_summaries: Default::default(), diagnostic_summaries: Default::default(),
client, client,
@ -584,6 +591,77 @@ impl Worktree {
} }
} }
fn start_background_scan_tasks(
abs_path: &Path,
snapshot: LocalSnapshot,
scan_requests_rx: channel::Receiver<ScanRequest>,
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
next_entry_id: Arc<AtomicUsize>,
fs: Arc<dyn Fs>,
cx: &mut ModelContext<'_, Worktree>,
) -> Vec<Task<()>> {
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
let background_scanner = cx.background_executor().spawn({
let abs_path = abs_path.to_path_buf();
let background = cx.background_executor().clone();
async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new(
snapshot,
next_entry_id,
fs,
scan_states_tx,
background,
scan_requests_rx,
path_prefixes_to_scan_rx,
)
.run(events)
.await;
}
});
let scan_state_updater = cx.spawn(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
this.update(&mut cx, |this, cx| {
let this = this.as_local_mut().unwrap();
match state {
ScanState::Started => {
*this.is_scanning.0.borrow_mut() = true;
}
ScanState::Updated {
snapshot,
changes,
barrier,
scanning,
} => {
*this.is_scanning.0.borrow_mut() = scanning;
this.set_snapshot(snapshot, changes, cx);
drop(barrier);
}
}
cx.notify();
})
.ok();
}
});
vec![background_scanner, scan_state_updater]
}
fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
.sorted()
.filter_map(|pattern| {
PathMatcher::new(pattern)
.map(Some)
.unwrap_or_else(|e| {
log::error!(
"Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
);
None
})
})
.collect()
}
impl LocalWorktree { impl LocalWorktree {
pub fn contains_abs_path(&self, path: &Path) -> bool { pub fn contains_abs_path(&self, path: &Path) -> bool {
path.starts_with(&self.abs_path) path.starts_with(&self.abs_path)
@ -1482,7 +1560,7 @@ impl Snapshot {
self.entries_by_id.get(&entry_id, &()).is_some() self.entries_by_id.get(&entry_id, &()).is_some()
} }
pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> { fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
let entry = Entry::try_from((&self.root_char_bag, entry))?; let entry = Entry::try_from((&self.root_char_bag, entry))?;
let old_entry = self.entries_by_id.insert_or_replace( let old_entry = self.entries_by_id.insert_or_replace(
PathEntry { PathEntry {
@ -2143,6 +2221,12 @@ impl LocalSnapshot {
paths.sort_by(|a, b| a.0.cmp(b.0)); paths.sort_by(|a, b| a.0.cmp(b.0));
paths paths
} }
fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
self.file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
}
} }
impl BackgroundScannerState { impl BackgroundScannerState {
@ -2165,7 +2249,7 @@ impl BackgroundScannerState {
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
let mut containing_repository = None; let mut containing_repository = None;
if !ignore_stack.is_all() { if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
containing_repository = Some(( containing_repository = Some((
@ -2376,18 +2460,30 @@ impl BackgroundScannerState {
// Remove any git repositories whose .git entry no longer exists. // Remove any git repositories whose .git entry no longer exists.
let snapshot = &mut self.snapshot; let snapshot = &mut self.snapshot;
let mut repositories = mem::take(&mut snapshot.git_repositories); let mut ids_to_preserve = HashSet::default();
let mut repository_entries = mem::take(&mut snapshot.repository_entries); for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
repositories.retain(|work_directory_id, _| { let exists_in_snapshot = snapshot
snapshot .entry_for_id(work_directory_id)
.entry_for_id(*work_directory_id)
.map_or(false, |entry| { .map_or(false, |entry| {
snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
})
}); });
repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); if exists_in_snapshot {
snapshot.git_repositories = repositories; ids_to_preserve.insert(work_directory_id);
snapshot.repository_entries = repository_entries; } else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
if snapshot.is_abs_path_excluded(&git_dir_abs_path)
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{
ids_to_preserve.insert(work_directory_id);
}
}
}
snapshot
.git_repositories
.retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
snapshot
.repository_entries
.retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
} }
fn build_git_repository( fn build_git_repository(
@ -3085,7 +3181,7 @@ impl BackgroundScanner {
let ignore_stack = state let ignore_stack = state
.snapshot .snapshot
.ignore_stack_for_abs_path(&root_abs_path, true); .ignore_stack_for_abs_path(&root_abs_path, true);
if ignore_stack.is_all() { if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
root_entry.is_ignored = true; root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref()); state.insert_entry(root_entry.clone(), self.fs.as_ref());
} }
@ -3222,6 +3318,7 @@ impl BackgroundScanner {
return false; return false;
}; };
if !is_git_related(&abs_path) {
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
snapshot snapshot
.entry_for_path(parent) .entry_for_path(parent)
@ -3231,6 +3328,13 @@ impl BackgroundScanner {
log::debug!("ignoring event {relative_path:?} within unloaded directory"); log::debug!("ignoring event {relative_path:?} within unloaded directory");
return false; return false;
} }
if snapshot.is_abs_path_excluded(abs_path) {
log::debug!(
"ignoring FS event for path {relative_path:?} within excluded directory"
);
return false;
}
}
relative_paths.push(relative_path); relative_paths.push(relative_path);
true true
@ -3392,18 +3496,26 @@ impl BackgroundScanner {
} }
async fn scan_dir(&self, job: &ScanJob) -> Result<()> { async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
log::debug!("scan directory {:?}", job.path); let root_abs_path;
let mut ignore_stack;
let mut ignore_stack = job.ignore_stack.clone(); let mut new_ignore;
let mut new_ignore = None; let root_char_bag;
let (root_abs_path, root_char_bag, next_entry_id) = { let next_entry_id;
let snapshot = &self.state.lock().snapshot; {
( let state = self.state.lock();
snapshot.abs_path().clone(), let snapshot = &state.snapshot;
snapshot.root_char_bag, root_abs_path = snapshot.abs_path().clone();
self.next_entry_id.clone(), if snapshot.is_abs_path_excluded(&job.abs_path) {
) log::error!("skipping excluded directory {:?}", job.path);
}; return Ok(());
}
log::debug!("scanning directory {:?}", job.path);
ignore_stack = job.ignore_stack.clone();
new_ignore = None;
root_char_bag = snapshot.root_char_bag;
next_entry_id = self.next_entry_id.clone();
drop(state);
}
let mut dotgit_path = None; let mut dotgit_path = None;
let mut root_canonical_path = None; let mut root_canonical_path = None;
@ -3418,18 +3530,8 @@ impl BackgroundScanner {
continue; continue;
} }
}; };
let child_name = child_abs_path.file_name().unwrap(); let child_name = child_abs_path.file_name().unwrap();
let child_path: Arc<Path> = job.path.join(child_name).into(); let child_path: Arc<Path> = job.path.join(child_name).into();
let child_metadata = match self.fs.metadata(&child_abs_path).await {
Ok(Some(metadata)) => metadata,
Ok(None) => continue,
Err(err) => {
log::error!("error processing {:?}: {:?}", child_abs_path, err);
continue;
}
};
// If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
if child_name == *GITIGNORE { if child_name == *GITIGNORE {
match build_gitignore(&child_abs_path, self.fs.as_ref()).await { match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@ -3473,6 +3575,26 @@ impl BackgroundScanner {
dotgit_path = Some(child_path.clone()); dotgit_path = Some(child_path.clone());
} }
{
let mut state = self.state.lock();
if state.snapshot.is_abs_path_excluded(&child_abs_path) {
let relative_path = job.path.join(child_name);
log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path);
continue;
}
drop(state);
}
let child_metadata = match self.fs.metadata(&child_abs_path).await {
Ok(Some(metadata)) => metadata,
Ok(None) => continue,
Err(err) => {
log::error!("error processing {child_abs_path:?}: {err:?}");
continue;
}
};
let mut child_entry = Entry::new( let mut child_entry = Entry::new(
child_path.clone(), child_path.clone(),
&child_metadata, &child_metadata,
@ -3653,14 +3775,12 @@ impl BackgroundScanner {
self.next_entry_id.as_ref(), self.next_entry_id.as_ref(),
state.snapshot.root_char_bag, state.snapshot.root_char_bag,
); );
fs_entry.is_ignored = ignore_stack.is_all(); let is_dir = fs_entry.is_dir();
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
if !fs_entry.is_ignored { if !is_dir && !fs_entry.is_ignored {
if !fs_entry.is_dir() { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
if let Some((work_dir, repo)) =
state.snapshot.local_repo_for_path(&path)
{
if let Ok(repo_path) = path.strip_prefix(work_dir.0) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
let repo_path = RepoPath(repo_path.into()); let repo_path = RepoPath(repo_path.into());
let repo = repo.repo_ptr.lock(); let repo = repo.repo_ptr.lock();
@ -3668,7 +3788,6 @@ impl BackgroundScanner {
} }
} }
} }
}
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) {
if state.should_scan_directory(&fs_entry) { if state.should_scan_directory(&fs_entry) {
@ -3824,8 +3943,7 @@ impl BackgroundScanner {
ignore_stack.clone() ignore_stack.clone()
}; };
// Scan any directories that were previously ignored and weren't // Scan any directories that were previously ignored and weren't previously scanned.
// previously scanned.
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
let state = self.state.lock(); let state = self.state.lock();
if state.should_scan_directory(&entry) { if state.should_scan_directory(&entry) {
@ -4001,6 +4119,12 @@ impl BackgroundScanner {
} }
} }
fn is_git_related(abs_path: &Path) -> bool {
abs_path
.components()
.any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
}
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = root_char_bag; let mut result = root_char_bag;
result.extend( result.extend(

File diff suppressed because it is too large Load diff

View file

@ -1732,7 +1732,7 @@ mod tests {
use super::*; use super::*;
use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle}; use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use project::FakeFs; use project::{project_settings::ProjectSettings, FakeFs};
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
use std::{ use std::{
@ -1832,6 +1832,123 @@ mod tests {
); );
} }
#[gpui::test]
async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
});
});
});
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root1",
json!({
".dockerignore": "",
".git": {
"HEAD": "",
},
"a": {
"0": { "q": "", "r": "", "s": "" },
"1": { "t": "", "u": "" },
"2": { "v": "", "w": "", "x": "", "y": "" },
},
"b": {
"3": { "Q": "" },
"4": { "R": "", "S": "", "T": "", "U": "" },
},
"C": {
"5": {},
"6": { "V": "", "W": "" },
"7": { "X": "" },
"8": { "Y": {}, "Z": "" }
}
}),
)
.await;
fs.insert_tree(
"/root2",
json!({
"d": {
"4": ""
},
"e": {}
}),
)
.await;
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
let workspace = cx
.add_window(|cx| Workspace::test_new(project.clone(), cx))
.root(cx);
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" > b",
" > C",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
toggle_expand_dir(&panel, "root1/b", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" v b <== selected",
" > 3",
" > C",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
toggle_expand_dir(&panel, "root2/d", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" v b",
" > 3",
" > C",
" .dockerignore",
"v root2",
" v d <== selected",
" > e",
]
);
toggle_expand_dir(&panel, "root2/e", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" v b",
" > 3",
" > C",
" .dockerignore",
"v root2",
" v d",
" v e <== selected",
]
);
}
#[gpui::test(iterations = 30)] #[gpui::test(iterations = 30)]
async fn test_editing_files(cx: &mut gpui::TestAppContext) { async fn test_editing_files(cx: &mut gpui::TestAppContext) {
init_test(cx); init_test(cx);
@ -2929,6 +3046,12 @@ mod tests {
workspace::init_settings(cx); workspace::init_settings(cx);
client::init_settings(cx); client::init_settings(cx);
Project::init_settings(cx); Project::init_settings(cx);
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(Vec::new());
});
});
}); });
} }

View file

@ -1571,7 +1571,7 @@ mod tests {
use super::*; use super::*;
use gpui::{TestAppContext, View, VisualTestContext, WindowHandle}; use gpui::{TestAppContext, View, VisualTestContext, WindowHandle};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use project::FakeFs; use project::{project_settings::ProjectSettings, FakeFs};
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
use std::{ use std::{
@ -1672,6 +1672,124 @@ mod tests {
); );
} }
#[gpui::test]
async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
});
});
});
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/root1",
json!({
".dockerignore": "",
".git": {
"HEAD": "",
},
"a": {
"0": { "q": "", "r": "", "s": "" },
"1": { "t": "", "u": "" },
"2": { "v": "", "w": "", "x": "", "y": "" },
},
"b": {
"3": { "Q": "" },
"4": { "R": "", "S": "", "T": "", "U": "" },
},
"C": {
"5": {},
"6": { "V": "", "W": "" },
"7": { "X": "" },
"8": { "Y": {}, "Z": "" }
}
}),
)
.await;
fs.insert_tree(
"/root2",
json!({
"d": {
"4": ""
},
"e": {}
}),
)
.await;
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let panel = workspace
.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
.unwrap();
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" > b",
" > C",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
toggle_expand_dir(&panel, "root1/b", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" v b <== selected",
" > 3",
" > C",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
toggle_expand_dir(&panel, "root2/d", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" v b",
" > 3",
" > C",
" .dockerignore",
"v root2",
" v d <== selected",
" > e",
]
);
toggle_expand_dir(&panel, "root2/e", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
"v root1",
" > a",
" v b",
" > 3",
" > C",
" .dockerignore",
"v root2",
" v d",
" v e <== selected",
]
);
}
#[gpui::test(iterations = 30)] #[gpui::test(iterations = 30)]
async fn test_editing_files(cx: &mut gpui::TestAppContext) { async fn test_editing_files(cx: &mut gpui::TestAppContext) {
init_test(cx); init_test(cx);
@ -2792,6 +2910,12 @@ mod tests {
workspace::init_settings(cx); workspace::init_settings(cx);
client::init_settings(cx); client::init_settings(cx);
Project::init_settings(cx); Project::init_settings(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(Vec::new());
});
});
}); });
} }

View file

@ -884,6 +884,7 @@ message SearchProject {
bool case_sensitive = 5; bool case_sensitive = 5;
string files_to_include = 6; string files_to_include = 6;
string files_to_exclude = 7; string files_to_exclude = 7;
bool include_ignored = 8;
} }
message SearchProjectResponse { message SearchProjectResponse {

View file

@ -884,6 +884,7 @@ message SearchProject {
bool case_sensitive = 5; bool case_sensitive = 5;
string files_to_include = 6; string files_to_include = 6;
string files_to_exclude = 7; string files_to_exclude = 7;
bool include_ignored = 8;
} }
message SearchProjectResponse { message SearchProjectResponse {

View file

@ -805,6 +805,7 @@ impl BufferSearchBar {
query, query,
self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE), self.search_options.contains(SearchOptions::CASE_SENSITIVE),
false,
Vec::new(), Vec::new(),
Vec::new(), Vec::new(),
) { ) {
@ -820,6 +821,7 @@ impl BufferSearchBar {
query, query,
self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE), self.search_options.contains(SearchOptions::CASE_SENSITIVE),
false,
Vec::new(), Vec::new(),
Vec::new(), Vec::new(),
) { ) {

View file

@ -4,7 +4,7 @@ use crate::{
search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button}, search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button},
ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery, ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery,
PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch, PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch,
ToggleCaseSensitive, ToggleReplace, ToggleWholeWord, ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord,
}; };
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use collections::HashMap; use collections::HashMap;
@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) {
cx.capture_action(ProjectSearchView::replace_next); cx.capture_action(ProjectSearchView::replace_next);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx); add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
add_toggle_option_action::<ToggleIncludeIgnored>(SearchOptions::INCLUDE_IGNORED, cx);
add_toggle_filters_action::<ToggleFilters>(cx); add_toggle_filters_action::<ToggleFilters>(cx);
} }
@ -1192,6 +1193,7 @@ impl ProjectSearchView {
text, text,
self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE), self.search_options.contains(SearchOptions::CASE_SENSITIVE),
self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
included_files, included_files,
excluded_files, excluded_files,
) { ) {
@ -1210,6 +1212,7 @@ impl ProjectSearchView {
text, text,
self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE), self.search_options.contains(SearchOptions::CASE_SENSITIVE),
self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
included_files, included_files,
excluded_files, excluded_files,
) { ) {
@ -1764,6 +1767,17 @@ impl View for ProjectSearchBar {
render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx) render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx)
}); });
let mut include_ignored = is_semantic_disabled.then(|| {
render_option_button_icon(
// TODO proper icon
"icons/case_insensitive.svg",
SearchOptions::INCLUDE_IGNORED,
cx,
)
});
// TODO not implemented yet
let _ = include_ignored.take();
let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| { let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| {
let is_active = if let Some(search) = self.active_project_search.as_ref() { let is_active = if let Some(search) = self.active_project_search.as_ref() {
let search = search.read(cx); let search = search.read(cx);
@ -1877,9 +1891,17 @@ impl View for ProjectSearchBar {
.flex(1., false), .flex(1., false),
) )
.with_children(search.filters_enabled.then(|| { .with_children(search.filters_enabled.then(|| {
Flex::row()
.with_child(
Flex::row() Flex::row()
.with_child( .with_child(
ChildView::new(&search.included_files_editor, cx) ChildView::new(&search.included_files_editor, cx)
.contained()
.constrained()
.with_height(theme.search.search_bar_row_height)
.flex(1., true),
)
.with_children(include_ignored)
.contained() .contained()
.with_style(include_container_style) .with_style(include_container_style)
.constrained() .constrained()

View file

@ -29,6 +29,7 @@ actions!(
CycleMode, CycleMode,
ToggleWholeWord, ToggleWholeWord,
ToggleCaseSensitive, ToggleCaseSensitive,
ToggleIncludeIgnored,
ToggleReplace, ToggleReplace,
SelectNextMatch, SelectNextMatch,
SelectPrevMatch, SelectPrevMatch,
@ -49,31 +50,35 @@ bitflags! {
const NONE = 0b000; const NONE = 0b000;
const WHOLE_WORD = 0b001; const WHOLE_WORD = 0b001;
const CASE_SENSITIVE = 0b010; const CASE_SENSITIVE = 0b010;
const INCLUDE_IGNORED = 0b100;
} }
} }
impl SearchOptions { impl SearchOptions {
pub fn label(&self) -> &'static str { pub fn label(&self) -> &'static str {
match *self { match *self {
SearchOptions::WHOLE_WORD => "Match Whole Word", Self::WHOLE_WORD => "Match Whole Word",
SearchOptions::CASE_SENSITIVE => "Match Case", Self::CASE_SENSITIVE => "Match Case",
_ => panic!("{:?} is not a named SearchOption", self), Self::INCLUDE_IGNORED => "Include Ignored",
_ => panic!("{self:?} is not a named SearchOption"),
} }
} }
pub fn icon(&self) -> &'static str { pub fn icon(&self) -> &'static str {
match *self { match *self {
SearchOptions::WHOLE_WORD => "icons/word_search.svg", Self::WHOLE_WORD => "icons/word_search.svg",
SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg", Self::CASE_SENSITIVE => "icons/case_insensitive.svg",
_ => panic!("{:?} is not a named SearchOption", self), Self::INCLUDE_IGNORED => "icons/case_insensitive.svg",
_ => panic!("{self:?} is not a named SearchOption"),
} }
} }
pub fn to_toggle_action(&self) -> Box<dyn Action> { pub fn to_toggle_action(&self) -> Box<dyn Action> {
match *self { match *self {
SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord), Self::WHOLE_WORD => Box::new(ToggleWholeWord),
SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive), Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
_ => panic!("{:?} is not a named SearchOption", self), Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored),
_ => panic!("{self:?} is not a named SearchOption"),
} }
} }
@ -85,6 +90,7 @@ impl SearchOptions {
let mut options = SearchOptions::NONE; let mut options = SearchOptions::NONE;
options.set(SearchOptions::WHOLE_WORD, query.whole_word()); options.set(SearchOptions::WHOLE_WORD, query.whole_word());
options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive()); options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive());
options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored());
options options
} }

View file

@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher {
} }
} }
impl PartialEq for PathMatcher {
fn eq(&self, other: &Self) -> bool {
self.maybe_path.eq(&other.maybe_path)
}
}
impl Eq for PathMatcher {}
impl PathMatcher { impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> { pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher { Ok(PathMatcher {
@ -211,7 +219,19 @@ impl PathMatcher {
} }
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool { pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other) other.as_ref().starts_with(&self.maybe_path)
|| self.glob.is_match(&other)
|| self.check_with_end_separator(other.as_ref())
}
fn check_with_end_separator(&self, path: &Path) -> bool {
let path_str = path.to_string_lossy();
let separator = std::path::MAIN_SEPARATOR_STR;
if path_str.ends_with(separator) {
self.glob.is_match(path)
} else {
self.glob.is_match(path_str.to_string() + separator)
}
} }
} }
@ -388,4 +408,14 @@ mod tests {
let path = Path::new("/a/b/c/.eslintrc.js"); let path = Path::new("/a/b/c/.eslintrc.js");
assert_eq!(path.extension_or_hidden_file_name(), Some("js")); assert_eq!(path.extension_or_hidden_file_name(), Some("js"));
} }
#[test]
fn edge_of_glob() {
let path = Path::new("/work/node_modules");
let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
assert!(
path_matcher.is_match(&path),
"Path matcher {path_matcher} should match {path:?}"
);
}
} }