Allow to exclude files from worktrees (#3356)
* Part of https://github.com/zed-industries/community/issues/70 Allows to fully remove from Zed certain files or file groups: no items will be scanned or added into worktrees, so nothing will be shown in project tree, project search and go to file would not see those, corresponding FS events will be ignored. One exclusion is .git files: those are still not shown or accessible by default, yet tracked in the worktrees. By default, is configured to ```json "file_scan_exclusions": [ "**/.git", "**/.svn", "**/.hg", "**/CVS", "**/.DS_Store", "**/Thumbs.db", "**/.classpath", "**/.settings" ], ``` * In addition, contains code preparations for "search in included files" feature: new SearchOptions variant, search crate and RPC adjustments Release Notes: - Added `file_scan_exclusions` section to project settings to completely ignore certain files in Zed
This commit is contained in:
commit
12b59daa1e
29 changed files with 3422 additions and 2417 deletions
|
@ -268,6 +268,19 @@
|
|||
// Whether to show warnings or not by default.
|
||||
"include_warnings": true
|
||||
},
|
||||
// Add files or globs of files that will be excluded by Zed entirely:
|
||||
// they will be skipped during FS scan(s), file tree and file search
|
||||
// will lack the corresponding file entries.
|
||||
"file_scan_exclusions": [
|
||||
"**/.git",
|
||||
"**/.svn",
|
||||
"**/.hg",
|
||||
"**/CVS",
|
||||
"**/.DS_Store",
|
||||
"**/Thumbs.db",
|
||||
"**/.classpath",
|
||||
"**/.settings"
|
||||
],
|
||||
// Git gutter behavior configuration.
|
||||
"git": {
|
||||
// Control whether the git gutter is shown. May take 2 values:
|
||||
|
|
|
@ -5052,7 +5052,7 @@ async fn test_project_search(
|
|||
let mut results = HashMap::default();
|
||||
let mut search_rx = project_b.update(cx_b, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
|
|
@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest {
|
|||
|
||||
let mut search = project.update(cx, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
|
||||
.unwrap(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
|
|
@ -4599,7 +4599,7 @@ async fn test_project_search(
|
|||
let mut results = HashMap::default();
|
||||
let mut search_rx = project_b.update(cx_b, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
|
|
@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest {
|
|||
|
||||
let mut search = project.update(cx, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
|
||||
.unwrap(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
|
|
@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
|
|||
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
|
||||
|
||||
lazy_static! {
|
||||
static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
|
||||
"@[-_\\w]+",
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap();
|
||||
static ref MENTIONS_SEARCH: SearchQuery =
|
||||
SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
|
||||
}
|
||||
|
||||
pub struct MessageEditor {
|
||||
|
|
|
@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
|
|||
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
|
||||
|
||||
lazy_static! {
|
||||
static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
|
||||
"@[-_\\w]+",
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap();
|
||||
static ref MENTIONS_SEARCH: SearchQuery =
|
||||
SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
|
||||
}
|
||||
|
||||
pub struct MessageEditor {
|
||||
|
|
|
@ -20,10 +20,6 @@ impl IgnoreStack {
|
|||
Arc::new(Self::All)
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
matches!(self, IgnoreStack::All)
|
||||
}
|
||||
|
||||
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
|
||||
match self.as_ref() {
|
||||
IgnoreStack::All => self,
|
||||
|
|
|
@ -5548,7 +5548,16 @@ impl Project {
|
|||
.collect::<Vec<_>>();
|
||||
|
||||
let background = cx.background().clone();
|
||||
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
|
||||
let path_count: usize = snapshots
|
||||
.iter()
|
||||
.map(|s| {
|
||||
if query.include_ignored() {
|
||||
s.file_count()
|
||||
} else {
|
||||
s.visible_file_count()
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
if path_count == 0 {
|
||||
let (_, rx) = smol::channel::bounded(1024);
|
||||
return rx;
|
||||
|
@ -5561,8 +5570,16 @@ impl Project {
|
|||
.iter()
|
||||
.filter_map(|(_, b)| {
|
||||
let buffer = b.upgrade(cx)?;
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
if let Some(path) = snapshot.file().map(|file| file.path()) {
|
||||
let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
|
||||
let is_ignored = buffer
|
||||
.project_path(cx)
|
||||
.and_then(|path| self.entry_for_path(&path, cx))
|
||||
.map_or(false, |entry| entry.is_ignored);
|
||||
(is_ignored, buffer.snapshot())
|
||||
});
|
||||
if is_ignored && !query.include_ignored() {
|
||||
return None;
|
||||
} else if let Some(path) = snapshot.file().map(|file| file.path()) {
|
||||
Some((path.clone(), (buffer, snapshot)))
|
||||
} else {
|
||||
unnamed_files.push(buffer);
|
||||
|
@ -5735,7 +5752,12 @@ impl Project {
|
|||
let mut snapshot_start_ix = 0;
|
||||
let mut abs_path = PathBuf::new();
|
||||
for snapshot in snapshots {
|
||||
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
|
||||
let snapshot_end_ix = snapshot_start_ix
|
||||
+ if query.include_ignored() {
|
||||
snapshot.file_count()
|
||||
} else {
|
||||
snapshot.visible_file_count()
|
||||
};
|
||||
if worker_end_ix <= snapshot_start_ix {
|
||||
break;
|
||||
} else if worker_start_ix > snapshot_end_ix {
|
||||
|
@ -5748,7 +5770,7 @@ impl Project {
|
|||
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
|
||||
|
||||
for entry in snapshot
|
||||
.files(false, start_in_snapshot)
|
||||
.files(query.include_ignored(), start_in_snapshot)
|
||||
.take(end_in_snapshot - start_in_snapshot)
|
||||
{
|
||||
if matching_paths_tx.is_closed() {
|
||||
|
|
|
@ -10,6 +10,8 @@ pub struct ProjectSettings {
|
|||
pub lsp: HashMap<Arc<str>, LspSettings>,
|
||||
#[serde(default)]
|
||||
pub git: GitSettings,
|
||||
#[serde(default)]
|
||||
pub file_scan_exclusions: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
|
|
|
@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
|
@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
|
@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
Vec::new()
|
||||
)
|
||||
|
@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.rs").unwrap()],
|
||||
Vec::new()
|
||||
)
|
||||
|
@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap(),
|
||||
|
@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
|
@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
)
|
||||
|
@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![PathMatcher::new("*.rs").unwrap()],
|
||||
)
|
||||
|
@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
|
@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
|
@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
)
|
||||
|
@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
).unwrap(),
|
||||
|
@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
|
@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
|
|
|
@ -39,6 +39,7 @@ pub enum SearchQuery {
|
|||
replacement: Option<String>,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
inner: SearchInputs,
|
||||
},
|
||||
|
||||
|
@ -48,6 +49,7 @@ pub enum SearchQuery {
|
|||
multiline: bool,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
inner: SearchInputs,
|
||||
},
|
||||
}
|
||||
|
@ -57,6 +59,7 @@ impl SearchQuery {
|
|||
query: impl ToString,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
) -> Result<Self> {
|
||||
|
@ -74,6 +77,7 @@ impl SearchQuery {
|
|||
replacement: None,
|
||||
whole_word,
|
||||
case_sensitive,
|
||||
include_ignored,
|
||||
inner,
|
||||
})
|
||||
}
|
||||
|
@ -82,6 +86,7 @@ impl SearchQuery {
|
|||
query: impl ToString,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
) -> Result<Self> {
|
||||
|
@ -111,6 +116,7 @@ impl SearchQuery {
|
|||
multiline,
|
||||
whole_word,
|
||||
case_sensitive,
|
||||
include_ignored,
|
||||
inner,
|
||||
})
|
||||
}
|
||||
|
@ -121,6 +127,7 @@ impl SearchQuery {
|
|||
message.query,
|
||||
message.whole_word,
|
||||
message.case_sensitive,
|
||||
message.include_ignored,
|
||||
deserialize_path_matches(&message.files_to_include)?,
|
||||
deserialize_path_matches(&message.files_to_exclude)?,
|
||||
)
|
||||
|
@ -129,6 +136,7 @@ impl SearchQuery {
|
|||
message.query,
|
||||
message.whole_word,
|
||||
message.case_sensitive,
|
||||
message.include_ignored,
|
||||
deserialize_path_matches(&message.files_to_include)?,
|
||||
deserialize_path_matches(&message.files_to_exclude)?,
|
||||
)
|
||||
|
@ -156,6 +164,7 @@ impl SearchQuery {
|
|||
regex: self.is_regex(),
|
||||
whole_word: self.whole_word(),
|
||||
case_sensitive: self.case_sensitive(),
|
||||
include_ignored: self.include_ignored(),
|
||||
files_to_include: self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
|
@ -336,6 +345,17 @@ impl SearchQuery {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn include_ignored(&self) -> bool {
|
||||
match self {
|
||||
Self::Text {
|
||||
include_ignored, ..
|
||||
} => *include_ignored,
|
||||
Self::Regex {
|
||||
include_ignored, ..
|
||||
} => *include_ignored,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_regex(&self) -> bool {
|
||||
matches!(self, Self::Regex { .. })
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::{
|
||||
copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
|
||||
copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
|
||||
ProjectEntryId, RemoveOptions,
|
||||
};
|
||||
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
|
@ -21,7 +22,10 @@ use futures::{
|
|||
};
|
||||
use fuzzy::CharBag;
|
||||
use git::{DOT_GIT, GITIGNORE};
|
||||
use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
|
||||
use gpui::{
|
||||
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
proto::{
|
||||
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
|
||||
|
@ -36,6 +40,7 @@ use postage::{
|
|||
prelude::{Sink as _, Stream as _},
|
||||
watch,
|
||||
};
|
||||
use settings::SettingsStore;
|
||||
use smol::channel::{self, Sender};
|
||||
use std::{
|
||||
any::Any,
|
||||
|
@ -55,7 +60,10 @@ use std::{
|
|||
time::{Duration, SystemTime},
|
||||
};
|
||||
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
|
||||
use util::{paths::HOME, ResultExt};
|
||||
use util::{
|
||||
paths::{PathMatcher, HOME},
|
||||
ResultExt,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
|
||||
pub struct WorktreeId(usize);
|
||||
|
@ -70,7 +78,8 @@ pub struct LocalWorktree {
|
|||
scan_requests_tx: channel::Sender<ScanRequest>,
|
||||
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
|
||||
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
|
||||
_background_scanner_task: Task<()>,
|
||||
_settings_subscription: Subscription,
|
||||
_background_scanner_tasks: Vec<Task<()>>,
|
||||
share: Option<ShareState>,
|
||||
diagnostics: HashMap<
|
||||
Arc<Path>,
|
||||
|
@ -216,6 +225,7 @@ pub struct LocalSnapshot {
|
|||
/// All of the git repositories in the worktree, indexed by the project entry
|
||||
/// id of their parent directory.
|
||||
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
|
||||
file_scan_exclusions: Vec<PathMatcher>,
|
||||
}
|
||||
|
||||
struct BackgroundScannerState {
|
||||
|
@ -299,17 +309,54 @@ impl Worktree {
|
|||
.await
|
||||
.context("failed to stat worktree path")?;
|
||||
|
||||
let closure_fs = Arc::clone(&fs);
|
||||
let closure_next_entry_id = Arc::clone(&next_entry_id);
|
||||
let closure_abs_path = abs_path.to_path_buf();
|
||||
Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
|
||||
let settings_subscription = cx.observe_global::<SettingsStore, _>(move |this, cx| {
|
||||
if let Self::Local(this) = this {
|
||||
let new_file_scan_exclusions =
|
||||
file_scan_exclusions(settings::get::<ProjectSettings>(cx));
|
||||
if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
|
||||
this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
|
||||
log::info!(
|
||||
"Re-scanning directories, new scan exclude files: {:?}",
|
||||
this.snapshot
|
||||
.file_scan_exclusions
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
|
||||
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
|
||||
channel::unbounded();
|
||||
this.scan_requests_tx = scan_requests_tx;
|
||||
this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
|
||||
this._background_scanner_tasks = start_background_scan_tasks(
|
||||
&closure_abs_path,
|
||||
this.snapshot(),
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
Arc::clone(&closure_next_entry_id),
|
||||
Arc::clone(&closure_fs),
|
||||
cx,
|
||||
);
|
||||
this.is_scanning = watch::channel_with(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let root_name = abs_path
|
||||
.file_name()
|
||||
.map_or(String::new(), |f| f.to_string_lossy().to_string());
|
||||
|
||||
let mut snapshot = LocalSnapshot {
|
||||
file_scan_exclusions: file_scan_exclusions(settings::get::<ProjectSettings>(cx)),
|
||||
ignores_by_parent_abs_path: Default::default(),
|
||||
git_repositories: Default::default(),
|
||||
snapshot: Snapshot {
|
||||
id: WorktreeId::from_usize(cx.model_id()),
|
||||
abs_path: abs_path.clone(),
|
||||
abs_path: abs_path.to_path_buf().into(),
|
||||
root_name: root_name.clone(),
|
||||
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
|
||||
entries_by_path: Default::default(),
|
||||
|
@ -334,60 +381,23 @@ impl Worktree {
|
|||
|
||||
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
|
||||
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
|
||||
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
|
||||
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
match state {
|
||||
ScanState::Started => {
|
||||
*this.is_scanning.0.borrow_mut() = true;
|
||||
}
|
||||
ScanState::Updated {
|
||||
snapshot,
|
||||
changes,
|
||||
barrier,
|
||||
scanning,
|
||||
} => {
|
||||
*this.is_scanning.0.borrow_mut() = scanning;
|
||||
this.set_snapshot(snapshot, changes, cx);
|
||||
drop(barrier);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let background_scanner_task = cx.background().spawn({
|
||||
let fs = fs.clone();
|
||||
let snapshot = snapshot.clone();
|
||||
let background = cx.background().clone();
|
||||
async move {
|
||||
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
|
||||
BackgroundScanner::new(
|
||||
snapshot,
|
||||
next_entry_id,
|
||||
fs,
|
||||
scan_states_tx,
|
||||
background,
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
)
|
||||
.run(events)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
|
||||
let task_snapshot = snapshot.clone();
|
||||
Worktree::Local(LocalWorktree {
|
||||
snapshot,
|
||||
is_scanning: watch::channel_with(true),
|
||||
share: None,
|
||||
scan_requests_tx,
|
||||
path_prefixes_to_scan_tx,
|
||||
_background_scanner_task: background_scanner_task,
|
||||
_settings_subscription: settings_subscription,
|
||||
_background_scanner_tasks: start_background_scan_tasks(
|
||||
&abs_path,
|
||||
task_snapshot,
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
Arc::clone(&next_entry_id),
|
||||
Arc::clone(&fs),
|
||||
cx,
|
||||
),
|
||||
diagnostics: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
client,
|
||||
|
@ -584,6 +594,76 @@ impl Worktree {
|
|||
}
|
||||
}
|
||||
|
||||
fn start_background_scan_tasks(
|
||||
abs_path: &Path,
|
||||
snapshot: LocalSnapshot,
|
||||
scan_requests_rx: channel::Receiver<ScanRequest>,
|
||||
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
|
||||
next_entry_id: Arc<AtomicUsize>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ModelContext<'_, Worktree>,
|
||||
) -> Vec<Task<()>> {
|
||||
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
|
||||
let background_scanner = cx.background().spawn({
|
||||
let abs_path = abs_path.to_path_buf();
|
||||
let background = cx.background().clone();
|
||||
async move {
|
||||
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
|
||||
BackgroundScanner::new(
|
||||
snapshot,
|
||||
next_entry_id,
|
||||
fs,
|
||||
scan_states_tx,
|
||||
background,
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
)
|
||||
.run(events)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
let scan_state_updater = cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
match state {
|
||||
ScanState::Started => {
|
||||
*this.is_scanning.0.borrow_mut() = true;
|
||||
}
|
||||
ScanState::Updated {
|
||||
snapshot,
|
||||
changes,
|
||||
barrier,
|
||||
scanning,
|
||||
} => {
|
||||
*this.is_scanning.0.borrow_mut() = scanning;
|
||||
this.set_snapshot(snapshot, changes, cx);
|
||||
drop(barrier);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
});
|
||||
vec![background_scanner, scan_state_updater]
|
||||
}
|
||||
|
||||
fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
|
||||
project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
|
||||
.sorted()
|
||||
.filter_map(|pattern| {
|
||||
PathMatcher::new(pattern)
|
||||
.map(Some)
|
||||
.unwrap_or_else(|e| {
|
||||
log::error!(
|
||||
"Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
|
||||
);
|
||||
None
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
impl LocalWorktree {
|
||||
pub fn contains_abs_path(&self, path: &Path) -> bool {
|
||||
path.starts_with(&self.abs_path)
|
||||
|
@ -1481,7 +1561,7 @@ impl Snapshot {
|
|||
self.entries_by_id.get(&entry_id, &()).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
|
||||
fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
|
||||
let entry = Entry::try_from((&self.root_char_bag, entry))?;
|
||||
let old_entry = self.entries_by_id.insert_or_replace(
|
||||
PathEntry {
|
||||
|
@ -2145,6 +2225,12 @@ impl LocalSnapshot {
|
|||
paths.sort_by(|a, b| a.0.cmp(b.0));
|
||||
paths
|
||||
}
|
||||
|
||||
fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
|
||||
}
|
||||
}
|
||||
|
||||
impl BackgroundScannerState {
|
||||
|
@ -2167,7 +2253,7 @@ impl BackgroundScannerState {
|
|||
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
|
||||
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
|
||||
let mut containing_repository = None;
|
||||
if !ignore_stack.is_all() {
|
||||
if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
|
||||
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
|
||||
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
|
||||
containing_repository = Some((
|
||||
|
@ -2378,18 +2464,30 @@ impl BackgroundScannerState {
|
|||
|
||||
// Remove any git repositories whose .git entry no longer exists.
|
||||
let snapshot = &mut self.snapshot;
|
||||
let mut repositories = mem::take(&mut snapshot.git_repositories);
|
||||
let mut repository_entries = mem::take(&mut snapshot.repository_entries);
|
||||
repositories.retain(|work_directory_id, _| {
|
||||
snapshot
|
||||
.entry_for_id(*work_directory_id)
|
||||
let mut ids_to_preserve = HashSet::default();
|
||||
for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
|
||||
let exists_in_snapshot = snapshot
|
||||
.entry_for_id(work_directory_id)
|
||||
.map_or(false, |entry| {
|
||||
snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
|
||||
})
|
||||
});
|
||||
repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
|
||||
snapshot.git_repositories = repositories;
|
||||
snapshot.repository_entries = repository_entries;
|
||||
});
|
||||
if exists_in_snapshot {
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
} else {
|
||||
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
|
||||
if snapshot.is_abs_path_excluded(&git_dir_abs_path)
|
||||
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
|
||||
{
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
snapshot
|
||||
.git_repositories
|
||||
.retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
|
||||
snapshot
|
||||
.repository_entries
|
||||
.retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
|
||||
}
|
||||
|
||||
fn build_git_repository(
|
||||
|
@ -3094,7 +3192,7 @@ impl BackgroundScanner {
|
|||
let ignore_stack = state
|
||||
.snapshot
|
||||
.ignore_stack_for_abs_path(&root_abs_path, true);
|
||||
if ignore_stack.is_all() {
|
||||
if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
|
||||
root_entry.is_ignored = true;
|
||||
state.insert_entry(root_entry.clone(), self.fs.as_ref());
|
||||
}
|
||||
|
@ -3231,14 +3329,22 @@ impl BackgroundScanner {
|
|||
return false;
|
||||
};
|
||||
|
||||
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
|
||||
snapshot
|
||||
.entry_for_path(parent)
|
||||
.map_or(false, |entry| entry.kind == EntryKind::Dir)
|
||||
});
|
||||
if !parent_dir_is_loaded {
|
||||
log::debug!("ignoring event {relative_path:?} within unloaded directory");
|
||||
return false;
|
||||
if !is_git_related(&abs_path) {
|
||||
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
|
||||
snapshot
|
||||
.entry_for_path(parent)
|
||||
.map_or(false, |entry| entry.kind == EntryKind::Dir)
|
||||
});
|
||||
if !parent_dir_is_loaded {
|
||||
log::debug!("ignoring event {relative_path:?} within unloaded directory");
|
||||
return false;
|
||||
}
|
||||
if snapshot.is_abs_path_excluded(abs_path) {
|
||||
log::debug!(
|
||||
"ignoring FS event for path {relative_path:?} within excluded directory"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
relative_paths.push(relative_path);
|
||||
|
@ -3401,18 +3507,26 @@ impl BackgroundScanner {
|
|||
}
|
||||
|
||||
async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
|
||||
log::debug!("scan directory {:?}", job.path);
|
||||
|
||||
let mut ignore_stack = job.ignore_stack.clone();
|
||||
let mut new_ignore = None;
|
||||
let (root_abs_path, root_char_bag, next_entry_id) = {
|
||||
let snapshot = &self.state.lock().snapshot;
|
||||
(
|
||||
snapshot.abs_path().clone(),
|
||||
snapshot.root_char_bag,
|
||||
self.next_entry_id.clone(),
|
||||
)
|
||||
};
|
||||
let root_abs_path;
|
||||
let mut ignore_stack;
|
||||
let mut new_ignore;
|
||||
let root_char_bag;
|
||||
let next_entry_id;
|
||||
{
|
||||
let state = self.state.lock();
|
||||
let snapshot = &state.snapshot;
|
||||
root_abs_path = snapshot.abs_path().clone();
|
||||
if snapshot.is_abs_path_excluded(&job.abs_path) {
|
||||
log::error!("skipping excluded directory {:?}", job.path);
|
||||
return Ok(());
|
||||
}
|
||||
log::debug!("scanning directory {:?}", job.path);
|
||||
ignore_stack = job.ignore_stack.clone();
|
||||
new_ignore = None;
|
||||
root_char_bag = snapshot.root_char_bag;
|
||||
next_entry_id = self.next_entry_id.clone();
|
||||
drop(state);
|
||||
}
|
||||
|
||||
let mut dotgit_path = None;
|
||||
let mut root_canonical_path = None;
|
||||
|
@ -3427,18 +3541,8 @@ impl BackgroundScanner {
|
|||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let child_name = child_abs_path.file_name().unwrap();
|
||||
let child_path: Arc<Path> = job.path.join(child_name).into();
|
||||
let child_metadata = match self.fs.metadata(&child_abs_path).await {
|
||||
Ok(Some(metadata)) => metadata,
|
||||
Ok(None) => continue,
|
||||
Err(err) => {
|
||||
log::error!("error processing {:?}: {:?}", child_abs_path, err);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
|
||||
if child_name == *GITIGNORE {
|
||||
match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
|
||||
|
@ -3482,6 +3586,26 @@ impl BackgroundScanner {
|
|||
dotgit_path = Some(child_path.clone());
|
||||
}
|
||||
|
||||
{
|
||||
let mut state = self.state.lock();
|
||||
if state.snapshot.is_abs_path_excluded(&child_abs_path) {
|
||||
let relative_path = job.path.join(child_name);
|
||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||
state.remove_path(&relative_path);
|
||||
continue;
|
||||
}
|
||||
drop(state);
|
||||
}
|
||||
|
||||
let child_metadata = match self.fs.metadata(&child_abs_path).await {
|
||||
Ok(Some(metadata)) => metadata,
|
||||
Ok(None) => continue,
|
||||
Err(err) => {
|
||||
log::error!("error processing {child_abs_path:?}: {err:?}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut child_entry = Entry::new(
|
||||
child_path.clone(),
|
||||
&child_metadata,
|
||||
|
@ -3662,19 +3786,16 @@ impl BackgroundScanner {
|
|||
self.next_entry_id.as_ref(),
|
||||
state.snapshot.root_char_bag,
|
||||
);
|
||||
fs_entry.is_ignored = ignore_stack.is_all();
|
||||
let is_dir = fs_entry.is_dir();
|
||||
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
|
||||
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
|
||||
|
||||
if !fs_entry.is_ignored {
|
||||
if !fs_entry.is_dir() {
|
||||
if let Some((work_dir, repo)) =
|
||||
state.snapshot.local_repo_for_path(&path)
|
||||
{
|
||||
if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
|
||||
let repo_path = RepoPath(repo_path.into());
|
||||
let repo = repo.repo_ptr.lock();
|
||||
fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
|
||||
}
|
||||
if !is_dir && !fs_entry.is_ignored {
|
||||
if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
|
||||
if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
|
||||
let repo_path = RepoPath(repo_path.into());
|
||||
let repo = repo.repo_ptr.lock();
|
||||
fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3833,8 +3954,7 @@ impl BackgroundScanner {
|
|||
ignore_stack.clone()
|
||||
};
|
||||
|
||||
// Scan any directories that were previously ignored and weren't
|
||||
// previously scanned.
|
||||
// Scan any directories that were previously ignored and weren't previously scanned.
|
||||
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
|
||||
let state = self.state.lock();
|
||||
if state.should_scan_directory(&entry) {
|
||||
|
@ -4010,6 +4130,12 @@ impl BackgroundScanner {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_git_related(abs_path: &Path) -> bool {
|
||||
abs_path
|
||||
.components()
|
||||
.any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
|
||||
}
|
||||
|
||||
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
|
||||
let mut result = root_char_bag;
|
||||
result.extend(
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use crate::{
|
||||
project_settings::ProjectSettings,
|
||||
worktree::{Event, Snapshot, WorktreeModelHandle},
|
||||
Entry, EntryKind, PathChange, Worktree,
|
||||
Entry, EntryKind, PathChange, Project, Worktree,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use client::Client;
|
||||
|
@ -12,6 +13,7 @@ use postage::stream::Stream;
|
|||
use pretty_assertions::assert_eq;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
env,
|
||||
fmt::Write,
|
||||
|
@ -23,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_traversal(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -78,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_descendent_entries(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -185,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -264,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -439,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_open_gitignored_files(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -599,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -722,6 +730,14 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions = Some(Vec::new());
|
||||
});
|
||||
});
|
||||
});
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -827,6 +843,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_write_file(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let dir = temp_tree(json!({
|
||||
".git": {},
|
||||
".gitignore": "ignored-dir\n",
|
||||
|
@ -877,8 +894,105 @@ async fn test_write_file(cx: &mut TestAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let dir = temp_tree(json!({
|
||||
".gitignore": "**/target\n/node_modules\n",
|
||||
"target": {
|
||||
"index": "blah2"
|
||||
},
|
||||
"node_modules": {
|
||||
".DS_Store": "",
|
||||
"prettier": {
|
||||
"package.json": "{}",
|
||||
},
|
||||
},
|
||||
"src": {
|
||||
".DS_Store": "",
|
||||
"foo": {
|
||||
"foo.rs": "mod another;\n",
|
||||
"another.rs": "// another",
|
||||
},
|
||||
"bar": {
|
||||
"bar.rs": "// bar",
|
||||
},
|
||||
"lib.rs": "mod foo;\nmod bar;\n",
|
||||
},
|
||||
".DS_Store": "",
|
||||
}));
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions =
|
||||
Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let tree = Worktree::local(
|
||||
build_client(cx),
|
||||
dir.path(),
|
||||
true,
|
||||
Arc::new(RealFs),
|
||||
Default::default(),
|
||||
&mut cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
tree.flush_fs_events(cx).await;
|
||||
tree.read_with(cx, |tree, _| {
|
||||
check_worktree_entries(
|
||||
tree,
|
||||
&[
|
||||
"src/foo/foo.rs",
|
||||
"src/foo/another.rs",
|
||||
"node_modules/.DS_Store",
|
||||
"src/.DS_Store",
|
||||
".DS_Store",
|
||||
],
|
||||
&["target", "node_modules"],
|
||||
&["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
|
||||
)
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions =
|
||||
Some(vec!["**/node_modules/**".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
tree.flush_fs_events(cx).await;
|
||||
cx.foreground().run_until_parked();
|
||||
tree.read_with(cx, |tree, _| {
|
||||
check_worktree_entries(
|
||||
tree,
|
||||
&[
|
||||
"node_modules/prettier/package.json",
|
||||
"node_modules/.DS_Store",
|
||||
"node_modules",
|
||||
],
|
||||
&["target"],
|
||||
&[
|
||||
".gitignore",
|
||||
"src/lib.rs",
|
||||
"src/bar/bar.rs",
|
||||
"src/foo/foo.rs",
|
||||
"src/foo/another.rs",
|
||||
"src/.DS_Store",
|
||||
".DS_Store",
|
||||
],
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 30)]
|
||||
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -938,6 +1052,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
|
||||
|
||||
let fs_fake = FakeFs::new(cx.background());
|
||||
|
@ -1054,6 +1169,7 @@ async fn test_random_worktree_operations_during_initial_scan(
|
|||
cx: &mut TestAppContext,
|
||||
mut rng: StdRng,
|
||||
) {
|
||||
init_test(cx);
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|o| o.parse().unwrap())
|
||||
.unwrap_or(5);
|
||||
|
@ -1143,6 +1259,7 @@ async fn test_random_worktree_operations_during_initial_scan(
|
|||
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
init_test(cx);
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|o| o.parse().unwrap())
|
||||
.unwrap_or(40);
|
||||
|
@ -1557,6 +1674,7 @@ fn random_filename(rng: &mut impl Rng) -> String {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_rename_work_directory(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let root = temp_tree(json!({
|
||||
"projects": {
|
||||
"project1": {
|
||||
|
@ -1627,6 +1745,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_git_repository_for_path(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let root = temp_tree(json!({
|
||||
"c.txt": "",
|
||||
"dir1": {
|
||||
|
@ -1747,6 +1866,15 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
const IGNORE_RULE: &'static str = "**/target";
|
||||
|
||||
let root = temp_tree(json!({
|
||||
|
@ -1935,6 +2063,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
|
@ -2139,3 +2268,44 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Sta
|
|||
.map(|status| (status.path().unwrap().to_string(), status.status()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_worktree_entries(
|
||||
tree: &Worktree,
|
||||
expected_excluded_paths: &[&str],
|
||||
expected_ignored_paths: &[&str],
|
||||
expected_tracked_paths: &[&str],
|
||||
) {
|
||||
for path in expected_excluded_paths {
|
||||
let entry = tree.entry_for_path(path);
|
||||
assert!(
|
||||
entry.is_none(),
|
||||
"expected path '{path}' to be excluded, but got entry: {entry:?}",
|
||||
);
|
||||
}
|
||||
for path in expected_ignored_paths {
|
||||
let entry = tree
|
||||
.entry_for_path(path)
|
||||
.unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
|
||||
assert!(
|
||||
entry.is_ignored,
|
||||
"expected path '{path}' to be ignored, but got entry: {entry:?}",
|
||||
);
|
||||
}
|
||||
for path in expected_tracked_paths {
|
||||
let entry = tree
|
||||
.entry_for_path(path)
|
||||
.unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
|
||||
assert!(
|
||||
!entry.is_ignored,
|
||||
"expected path '{path}' to be tracked, but got entry: {entry:?}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -20,10 +20,6 @@ impl IgnoreStack {
|
|||
Arc::new(Self::All)
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
matches!(self, IgnoreStack::All)
|
||||
}
|
||||
|
||||
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
|
||||
match self.as_ref() {
|
||||
IgnoreStack::All => self,
|
||||
|
|
|
@ -5618,7 +5618,16 @@ impl Project {
|
|||
.collect::<Vec<_>>();
|
||||
|
||||
let background = cx.background_executor().clone();
|
||||
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
|
||||
let path_count: usize = snapshots
|
||||
.iter()
|
||||
.map(|s| {
|
||||
if query.include_ignored() {
|
||||
s.file_count()
|
||||
} else {
|
||||
s.visible_file_count()
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
if path_count == 0 {
|
||||
let (_, rx) = smol::channel::bounded(1024);
|
||||
return rx;
|
||||
|
@ -5631,8 +5640,16 @@ impl Project {
|
|||
.iter()
|
||||
.filter_map(|(_, b)| {
|
||||
let buffer = b.upgrade()?;
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
if let Some(path) = snapshot.file().map(|file| file.path()) {
|
||||
let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
|
||||
let is_ignored = buffer
|
||||
.project_path(cx)
|
||||
.and_then(|path| self.entry_for_path(&path, cx))
|
||||
.map_or(false, |entry| entry.is_ignored);
|
||||
(is_ignored, buffer.snapshot())
|
||||
});
|
||||
if is_ignored && !query.include_ignored() {
|
||||
return None;
|
||||
} else if let Some(path) = snapshot.file().map(|file| file.path()) {
|
||||
Some((path.clone(), (buffer, snapshot)))
|
||||
} else {
|
||||
unnamed_files.push(buffer);
|
||||
|
@ -5806,7 +5823,12 @@ impl Project {
|
|||
let mut snapshot_start_ix = 0;
|
||||
let mut abs_path = PathBuf::new();
|
||||
for snapshot in snapshots {
|
||||
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
|
||||
let snapshot_end_ix = snapshot_start_ix
|
||||
+ if query.include_ignored() {
|
||||
snapshot.file_count()
|
||||
} else {
|
||||
snapshot.visible_file_count()
|
||||
};
|
||||
if worker_end_ix <= snapshot_start_ix {
|
||||
break;
|
||||
} else if worker_start_ix > snapshot_end_ix {
|
||||
|
@ -5819,7 +5841,7 @@ impl Project {
|
|||
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
|
||||
|
||||
for entry in snapshot
|
||||
.files(false, start_in_snapshot)
|
||||
.files(query.include_ignored(), start_in_snapshot)
|
||||
.take(end_in_snapshot - start_in_snapshot)
|
||||
{
|
||||
if matching_paths_tx.is_closed() {
|
||||
|
|
|
@ -11,6 +11,8 @@ pub struct ProjectSettings {
|
|||
pub lsp: HashMap<Arc<str>, LspSettings>,
|
||||
#[serde(default)]
|
||||
pub git: GitSettings,
|
||||
#[serde(default)]
|
||||
pub file_scan_exclusions: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
|
|
|
@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
|
@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
|
@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
Vec::new()
|
||||
)
|
||||
|
@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.rs").unwrap()],
|
||||
Vec::new()
|
||||
)
|
||||
|
@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap(),
|
||||
|
@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
|
@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
)
|
||||
|
@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![PathMatcher::new("*.rs").unwrap()],
|
||||
)
|
||||
|
@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
|
@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
|
@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
)
|
||||
|
@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
).unwrap(),
|
||||
|
@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
|
@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
|||
search_query,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
|
|
|
@ -39,6 +39,7 @@ pub enum SearchQuery {
|
|||
replacement: Option<String>,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
inner: SearchInputs,
|
||||
},
|
||||
|
||||
|
@ -48,6 +49,7 @@ pub enum SearchQuery {
|
|||
multiline: bool,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
inner: SearchInputs,
|
||||
},
|
||||
}
|
||||
|
@ -57,6 +59,7 @@ impl SearchQuery {
|
|||
query: impl ToString,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
) -> Result<Self> {
|
||||
|
@ -74,6 +77,7 @@ impl SearchQuery {
|
|||
replacement: None,
|
||||
whole_word,
|
||||
case_sensitive,
|
||||
include_ignored,
|
||||
inner,
|
||||
})
|
||||
}
|
||||
|
@ -82,6 +86,7 @@ impl SearchQuery {
|
|||
query: impl ToString,
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
) -> Result<Self> {
|
||||
|
@ -111,6 +116,7 @@ impl SearchQuery {
|
|||
multiline,
|
||||
whole_word,
|
||||
case_sensitive,
|
||||
include_ignored,
|
||||
inner,
|
||||
})
|
||||
}
|
||||
|
@ -121,6 +127,7 @@ impl SearchQuery {
|
|||
message.query,
|
||||
message.whole_word,
|
||||
message.case_sensitive,
|
||||
message.include_ignored,
|
||||
deserialize_path_matches(&message.files_to_include)?,
|
||||
deserialize_path_matches(&message.files_to_exclude)?,
|
||||
)
|
||||
|
@ -129,6 +136,7 @@ impl SearchQuery {
|
|||
message.query,
|
||||
message.whole_word,
|
||||
message.case_sensitive,
|
||||
message.include_ignored,
|
||||
deserialize_path_matches(&message.files_to_include)?,
|
||||
deserialize_path_matches(&message.files_to_exclude)?,
|
||||
)
|
||||
|
@ -156,6 +164,7 @@ impl SearchQuery {
|
|||
regex: self.is_regex(),
|
||||
whole_word: self.whole_word(),
|
||||
case_sensitive: self.case_sensitive(),
|
||||
include_ignored: self.include_ignored(),
|
||||
files_to_include: self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
|
@ -336,6 +345,17 @@ impl SearchQuery {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn include_ignored(&self) -> bool {
|
||||
match self {
|
||||
Self::Text {
|
||||
include_ignored, ..
|
||||
} => *include_ignored,
|
||||
Self::Regex {
|
||||
include_ignored, ..
|
||||
} => *include_ignored,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_regex(&self) -> bool {
|
||||
matches!(self, Self::Regex { .. })
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::{
|
||||
copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
|
||||
copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
|
||||
ProjectEntryId, RemoveOptions,
|
||||
};
|
||||
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
|
@ -25,6 +26,7 @@ use gpui::{
|
|||
AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
|
||||
Task,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
proto::{
|
||||
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
|
||||
|
@ -39,6 +41,7 @@ use postage::{
|
|||
prelude::{Sink as _, Stream as _},
|
||||
watch,
|
||||
};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use smol::channel::{self, Sender};
|
||||
use std::{
|
||||
any::Any,
|
||||
|
@ -58,7 +61,10 @@ use std::{
|
|||
time::{Duration, SystemTime},
|
||||
};
|
||||
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
|
||||
use util::{paths::HOME, ResultExt};
|
||||
use util::{
|
||||
paths::{PathMatcher, HOME},
|
||||
ResultExt,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
|
||||
pub struct WorktreeId(usize);
|
||||
|
@ -73,7 +79,7 @@ pub struct LocalWorktree {
|
|||
scan_requests_tx: channel::Sender<ScanRequest>,
|
||||
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
|
||||
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
|
||||
_background_scanner_task: Task<()>,
|
||||
_background_scanner_tasks: Vec<Task<()>>,
|
||||
share: Option<ShareState>,
|
||||
diagnostics: HashMap<
|
||||
Arc<Path>,
|
||||
|
@ -219,6 +225,7 @@ pub struct LocalSnapshot {
|
|||
/// All of the git repositories in the worktree, indexed by the project entry
|
||||
/// id of their parent directory.
|
||||
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
|
||||
file_scan_exclusions: Vec<PathMatcher>,
|
||||
}
|
||||
|
||||
struct BackgroundScannerState {
|
||||
|
@ -302,17 +309,56 @@ impl Worktree {
|
|||
.await
|
||||
.context("failed to stat worktree path")?;
|
||||
|
||||
let closure_fs = Arc::clone(&fs);
|
||||
let closure_next_entry_id = Arc::clone(&next_entry_id);
|
||||
let closure_abs_path = abs_path.to_path_buf();
|
||||
cx.build_model(move |cx: &mut ModelContext<Worktree>| {
|
||||
cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
if let Self::Local(this) = this {
|
||||
let new_file_scan_exclusions =
|
||||
file_scan_exclusions(ProjectSettings::get_global(cx));
|
||||
if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
|
||||
this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
|
||||
log::info!(
|
||||
"Re-scanning directories, new scan exclude files: {:?}",
|
||||
this.snapshot
|
||||
.file_scan_exclusions
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
|
||||
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
|
||||
channel::unbounded();
|
||||
this.scan_requests_tx = scan_requests_tx;
|
||||
this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
|
||||
this._background_scanner_tasks = start_background_scan_tasks(
|
||||
&closure_abs_path,
|
||||
this.snapshot(),
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
Arc::clone(&closure_next_entry_id),
|
||||
Arc::clone(&closure_fs),
|
||||
cx,
|
||||
);
|
||||
this.is_scanning = watch::channel_with(true);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let root_name = abs_path
|
||||
.file_name()
|
||||
.map_or(String::new(), |f| f.to_string_lossy().to_string());
|
||||
|
||||
let mut snapshot = LocalSnapshot {
|
||||
file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)),
|
||||
ignores_by_parent_abs_path: Default::default(),
|
||||
git_repositories: Default::default(),
|
||||
snapshot: Snapshot {
|
||||
id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize),
|
||||
abs_path: abs_path.clone(),
|
||||
abs_path: abs_path.to_path_buf().into(),
|
||||
root_name: root_name.clone(),
|
||||
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
|
||||
entries_by_path: Default::default(),
|
||||
|
@ -337,61 +383,22 @@ impl Worktree {
|
|||
|
||||
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
|
||||
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
|
||||
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
match state {
|
||||
ScanState::Started => {
|
||||
*this.is_scanning.0.borrow_mut() = true;
|
||||
}
|
||||
ScanState::Updated {
|
||||
snapshot,
|
||||
changes,
|
||||
barrier,
|
||||
scanning,
|
||||
} => {
|
||||
*this.is_scanning.0.borrow_mut() = scanning;
|
||||
this.set_snapshot(snapshot, changes, cx);
|
||||
drop(barrier);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let background_scanner_task = cx.background_executor().spawn({
|
||||
let fs = fs.clone();
|
||||
let snapshot = snapshot.clone();
|
||||
let background = cx.background_executor().clone();
|
||||
async move {
|
||||
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
|
||||
BackgroundScanner::new(
|
||||
snapshot,
|
||||
next_entry_id,
|
||||
fs,
|
||||
scan_states_tx,
|
||||
background,
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
)
|
||||
.run(events)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
|
||||
let task_snapshot = snapshot.clone();
|
||||
Worktree::Local(LocalWorktree {
|
||||
snapshot,
|
||||
is_scanning: watch::channel_with(true),
|
||||
share: None,
|
||||
scan_requests_tx,
|
||||
path_prefixes_to_scan_tx,
|
||||
_background_scanner_task: background_scanner_task,
|
||||
_background_scanner_tasks: start_background_scan_tasks(
|
||||
&abs_path,
|
||||
task_snapshot,
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
Arc::clone(&next_entry_id),
|
||||
Arc::clone(&fs),
|
||||
cx,
|
||||
),
|
||||
diagnostics: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
client,
|
||||
|
@ -584,6 +591,77 @@ impl Worktree {
|
|||
}
|
||||
}
|
||||
|
||||
fn start_background_scan_tasks(
|
||||
abs_path: &Path,
|
||||
snapshot: LocalSnapshot,
|
||||
scan_requests_rx: channel::Receiver<ScanRequest>,
|
||||
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
|
||||
next_entry_id: Arc<AtomicUsize>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ModelContext<'_, Worktree>,
|
||||
) -> Vec<Task<()>> {
|
||||
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
|
||||
let background_scanner = cx.background_executor().spawn({
|
||||
let abs_path = abs_path.to_path_buf();
|
||||
let background = cx.background_executor().clone();
|
||||
async move {
|
||||
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
|
||||
BackgroundScanner::new(
|
||||
snapshot,
|
||||
next_entry_id,
|
||||
fs,
|
||||
scan_states_tx,
|
||||
background,
|
||||
scan_requests_rx,
|
||||
path_prefixes_to_scan_rx,
|
||||
)
|
||||
.run(events)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
let scan_state_updater = cx.spawn(|this, mut cx| async move {
|
||||
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let this = this.as_local_mut().unwrap();
|
||||
match state {
|
||||
ScanState::Started => {
|
||||
*this.is_scanning.0.borrow_mut() = true;
|
||||
}
|
||||
ScanState::Updated {
|
||||
snapshot,
|
||||
changes,
|
||||
barrier,
|
||||
scanning,
|
||||
} => {
|
||||
*this.is_scanning.0.borrow_mut() = scanning;
|
||||
this.set_snapshot(snapshot, changes, cx);
|
||||
drop(barrier);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
vec![background_scanner, scan_state_updater]
|
||||
}
|
||||
|
||||
fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
|
||||
project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
|
||||
.sorted()
|
||||
.filter_map(|pattern| {
|
||||
PathMatcher::new(pattern)
|
||||
.map(Some)
|
||||
.unwrap_or_else(|e| {
|
||||
log::error!(
|
||||
"Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
|
||||
);
|
||||
None
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
impl LocalWorktree {
|
||||
pub fn contains_abs_path(&self, path: &Path) -> bool {
|
||||
path.starts_with(&self.abs_path)
|
||||
|
@ -1482,7 +1560,7 @@ impl Snapshot {
|
|||
self.entries_by_id.get(&entry_id, &()).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
|
||||
fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
|
||||
let entry = Entry::try_from((&self.root_char_bag, entry))?;
|
||||
let old_entry = self.entries_by_id.insert_or_replace(
|
||||
PathEntry {
|
||||
|
@ -2143,6 +2221,12 @@ impl LocalSnapshot {
|
|||
paths.sort_by(|a, b| a.0.cmp(b.0));
|
||||
paths
|
||||
}
|
||||
|
||||
fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
|
||||
}
|
||||
}
|
||||
|
||||
impl BackgroundScannerState {
|
||||
|
@ -2165,7 +2249,7 @@ impl BackgroundScannerState {
|
|||
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
|
||||
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
|
||||
let mut containing_repository = None;
|
||||
if !ignore_stack.is_all() {
|
||||
if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
|
||||
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
|
||||
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
|
||||
containing_repository = Some((
|
||||
|
@ -2376,18 +2460,30 @@ impl BackgroundScannerState {
|
|||
|
||||
// Remove any git repositories whose .git entry no longer exists.
|
||||
let snapshot = &mut self.snapshot;
|
||||
let mut repositories = mem::take(&mut snapshot.git_repositories);
|
||||
let mut repository_entries = mem::take(&mut snapshot.repository_entries);
|
||||
repositories.retain(|work_directory_id, _| {
|
||||
snapshot
|
||||
.entry_for_id(*work_directory_id)
|
||||
let mut ids_to_preserve = HashSet::default();
|
||||
for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
|
||||
let exists_in_snapshot = snapshot
|
||||
.entry_for_id(work_directory_id)
|
||||
.map_or(false, |entry| {
|
||||
snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
|
||||
})
|
||||
});
|
||||
repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
|
||||
snapshot.git_repositories = repositories;
|
||||
snapshot.repository_entries = repository_entries;
|
||||
});
|
||||
if exists_in_snapshot {
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
} else {
|
||||
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
|
||||
if snapshot.is_abs_path_excluded(&git_dir_abs_path)
|
||||
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
|
||||
{
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
snapshot
|
||||
.git_repositories
|
||||
.retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
|
||||
snapshot
|
||||
.repository_entries
|
||||
.retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
|
||||
}
|
||||
|
||||
fn build_git_repository(
|
||||
|
@ -3085,7 +3181,7 @@ impl BackgroundScanner {
|
|||
let ignore_stack = state
|
||||
.snapshot
|
||||
.ignore_stack_for_abs_path(&root_abs_path, true);
|
||||
if ignore_stack.is_all() {
|
||||
if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
|
||||
root_entry.is_ignored = true;
|
||||
state.insert_entry(root_entry.clone(), self.fs.as_ref());
|
||||
}
|
||||
|
@ -3222,14 +3318,22 @@ impl BackgroundScanner {
|
|||
return false;
|
||||
};
|
||||
|
||||
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
|
||||
snapshot
|
||||
.entry_for_path(parent)
|
||||
.map_or(false, |entry| entry.kind == EntryKind::Dir)
|
||||
});
|
||||
if !parent_dir_is_loaded {
|
||||
log::debug!("ignoring event {relative_path:?} within unloaded directory");
|
||||
return false;
|
||||
if !is_git_related(&abs_path) {
|
||||
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
|
||||
snapshot
|
||||
.entry_for_path(parent)
|
||||
.map_or(false, |entry| entry.kind == EntryKind::Dir)
|
||||
});
|
||||
if !parent_dir_is_loaded {
|
||||
log::debug!("ignoring event {relative_path:?} within unloaded directory");
|
||||
return false;
|
||||
}
|
||||
if snapshot.is_abs_path_excluded(abs_path) {
|
||||
log::debug!(
|
||||
"ignoring FS event for path {relative_path:?} within excluded directory"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
relative_paths.push(relative_path);
|
||||
|
@ -3392,18 +3496,26 @@ impl BackgroundScanner {
|
|||
}
|
||||
|
||||
async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
|
||||
log::debug!("scan directory {:?}", job.path);
|
||||
|
||||
let mut ignore_stack = job.ignore_stack.clone();
|
||||
let mut new_ignore = None;
|
||||
let (root_abs_path, root_char_bag, next_entry_id) = {
|
||||
let snapshot = &self.state.lock().snapshot;
|
||||
(
|
||||
snapshot.abs_path().clone(),
|
||||
snapshot.root_char_bag,
|
||||
self.next_entry_id.clone(),
|
||||
)
|
||||
};
|
||||
let root_abs_path;
|
||||
let mut ignore_stack;
|
||||
let mut new_ignore;
|
||||
let root_char_bag;
|
||||
let next_entry_id;
|
||||
{
|
||||
let state = self.state.lock();
|
||||
let snapshot = &state.snapshot;
|
||||
root_abs_path = snapshot.abs_path().clone();
|
||||
if snapshot.is_abs_path_excluded(&job.abs_path) {
|
||||
log::error!("skipping excluded directory {:?}", job.path);
|
||||
return Ok(());
|
||||
}
|
||||
log::debug!("scanning directory {:?}", job.path);
|
||||
ignore_stack = job.ignore_stack.clone();
|
||||
new_ignore = None;
|
||||
root_char_bag = snapshot.root_char_bag;
|
||||
next_entry_id = self.next_entry_id.clone();
|
||||
drop(state);
|
||||
}
|
||||
|
||||
let mut dotgit_path = None;
|
||||
let mut root_canonical_path = None;
|
||||
|
@ -3418,18 +3530,8 @@ impl BackgroundScanner {
|
|||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let child_name = child_abs_path.file_name().unwrap();
|
||||
let child_path: Arc<Path> = job.path.join(child_name).into();
|
||||
let child_metadata = match self.fs.metadata(&child_abs_path).await {
|
||||
Ok(Some(metadata)) => metadata,
|
||||
Ok(None) => continue,
|
||||
Err(err) => {
|
||||
log::error!("error processing {:?}: {:?}", child_abs_path, err);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
|
||||
if child_name == *GITIGNORE {
|
||||
match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
|
||||
|
@ -3473,6 +3575,26 @@ impl BackgroundScanner {
|
|||
dotgit_path = Some(child_path.clone());
|
||||
}
|
||||
|
||||
{
|
||||
let mut state = self.state.lock();
|
||||
if state.snapshot.is_abs_path_excluded(&child_abs_path) {
|
||||
let relative_path = job.path.join(child_name);
|
||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||
state.remove_path(&relative_path);
|
||||
continue;
|
||||
}
|
||||
drop(state);
|
||||
}
|
||||
|
||||
let child_metadata = match self.fs.metadata(&child_abs_path).await {
|
||||
Ok(Some(metadata)) => metadata,
|
||||
Ok(None) => continue,
|
||||
Err(err) => {
|
||||
log::error!("error processing {child_abs_path:?}: {err:?}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut child_entry = Entry::new(
|
||||
child_path.clone(),
|
||||
&child_metadata,
|
||||
|
@ -3653,19 +3775,16 @@ impl BackgroundScanner {
|
|||
self.next_entry_id.as_ref(),
|
||||
state.snapshot.root_char_bag,
|
||||
);
|
||||
fs_entry.is_ignored = ignore_stack.is_all();
|
||||
let is_dir = fs_entry.is_dir();
|
||||
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
|
||||
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
|
||||
|
||||
if !fs_entry.is_ignored {
|
||||
if !fs_entry.is_dir() {
|
||||
if let Some((work_dir, repo)) =
|
||||
state.snapshot.local_repo_for_path(&path)
|
||||
{
|
||||
if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
|
||||
let repo_path = RepoPath(repo_path.into());
|
||||
let repo = repo.repo_ptr.lock();
|
||||
fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
|
||||
}
|
||||
if !is_dir && !fs_entry.is_ignored {
|
||||
if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
|
||||
if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
|
||||
let repo_path = RepoPath(repo_path.into());
|
||||
let repo = repo.repo_ptr.lock();
|
||||
fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3824,8 +3943,7 @@ impl BackgroundScanner {
|
|||
ignore_stack.clone()
|
||||
};
|
||||
|
||||
// Scan any directories that were previously ignored and weren't
|
||||
// previously scanned.
|
||||
// Scan any directories that were previously ignored and weren't previously scanned.
|
||||
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
|
||||
let state = self.state.lock();
|
||||
if state.should_scan_directory(&entry) {
|
||||
|
@ -4001,6 +4119,12 @@ impl BackgroundScanner {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_git_related(abs_path: &Path) -> bool {
|
||||
abs_path
|
||||
.components()
|
||||
.any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
|
||||
}
|
||||
|
||||
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
|
||||
let mut result = root_char_bag;
|
||||
result.extend(
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1732,7 +1732,7 @@ mod tests {
|
|||
use super::*;
|
||||
use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle};
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::FakeFs;
|
||||
use project::{project_settings::ProjectSettings, FakeFs};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
|
@ -1832,6 +1832,123 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/root1",
|
||||
json!({
|
||||
".dockerignore": "",
|
||||
".git": {
|
||||
"HEAD": "",
|
||||
},
|
||||
"a": {
|
||||
"0": { "q": "", "r": "", "s": "" },
|
||||
"1": { "t": "", "u": "" },
|
||||
"2": { "v": "", "w": "", "x": "", "y": "" },
|
||||
},
|
||||
"b": {
|
||||
"3": { "Q": "" },
|
||||
"4": { "R": "", "S": "", "T": "", "U": "" },
|
||||
},
|
||||
"C": {
|
||||
"5": {},
|
||||
"6": { "V": "", "W": "" },
|
||||
"7": { "X": "" },
|
||||
"8": { "Y": {}, "Z": "" }
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
fs.insert_tree(
|
||||
"/root2",
|
||||
json!({
|
||||
"d": {
|
||||
"4": ""
|
||||
},
|
||||
"e": {}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
|
||||
let workspace = cx
|
||||
.add_window(|cx| Workspace::test_new(project.clone(), cx))
|
||||
.root(cx);
|
||||
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" > b",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" > d",
|
||||
" > e",
|
||||
]
|
||||
);
|
||||
|
||||
toggle_expand_dir(&panel, "root1/b", cx);
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" v b <== selected",
|
||||
" > 3",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" > d",
|
||||
" > e",
|
||||
]
|
||||
);
|
||||
|
||||
toggle_expand_dir(&panel, "root2/d", cx);
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" v b",
|
||||
" > 3",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" v d <== selected",
|
||||
" > e",
|
||||
]
|
||||
);
|
||||
|
||||
toggle_expand_dir(&panel, "root2/e", cx);
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" v b",
|
||||
" > 3",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" v d",
|
||||
" v e <== selected",
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 30)]
|
||||
async fn test_editing_files(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
@ -2929,6 +3046,12 @@ mod tests {
|
|||
workspace::init_settings(cx);
|
||||
client::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions = Some(Vec::new());
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1571,7 +1571,7 @@ mod tests {
|
|||
use super::*;
|
||||
use gpui::{TestAppContext, View, VisualTestContext, WindowHandle};
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::FakeFs;
|
||||
use project::{project_settings::ProjectSettings, FakeFs};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
|
@ -1672,6 +1672,124 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions =
|
||||
Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/root1",
|
||||
json!({
|
||||
".dockerignore": "",
|
||||
".git": {
|
||||
"HEAD": "",
|
||||
},
|
||||
"a": {
|
||||
"0": { "q": "", "r": "", "s": "" },
|
||||
"1": { "t": "", "u": "" },
|
||||
"2": { "v": "", "w": "", "x": "", "y": "" },
|
||||
},
|
||||
"b": {
|
||||
"3": { "Q": "" },
|
||||
"4": { "R": "", "S": "", "T": "", "U": "" },
|
||||
},
|
||||
"C": {
|
||||
"5": {},
|
||||
"6": { "V": "", "W": "" },
|
||||
"7": { "X": "" },
|
||||
"8": { "Y": {}, "Z": "" }
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
fs.insert_tree(
|
||||
"/root2",
|
||||
json!({
|
||||
"d": {
|
||||
"4": ""
|
||||
},
|
||||
"e": {}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
|
||||
let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
let panel = workspace
|
||||
.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" > b",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" > d",
|
||||
" > e",
|
||||
]
|
||||
);
|
||||
|
||||
toggle_expand_dir(&panel, "root1/b", cx);
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" v b <== selected",
|
||||
" > 3",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" > d",
|
||||
" > e",
|
||||
]
|
||||
);
|
||||
|
||||
toggle_expand_dir(&panel, "root2/d", cx);
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" v b",
|
||||
" > 3",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" v d <== selected",
|
||||
" > e",
|
||||
]
|
||||
);
|
||||
|
||||
toggle_expand_dir(&panel, "root2/e", cx);
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
&[
|
||||
"v root1",
|
||||
" > a",
|
||||
" v b",
|
||||
" > 3",
|
||||
" > C",
|
||||
" .dockerignore",
|
||||
"v root2",
|
||||
" v d",
|
||||
" v e <== selected",
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 30)]
|
||||
async fn test_editing_files(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
@ -2792,6 +2910,12 @@ mod tests {
|
|||
workspace::init_settings(cx);
|
||||
client::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions = Some(Vec::new());
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -884,6 +884,7 @@ message SearchProject {
|
|||
bool case_sensitive = 5;
|
||||
string files_to_include = 6;
|
||||
string files_to_exclude = 7;
|
||||
bool include_ignored = 8;
|
||||
}
|
||||
|
||||
message SearchProjectResponse {
|
||||
|
|
|
@ -884,6 +884,7 @@ message SearchProject {
|
|||
bool case_sensitive = 5;
|
||||
string files_to_include = 6;
|
||||
string files_to_exclude = 7;
|
||||
bool include_ignored = 8;
|
||||
}
|
||||
|
||||
message SearchProjectResponse {
|
||||
|
|
|
@ -805,6 +805,7 @@ impl BufferSearchBar {
|
|||
query,
|
||||
self.search_options.contains(SearchOptions::WHOLE_WORD),
|
||||
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
|
||||
false,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
) {
|
||||
|
@ -820,6 +821,7 @@ impl BufferSearchBar {
|
|||
query,
|
||||
self.search_options.contains(SearchOptions::WHOLE_WORD),
|
||||
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
|
||||
false,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
) {
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::{
|
|||
search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button},
|
||||
ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery,
|
||||
PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch,
|
||||
ToggleCaseSensitive, ToggleReplace, ToggleWholeWord,
|
||||
ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord,
|
||||
};
|
||||
use anyhow::{Context, Result};
|
||||
use collections::HashMap;
|
||||
|
@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) {
|
|||
cx.capture_action(ProjectSearchView::replace_next);
|
||||
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
|
||||
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
|
||||
add_toggle_option_action::<ToggleIncludeIgnored>(SearchOptions::INCLUDE_IGNORED, cx);
|
||||
add_toggle_filters_action::<ToggleFilters>(cx);
|
||||
}
|
||||
|
||||
|
@ -1192,6 +1193,7 @@ impl ProjectSearchView {
|
|||
text,
|
||||
self.search_options.contains(SearchOptions::WHOLE_WORD),
|
||||
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
|
||||
self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
|
||||
included_files,
|
||||
excluded_files,
|
||||
) {
|
||||
|
@ -1210,6 +1212,7 @@ impl ProjectSearchView {
|
|||
text,
|
||||
self.search_options.contains(SearchOptions::WHOLE_WORD),
|
||||
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
|
||||
self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
|
||||
included_files,
|
||||
excluded_files,
|
||||
) {
|
||||
|
@ -1764,6 +1767,17 @@ impl View for ProjectSearchBar {
|
|||
render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx)
|
||||
});
|
||||
|
||||
let mut include_ignored = is_semantic_disabled.then(|| {
|
||||
render_option_button_icon(
|
||||
// TODO proper icon
|
||||
"icons/case_insensitive.svg",
|
||||
SearchOptions::INCLUDE_IGNORED,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
// TODO not implemented yet
|
||||
let _ = include_ignored.take();
|
||||
|
||||
let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| {
|
||||
let is_active = if let Some(search) = self.active_project_search.as_ref() {
|
||||
let search = search.read(cx);
|
||||
|
@ -1879,7 +1893,15 @@ impl View for ProjectSearchBar {
|
|||
.with_children(search.filters_enabled.then(|| {
|
||||
Flex::row()
|
||||
.with_child(
|
||||
ChildView::new(&search.included_files_editor, cx)
|
||||
Flex::row()
|
||||
.with_child(
|
||||
ChildView::new(&search.included_files_editor, cx)
|
||||
.contained()
|
||||
.constrained()
|
||||
.with_height(theme.search.search_bar_row_height)
|
||||
.flex(1., true),
|
||||
)
|
||||
.with_children(include_ignored)
|
||||
.contained()
|
||||
.with_style(include_container_style)
|
||||
.constrained()
|
||||
|
|
|
@ -29,6 +29,7 @@ actions!(
|
|||
CycleMode,
|
||||
ToggleWholeWord,
|
||||
ToggleCaseSensitive,
|
||||
ToggleIncludeIgnored,
|
||||
ToggleReplace,
|
||||
SelectNextMatch,
|
||||
SelectPrevMatch,
|
||||
|
@ -49,31 +50,35 @@ bitflags! {
|
|||
const NONE = 0b000;
|
||||
const WHOLE_WORD = 0b001;
|
||||
const CASE_SENSITIVE = 0b010;
|
||||
const INCLUDE_IGNORED = 0b100;
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchOptions {
|
||||
pub fn label(&self) -> &'static str {
|
||||
match *self {
|
||||
SearchOptions::WHOLE_WORD => "Match Whole Word",
|
||||
SearchOptions::CASE_SENSITIVE => "Match Case",
|
||||
_ => panic!("{:?} is not a named SearchOption", self),
|
||||
Self::WHOLE_WORD => "Match Whole Word",
|
||||
Self::CASE_SENSITIVE => "Match Case",
|
||||
Self::INCLUDE_IGNORED => "Include Ignored",
|
||||
_ => panic!("{self:?} is not a named SearchOption"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn icon(&self) -> &'static str {
|
||||
match *self {
|
||||
SearchOptions::WHOLE_WORD => "icons/word_search.svg",
|
||||
SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg",
|
||||
_ => panic!("{:?} is not a named SearchOption", self),
|
||||
Self::WHOLE_WORD => "icons/word_search.svg",
|
||||
Self::CASE_SENSITIVE => "icons/case_insensitive.svg",
|
||||
Self::INCLUDE_IGNORED => "icons/case_insensitive.svg",
|
||||
_ => panic!("{self:?} is not a named SearchOption"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_toggle_action(&self) -> Box<dyn Action> {
|
||||
match *self {
|
||||
SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord),
|
||||
SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
|
||||
_ => panic!("{:?} is not a named SearchOption", self),
|
||||
Self::WHOLE_WORD => Box::new(ToggleWholeWord),
|
||||
Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
|
||||
Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored),
|
||||
_ => panic!("{self:?} is not a named SearchOption"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -85,6 +90,7 @@ impl SearchOptions {
|
|||
let mut options = SearchOptions::NONE;
|
||||
options.set(SearchOptions::WHOLE_WORD, query.whole_word());
|
||||
options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive());
|
||||
options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored());
|
||||
options
|
||||
}
|
||||
|
||||
|
|
|
@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher {
|
|||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PathMatcher {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.maybe_path.eq(&other.maybe_path)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for PathMatcher {}
|
||||
|
||||
impl PathMatcher {
|
||||
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
|
||||
Ok(PathMatcher {
|
||||
|
@ -211,7 +219,19 @@ impl PathMatcher {
|
|||
}
|
||||
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
|
||||
other.as_ref().starts_with(&self.maybe_path)
|
||||
|| self.glob.is_match(&other)
|
||||
|| self.check_with_end_separator(other.as_ref())
|
||||
}
|
||||
|
||||
fn check_with_end_separator(&self, path: &Path) -> bool {
|
||||
let path_str = path.to_string_lossy();
|
||||
let separator = std::path::MAIN_SEPARATOR_STR;
|
||||
if path_str.ends_with(separator) {
|
||||
self.glob.is_match(path)
|
||||
} else {
|
||||
self.glob.is_match(path_str.to_string() + separator)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -388,4 +408,14 @@ mod tests {
|
|||
let path = Path::new("/a/b/c/.eslintrc.js");
|
||||
assert_eq!(path.extension_or_hidden_file_name(), Some("js"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edge_of_glob() {
|
||||
let path = Path::new("/work/node_modules");
|
||||
let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
|
||||
assert!(
|
||||
path_matcher.is_match(&path),
|
||||
"Path matcher {path_matcher} should match {path:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue