Add file_scan_inclusions setting to customize Zed file indexing (#16852)

Closes #4745

Release Notes:

- Added a new `file_scan_inclusions` setting to force Zed to index files
that match the provided globs, even if they're gitignored.

---------

Co-authored-by: Mikayla Maki <mikayla@zed.dev>
This commit is contained in:
Ryan Hawkins 2024-11-20 18:00:21 -07:00 committed by GitHub
parent 95ace03706
commit 0e62b6dddd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 350 additions and 25 deletions

View file

@ -668,7 +668,7 @@
},
// Add files or globs of files that will be excluded by Zed entirely:
// they will be skipped during FS scan(s), file tree and file search
// will lack the corresponding file entries.
// will lack the corresponding file entries. Overrides `file_scan_inclusions`.
"file_scan_exclusions": [
"**/.git",
"**/.svn",
@ -679,6 +679,14 @@
"**/.classpath",
"**/.settings"
],
// Add files or globs of files that will be included by Zed, even when
// ignored by git. This is useful for files that are not tracked by git,
// but are still important to your project. Note that globs that are
// overly broad can slow down Zed's file scanning. Overridden by `file_scan_exclusions`.
"file_scan_inclusions": [
".env*",
"docker-compose.*.yml"
],
// Git gutter behavior configuration.
"git": {
// Control whether the git gutter is shown. May take 2 values:

View file

@ -2033,6 +2033,7 @@ impl ProjectPanel {
is_ignored: entry.is_ignored,
is_external: false,
is_private: false,
is_always_included: entry.is_always_included,
git_status: entry.git_status,
canonical_path: entry.canonical_path.clone(),
char_bag: entry.char_bag,

View file

@ -37,7 +37,7 @@ log.workspace = true
parking_lot.workspace = true
paths.workspace = true
postage.workspace = true
rpc.workspace = true
rpc = { workspace = true, features = ["gpui"] }
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true

View file

@ -65,7 +65,10 @@ use std::{
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use text::{LineEnding, Rope};
use util::{paths::home_dir, ResultExt};
use util::{
paths::{home_dir, PathMatcher},
ResultExt,
};
pub use worktree_settings::WorktreeSettings;
#[cfg(feature = "test-support")]
@ -134,6 +137,7 @@ pub struct RemoteWorktree {
background_snapshot: Arc<Mutex<(Snapshot, Vec<proto::UpdateWorktree>)>>,
project_id: u64,
client: AnyProtoClient,
file_scan_inclusions: PathMatcher,
updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
update_observer: Option<mpsc::UnboundedSender<proto::UpdateWorktree>>,
snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
@ -150,6 +154,7 @@ pub struct Snapshot {
root_char_bag: CharBag,
entries_by_path: SumTree<Entry>,
entries_by_id: SumTree<PathEntry>,
always_included_entries: Vec<Arc<Path>>,
repository_entries: TreeMap<RepositoryWorkDirectory, RepositoryEntry>,
/// A number that increases every time the worktree begins scanning
@ -433,7 +438,7 @@ impl Worktree {
cx.observe_global::<SettingsStore>(move |this, cx| {
if let Self::Local(this) = this {
let settings = WorktreeSettings::get(settings_location, cx).clone();
if settings != this.settings {
if this.settings != settings {
this.settings = settings;
this.restart_background_scanners(cx);
}
@ -480,11 +485,19 @@ impl Worktree {
let (background_updates_tx, mut background_updates_rx) = mpsc::unbounded();
let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
let worktree_id = snapshot.id();
let settings_location = Some(SettingsLocation {
worktree_id,
path: Path::new(EMPTY_PATH),
});
let settings = WorktreeSettings::get(settings_location, cx).clone();
let worktree = RemoteWorktree {
client,
project_id,
replica_id,
snapshot,
file_scan_inclusions: settings.file_scan_inclusions.clone(),
background_snapshot: background_snapshot.clone(),
updates_tx: Some(background_updates_tx),
update_observer: None,
@ -500,7 +513,10 @@ impl Worktree {
while let Some(update) = background_updates_rx.next().await {
{
let mut lock = background_snapshot.lock();
if let Err(error) = lock.0.apply_remote_update(update.clone()) {
if let Err(error) = lock
.0
.apply_remote_update(update.clone(), &settings.file_scan_inclusions)
{
log::error!("error applying worktree update: {}", error);
}
lock.1.push(update);
@ -1022,7 +1038,17 @@ impl LocalWorktree {
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
self.scan_requests_tx = scan_requests_tx;
self.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
self.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
let always_included_entries = mem::take(&mut self.snapshot.always_included_entries);
log::debug!(
"refreshing entries for the following always included paths: {:?}",
always_included_entries
);
// Cleans up old always included entries to ensure they get updated properly. Otherwise,
// nested always included entries may not get updated and will result in out-of-date info.
self.refresh_entries_for_paths(always_included_entries);
}
fn start_background_scanner(
@ -1971,7 +1997,7 @@ impl RemoteWorktree {
this.update(&mut cx, |worktree, _| {
let worktree = worktree.as_remote_mut().unwrap();
let snapshot = &mut worktree.background_snapshot.lock().0;
let entry = snapshot.insert_entry(entry);
let entry = snapshot.insert_entry(entry, &worktree.file_scan_inclusions);
worktree.snapshot = snapshot.clone();
entry
})?
@ -2052,6 +2078,7 @@ impl Snapshot {
abs_path,
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
root_name,
always_included_entries: Default::default(),
entries_by_path: Default::default(),
entries_by_id: Default::default(),
repository_entries: Default::default(),
@ -2115,8 +2142,12 @@ impl Snapshot {
self.entries_by_id.get(&entry_id, &()).is_some()
}
fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
let entry = Entry::try_from((&self.root_char_bag, entry))?;
fn insert_entry(
&mut self,
entry: proto::Entry,
always_included_paths: &PathMatcher,
) -> Result<Entry> {
let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?;
let old_entry = self.entries_by_id.insert_or_replace(
PathEntry {
id: entry.id,
@ -2170,7 +2201,11 @@ impl Snapshot {
}
}
pub(crate) fn apply_remote_update(&mut self, mut update: proto::UpdateWorktree) -> Result<()> {
pub(crate) fn apply_remote_update(
&mut self,
mut update: proto::UpdateWorktree,
always_included_paths: &PathMatcher,
) -> Result<()> {
log::trace!(
"applying remote worktree update. {} entries updated, {} removed",
update.updated_entries.len(),
@ -2193,7 +2228,7 @@ impl Snapshot {
}
for entry in update.updated_entries {
let entry = Entry::try_from((&self.root_char_bag, entry))?;
let entry = Entry::try_from((&self.root_char_bag, always_included_paths, entry))?;
if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
}
@ -2713,7 +2748,7 @@ impl LocalSnapshot {
for entry in self.entries_by_path.cursor::<()>(&()) {
if entry.is_file() {
assert_eq!(files.next().unwrap().inode, entry.inode);
if !entry.is_ignored && !entry.is_external {
if (!entry.is_ignored && !entry.is_external) || entry.is_always_included {
assert_eq!(visible_files.next().unwrap().inode, entry.inode);
}
}
@ -2796,7 +2831,7 @@ impl LocalSnapshot {
impl BackgroundScannerState {
fn should_scan_directory(&self, entry: &Entry) -> bool {
(!entry.is_external && !entry.is_ignored)
(!entry.is_external && (!entry.is_ignored || entry.is_always_included))
|| entry.path.file_name() == Some(*DOT_GIT)
|| entry.path.file_name() == Some(local_settings_folder_relative_path().as_os_str())
|| self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning
@ -3369,6 +3404,12 @@ pub struct Entry {
/// exclude them from searches.
pub is_ignored: bool,
/// Whether this entry is always included in searches.
///
/// This is used for entries that are always included in searches, even
/// if they are ignored by git. Overridden by file_scan_exclusions.
pub is_always_included: bool,
/// Whether this entry's canonical path is outside of the worktree.
/// This means the entry is only accessible from the worktree root via a
/// symlink.
@ -3440,6 +3481,7 @@ impl Entry {
size: metadata.len,
canonical_path,
is_ignored: false,
is_always_included: false,
is_external: false,
is_private: false,
git_status: None,
@ -3486,7 +3528,8 @@ impl sum_tree::Item for Entry {
type Summary = EntrySummary;
fn summary(&self, _cx: &()) -> Self::Summary {
let non_ignored_count = if self.is_ignored || self.is_external {
let non_ignored_count = if (self.is_ignored || self.is_external) && !self.is_always_included
{
0
} else {
1
@ -4254,6 +4297,7 @@ impl BackgroundScanner {
if child_entry.is_dir() {
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
child_entry.is_always_included = self.settings.is_path_always_included(&child_path);
// Avoid recursing until crash in the case of a recursive symlink
if job.ancestor_inodes.contains(&child_entry.inode) {
@ -4278,6 +4322,7 @@ impl BackgroundScanner {
}
} else {
child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
child_entry.is_always_included = self.settings.is_path_always_included(&child_path);
if !child_entry.is_ignored {
if let Some(repo) = &containing_repository {
if let Ok(repo_path) = child_entry.path.strip_prefix(&repo.work_directory) {
@ -4314,6 +4359,12 @@ impl BackgroundScanner {
new_jobs.remove(job_ix);
}
}
if entry.is_always_included {
state
.snapshot
.always_included_entries
.push(entry.path.clone());
}
}
state.populate_dir(&job.path, new_entries, new_ignore);
@ -4430,6 +4481,7 @@ impl BackgroundScanner {
fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
fs_entry.is_external = is_external;
fs_entry.is_private = self.is_path_private(path);
fs_entry.is_always_included = self.settings.is_path_always_included(path);
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) {
if state.should_scan_directory(&fs_entry)
@ -5317,7 +5369,7 @@ impl<'a> Traversal<'a> {
if let Some(entry) = self.cursor.item() {
if (self.include_files || !entry.is_file())
&& (self.include_dirs || !entry.is_dir())
&& (self.include_ignored || !entry.is_ignored)
&& (self.include_ignored || !entry.is_ignored || entry.is_always_included)
{
return true;
}
@ -5448,10 +5500,12 @@ impl<'a> From<&'a Entry> for proto::Entry {
}
}
impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry {
type Error = anyhow::Error;
fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
fn try_from(
(root_char_bag, always_included, entry): (&'a CharBag, &PathMatcher, proto::Entry),
) -> Result<Self> {
let kind = if entry.is_dir {
EntryKind::Dir
} else {
@ -5462,7 +5516,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
Ok(Entry {
id: ProjectEntryId::from_proto(entry.id),
kind,
path,
path: path.clone(),
inode: entry.inode,
mtime: entry.mtime.map(|time| time.into()),
size: entry.size.unwrap_or(0),
@ -5470,6 +5524,7 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
.canonical_path
.map(|path_string| Box::from(Path::new(&path_string))),
is_ignored: entry.is_ignored,
is_always_included: always_included.is_match(path.as_ref()),
is_external: entry.is_external,
git_status: git_status_from_proto(entry.git_status),
is_private: false,

View file

@ -9,6 +9,7 @@ use util::paths::PathMatcher;
#[derive(Clone, PartialEq, Eq)]
pub struct WorktreeSettings {
pub file_scan_inclusions: PathMatcher,
pub file_scan_exclusions: PathMatcher,
pub private_files: PathMatcher,
}
@ -21,13 +22,19 @@ impl WorktreeSettings {
pub fn is_path_excluded(&self, path: &Path) -> bool {
path.ancestors()
.any(|ancestor| self.file_scan_exclusions.is_match(ancestor))
.any(|ancestor| self.file_scan_exclusions.is_match(&ancestor))
}
pub fn is_path_always_included(&self, path: &Path) -> bool {
path.ancestors()
.any(|ancestor| self.file_scan_inclusions.is_match(&ancestor))
}
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct WorktreeSettingsContent {
/// Completely ignore files matching globs from `file_scan_exclusions`
/// Completely ignore files matching globs from `file_scan_exclusions`. Overrides
/// `file_scan_inclusions`.
///
/// Default: [
/// "**/.git",
@ -42,6 +49,15 @@ pub struct WorktreeSettingsContent {
#[serde(default)]
pub file_scan_exclusions: Option<Vec<String>>,
/// Always include files that match these globs when scanning for files, even if they're
/// ignored by git. This setting is overridden by `file_scan_exclusions`.
/// Default: [
/// ".env*",
/// "docker-compose.*.yml",
/// ]
#[serde(default)]
pub file_scan_inclusions: Option<Vec<String>>,
/// Treat the files matching these globs as `.env` files.
/// Default: [ "**/.env*" ]
pub private_files: Option<Vec<String>>,
@ -59,11 +75,27 @@ impl Settings for WorktreeSettings {
let result: WorktreeSettingsContent = sources.json_merge()?;
let mut file_scan_exclusions = result.file_scan_exclusions.unwrap_or_default();
let mut private_files = result.private_files.unwrap_or_default();
let mut parsed_file_scan_inclusions: Vec<String> = result
.file_scan_inclusions
.unwrap_or_default()
.iter()
.flat_map(|glob| {
Path::new(glob)
.ancestors()
.map(|a| a.to_string_lossy().into())
})
.filter(|p| p != "")
.collect();
file_scan_exclusions.sort();
private_files.sort();
parsed_file_scan_inclusions.sort();
Ok(Self {
file_scan_exclusions: path_matchers(&file_scan_exclusions, "file_scan_exclusions")?,
private_files: path_matchers(&private_files, "private_files")?,
file_scan_inclusions: path_matchers(
&parsed_file_scan_inclusions,
"file_scan_inclusions",
)?,
})
}
}

View file

@ -878,6 +878,211 @@ async fn test_write_file(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let dir = temp_tree(json!({
".gitignore": "**/target\n/node_modules\ntop_level.txt\n",
"target": {
"index": "blah2"
},
"node_modules": {
".DS_Store": "",
"prettier": {
"package.json": "{}",
},
},
"src": {
".DS_Store": "",
"foo": {
"foo.rs": "mod another;\n",
"another.rs": "// another",
},
"bar": {
"bar.rs": "// bar",
},
"lib.rs": "mod foo;\nmod bar;\n",
},
"top_level.txt": "top level file",
".DS_Store": "",
}));
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec![]);
project_settings.file_scan_inclusions = Some(vec![
"node_modules/**/package.json".to_string(),
"**/.DS_Store".to_string(),
]);
});
});
});
let tree = Worktree::local(
dir.path(),
true,
Arc::new(RealFs::default()),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
// Assert that file_scan_inclusions overrides file_scan_exclusions.
check_worktree_entries(
tree,
&[],
&["target", "node_modules"],
&["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
&[
"node_modules/prettier/package.json",
".DS_Store",
"node_modules/.DS_Store",
"src/.DS_Store",
],
)
});
}
#[gpui::test]
async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let dir = temp_tree(json!({
".gitignore": "**/target\n/node_modules\n",
"target": {
"index": "blah2"
},
"node_modules": {
".DS_Store": "",
"prettier": {
"package.json": "{}",
},
},
"src": {
".DS_Store": "",
"foo": {
"foo.rs": "mod another;\n",
"another.rs": "// another",
},
},
".DS_Store": "",
}));
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec!["**/.DS_Store".to_string()]);
project_settings.file_scan_inclusions = Some(vec!["**/.DS_Store".to_string()]);
});
});
});
let tree = Worktree::local(
dir.path(),
true,
Arc::new(RealFs::default()),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
// Assert that file_scan_inclusions overrides file_scan_exclusions.
check_worktree_entries(
tree,
&[".DS_Store, src/.DS_Store"],
&["target", "node_modules"],
&["src/foo/another.rs", "src/foo/foo.rs", ".gitignore"],
&[],
)
});
}
#[gpui::test]
async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppContext) {
init_test(cx);
cx.executor().allow_parking();
let dir = temp_tree(json!({
".gitignore": "**/target\n/node_modules/\n",
"target": {
"index": "blah2"
},
"node_modules": {
".DS_Store": "",
"prettier": {
"package.json": "{}",
},
},
"src": {
".DS_Store": "",
"foo": {
"foo.rs": "mod another;\n",
"another.rs": "// another",
},
},
".DS_Store": "",
}));
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec![]);
project_settings.file_scan_inclusions = Some(vec!["node_modules/**".to_string()]);
});
});
});
let tree = Worktree::local(
dir.path(),
true,
Arc::new(RealFs::default()),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
assert!(tree
.entry_for_path("node_modules")
.is_some_and(|f| f.is_always_included));
assert!(tree
.entry_for_path("node_modules/prettier/package.json")
.is_some_and(|f| f.is_always_included));
});
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec![]);
project_settings.file_scan_inclusions = Some(vec![]);
});
});
});
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
assert!(tree
.entry_for_path("node_modules")
.is_some_and(|f| !f.is_always_included));
assert!(tree
.entry_for_path("node_modules/prettier/package.json")
.is_some_and(|f| !f.is_always_included));
});
}
#[gpui::test]
async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
init_test(cx);
@ -939,6 +1144,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
],
&["target", "node_modules"],
&["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
&[],
)
});
@ -970,6 +1176,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
"src/.DS_Store",
".DS_Store",
],
&[],
)
});
}
@ -1051,6 +1258,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"src/bar/bar.rs",
".gitignore",
],
&[],
)
});
@ -1111,6 +1319,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"src/new_file",
".gitignore",
],
&[],
)
});
}
@ -1140,14 +1349,14 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
.await;
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(tree, &[], &["HEAD", "foo"], &[])
check_worktree_entries(tree, &[], &["HEAD", "foo"], &[], &[])
});
std::fs::write(dot_git_worktree_dir.join("new_file"), "new file contents")
.unwrap_or_else(|e| panic!("Failed to create in {dot_git_worktree_dir:?} a new file: {e}"));
tree.flush_fs_events(cx).await;
tree.read_with(cx, |tree, _| {
check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[])
check_worktree_entries(tree, &[], &["HEAD", "foo", "new_file"], &[], &[])
});
}
@ -1180,8 +1389,12 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
let snapshot = Arc::new(Mutex::new(tree.snapshot()));
tree.observe_updates(0, cx, {
let snapshot = snapshot.clone();
let settings = tree.settings().clone();
move |update| {
snapshot.lock().apply_remote_update(update).unwrap();
snapshot
.lock()
.apply_remote_update(update, &settings.file_scan_inclusions)
.unwrap();
async { true }
}
});
@ -1474,12 +1687,14 @@ async fn test_random_worktree_operations_during_initial_scan(
snapshot
});
let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
let mut updated_snapshot = snapshot.clone();
for update in updates.lock().iter() {
if update.scan_id >= updated_snapshot.scan_id() as u64 {
updated_snapshot
.apply_remote_update(update.clone())
.apply_remote_update(update.clone(), &settings.file_scan_inclusions)
.unwrap();
}
}
@ -1610,10 +1825,14 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
);
}
let settings = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().settings());
for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
for update in updates.lock().iter() {
if update.scan_id >= prev_snapshot.scan_id() as u64 {
prev_snapshot.apply_remote_update(update.clone()).unwrap();
prev_snapshot
.apply_remote_update(update.clone(), &settings.file_scan_inclusions)
.unwrap();
}
}
@ -2588,6 +2807,7 @@ fn check_worktree_entries(
expected_excluded_paths: &[&str],
expected_ignored_paths: &[&str],
expected_tracked_paths: &[&str],
expected_included_paths: &[&str],
) {
for path in expected_excluded_paths {
let entry = tree.entry_for_path(path);
@ -2610,10 +2830,19 @@ fn check_worktree_entries(
.entry_for_path(path)
.unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
assert!(
!entry.is_ignored,
!entry.is_ignored || entry.is_always_included,
"expected path '{path}' to be tracked, but got entry: {entry:?}",
);
}
for path in expected_included_paths {
let entry = tree
.entry_for_path(path)
.unwrap_or_else(|| panic!("Missing entry for expected included path '{path}'"));
assert!(
entry.is_always_included,
"expected path '{path}' to always be included, but got entry: {entry:?}",
);
}
}
fn init_test(cx: &mut gpui::TestAppContext) {