project_panel: Add Alt/Opt+Click to expand/collapse a directory and all its contents (#22896)

Closes #15966 

This PR adds `Alt/Opt+Click` to expand or collapse a directory and all
its contents.

Context:

The current `expand_entry` scans immediate child subdirectories if they
aren’t loaded, while `expand_all_for_entry` scans the entire subtree.
The latter takes longer, so we wait for it to complete to ensure
accurate results.

For full directory scan, instead of using
`refresh_entries_for_paths(vec![path])`, which requires specifying all
explicit paths to refresh, we use `add_path_prefix_to_scan`, which
eliminates the need to list every path. Both methods internally call
`reload_entries_for_paths`, which invokes `should_scan_directory`. This
determines whether to scan deeper based on a path prefix match between
the given directory and its subdirectories, returning `true` for
`add_path_prefix_to_scan`.

The existing code handles scanning, removing path prefixes after scans
complete, and managing ignored directories.

How it works (Expand):
1. Alt clicking on non-ignored closed directory, expands it and all its
subdirectories, except ignored subdirectories. This helps while working
on mono repos, where you might not want to expand dirs like
`node_modules`, `dist`, etc or git submodules, when you expand any root
dir.

In example, `draft` and `posts` dir are ignored dir.


[expand-1.webm](https://github.com/user-attachments/assets/07d3f724-0757-408f-b349-5beb4ee8440e)

2. Alt clicking on ignored closed directory, expands it and all its
subdirectories. This is when you explicitly want to do it, on dirs like
`node_modules`, `dist`, etc.

In example, `dist` dir is ignored dir.


[expand-2.webm](https://github.com/user-attachments/assets/99e55883-ab1a-4a9c-a0f0-48026991a922)

3. In case of auto folded subdirectories, expand all action will take
precedence over it. That is, it will unfold all the subdirectories
inside clicked dir. This is intentional, as user explicitly wants to
reveal as much content as possible. (This is my personal opinion on how
it should work).


[expand-3.webm](https://github.com/user-attachments/assets/f20b0311-e92a-4e34-b640-1469b0d6fa16)

How it works (Collapse):
1. Alt clicking any opened directory will collapse it and all its
children, whether ignored or not. This is when you want to start from a
fresh state.

2. When auto fold is enabled in settings, collapse action will also fold
all subdirectories that it can fold. This is to bring it back to its
fresh state as mentioned above.


[collapse-1-2.webm](https://github.com/user-attachments/assets/74db6cee-0afa-406b-a9a2-7421083a2c2a)


Future:
- Using keybinding to expand/collapse all for selected entry
- Handle expand/collapse all for folded entry

Todos:
- [x] Expand entries logic
- [x] Handle remote worktree for expand
- [x] Figure out scan complete status
- [x] Move expansion logic to status update event
- [x] Collapse entries logic
- [x] Handle fold/unfold subdirs interaction
- [x] Do not expand git ignored sub-dirs
- [x] Tests
- [x] Test Remote

Release Notes:

- Added Alt/Opt+Click functionality to expand or collapse a directory
and all its contents.
This commit is contained in:
tims 2025-01-28 14:07:56 +05:30 committed by GitHub
parent 793873bdc9
commit 5c650cdcb2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 640 additions and 13 deletions

View file

@ -333,6 +333,9 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::CopyProjectEntry>) .add_request_handler(forward_mutating_project_request::<proto::CopyProjectEntry>)
.add_request_handler(forward_mutating_project_request::<proto::DeleteProjectEntry>) .add_request_handler(forward_mutating_project_request::<proto::DeleteProjectEntry>)
.add_request_handler(forward_mutating_project_request::<proto::ExpandProjectEntry>) .add_request_handler(forward_mutating_project_request::<proto::ExpandProjectEntry>)
.add_request_handler(
forward_mutating_project_request::<proto::ExpandAllForProjectEntry>,
)
.add_request_handler(forward_mutating_project_request::<proto::OnTypeFormatting>) .add_request_handler(forward_mutating_project_request::<proto::OnTypeFormatting>)
.add_request_handler(forward_mutating_project_request::<proto::SaveBuffer>) .add_request_handler(forward_mutating_project_request::<proto::SaveBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::BlameBuffer>) .add_request_handler(forward_mutating_project_request::<proto::BlameBuffer>)

View file

@ -281,6 +281,7 @@ pub enum Event {
RefreshInlayHints, RefreshInlayHints,
RevealInProjectPanel(ProjectEntryId), RevealInProjectPanel(ProjectEntryId),
SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>), SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
ExpandedAllForEntry(WorktreeId, ProjectEntryId),
} }
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)] #[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
@ -1577,6 +1578,25 @@ impl Project {
worktree.update(cx, |worktree, cx| worktree.expand_entry(entry_id, cx)) worktree.update(cx, |worktree, cx| worktree.expand_entry(entry_id, cx))
} }
pub fn expand_all_for_entry(
&mut self,
worktree_id: WorktreeId,
entry_id: ProjectEntryId,
cx: &mut Context<Self>,
) -> Option<Task<Result<()>>> {
let worktree = self.worktree_for_id(worktree_id, cx)?;
let task = worktree.update(cx, |worktree, cx| {
worktree.expand_all_for_entry(entry_id, cx)
});
Some(cx.spawn(|this, mut cx| async move {
task.ok_or_else(|| anyhow!("no task"))?.await?;
this.update(&mut cx, |_, cx| {
cx.emit(Event::ExpandedAllForEntry(worktree_id, entry_id));
})?;
Ok(())
}))
}
pub fn shared(&mut self, project_id: u64, cx: &mut Context<Self>) -> Result<()> { pub fn shared(&mut self, project_id: u64, cx: &mut Context<Self>) -> Result<()> {
if !matches!(self.client_state, ProjectClientState::Local) { if !matches!(self.client_state, ProjectClientState::Local) {
return Err(anyhow!("project was already shared")); return Err(anyhow!("project was already shared"));

View file

@ -75,6 +75,7 @@ impl WorktreeStore {
client.add_model_request_handler(Self::handle_copy_project_entry); client.add_model_request_handler(Self::handle_copy_project_entry);
client.add_model_request_handler(Self::handle_delete_project_entry); client.add_model_request_handler(Self::handle_delete_project_entry);
client.add_model_request_handler(Self::handle_expand_project_entry); client.add_model_request_handler(Self::handle_expand_project_entry);
client.add_model_request_handler(Self::handle_expand_all_for_project_entry);
client.add_model_request_handler(Self::handle_git_branches); client.add_model_request_handler(Self::handle_git_branches);
client.add_model_request_handler(Self::handle_update_branch); client.add_model_request_handler(Self::handle_update_branch);
} }
@ -1089,6 +1090,18 @@ impl WorktreeStore {
Worktree::handle_expand_entry(worktree, envelope.payload, cx).await Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
} }
pub async fn handle_expand_all_for_project_entry(
this: Entity<Self>,
envelope: TypedEnvelope<proto::ExpandAllForProjectEntry>,
mut cx: AsyncAppContext,
) -> Result<proto::ExpandAllForProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this
.update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
.ok_or_else(|| anyhow!("invalid request"))?;
Worktree::handle_expand_all_for_entry(worktree, envelope.payload, cx).await
}
pub async fn handle_git_branches( pub async fn handle_git_branches(
this: Entity<Self>, this: Entity<Self>,
branches: TypedEnvelope<proto::GitBranches>, branches: TypedEnvelope<proto::GitBranches>,

View file

@ -322,6 +322,41 @@ impl ProjectPanel {
this.update_visible_entries(None, cx); this.update_visible_entries(None, cx);
cx.notify(); cx.notify();
} }
project::Event::ExpandedAllForEntry(worktree_id, entry_id) => {
if let Some((worktree, expanded_dir_ids)) = project
.read(cx)
.worktree_for_id(*worktree_id, cx)
.zip(this.expanded_dir_ids.get_mut(&worktree_id))
{
let worktree = worktree.read(cx);
let Some(entry) = worktree.entry_for_id(*entry_id) else {
return;
};
let include_ignored_dirs = !entry.is_ignored;
let mut dirs_to_expand = vec![*entry_id];
while let Some(current_id) = dirs_to_expand.pop() {
let Some(current_entry) = worktree.entry_for_id(current_id) else {
continue;
};
for child in worktree.child_entries(&current_entry.path) {
if !child.is_dir() || (include_ignored_dirs && child.is_ignored) {
continue;
}
dirs_to_expand.push(child.id);
if let Err(ix) = expanded_dir_ids.binary_search(&child.id) {
expanded_dir_ids.insert(ix, child.id);
}
this.unfolded_dir_ids.insert(child.id);
}
}
this.update_visible_entries(None, cx);
cx.notify();
}
}
_ => {} _ => {}
}) })
.detach(); .detach();
@ -487,6 +522,7 @@ impl ProjectPanel {
} }
} }
} }
_ => {} _ => {}
} }
}) })
@ -883,6 +919,99 @@ impl ProjectPanel {
} }
} }
fn toggle_expand_all(
&mut self,
entry_id: ProjectEntryId,
window: &mut Window,
cx: &mut Context<Self>,
) {
if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) {
if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
match expanded_dir_ids.binary_search(&entry_id) {
Ok(_ix) => {
self.collapse_all_for_entry(worktree_id, entry_id, cx);
}
Err(_ix) => {
self.expand_all_for_entry(worktree_id, entry_id, cx);
}
}
self.update_visible_entries(Some((worktree_id, entry_id)), cx);
window.focus(&self.focus_handle);
cx.notify();
}
}
}
fn expand_all_for_entry(
&mut self,
worktree_id: WorktreeId,
entry_id: ProjectEntryId,
cx: &mut Context<Self>,
) {
self.project.update(cx, |project, cx| {
if let Some((worktree, expanded_dir_ids)) = project
.worktree_for_id(worktree_id, cx)
.zip(self.expanded_dir_ids.get_mut(&worktree_id))
{
if let Some(task) = project.expand_all_for_entry(worktree_id, entry_id, cx) {
task.detach();
}
let worktree = worktree.read(cx);
if let Some(mut entry) = worktree.entry_for_id(entry_id) {
loop {
if let Err(ix) = expanded_dir_ids.binary_search(&entry.id) {
expanded_dir_ids.insert(ix, entry.id);
}
if let Some(parent_entry) =
entry.path.parent().and_then(|p| worktree.entry_for_path(p))
{
entry = parent_entry;
} else {
break;
}
}
}
}
});
}
fn collapse_all_for_entry(
&mut self,
worktree_id: WorktreeId,
entry_id: ProjectEntryId,
cx: &mut Context<Self>,
) {
self.project.update(cx, |project, cx| {
if let Some((worktree, expanded_dir_ids)) = project
.worktree_for_id(worktree_id, cx)
.zip(self.expanded_dir_ids.get_mut(&worktree_id))
{
let worktree = worktree.read(cx);
let mut dirs_to_collapse = vec![entry_id];
let auto_fold_enabled = ProjectPanelSettings::get_global(cx).auto_fold_dirs;
while let Some(current_id) = dirs_to_collapse.pop() {
let Some(current_entry) = worktree.entry_for_id(current_id) else {
continue;
};
if let Ok(ix) = expanded_dir_ids.binary_search(&current_id) {
expanded_dir_ids.remove(ix);
}
if auto_fold_enabled {
self.unfolded_dir_ids.remove(&current_id);
}
for child in worktree.child_entries(&current_entry.path) {
if child.is_dir() {
dirs_to_collapse.push(child.id);
}
}
}
}
});
}
fn select_prev(&mut self, _: &SelectPrev, window: &mut Window, cx: &mut Context<Self>) { fn select_prev(&mut self, _: &SelectPrev, window: &mut Window, cx: &mut Context<Self>) {
if let Some(edit_state) = &self.edit_state { if let Some(edit_state) = &self.edit_state {
if edit_state.processing_filename.is_none() { if edit_state.processing_filename.is_none() {
@ -3585,7 +3714,11 @@ impl ProjectPanel {
} }
} else if kind.is_dir() { } else if kind.is_dir() {
this.marked_entries.clear(); this.marked_entries.clear();
this.toggle_expanded(entry_id, window, cx); if event.down.modifiers.alt {
this.toggle_expand_all(entry_id, window, cx);
} else {
this.toggle_expanded(entry_id, window, cx);
}
} else { } else {
let preview_tabs_enabled = PreviewTabsSettings::get_global(cx).enabled; let preview_tabs_enabled = PreviewTabsSettings::get_global(cx).enabled;
let click_count = event.up.click_count; let click_count = event.up.click_count;
@ -8293,6 +8426,383 @@ mod tests {
}); });
} }
#[gpui::test]
async fn test_expand_all_for_entry(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
".gitignore": "**/ignored_dir\n**/ignored_nested",
"dir1": {
"empty1": {
"empty2": {
"empty3": {
"file.txt": ""
}
}
},
"subdir1": {
"file1.txt": "",
"file2.txt": "",
"ignored_nested": {
"ignored_file.txt": ""
}
},
"ignored_dir": {
"subdir": {
"deep_file.txt": ""
}
}
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
// Test 1: When auto-fold is enabled
cx.update(|_, cx| {
let settings = *ProjectPanelSettings::get_global(cx);
ProjectPanelSettings::override_global(
ProjectPanelSettings {
auto_fold_dirs: true,
..settings
},
cx,
);
});
let panel = workspace.update(cx, ProjectPanel::new).unwrap();
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&["v root", " > dir1", " .gitignore",],
"Initial state should show collapsed root structure"
);
toggle_expand_dir(&panel, "root/dir1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" > empty1/empty2/empty3",
" > ignored_dir",
" > subdir1",
" .gitignore",
],
"Should show first level with auto-folded dirs and ignored dir visible"
);
let entry_id = find_project_entry(&panel, "root/dir1", cx).unwrap();
panel.update(cx, |panel, cx| {
let project = panel.project.read(cx);
let worktree = project.worktrees(cx).next().unwrap().read(cx);
panel.expand_all_for_entry(worktree.id(), entry_id, cx);
panel.update_visible_entries(None, cx);
});
cx.run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" v empty1",
" v empty2",
" v empty3",
" file.txt",
" > ignored_dir",
" v subdir1",
" > ignored_nested",
" file1.txt",
" file2.txt",
" .gitignore",
],
"After expand_all with auto-fold: should not expand ignored_dir, should expand folded dirs, and should not expand ignored_nested"
);
// Test 2: When auto-fold is disabled
cx.update(|_, cx| {
let settings = *ProjectPanelSettings::get_global(cx);
ProjectPanelSettings::override_global(
ProjectPanelSettings {
auto_fold_dirs: false,
..settings
},
cx,
);
});
panel.update_in(cx, |panel, window, cx| {
panel.collapse_all_entries(&CollapseAllEntries, window, cx);
});
toggle_expand_dir(&panel, "root/dir1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" > empty1",
" > ignored_dir",
" > subdir1",
" .gitignore",
],
"With auto-fold disabled: should show all directories separately"
);
let entry_id = find_project_entry(&panel, "root/dir1", cx).unwrap();
panel.update(cx, |panel, cx| {
let project = panel.project.read(cx);
let worktree = project.worktrees(cx).next().unwrap().read(cx);
panel.expand_all_for_entry(worktree.id(), entry_id, cx);
panel.update_visible_entries(None, cx);
});
cx.run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" v empty1",
" v empty2",
" v empty3",
" file.txt",
" > ignored_dir",
" v subdir1",
" > ignored_nested",
" file1.txt",
" file2.txt",
" .gitignore",
],
"After expand_all without auto-fold: should expand all dirs normally, \
expand ignored_dir itself but not its subdirs, and not expand ignored_nested"
);
// Test 3: When explicitly called on ignored directory
let ignored_dir_entry = find_project_entry(&panel, "root/dir1/ignored_dir", cx).unwrap();
panel.update(cx, |panel, cx| {
let project = panel.project.read(cx);
let worktree = project.worktrees(cx).next().unwrap().read(cx);
panel.expand_all_for_entry(worktree.id(), ignored_dir_entry, cx);
panel.update_visible_entries(None, cx);
});
cx.run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" v empty1",
" v empty2",
" v empty3",
" file.txt",
" v ignored_dir",
" v subdir",
" deep_file.txt",
" v subdir1",
" > ignored_nested",
" file1.txt",
" file2.txt",
" .gitignore",
],
"After expand_all on ignored_dir: should expand all contents of the ignored directory"
);
}
#[gpui::test]
async fn test_collapse_all_for_entry(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"dir1": {
"subdir1": {
"nested1": {
"file1.txt": "",
"file2.txt": ""
},
},
"subdir2": {
"file4.txt": ""
}
},
"dir2": {
"single_file": {
"file5.txt": ""
}
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
let workspace =
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
// Test 1: Basic collapsing
{
let panel = workspace.update(cx, ProjectPanel::new).unwrap();
toggle_expand_dir(&panel, "root/dir1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir1/nested1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir2", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1",
" v subdir1",
" v nested1",
" file1.txt",
" file2.txt",
" v subdir2 <== selected",
" file4.txt",
" > dir2",
],
"Initial state with everything expanded"
);
let entry_id = find_project_entry(&panel, "root/dir1", cx).unwrap();
panel.update(cx, |panel, cx| {
let project = panel.project.read(cx);
let worktree = project.worktrees(cx).next().unwrap().read(cx);
panel.collapse_all_for_entry(worktree.id(), entry_id, cx);
panel.update_visible_entries(None, cx);
});
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&["v root", " > dir1", " > dir2",],
"All subdirs under dir1 should be collapsed"
);
}
// Test 2: With auto-fold enabled
{
cx.update(|_, cx| {
let settings = *ProjectPanelSettings::get_global(cx);
ProjectPanelSettings::override_global(
ProjectPanelSettings {
auto_fold_dirs: true,
..settings
},
cx,
);
});
let panel = workspace.update(cx, ProjectPanel::new).unwrap();
toggle_expand_dir(&panel, "root/dir1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir1/nested1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1",
" v subdir1/nested1 <== selected",
" file1.txt",
" file2.txt",
" > subdir2",
" > dir2/single_file",
],
"Initial state with some dirs expanded"
);
let entry_id = find_project_entry(&panel, "root/dir1", cx).unwrap();
panel.update(cx, |panel, cx| {
let project = panel.project.read(cx);
let worktree = project.worktrees(cx).next().unwrap().read(cx);
panel.collapse_all_for_entry(worktree.id(), entry_id, cx);
});
toggle_expand_dir(&panel, "root/dir1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" > subdir1/nested1",
" > subdir2",
" > dir2/single_file",
],
"Subdirs should be collapsed and folded with auto-fold enabled"
);
}
// Test 3: With auto-fold disabled
{
cx.update(|_, cx| {
let settings = *ProjectPanelSettings::get_global(cx);
ProjectPanelSettings::override_global(
ProjectPanelSettings {
auto_fold_dirs: false,
..settings
},
cx,
);
});
let panel = workspace.update(cx, ProjectPanel::new).unwrap();
toggle_expand_dir(&panel, "root/dir1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir1", cx);
toggle_expand_dir(&panel, "root/dir1/subdir1/nested1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1",
" v subdir1",
" v nested1 <== selected",
" file1.txt",
" file2.txt",
" > subdir2",
" > dir2",
],
"Initial state with some dirs expanded and auto-fold disabled"
);
let entry_id = find_project_entry(&panel, "root/dir1", cx).unwrap();
panel.update(cx, |panel, cx| {
let project = panel.project.read(cx);
let worktree = project.worktrees(cx).next().unwrap().read(cx);
panel.collapse_all_for_entry(worktree.id(), entry_id, cx);
});
toggle_expand_dir(&panel, "root/dir1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..20, cx),
&[
"v root",
" v dir1 <== selected",
" > subdir1",
" > subdir2",
" > dir2",
],
"Subdirs should be collapsed but not folded with auto-fold disabled"
);
}
}
fn select_path( fn select_path(
panel: &Entity<ProjectPanel>, panel: &Entity<ProjectPanel>,
path: impl AsRef<Path>, path: impl AsRef<Path>,

View file

@ -73,7 +73,8 @@ message Envelope {
ProjectEntryResponse project_entry_response = 50; ProjectEntryResponse project_entry_response = 50;
ExpandProjectEntry expand_project_entry = 51; ExpandProjectEntry expand_project_entry = 51;
ExpandProjectEntryResponse expand_project_entry_response = 52; ExpandProjectEntryResponse expand_project_entry_response = 52;
ExpandAllForProjectEntry expand_all_for_project_entry = 291;
ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292;
UpdateDiagnosticSummary update_diagnostic_summary = 53; UpdateDiagnosticSummary update_diagnostic_summary = 53;
StartLanguageServer start_language_server = 54; StartLanguageServer start_language_server = 54;
UpdateLanguageServer update_language_server = 55; UpdateLanguageServer update_language_server = 55;
@ -630,6 +631,15 @@ message ExpandProjectEntryResponse {
uint64 worktree_scan_id = 1; uint64 worktree_scan_id = 1;
} }
message ExpandAllForProjectEntry {
uint64 project_id = 1;
uint64 entry_id = 2;
}
message ExpandAllForProjectEntryResponse {
uint64 worktree_scan_id = 1;
}
message ProjectEntryResponse { message ProjectEntryResponse {
optional Entry entry = 1; optional Entry entry = 1;
uint64 worktree_scan_id = 2; uint64 worktree_scan_id = 2;

View file

@ -174,6 +174,8 @@ messages!(
(Error, Foreground), (Error, Foreground),
(ExpandProjectEntry, Foreground), (ExpandProjectEntry, Foreground),
(ExpandProjectEntryResponse, Foreground), (ExpandProjectEntryResponse, Foreground),
(ExpandAllForProjectEntry, Foreground),
(ExpandAllForProjectEntryResponse, Foreground),
(Follow, Foreground), (Follow, Foreground),
(FollowResponse, Foreground), (FollowResponse, Foreground),
(FormatBuffers, Foreground), (FormatBuffers, Foreground),
@ -394,6 +396,7 @@ request_messages!(
(DeleteChannel, Ack), (DeleteChannel, Ack),
(DeleteProjectEntry, ProjectEntryResponse), (DeleteProjectEntry, ProjectEntryResponse),
(ExpandProjectEntry, ExpandProjectEntryResponse), (ExpandProjectEntry, ExpandProjectEntryResponse),
(ExpandAllForProjectEntry, ExpandAllForProjectEntryResponse),
(Follow, FollowResponse), (Follow, FollowResponse),
(FormatBuffers, FormatBuffersResponse), (FormatBuffers, FormatBuffersResponse),
(FuzzySearchUsers, UsersResponse), (FuzzySearchUsers, UsersResponse),
@ -518,6 +521,7 @@ entity_messages!(
CreateProjectEntry, CreateProjectEntry,
DeleteProjectEntry, DeleteProjectEntry,
ExpandProjectEntry, ExpandProjectEntry,
ExpandAllForProjectEntry,
FindSearchCandidates, FindSearchCandidates,
FormatBuffers, FormatBuffers,
GetCodeActions, GetCodeActions,

View file

@ -117,7 +117,7 @@ pub struct LoadedBinaryFile {
pub struct LocalWorktree { pub struct LocalWorktree {
snapshot: LocalSnapshot, snapshot: LocalSnapshot,
scan_requests_tx: channel::Sender<ScanRequest>, scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>, path_prefixes_to_scan_tx: channel::Sender<PathPrefixScanRequest>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>), is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
_background_scanner_tasks: Vec<Task<()>>, _background_scanner_tasks: Vec<Task<()>>,
update_observer: Option<UpdateObservationState>, update_observer: Option<UpdateObservationState>,
@ -129,6 +129,11 @@ pub struct LocalWorktree {
share_private_files: bool, share_private_files: bool,
} }
pub struct PathPrefixScanRequest {
path: Arc<Path>,
done: SmallVec<[barrier::Sender; 1]>,
}
struct ScanRequest { struct ScanRequest {
relative_paths: Vec<Arc<Path>>, relative_paths: Vec<Arc<Path>>,
done: SmallVec<[barrier::Sender; 1]>, done: SmallVec<[barrier::Sender; 1]>,
@ -1097,6 +1102,32 @@ impl Worktree {
} }
} }
pub fn expand_all_for_entry(
&mut self,
entry_id: ProjectEntryId,
cx: &Context<Worktree>,
) -> Option<Task<Result<()>>> {
match self {
Worktree::Local(this) => this.expand_all_for_entry(entry_id, cx),
Worktree::Remote(this) => {
let response = this.client.request(proto::ExpandAllForProjectEntry {
project_id: this.project_id,
entry_id: entry_id.to_proto(),
});
Some(cx.spawn(move |this, mut cx| async move {
let response = response.await?;
this.update(&mut cx, |this, _| {
this.as_remote_mut()
.unwrap()
.wait_for_snapshot(response.worktree_scan_id as usize)
})?
.await?;
Ok(())
}))
}
}
}
pub async fn handle_create_entry( pub async fn handle_create_entry(
this: Entity<Self>, this: Entity<Self>,
request: proto::CreateProjectEntry, request: proto::CreateProjectEntry,
@ -1154,6 +1185,21 @@ impl Worktree {
}) })
} }
pub async fn handle_expand_all_for_entry(
this: Entity<Self>,
request: proto::ExpandAllForProjectEntry,
mut cx: AsyncAppContext,
) -> Result<proto::ExpandAllForProjectEntryResponse> {
let task = this.update(&mut cx, |this, cx| {
this.expand_all_for_entry(ProjectEntryId::from_proto(request.entry_id), cx)
})?;
task.ok_or_else(|| anyhow!("no such entry"))?.await?;
let scan_id = this.read_with(&cx, |this, _| this.scan_id())?;
Ok(proto::ExpandAllForProjectEntryResponse {
worktree_scan_id: scan_id as u64,
})
}
pub async fn handle_rename_entry( pub async fn handle_rename_entry(
this: Entity<Self>, this: Entity<Self>,
request: proto::RenameProjectEntry, request: proto::RenameProjectEntry,
@ -1238,7 +1284,7 @@ impl LocalWorktree {
fn start_background_scanner( fn start_background_scanner(
&mut self, &mut self,
scan_requests_rx: channel::Receiver<ScanRequest>, scan_requests_rx: channel::Receiver<ScanRequest>,
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>, path_prefixes_to_scan_rx: channel::Receiver<PathPrefixScanRequest>,
cx: &Context<Worktree>, cx: &Context<Worktree>,
) { ) {
let snapshot = self.snapshot(); let snapshot = self.snapshot();
@ -1961,6 +2007,19 @@ impl LocalWorktree {
})) }))
} }
fn expand_all_for_entry(
&self,
entry_id: ProjectEntryId,
cx: &Context<Worktree>,
) -> Option<Task<Result<()>>> {
let path = self.entry_for_id(entry_id).unwrap().path.clone();
let mut rx = self.add_path_prefix_to_scan(path.clone());
Some(cx.background_executor().spawn(async move {
rx.next().await;
Ok(())
}))
}
fn refresh_entries_for_paths(&self, paths: Vec<Arc<Path>>) -> barrier::Receiver { fn refresh_entries_for_paths(&self, paths: Vec<Arc<Path>>) -> barrier::Receiver {
let (tx, rx) = barrier::channel(); let (tx, rx) = barrier::channel();
self.scan_requests_tx self.scan_requests_tx
@ -1972,8 +2031,15 @@ impl LocalWorktree {
rx rx
} }
pub fn add_path_prefix_to_scan(&self, path_prefix: Arc<Path>) { pub fn add_path_prefix_to_scan(&self, path_prefix: Arc<Path>) -> barrier::Receiver {
self.path_prefixes_to_scan_tx.try_send(path_prefix).ok(); let (tx, rx) = barrier::channel();
self.path_prefixes_to_scan_tx
.try_send(PathPrefixScanRequest {
path: path_prefix,
done: smallvec![tx],
})
.ok();
rx
} }
fn refresh_entry( fn refresh_entry(
@ -4007,7 +4073,7 @@ struct BackgroundScanner {
status_updates_tx: UnboundedSender<ScanState>, status_updates_tx: UnboundedSender<ScanState>,
executor: BackgroundExecutor, executor: BackgroundExecutor,
scan_requests_rx: channel::Receiver<ScanRequest>, scan_requests_rx: channel::Receiver<ScanRequest>,
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>, path_prefixes_to_scan_rx: channel::Receiver<PathPrefixScanRequest>,
next_entry_id: Arc<AtomicUsize>, next_entry_id: Arc<AtomicUsize>,
phase: BackgroundScannerPhase, phase: BackgroundScannerPhase,
watcher: Arc<dyn Watcher>, watcher: Arc<dyn Watcher>,
@ -4132,23 +4198,24 @@ impl BackgroundScanner {
} }
} }
path_prefix = self.path_prefixes_to_scan_rx.recv().fuse() => { path_prefix_request = self.path_prefixes_to_scan_rx.recv().fuse() => {
let Ok(path_prefix) = path_prefix else { break }; let Ok(request) = path_prefix_request else { break };
log::trace!("adding path prefix {:?}", path_prefix); log::trace!("adding path prefix {:?}", request.path);
let did_scan = self.forcibly_load_paths(&[path_prefix.clone()]).await; let did_scan = self.forcibly_load_paths(&[request.path.clone()]).await;
if did_scan { if did_scan {
let abs_path = let abs_path =
{ {
let mut state = self.state.lock(); let mut state = self.state.lock();
state.path_prefixes_to_scan.insert(path_prefix.clone()); state.path_prefixes_to_scan.insert(request.path.clone());
state.snapshot.abs_path.as_path().join(&path_prefix) state.snapshot.abs_path.as_path().join(&request.path)
}; };
if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() { if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() {
self.process_events(vec![abs_path]).await; self.process_events(vec![abs_path]).await;
} }
} }
self.send_status_update(false, request.done);
} }
paths = fs_events_rx.next().fuse() => { paths = fs_events_rx.next().fuse() => {