From 01d9d53f4a66109532fe53fe5731f743dd05e57e Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 09:51:11 -0800 Subject: [PATCH 01/27] Adjust the type arrangement on ManagedViews --- .../command_palette2/src/command_palette.rs | 11 +++++--- crates/file_finder2/src/file_finder.rs | 15 ++++++----- crates/go_to_line2/src/go_to_line.rs | 16 ++++++----- crates/gpui2/src/window.rs | 13 +++------ crates/ui2/src/components/context_menu.rs | 27 ++++++++++--------- crates/workspace2/src/modal_layer.rs | 6 ++--- crates/workspace2/src/workspace2.rs | 12 ++++----- 7 files changed, 53 insertions(+), 47 deletions(-) diff --git a/crates/command_palette2/src/command_palette.rs b/crates/command_palette2/src/command_palette.rs index 9463cab68c..ddb54e1505 100644 --- a/crates/command_palette2/src/command_palette.rs +++ b/crates/command_palette2/src/command_palette.rs @@ -1,8 +1,9 @@ use collections::{CommandPaletteFilter, HashMap}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - actions, div, prelude::*, Action, AppContext, Component, Dismiss, Div, FocusHandle, Keystroke, - ManagedView, ParentComponent, Render, Styled, View, ViewContext, VisualContext, WeakView, + actions, div, prelude::*, Action, AppContext, Component, Div, EventEmitter, FocusHandle, + FocusableView, Keystroke, ManagedView, ParentComponent, Render, Styled, View, ViewContext, + VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; use std::{ @@ -68,7 +69,9 @@ impl CommandPalette { } } -impl ManagedView for CommandPalette { +impl EventEmitter for CommandPalette {} + +impl FocusableView for CommandPalette { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.picker.focus_handle(cx) } @@ -265,7 +268,7 @@ impl PickerDelegate for CommandPaletteDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.command_palette - .update(cx, |_, cx| cx.emit(Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) .log_err(); } diff --git a/crates/file_finder2/src/file_finder.rs b/crates/file_finder2/src/file_finder.rs index 0fee5102e6..5312152f9f 100644 --- a/crates/file_finder2/src/file_finder.rs +++ b/crates/file_finder2/src/file_finder.rs @@ -2,9 +2,9 @@ use collections::HashMap; use editor::{scroll::autoscroll::Autoscroll, Bias, Editor}; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ - actions, div, AppContext, Component, Dismiss, Div, FocusHandle, InteractiveComponent, - ManagedView, Model, ParentComponent, Render, Styled, Task, View, ViewContext, VisualContext, - WeakView, + actions, div, AppContext, Component, Div, EventEmitter, FocusHandle, FocusableView, + InteractiveComponent, ManagedView, Model, ParentComponent, Render, Styled, Task, View, + ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId}; @@ -111,7 +111,8 @@ impl FileFinder { } } -impl ManagedView for FileFinder { +impl EventEmitter for FileFinder {} +impl FocusableView for FileFinder { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.picker.focus_handle(cx) } @@ -688,7 +689,9 @@ impl PickerDelegate for FileFinderDelegate { .log_err(); } } - finder.update(&mut cx, |_, cx| cx.emit(Dismiss)).ok()?; + finder + .update(&mut cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .ok()?; Some(()) }) @@ -699,7 +702,7 @@ impl PickerDelegate for FileFinderDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.file_finder - .update(cx, |_, cx| cx.emit(Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) .log_err(); } diff --git a/crates/go_to_line2/src/go_to_line.rs b/crates/go_to_line2/src/go_to_line.rs index 565afb5e93..04c3f74473 100644 --- a/crates/go_to_line2/src/go_to_line.rs +++ b/crates/go_to_line2/src/go_to_line.rs @@ -1,7 +1,8 @@ use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor}; use gpui::{ - actions, div, prelude::*, AppContext, Dismiss, Div, FocusHandle, ManagedView, ParentComponent, - Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, WindowContext, + actions, div, prelude::*, AppContext, Div, EventEmitter, FocusHandle, FocusableView, + ManagedView, ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, + VisualContext, WindowContext, }; use text::{Bias, Point}; use theme::ActiveTheme; @@ -23,11 +24,12 @@ pub struct GoToLine { _subscriptions: Vec, } -impl ManagedView for GoToLine { +impl FocusableView for GoToLine { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { - self.line_editor.focus_handle(cx) + self.active_editor.focus_handle(cx) } } +impl EventEmitter for GoToLine {} impl GoToLine { fn register(workspace: &mut Workspace, _: &mut ViewContext) { @@ -87,7 +89,7 @@ impl GoToLine { ) { match event { // todo!() this isn't working... - editor::Event::Blurred => cx.emit(Dismiss), + editor::Event::Blurred => cx.emit(ManagedView::Dismiss), editor::Event::BufferEdited { .. } => self.highlight_current_line(cx), _ => {} } @@ -122,7 +124,7 @@ impl GoToLine { } fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { @@ -139,7 +141,7 @@ impl GoToLine { self.prev_scroll_position.take(); } - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } } diff --git a/crates/gpui2/src/window.rs b/crates/gpui2/src/window.rs index 6d07f06d94..5234049943 100644 --- a/crates/gpui2/src/window.rs +++ b/crates/gpui2/src/window.rs @@ -193,17 +193,12 @@ pub trait FocusableView: Render { /// ManagedView is a view (like a Modal, Popover, Menu, etc.) /// where the lifecycle of the view is handled by another view. -pub trait ManagedView: Render { - fn focus_handle(&self, cx: &AppContext) -> FocusHandle; -} +pub trait Managed: FocusableView + EventEmitter {} -pub struct Dismiss; -impl EventEmitter for T {} +impl> Managed for M {} -impl FocusableView for T { - fn focus_handle(&self, cx: &AppContext) -> FocusHandle { - self.focus_handle(cx) - } +pub enum ManagedView { + Dismiss, } // Holds the state for a specific window. diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index d3214cbff1..c6a0d9689a 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -4,8 +4,9 @@ use std::rc::Rc; use crate::prelude::*; use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHeader}; use gpui::{ - overlay, px, Action, AnchorCorner, AnyElement, Bounds, Dismiss, DispatchPhase, Div, - FocusHandle, LayoutId, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View, + overlay, px, Action, AnchorCorner, AnyElement, AppContext, Bounds, DispatchPhase, Div, + EventEmitter, FocusHandle, FocusableView, LayoutId, Managed, ManagedView, MouseButton, + MouseDownEvent, Pixels, Point, Render, View, }; pub struct ContextMenu { @@ -13,12 +14,14 @@ pub struct ContextMenu { focus_handle: FocusHandle, } -impl ManagedView for ContextMenu { - fn focus_handle(&self, cx: &gpui::AppContext) -> FocusHandle { +impl FocusableView for ContextMenu { + fn focus_handle(&self, _cx: &AppContext) -> FocusHandle { self.focus_handle.clone() } } +impl EventEmitter for ContextMenu {} + impl ContextMenu { pub fn new(cx: &mut WindowContext) -> Self { Self { @@ -44,11 +47,11 @@ impl ContextMenu { pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { // todo!() - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } pub fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } } @@ -76,7 +79,7 @@ impl Render for ContextMenu { } } -pub struct MenuHandle { +pub struct MenuHandle { id: Option, child_builder: Option AnyElement + 'static>>, menu_builder: Option) -> View + 'static>>, @@ -85,7 +88,7 @@ pub struct MenuHandle { attach: Option, } -impl MenuHandle { +impl MenuHandle { pub fn id(mut self, id: impl Into) -> Self { self.id = Some(id.into()); self @@ -115,7 +118,7 @@ impl MenuHandle { } } -pub fn menu_handle() -> MenuHandle { +pub fn menu_handle() -> MenuHandle { MenuHandle { id: None, child_builder: None, @@ -132,7 +135,7 @@ pub struct MenuHandleState { child_element: Option>, menu_element: Option>, } -impl Element for MenuHandle { +impl Element for MenuHandle { type ElementState = MenuHandleState; fn element_id(&self) -> Option { @@ -226,7 +229,7 @@ impl Element for MenuHandle { let new_menu = (builder)(view_state, cx); let menu2 = menu.clone(); cx.subscribe(&new_menu, move |this, modal, e, cx| match e { - &Dismiss => { + &ManagedView::Dismiss => { *menu2.borrow_mut() = None; cx.notify(); } @@ -247,7 +250,7 @@ impl Element for MenuHandle { } } -impl Component for MenuHandle { +impl Component for MenuHandle { fn render(self) -> AnyElement { AnyElement::new(self) } diff --git a/crates/workspace2/src/modal_layer.rs b/crates/workspace2/src/modal_layer.rs index 8afd8317f9..3ed50fefdf 100644 --- a/crates/workspace2/src/modal_layer.rs +++ b/crates/workspace2/src/modal_layer.rs @@ -1,5 +1,5 @@ use gpui::{ - div, prelude::*, px, AnyView, Div, FocusHandle, ManagedView, Render, Subscription, View, + div, prelude::*, px, AnyView, Div, FocusHandle, Managed, Render, Subscription, View, ViewContext, }; use ui::{h_stack, v_stack}; @@ -22,7 +22,7 @@ impl ModalLayer { pub fn toggle_modal(&mut self, cx: &mut ViewContext, build_view: B) where - V: ManagedView, + V: Managed, B: FnOnce(&mut ViewContext) -> V, { if let Some(active_modal) = &self.active_modal { @@ -38,7 +38,7 @@ impl ModalLayer { pub fn show_modal(&mut self, new_modal: View, cx: &mut ViewContext) where - V: ManagedView, + V: Managed, { self.active_modal = Some(ActiveModal { modal: new_modal.clone().into(), diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index 08d248f6f2..34ebdbd3e2 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -31,10 +31,10 @@ use futures::{ use gpui::{ actions, div, point, size, Action, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext, AsyncWindowContext, Bounds, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, - FocusableView, GlobalPixels, InteractiveComponent, KeyContext, ManagedView, Model, - ModelContext, ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, - Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, - WindowHandle, WindowOptions, + FocusableView, GlobalPixels, InteractiveComponent, KeyContext, Managed, Model, ModelContext, + ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, Subscription, + Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, WindowHandle, + WindowOptions, }; use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem}; use itertools::Itertools; @@ -3364,14 +3364,14 @@ impl Workspace { div } - pub fn active_modal( + pub fn active_modal( &mut self, cx: &ViewContext, ) -> Option> { self.modal_layer.read(cx).active_modal() } - pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) + pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) where B: FnOnce(&mut ViewContext) -> V, { From 17d53d0e38420ba34b324453fd5df96bbf48d472 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 10:06:41 -0800 Subject: [PATCH 02/27] Rename again, add fun cx APIs using new traits --- .../command_palette2/src/command_palette.rs | 6 ++--- crates/file_finder2/src/file_finder.rs | 8 +++---- crates/go_to_line2/src/go_to_line.rs | 10 ++++---- crates/gpui2/src/app/async_context.rs | 11 ++++++++- crates/gpui2/src/app/test_context.rs | 11 +++++++++ crates/gpui2/src/gpui2.rs | 4 ++++ crates/gpui2/src/window.rs | 24 ++++++++++++++++--- crates/ui2/src/components/context_menu.rs | 20 ++++++++-------- crates/workspace2/src/modal_layer.rs | 6 ++--- crates/workspace2/src/workspace2.rs | 12 +++++----- 10 files changed, 77 insertions(+), 35 deletions(-) diff --git a/crates/command_palette2/src/command_palette.rs b/crates/command_palette2/src/command_palette.rs index ddb54e1505..678fec7dac 100644 --- a/crates/command_palette2/src/command_palette.rs +++ b/crates/command_palette2/src/command_palette.rs @@ -2,7 +2,7 @@ use collections::{CommandPaletteFilter, HashMap}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ actions, div, prelude::*, Action, AppContext, Component, Div, EventEmitter, FocusHandle, - FocusableView, Keystroke, ManagedView, ParentComponent, Render, Styled, View, ViewContext, + FocusableView, Keystroke, ManagedEvent, ParentComponent, Render, Styled, View, ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; @@ -69,7 +69,7 @@ impl CommandPalette { } } -impl EventEmitter for CommandPalette {} +impl EventEmitter for CommandPalette {} impl FocusableView for CommandPalette { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { @@ -268,7 +268,7 @@ impl PickerDelegate for CommandPaletteDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.command_palette - .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) .log_err(); } diff --git a/crates/file_finder2/src/file_finder.rs b/crates/file_finder2/src/file_finder.rs index 5312152f9f..d64a27e74a 100644 --- a/crates/file_finder2/src/file_finder.rs +++ b/crates/file_finder2/src/file_finder.rs @@ -3,7 +3,7 @@ use editor::{scroll::autoscroll::Autoscroll, Bias, Editor}; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ actions, div, AppContext, Component, Div, EventEmitter, FocusHandle, FocusableView, - InteractiveComponent, ManagedView, Model, ParentComponent, Render, Styled, Task, View, + InteractiveComponent, ManagedEvent, Model, ParentComponent, Render, Styled, Task, View, ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; @@ -111,7 +111,7 @@ impl FileFinder { } } -impl EventEmitter for FileFinder {} +impl EventEmitter for FileFinder {} impl FocusableView for FileFinder { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.picker.focus_handle(cx) @@ -690,7 +690,7 @@ impl PickerDelegate for FileFinderDelegate { } } finder - .update(&mut cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .update(&mut cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) .ok()?; Some(()) @@ -702,7 +702,7 @@ impl PickerDelegate for FileFinderDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.file_finder - .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) .log_err(); } diff --git a/crates/go_to_line2/src/go_to_line.rs b/crates/go_to_line2/src/go_to_line.rs index 04c3f74473..ec0d8eca5d 100644 --- a/crates/go_to_line2/src/go_to_line.rs +++ b/crates/go_to_line2/src/go_to_line.rs @@ -1,7 +1,7 @@ use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor}; use gpui::{ actions, div, prelude::*, AppContext, Div, EventEmitter, FocusHandle, FocusableView, - ManagedView, ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, + ManagedEvent, ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, WindowContext, }; use text::{Bias, Point}; @@ -29,7 +29,7 @@ impl FocusableView for GoToLine { self.active_editor.focus_handle(cx) } } -impl EventEmitter for GoToLine {} +impl EventEmitter for GoToLine {} impl GoToLine { fn register(workspace: &mut Workspace, _: &mut ViewContext) { @@ -89,7 +89,7 @@ impl GoToLine { ) { match event { // todo!() this isn't working... - editor::Event::Blurred => cx.emit(ManagedView::Dismiss), + editor::Event::Blurred => cx.emit(ManagedEvent::Dismiss), editor::Event::BufferEdited { .. } => self.highlight_current_line(cx), _ => {} } @@ -124,7 +124,7 @@ impl GoToLine { } fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { @@ -141,7 +141,7 @@ impl GoToLine { self.prev_scroll_position.take(); } - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } } diff --git a/crates/gpui2/src/app/async_context.rs b/crates/gpui2/src/app/async_context.rs index 83b3ccebe7..04a26f663b 100644 --- a/crates/gpui2/src/app/async_context.rs +++ b/crates/gpui2/src/app/async_context.rs @@ -1,6 +1,6 @@ use crate::{ AnyView, AnyWindowHandle, AppCell, AppContext, BackgroundExecutor, Context, FocusableView, - ForegroundExecutor, Model, ModelContext, Render, Result, Task, View, ViewContext, + ForegroundExecutor, ManagedEvent, Model, ModelContext, Render, Result, Task, View, ViewContext, VisualContext, WindowContext, WindowHandle, }; use anyhow::{anyhow, Context as _}; @@ -320,4 +320,13 @@ impl VisualContext for AsyncWindowContext { view.read(cx).focus_handle(cx).clone().focus(cx); }) } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: crate::ManagedView, + { + self.window.update(self, |_, cx| { + view.update(cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) + }) + } } diff --git a/crates/gpui2/src/app/test_context.rs b/crates/gpui2/src/app/test_context.rs index 940492573f..a3058aa080 100644 --- a/crates/gpui2/src/app/test_context.rs +++ b/crates/gpui2/src/app/test_context.rs @@ -579,6 +579,17 @@ impl<'a> VisualContext for VisualTestContext<'a> { }) .unwrap() } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: crate::ManagedView, + { + self.window + .update(self.cx, |_, cx| { + view.update(cx, |_, cx| cx.emit(crate::ManagedEvent::Dismiss)) + }) + .unwrap() + } } impl AnyWindowHandle { diff --git a/crates/gpui2/src/gpui2.rs b/crates/gpui2/src/gpui2.rs index 88ecd52c03..a24509386b 100644 --- a/crates/gpui2/src/gpui2.rs +++ b/crates/gpui2/src/gpui2.rs @@ -141,6 +141,10 @@ pub trait VisualContext: Context { fn focus_view(&mut self, view: &View) -> Self::Result<()> where V: FocusableView; + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: ManagedView; } pub trait Entity: Sealed { diff --git a/crates/gpui2/src/window.rs b/crates/gpui2/src/window.rs index 5234049943..e4ae3f9c03 100644 --- a/crates/gpui2/src/window.rs +++ b/crates/gpui2/src/window.rs @@ -193,11 +193,11 @@ pub trait FocusableView: Render { /// ManagedView is a view (like a Modal, Popover, Menu, etc.) /// where the lifecycle of the view is handled by another view. -pub trait Managed: FocusableView + EventEmitter {} +pub trait ManagedView: FocusableView + EventEmitter {} -impl> Managed for M {} +impl> ManagedView for M {} -pub enum ManagedView { +pub enum ManagedEvent { Dismiss, } @@ -1577,6 +1577,13 @@ impl VisualContext for WindowContext<'_> { view.focus_handle(cx).clone().focus(cx); }) } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: ManagedView, + { + self.update_view(view, |_, cx| cx.emit(ManagedEvent::Dismiss)) + } } impl<'a> std::ops::Deref for WindowContext<'a> { @@ -2270,6 +2277,13 @@ impl<'a, V: 'static> ViewContext<'a, V> { { self.defer(|view, cx| view.focus_handle(cx).focus(cx)) } + + pub fn dismiss_self(&mut self) + where + V: ManagedView, + { + self.defer(|_, cx| cx.emit(ManagedEvent::Dismiss)) + } } impl Context for ViewContext<'_, V> { @@ -2349,6 +2363,10 @@ impl VisualContext for ViewContext<'_, V> { fn focus_view(&mut self, view: &View) -> Self::Result<()> { self.window_cx.focus_view(view) } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> { + self.window_cx.dismiss_view(view) + } } impl<'a, V> std::ops::Deref for ViewContext<'a, V> { diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index c6a0d9689a..80c22cbbcf 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -5,7 +5,7 @@ use crate::prelude::*; use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHeader}; use gpui::{ overlay, px, Action, AnchorCorner, AnyElement, AppContext, Bounds, DispatchPhase, Div, - EventEmitter, FocusHandle, FocusableView, LayoutId, Managed, ManagedView, MouseButton, + EventEmitter, FocusHandle, FocusableView, LayoutId, ManagedEvent, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View, }; @@ -20,7 +20,7 @@ impl FocusableView for ContextMenu { } } -impl EventEmitter for ContextMenu {} +impl EventEmitter for ContextMenu {} impl ContextMenu { pub fn new(cx: &mut WindowContext) -> Self { @@ -47,11 +47,11 @@ impl ContextMenu { pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { // todo!() - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } pub fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } } @@ -79,7 +79,7 @@ impl Render for ContextMenu { } } -pub struct MenuHandle { +pub struct MenuHandle { id: Option, child_builder: Option AnyElement + 'static>>, menu_builder: Option) -> View + 'static>>, @@ -88,7 +88,7 @@ pub struct MenuHandle { attach: Option, } -impl MenuHandle { +impl MenuHandle { pub fn id(mut self, id: impl Into) -> Self { self.id = Some(id.into()); self @@ -118,7 +118,7 @@ impl MenuHandle { } } -pub fn menu_handle() -> MenuHandle { +pub fn menu_handle() -> MenuHandle { MenuHandle { id: None, child_builder: None, @@ -135,7 +135,7 @@ pub struct MenuHandleState { child_element: Option>, menu_element: Option>, } -impl Element for MenuHandle { +impl Element for MenuHandle { type ElementState = MenuHandleState; fn element_id(&self) -> Option { @@ -229,7 +229,7 @@ impl Element for MenuHandle { let new_menu = (builder)(view_state, cx); let menu2 = menu.clone(); cx.subscribe(&new_menu, move |this, modal, e, cx| match e { - &ManagedView::Dismiss => { + &ManagedEvent::Dismiss => { *menu2.borrow_mut() = None; cx.notify(); } @@ -250,7 +250,7 @@ impl Element for MenuHandle { } } -impl Component for MenuHandle { +impl Component for MenuHandle { fn render(self) -> AnyElement { AnyElement::new(self) } diff --git a/crates/workspace2/src/modal_layer.rs b/crates/workspace2/src/modal_layer.rs index 3ed50fefdf..8afd8317f9 100644 --- a/crates/workspace2/src/modal_layer.rs +++ b/crates/workspace2/src/modal_layer.rs @@ -1,5 +1,5 @@ use gpui::{ - div, prelude::*, px, AnyView, Div, FocusHandle, Managed, Render, Subscription, View, + div, prelude::*, px, AnyView, Div, FocusHandle, ManagedView, Render, Subscription, View, ViewContext, }; use ui::{h_stack, v_stack}; @@ -22,7 +22,7 @@ impl ModalLayer { pub fn toggle_modal(&mut self, cx: &mut ViewContext, build_view: B) where - V: Managed, + V: ManagedView, B: FnOnce(&mut ViewContext) -> V, { if let Some(active_modal) = &self.active_modal { @@ -38,7 +38,7 @@ impl ModalLayer { pub fn show_modal(&mut self, new_modal: View, cx: &mut ViewContext) where - V: Managed, + V: ManagedView, { self.active_modal = Some(ActiveModal { modal: new_modal.clone().into(), diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index 34ebdbd3e2..08d248f6f2 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -31,10 +31,10 @@ use futures::{ use gpui::{ actions, div, point, size, Action, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext, AsyncWindowContext, Bounds, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, - FocusableView, GlobalPixels, InteractiveComponent, KeyContext, Managed, Model, ModelContext, - ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, Subscription, - Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, WindowHandle, - WindowOptions, + FocusableView, GlobalPixels, InteractiveComponent, KeyContext, ManagedView, Model, + ModelContext, ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, + Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, + WindowHandle, WindowOptions, }; use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem}; use itertools::Itertools; @@ -3364,14 +3364,14 @@ impl Workspace { div } - pub fn active_modal( + pub fn active_modal( &mut self, cx: &ViewContext, ) -> Option> { self.modal_layer.read(cx).active_modal() } - pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) + pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) where B: FnOnce(&mut ViewContext) -> V, { From 9d742b90c386594d7962c4419c4d2e8505a9f355 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 11:57:51 -0700 Subject: [PATCH 03/27] Allow you to click on a context menu item --- crates/gpui2/src/app/entity_map.rs | 11 ++- crates/gpui2/src/elements/div.rs | 5 + crates/gpui2/src/platform/mac/window.rs | 5 +- crates/ui2/src/components/context_menu.rs | 109 +++++++++++++++++----- crates/ui2/src/components/list.rs | 74 ++++++++------- crates/ui2/src/static_data.rs | 8 +- 6 files changed, 139 insertions(+), 73 deletions(-) diff --git a/crates/gpui2/src/app/entity_map.rs b/crates/gpui2/src/app/entity_map.rs index 4a3cca040b..f1e7fad6a1 100644 --- a/crates/gpui2/src/app/entity_map.rs +++ b/crates/gpui2/src/app/entity_map.rs @@ -71,11 +71,12 @@ impl EntityMap { #[track_caller] pub fn lease<'a, T>(&mut self, model: &'a Model) -> Lease<'a, T> { self.assert_valid_context(model); - let entity = Some( - self.entities - .remove(model.entity_id) - .expect("Circular entity lease. Is the entity already being updated?"), - ); + let entity = Some(self.entities.remove(model.entity_id).unwrap_or_else(|| { + panic!( + "Circular entity lease of {}. Is it already being updated?", + std::any::type_name::() + ) + })); Lease { model, entity, diff --git a/crates/gpui2/src/elements/div.rs b/crates/gpui2/src/elements/div.rs index f9560f2c53..a37e3dee2a 100644 --- a/crates/gpui2/src/elements/div.rs +++ b/crates/gpui2/src/elements/div.rs @@ -1124,9 +1124,14 @@ where } } } + // if self.hover_style.is_some() { if bounds.contains_point(&mouse_position) { + // eprintln!("div hovered {bounds:?} {mouse_position:?}"); style.refine(&self.hover_style); + } else { + // eprintln!("div NOT hovered {bounds:?} {mouse_position:?}"); } + // } if let Some(drag) = cx.active_drag.take() { for (state_type, group_drag_style) in &self.group_drag_over_styles { diff --git a/crates/gpui2/src/platform/mac/window.rs b/crates/gpui2/src/platform/mac/window.rs index 03782d13a8..bb3a659a62 100644 --- a/crates/gpui2/src/platform/mac/window.rs +++ b/crates/gpui2/src/platform/mac/window.rs @@ -1205,10 +1205,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { InputEvent::MouseMove(_) if !(is_active || lock.kind == WindowKind::PopUp) => return, - InputEvent::MouseUp(MouseUpEvent { - button: MouseButton::Left, - .. - }) => { + InputEvent::MouseUp(MouseUpEvent { .. }) => { lock.synthetic_drag_counter += 1; } diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index d3214cbff1..b1e30dbe3b 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -6,39 +6,66 @@ use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHea use gpui::{ overlay, px, Action, AnchorCorner, AnyElement, Bounds, Dismiss, DispatchPhase, Div, FocusHandle, LayoutId, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View, + VisualContext, WeakView, }; -pub struct ContextMenu { - items: Vec, - focus_handle: FocusHandle, +pub enum ContextMenuItem { + Separator(ListSeparator), + Header(ListSubHeader), + Entry( + ListEntry>, + Rc)>, + ), } -impl ManagedView for ContextMenu { +pub struct ContextMenu { + items: Vec>, + focus_handle: FocusHandle, + handle: WeakView, +} + +impl ManagedView for ContextMenu { fn focus_handle(&self, cx: &gpui::AppContext) -> FocusHandle { self.focus_handle.clone() } } -impl ContextMenu { - pub fn new(cx: &mut WindowContext) -> Self { - Self { - items: Default::default(), - focus_handle: cx.focus_handle(), - } +impl ContextMenu { + pub fn build( + cx: &mut ViewContext, + f: impl FnOnce(Self, &mut ViewContext) -> Self, + ) -> View { + let handle = cx.view().downgrade(); + cx.build_view(|cx| { + f( + Self { + handle, + items: Default::default(), + focus_handle: cx.focus_handle(), + }, + cx, + ) + }) } pub fn header(mut self, title: impl Into) -> Self { - self.items.push(ListItem::Header(ListSubHeader::new(title))); + self.items + .push(ContextMenuItem::Header(ListSubHeader::new(title))); self } pub fn separator(mut self) -> Self { - self.items.push(ListItem::Separator(ListSeparator)); + self.items.push(ContextMenuItem::Separator(ListSeparator)); self } - pub fn entry(mut self, label: Label, action: Box) -> Self { - self.items.push(ListEntry::new(label).action(action).into()); + pub fn entry( + mut self, + view: ListEntry, + on_click: impl Fn(&mut V, &mut ViewContext) + 'static, + ) -> Self { + self.items + .push(ContextMenuItem::Entry(view, Rc::new(on_click))); self } @@ -52,9 +79,9 @@ impl ContextMenu { } } -impl Render for ContextMenu { +impl Render for ContextMenu { type Element = Div; - // todo!() + fn render(&mut self, cx: &mut ViewContext) -> Self::Element { div().elevation_2(cx).flex().flex_row().child( v_stack() @@ -71,7 +98,25 @@ impl Render for ContextMenu { // .bg(cx.theme().colors().elevated_surface_background) // .border() // .border_color(cx.theme().colors().border) - .child(List::new(self.items.clone())), + .child(List::new( + self.items + .iter() + .map(|item| match item { + ContextMenuItem::Separator(separator) => { + ListItem::Separator(separator.clone()) + } + ContextMenuItem::Header(header) => ListItem::Header(header.clone()), + ContextMenuItem::Entry(entry, callback) => { + let callback = callback.clone(); + let handle = self.handle.clone(); + ListItem::Entry(entry.clone().on_click(move |this, cx| { + handle.update(cx, |view, cx| callback(view, cx)).ok(); + cx.emit(Dismiss); + })) + } + }) + .collect(), + )), ) } } @@ -232,6 +277,7 @@ impl Element for MenuHandle { } }) .detach(); + cx.focus_view(&new_menu); *menu.borrow_mut() = Some(new_menu); *position.borrow_mut() = if attach.is_some() && child_layout_id.is_some() { @@ -260,16 +306,25 @@ pub use stories::*; mod stories { use super::*; use crate::story::Story; - use gpui::{actions, Div, Render, VisualContext}; + use gpui::{actions, Div, Render}; - actions!(PrintCurrentDate); + actions!(PrintCurrentDate, PrintBestFood); - fn build_menu(cx: &mut WindowContext, header: impl Into) -> View { - cx.build_view(|cx| { - ContextMenu::new(cx).header(header).separator().entry( - Label::new("Print current time"), - PrintCurrentDate.boxed_clone(), - ) + fn build_menu( + cx: &mut ViewContext, + header: impl Into, + ) -> View> { + let handle = cx.view().clone(); + ContextMenu::build(cx, |menu, _| { + menu.header(header) + .separator() + .entry(ListEntry::new(Label::new("Print current time")), |v, cx| { + println!("dispatching PrintCurrentTime action"); + cx.dispatch_action(PrintCurrentDate.boxed_clone()) + }) + .entry(ListEntry::new(Label::new("Print best food")), |v, cx| { + cx.dispatch_action(PrintBestFood.boxed_clone()) + }) }) } @@ -281,10 +336,14 @@ mod stories { fn render(&mut self, cx: &mut ViewContext) -> Self::Element { Story::container(cx) .on_action(|_, _: &PrintCurrentDate, _| { + println!("printing unix time!"); if let Ok(unix_time) = std::time::UNIX_EPOCH.elapsed() { println!("Current Unix time is {:?}", unix_time.as_secs()); } }) + .on_action(|_, _: &PrintBestFood, _| { + println!("burrito"); + }) .flex() .flex_row() .justify_between() diff --git a/crates/ui2/src/components/list.rs b/crates/ui2/src/components/list.rs index b9508c5413..07ff577ce0 100644 --- a/crates/ui2/src/components/list.rs +++ b/crates/ui2/src/components/list.rs @@ -1,4 +1,6 @@ -use gpui::{div, Action}; +use std::rc::Rc; + +use gpui::{div, Div, Stateful, StatefulInteractiveComponent}; use crate::settings::user_settings; use crate::{ @@ -172,35 +174,35 @@ pub enum ListEntrySize { Medium, } -#[derive(Component, Clone)] -pub enum ListItem { - Entry(ListEntry), +#[derive(Clone)] +pub enum ListItem { + Entry(ListEntry), Separator(ListSeparator), Header(ListSubHeader), } -impl From for ListItem { - fn from(entry: ListEntry) -> Self { +impl From> for ListItem { + fn from(entry: ListEntry) -> Self { Self::Entry(entry) } } -impl From for ListItem { +impl From for ListItem { fn from(entry: ListSeparator) -> Self { Self::Separator(entry) } } -impl From for ListItem { +impl From for ListItem { fn from(entry: ListSubHeader) -> Self { Self::Header(entry) } } -impl ListItem { - fn render(self, view: &mut V, cx: &mut ViewContext) -> impl Component { +impl ListItem { + fn render(self, view: &mut V, ix: usize, cx: &mut ViewContext) -> impl Component { match self { - ListItem::Entry(entry) => div().child(entry.render(view, cx)), + ListItem::Entry(entry) => div().child(entry.render(ix, cx)), ListItem::Separator(separator) => div().child(separator.render(view, cx)), ListItem::Header(header) => div().child(header.render(view, cx)), } @@ -210,7 +212,7 @@ impl ListItem { Self::Entry(ListEntry::new(label)) } - pub fn as_entry(&mut self) -> Option<&mut ListEntry> { + pub fn as_entry(&mut self) -> Option<&mut ListEntry> { if let Self::Entry(entry) = self { Some(entry) } else { @@ -219,8 +221,7 @@ impl ListItem { } } -#[derive(Component)] -pub struct ListEntry { +pub struct ListEntry { disabled: bool, // TODO: Reintroduce this // disclosure_control_style: DisclosureControlVisibility, @@ -231,15 +232,13 @@ pub struct ListEntry { size: ListEntrySize, toggle: Toggle, variant: ListItemVariant, - on_click: Option>, + on_click: Option) + 'static>>, } -impl Clone for ListEntry { +impl Clone for ListEntry { fn clone(&self) -> Self { Self { disabled: self.disabled, - // TODO: Reintroduce this - // disclosure_control_style: DisclosureControlVisibility, indent_level: self.indent_level, label: self.label.clone(), left_slot: self.left_slot.clone(), @@ -247,12 +246,12 @@ impl Clone for ListEntry { size: self.size, toggle: self.toggle, variant: self.variant, - on_click: self.on_click.as_ref().map(|opt| opt.boxed_clone()), + on_click: self.on_click.clone(), } } } -impl ListEntry { +impl ListEntry { pub fn new(label: Label) -> Self { Self { disabled: false, @@ -267,8 +266,8 @@ impl ListEntry { } } - pub fn action(mut self, action: impl Into>) -> Self { - self.on_click = Some(action.into()); + pub fn on_click(mut self, handler: impl Fn(&mut V, &mut ViewContext) + 'static) -> Self { + self.on_click = Some(Rc::new(handler)); self } @@ -307,7 +306,7 @@ impl ListEntry { self } - fn render(self, _view: &mut V, cx: &mut ViewContext) -> impl Component { + fn render(self, ix: usize, cx: &mut ViewContext) -> Stateful> { let settings = user_settings(cx); let left_content = match self.left_slot.clone() { @@ -328,21 +327,21 @@ impl ListEntry { ListEntrySize::Medium => div().h_7(), }; div() + .id(ix) .relative() .hover(|mut style| { style.background = Some(cx.theme().colors().editor_background.into()); style }) - .on_mouse_down(gpui::MouseButton::Left, { - let action = self.on_click.map(|action| action.boxed_clone()); + .on_click({ + let on_click = self.on_click.clone(); - move |entry: &mut V, event, cx| { - if let Some(action) = action.as_ref() { - cx.dispatch_action(action.boxed_clone()); + move |view: &mut V, event, cx| { + if let Some(on_click) = &on_click { + (on_click)(view, cx) } } }) - .group("") .bg(cx.theme().colors().surface_background) // TODO: Add focus state // .when(self.state == InteractionState::Focused, |this| { @@ -391,8 +390,8 @@ impl ListSeparator { } #[derive(Component)] -pub struct List { - items: Vec, +pub struct List { + items: Vec>, /// Message to display when the list is empty /// Defaults to "No items" empty_message: SharedString, @@ -400,8 +399,8 @@ pub struct List { toggle: Toggle, } -impl List { - pub fn new(items: Vec) -> Self { +impl List { + pub fn new(items: Vec>) -> Self { Self { items, empty_message: "No items".into(), @@ -425,9 +424,14 @@ impl List { self } - fn render(self, _view: &mut V, cx: &mut ViewContext) -> impl Component { + fn render(self, view: &mut V, cx: &mut ViewContext) -> impl Component { let list_content = match (self.items.is_empty(), self.toggle) { - (false, _) => div().children(self.items), + (false, _) => div().children( + self.items + .into_iter() + .enumerate() + .map(|(ix, item)| item.render(view, ix, cx)), + ), (true, Toggle::Toggled(false)) => div(), (true, _) => { div().child(Label::new(self.empty_message.clone()).color(TextColor::Muted)) diff --git a/crates/ui2/src/static_data.rs b/crates/ui2/src/static_data.rs index bb81d6230f..dd296cee5d 100644 --- a/crates/ui2/src/static_data.rs +++ b/crates/ui2/src/static_data.rs @@ -478,7 +478,7 @@ pub fn static_new_notification_items_2() -> Vec> { ] } -pub fn static_project_panel_project_items() -> Vec { +pub fn static_project_panel_project_items() -> Vec> { vec![ ListEntry::new(Label::new("zed")) .left_icon(Icon::FolderOpen.into()) @@ -605,7 +605,7 @@ pub fn static_project_panel_project_items() -> Vec { .collect() } -pub fn static_project_panel_single_items() -> Vec { +pub fn static_project_panel_single_items() -> Vec> { vec![ ListEntry::new(Label::new("todo.md")) .left_icon(Icon::FileDoc.into()) @@ -622,7 +622,7 @@ pub fn static_project_panel_single_items() -> Vec { .collect() } -pub fn static_collab_panel_current_call() -> Vec { +pub fn static_collab_panel_current_call() -> Vec> { vec![ ListEntry::new(Label::new("as-cii")).left_avatar("http://github.com/as-cii.png?s=50"), ListEntry::new(Label::new("nathansobo")) @@ -635,7 +635,7 @@ pub fn static_collab_panel_current_call() -> Vec { .collect() } -pub fn static_collab_panel_channels() -> Vec { +pub fn static_collab_panel_channels() -> Vec> { vec![ ListEntry::new(Label::new("zed")) .left_icon(Icon::Hash.into()) From eb04160d2d9877fedd8efd5219c0e96c1cad0bf7 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 12:14:06 -0700 Subject: [PATCH 04/27] Dock menu --- crates/terminal_view2/src/terminal_view.rs | 13 ++++---- crates/ui2/src/components/context_menu.rs | 5 +++ crates/workspace2/src/dock.rs | 39 ++++++++++++++++++---- 3 files changed, 44 insertions(+), 13 deletions(-) diff --git a/crates/terminal_view2/src/terminal_view.rs b/crates/terminal_view2/src/terminal_view.rs index b6ab7e86b9..56de1ee7ef 100644 --- a/crates/terminal_view2/src/terminal_view.rs +++ b/crates/terminal_view2/src/terminal_view.rs @@ -32,7 +32,7 @@ use workspace::{ notifications::NotifyResultExt, register_deserializable_item, searchable::{SearchEvent, SearchOptions, SearchableItem}, - ui::{ContextMenu, Label}, + ui::{ContextMenu, Label, ListEntry}, CloseActiveItem, NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId, }; @@ -85,7 +85,7 @@ pub struct TerminalView { has_new_content: bool, //Currently using iTerm bell, show bell emoji in tab until input is received has_bell: bool, - context_menu: Option>, + context_menu: Option>>, blink_state: bool, blinking_on: bool, blinking_paused: bool, @@ -300,11 +300,10 @@ impl TerminalView { position: gpui::Point, cx: &mut ViewContext, ) { - self.context_menu = Some(cx.build_view(|cx| { - ContextMenu::new(cx) - .entry(Label::new("Clear"), Box::new(Clear)) - .entry( - Label::new("Close"), + self.context_menu = Some(ContextMenu::build(cx, |menu, _| { + menu.action(ListEntry::new(Label::new("Clear")), Box::new(Clear)) + .action( + ListEntry::new(Label::new("Close")), Box::new(CloseActiveItem { save_intent: None }), ) })); diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index b1e30dbe3b..8bbc0c1b0c 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -69,6 +69,11 @@ impl ContextMenu { self } + pub fn action(self, view: ListEntry, action: Box) -> Self { + // todo: add the keybindings to the list entry + self.entry(view, move |_, cx| cx.dispatch_action(action.boxed_clone())) + } + pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { // todo!() cx.emit(Dismiss); diff --git a/crates/workspace2/src/dock.rs b/crates/workspace2/src/dock.rs index f62633e439..07237d6f62 100644 --- a/crates/workspace2/src/dock.rs +++ b/crates/workspace2/src/dock.rs @@ -8,7 +8,9 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::sync::Arc; use theme2::ActiveTheme; -use ui::{h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Tooltip}; +use ui::{ + h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Label, ListEntry, Tooltip, +}; pub enum PanelEvent { ChangePosition, @@ -672,6 +674,7 @@ impl Render for PanelButtons { let dock = self.dock.read(cx); let active_index = dock.active_panel_index; let is_open = dock.is_open; + let dock_position = dock.position; let (menu_anchor, menu_attach) = match dock.position { DockPosition::Left => (AnchorCorner::BottomLeft, AnchorCorner::TopLeft), @@ -684,9 +687,10 @@ impl Render for PanelButtons { .panel_entries .iter() .enumerate() - .filter_map(|(i, panel)| { - let icon = panel.panel.icon(cx)?; - let name = panel.panel.persistent_name(); + .filter_map(|(i, entry)| { + let icon = entry.panel.icon(cx)?; + let name = entry.panel.persistent_name(); + let panel = entry.panel.clone(); let mut button: IconButton = if i == active_index && is_open { let action = dock.toggle_action(); @@ -697,7 +701,7 @@ impl Render for PanelButtons { .action(action.boxed_clone()) .tooltip(move |_, cx| Tooltip::for_action(tooltip.clone(), &*action, cx)) } else { - let action = panel.panel.toggle_action(cx); + let action = entry.panel.toggle_action(cx); IconButton::new(name, icon) .action(action.boxed_clone()) @@ -708,7 +712,30 @@ impl Render for PanelButtons { menu_handle() .id(name) .menu(move |_, cx| { - cx.build_view(|cx| ContextMenu::new(cx).header("SECTION")) + const POSITIONS: [DockPosition; 3] = [ + DockPosition::Left, + DockPosition::Right, + DockPosition::Bottom, + ]; + ContextMenu::build(cx, |mut menu, cx| { + for position in POSITIONS { + if position != dock_position + && panel.position_is_valid(position, cx) + { + let panel = panel.clone(); + menu = menu.entry( + ListEntry::new(Label::new(format!( + "Dock {}", + position.to_label() + ))), + move |_, cx| { + panel.set_position(position, cx); + }, + ) + } + } + menu + }) }) .anchor(menu_anchor) .attach(menu_attach) From a5c615ceb4ad7d34a230db83684ebd6556ef9ae8 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Nov 2023 10:56:28 +0200 Subject: [PATCH 05/27] Pass a new query parameter into the search --- crates/collab/src/tests/integration_tests.rs | 2 +- .../random_project_collaboration_tests.rs | 3 ++- crates/collab2/src/tests/integration_tests.rs | 2 +- .../random_project_collaboration_tests.rs | 3 ++- .../src/chat_panel/message_editor.rs | 10 ++------ crates/project/src/project.rs | 20 +++++++++++++--- crates/project/src/project_tests.rs | 16 +++++++++++-- crates/project/src/search.rs | 20 ++++++++++++++++ crates/project2/src/project2.rs | 20 +++++++++++++--- crates/project2/src/project_tests.rs | 16 +++++++++++-- crates/project2/src/search.rs | 20 ++++++++++++++++ crates/rpc/proto/zed.proto | 1 + crates/rpc2/proto/zed.proto | 1 + crates/search/src/buffer_search.rs | 2 ++ crates/search/src/project_search.rs | 15 +++++++++++- crates/search/src/search.rs | 24 ++++++++++++------- 16 files changed, 143 insertions(+), 32 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 550c3a2bd8..fa7c4fe67d 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -5052,7 +5052,7 @@ async fn test_project_search( let mut results = HashMap::default(); let mut search_rx = project_b.update(cx_b, |project, cx| { project.search( - SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(), cx, ) }); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 6f9513c325..42a2b79275 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest { let mut search = project.update(cx, |project, cx| { project.search( - SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()) + .unwrap(), cx, ) }); diff --git a/crates/collab2/src/tests/integration_tests.rs b/crates/collab2/src/tests/integration_tests.rs index 121a98c1d2..f2a39f3511 100644 --- a/crates/collab2/src/tests/integration_tests.rs +++ b/crates/collab2/src/tests/integration_tests.rs @@ -4599,7 +4599,7 @@ async fn test_project_search( let mut results = HashMap::default(); let mut search_rx = project_b.update(cx_b, |project, cx| { project.search( - SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(), cx, ) }); diff --git a/crates/collab2/src/tests/random_project_collaboration_tests.rs b/crates/collab2/src/tests/random_project_collaboration_tests.rs index 361ca00c33..47b936a611 100644 --- a/crates/collab2/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab2/src/tests/random_project_collaboration_tests.rs @@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest { let mut search = project.update(cx, |project, cx| { project.search( - SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()) + .unwrap(), cx, ) }); diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 6dbe3aa204..9e6bfb553e 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration}; const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); lazy_static! { - static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex( - "@[-_\\w]+", - false, - false, - Default::default(), - Default::default() - ) - .unwrap(); + static ref MENTIONS_SEARCH: SearchQuery = + SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap(); } pub struct MessageEditor { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ab6cbd88c0..09d2c9a981 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -5548,7 +5548,16 @@ impl Project { .collect::>(); let background = cx.background().clone(); - let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); + let path_count: usize = snapshots + .iter() + .map(|s| { + if query.include_ignored() { + s.file_count() + } else { + s.visible_file_count() + } + }) + .sum(); if path_count == 0 { let (_, rx) = smol::channel::bounded(1024); return rx; @@ -5735,7 +5744,12 @@ impl Project { let mut snapshot_start_ix = 0; let mut abs_path = PathBuf::new(); for snapshot in snapshots { - let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count(); + let snapshot_end_ix = snapshot_start_ix + + if query.include_ignored() { + snapshot.file_count() + } else { + snapshot.visible_file_count() + }; if worker_end_ix <= snapshot_start_ix { break; } else if worker_start_ix > snapshot_end_ix { @@ -5748,7 +5762,7 @@ impl Project { cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix; for entry in snapshot - .files(false, start_in_snapshot) + .files(query.include_ignored(), start_in_snapshot) .take(end_in_snapshot - start_in_snapshot) { if matching_paths_tx.is_closed() { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 90d32643d5..264c1ff7b5 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], Vec::new() ) @@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.rs").unwrap()], Vec::new() ) @@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap(), @@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.ts").unwrap(), @@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.odd").unwrap()], ) @@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.rs").unwrap()], ) @@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.ts").unwrap(), @@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.rs").unwrap(), @@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()], ) @@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()], ).unwrap(), @@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() @@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 7e360e22ee..c673440326 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -39,6 +39,7 @@ pub enum SearchQuery { replacement: Option, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, @@ -48,6 +49,7 @@ pub enum SearchQuery { multiline: bool, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, } @@ -57,6 +59,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -74,6 +77,7 @@ impl SearchQuery { replacement: None, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -82,6 +86,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -111,6 +116,7 @@ impl SearchQuery { multiline, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -121,6 +127,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -129,6 +136,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -156,6 +164,7 @@ impl SearchQuery { regex: self.is_regex(), whole_word: self.whole_word(), case_sensitive: self.case_sensitive(), + include_ignored: self.include_ignored(), files_to_include: self .files_to_include() .iter() @@ -336,6 +345,17 @@ impl SearchQuery { } } + pub fn include_ignored(&self) -> bool { + match self { + Self::Text { + include_ignored, .. + } => *include_ignored, + Self::Regex { + include_ignored, .. + } => *include_ignored, + } + } + pub fn is_regex(&self) -> bool { matches!(self, Self::Regex { .. }) } diff --git a/crates/project2/src/project2.rs b/crates/project2/src/project2.rs index f2e47b7184..3cc4e8a293 100644 --- a/crates/project2/src/project2.rs +++ b/crates/project2/src/project2.rs @@ -5618,7 +5618,16 @@ impl Project { .collect::>(); let background = cx.background_executor().clone(); - let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); + let path_count: usize = snapshots + .iter() + .map(|s| { + if query.include_ignored() { + s.file_count() + } else { + s.visible_file_count() + } + }) + .sum(); if path_count == 0 { let (_, rx) = smol::channel::bounded(1024); return rx; @@ -5806,7 +5815,12 @@ impl Project { let mut snapshot_start_ix = 0; let mut abs_path = PathBuf::new(); for snapshot in snapshots { - let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count(); + let snapshot_end_ix = snapshot_start_ix + + if query.include_ignored() { + snapshot.file_count() + } else { + snapshot.visible_file_count() + }; if worker_end_ix <= snapshot_start_ix { break; } else if worker_start_ix > snapshot_end_ix { @@ -5819,7 +5833,7 @@ impl Project { cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix; for entry in snapshot - .files(false, start_in_snapshot) + .files(query.include_ignored(), start_in_snapshot) .take(end_in_snapshot - start_in_snapshot) { if matching_paths_tx.is_closed() { diff --git a/crates/project2/src/project_tests.rs b/crates/project2/src/project_tests.rs index 81a4def456..53b2f6ba1f 100644 --- a/crates/project2/src/project_tests.rs +++ b/crates/project2/src/project_tests.rs @@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], Vec::new() ) @@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.rs").unwrap()], Vec::new() ) @@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap(), @@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.ts").unwrap(), @@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.odd").unwrap()], ) @@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.rs").unwrap()], ) @@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.ts").unwrap(), @@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.rs").unwrap(), @@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()], ) @@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()], ).unwrap(), @@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() @@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() diff --git a/crates/project2/src/search.rs b/crates/project2/src/search.rs index 7e360e22ee..c673440326 100644 --- a/crates/project2/src/search.rs +++ b/crates/project2/src/search.rs @@ -39,6 +39,7 @@ pub enum SearchQuery { replacement: Option, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, @@ -48,6 +49,7 @@ pub enum SearchQuery { multiline: bool, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, } @@ -57,6 +59,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -74,6 +77,7 @@ impl SearchQuery { replacement: None, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -82,6 +86,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -111,6 +116,7 @@ impl SearchQuery { multiline, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -121,6 +127,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -129,6 +136,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -156,6 +164,7 @@ impl SearchQuery { regex: self.is_regex(), whole_word: self.whole_word(), case_sensitive: self.case_sensitive(), + include_ignored: self.include_ignored(), files_to_include: self .files_to_include() .iter() @@ -336,6 +345,17 @@ impl SearchQuery { } } + pub fn include_ignored(&self) -> bool { + match self { + Self::Text { + include_ignored, .. + } => *include_ignored, + Self::Regex { + include_ignored, .. + } => *include_ignored, + } + } + pub fn is_regex(&self) -> bool { matches!(self, Self::Regex { .. }) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 206777879b..a6d27fa57d 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -884,6 +884,7 @@ message SearchProject { bool case_sensitive = 5; string files_to_include = 6; string files_to_exclude = 7; + bool include_ignored = 8; } message SearchProjectResponse { diff --git a/crates/rpc2/proto/zed.proto b/crates/rpc2/proto/zed.proto index 206777879b..a6d27fa57d 100644 --- a/crates/rpc2/proto/zed.proto +++ b/crates/rpc2/proto/zed.proto @@ -884,6 +884,7 @@ message SearchProject { bool case_sensitive = 5; string files_to_include = 6; string files_to_exclude = 7; + bool include_ignored = 8; } message SearchProjectResponse { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index ef8c56f2a7..29ffe7c021 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -805,6 +805,7 @@ impl BufferSearchBar { query, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, Vec::new(), Vec::new(), ) { @@ -820,6 +821,7 @@ impl BufferSearchBar { query, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, Vec::new(), Vec::new(), ) { diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index f6e17bbee5..74e06b4e2a 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -4,7 +4,7 @@ use crate::{ search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button}, ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery, PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch, - ToggleCaseSensitive, ToggleReplace, ToggleWholeWord, + ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord, }; use anyhow::{Context, Result}; use collections::HashMap; @@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) { cx.capture_action(ProjectSearchView::replace_next); add_toggle_option_action::(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::(SearchOptions::WHOLE_WORD, cx); + add_toggle_option_action::(SearchOptions::INCLUDE_IGNORED, cx); add_toggle_filters_action::(cx); } @@ -1192,6 +1193,7 @@ impl ProjectSearchView { text, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + self.search_options.contains(SearchOptions::INCLUDE_IGNORED), included_files, excluded_files, ) { @@ -1210,6 +1212,7 @@ impl ProjectSearchView { text, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + self.search_options.contains(SearchOptions::INCLUDE_IGNORED), included_files, excluded_files, ) { @@ -1764,6 +1767,15 @@ impl View for ProjectSearchBar { render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx) }); + let include_ignored = is_semantic_disabled.then(|| { + render_option_button_icon( + // TODO kb icon + "icons/case_insensitive.svg", + SearchOptions::INCLUDE_IGNORED, + cx, + ) + }); + let search_button_for_mode = |mode, side, cx: &mut ViewContext| { let is_active = if let Some(search) = self.active_project_search.as_ref() { let search = search.read(cx); @@ -1863,6 +1875,7 @@ impl View for ProjectSearchBar { .with_child(filter_button) .with_children(case_sensitive) .with_children(whole_word) + .with_children(include_ignored) .flex(1., false) .constrained() .contained(), diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index ba06b3f9c7..db39455dca 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -29,6 +29,7 @@ actions!( CycleMode, ToggleWholeWord, ToggleCaseSensitive, + ToggleIncludeIgnored, ToggleReplace, SelectNextMatch, SelectPrevMatch, @@ -49,31 +50,35 @@ bitflags! { const NONE = 0b000; const WHOLE_WORD = 0b001; const CASE_SENSITIVE = 0b010; + const INCLUDE_IGNORED = 0b100; } } impl SearchOptions { pub fn label(&self) -> &'static str { match *self { - SearchOptions::WHOLE_WORD => "Match Whole Word", - SearchOptions::CASE_SENSITIVE => "Match Case", - _ => panic!("{:?} is not a named SearchOption", self), + Self::WHOLE_WORD => "Match Whole Word", + Self::CASE_SENSITIVE => "Match Case", + Self::INCLUDE_IGNORED => "Include Ignored", + _ => panic!("{self:?} is not a named SearchOption"), } } pub fn icon(&self) -> &'static str { match *self { - SearchOptions::WHOLE_WORD => "icons/word_search.svg", - SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg", - _ => panic!("{:?} is not a named SearchOption", self), + Self::WHOLE_WORD => "icons/word_search.svg", + Self::CASE_SENSITIVE => "icons/case_insensitive.svg", + Self::INCLUDE_IGNORED => "icons/case_insensitive.svg", + _ => panic!("{self:?} is not a named SearchOption"), } } pub fn to_toggle_action(&self) -> Box { match *self { - SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord), - SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive), - _ => panic!("{:?} is not a named SearchOption", self), + Self::WHOLE_WORD => Box::new(ToggleWholeWord), + Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive), + Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored), + _ => panic!("{self:?} is not a named SearchOption"), } } @@ -85,6 +90,7 @@ impl SearchOptions { let mut options = SearchOptions::NONE; options.set(SearchOptions::WHOLE_WORD, query.whole_word()); options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive()); + options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored()); options } From 7d97dfa6beefc555a62df70a94981fd7b79d75f0 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Nov 2023 15:23:08 +0200 Subject: [PATCH 06/27] Test and filter data draft --- crates/project/src/ignore.rs | 22 +----- crates/project/src/project_settings.rs | 6 ++ crates/project/src/worktree.rs | 105 ++++++++++++++++++++++--- crates/project/src/worktree_tests.rs | 85 +++++++++++++++++++- 4 files changed, 186 insertions(+), 32 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index 8bac08b96c..4f28160e3a 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -1,5 +1,5 @@ use ignore::gitignore::Gitignore; -use std::{ffi::OsStr, path::Path, sync::Arc}; +use std::{path::Path, sync::Arc}; pub enum IgnoreStack { None, @@ -34,24 +34,4 @@ impl IgnoreStack { }), } } - - pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool { - if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { - return true; - } - - match self { - Self::None => false, - Self::All => true, - Self::Some { - abs_base_path, - ignore, - parent: prev, - } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { - ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir), - ignore::Match::Ignore(_) => true, - ignore::Match::Whitelist(_) => false, - }, - } - } } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 607b284813..b2bafe228e 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,6 +10,12 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, + // TODO kb better names and docs + // TODO kb how to react on their changes? + #[serde(default)] + pub scan_exclude_files: Vec, + #[serde(default)] + pub scan_include_files: Vec, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d59885225a..d1633b828d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1,5 +1,6 @@ use crate::{ - copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, + copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary, + ProjectEntryId, RemoveOptions, }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; @@ -55,7 +56,10 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt}; +use util::{ + paths::{PathMatcher, HOME}, + ResultExt, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -216,6 +220,8 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, + scan_exclude_files: Vec, + scan_include_files: Vec, } struct BackgroundScannerState { @@ -303,8 +309,34 @@ impl Worktree { let root_name = abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); - + let project_settings = settings::get::(cx); + let scan_exclude_files = project_settings.scan_exclude_files.iter() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect::>(); + let scan_include_files = project_settings.scan_include_files.iter() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `scan_include_files` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect::>(); let mut snapshot = LocalSnapshot { + scan_include_files, + scan_exclude_files, ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { @@ -2042,7 +2074,7 @@ impl LocalSnapshot { let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if ignore_stack.is_abs_path_ignored(parent_abs_path, true) { + if self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2050,7 +2082,7 @@ impl LocalSnapshot { } } - if ignore_stack.is_abs_path_ignored(abs_path, is_dir) { + if self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2145,6 +2177,45 @@ impl LocalSnapshot { paths.sort_by(|a, b| a.0.cmp(b.0)); paths } + + fn is_abs_path_ignored( + &self, + abs_path: &Path, + ignore_stack: &IgnoreStack, + is_dir: bool, + ) -> bool { + dbg!(&abs_path); + if self + .scan_include_files + .iter() + .any(|include_matcher| include_matcher.is_match(abs_path)) + { + dbg!("included!!"); + return false; + } else if self + .scan_exclude_files + .iter() + .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) + { + dbg!("excluded!!"); + return true; + } else if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { + return true; + } + match ignore_stack { + IgnoreStack::None => false, + IgnoreStack::All => true, + IgnoreStack::Some { + abs_base_path, + ignore, + parent: prev, + } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { + ignore::Match::None => self.is_abs_path_ignored(abs_path, &prev, is_dir), + ignore::Match::Ignore(_) => true, + ignore::Match::Whitelist(_) => false, + }, + } + } } impl BackgroundScannerState { @@ -2767,7 +2838,7 @@ pub struct Entry { pub mtime: SystemTime, pub is_symlink: bool, - /// Whether this entry is ignored by Git. + /// Whether this entry is ignored by Zed. /// /// We only scan ignored entries once the directory is expanded and /// exclude them from searches. @@ -3464,7 +3535,7 @@ impl BackgroundScanner { for entry in &mut new_entries { let entry_abs_path = root_abs_path.join(&entry.path); entry.is_ignored = - ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir()); + self.is_abs_path_ignored(&entry_abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { @@ -3523,7 +3594,8 @@ impl BackgroundScanner { } if child_entry.is_dir() { - child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true); + child_entry.is_ignored = + self.is_abs_path_ignored(&child_abs_path, &ignore_stack, true); // Avoid recursing until crash in the case of a recursive symlink if !job.ancestor_inodes.contains(&child_entry.inode) { @@ -3547,7 +3619,8 @@ impl BackgroundScanner { new_jobs.push(None); } } else { - child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); + child_entry.is_ignored = + self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); if !child_entry.is_ignored { if let Some((repository_dir, repository, staged_statuses)) = &job.containing_repository @@ -3825,7 +3898,7 @@ impl BackgroundScanner { for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); - entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir()); + entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { let child_ignore_stack = if entry.is_ignored { IgnoreStack::all() @@ -4008,6 +4081,18 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } + + fn is_abs_path_ignored( + &self, + abs_path: &Path, + ignore_stack: &IgnoreStack, + is_dir: bool, + ) -> bool { + self.state + .lock() + .snapshot + .is_abs_path_ignored(abs_path, ignore_stack, is_dir) + } } fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 4253f45b0c..fff23a36b4 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -1,6 +1,7 @@ use crate::{ + project_settings::ProjectSettings, worktree::{Event, Snapshot, WorktreeModelHandle}, - Entry, EntryKind, PathChange, Worktree, + Entry, EntryKind, PathChange, Project, Worktree, }; use anyhow::Result; use client::Client; @@ -12,6 +13,7 @@ use postage::stream::Stream; use pretty_assertions::assert_eq; use rand::prelude::*; use serde_json::json; +use settings::SettingsStore; use std::{ env, fmt::Write, @@ -877,6 +879,87 @@ async fn test_write_file(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { + let dir = temp_tree(json!({ + ".git": {}, + ".gitignore": "**/target\n/node_modules\n", + "target": {}, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + "bar": { + "bar.rs": "// bar", + }, + "lib.rs": "mod foo;\nmod bar;\n", + }, + ".DS_Store": "", + })); + cx.update(|cx| { + cx.set_global(SettingsStore::test(cx)); + Project::init_settings(cx); + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.scan_exclude_files = + vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; + project_settings.scan_include_files = vec!["**/node_modules".to_string()]; + }); + }); + }); + + let tree = Worktree::local( + build_client(cx), + dir.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + // tree.update(cx, |tree, cx| { + // tree.as_local().unwrap().write_file( + // Path::new("tracked-dir/file.txt"), + // "hello".into(), + // Default::default(), + // cx, + // ) + // }) + // .await + // .unwrap(); + // tree.update(cx, |tree, cx| { + // tree.as_local().unwrap().write_file( + // Path::new("ignored-dir/file.txt"), + // "world".into(), + // Default::default(), + // cx, + // ) + // }) + // .await + // .unwrap(); + + // tree.read_with(cx, |tree, _| { + // let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); + // let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); + // assert!(!tracked.is_ignored); + // assert!(ignored.is_ignored); + // }); + dbg!("!!!!!!!!!!!!"); +} + #[gpui::test(iterations = 30)] async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background()); From 9072e5a50707ab2dc799803bf0b04dc825caff9d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Nov 2023 17:51:44 +0200 Subject: [PATCH 07/27] Properly set ignore stacks and is_ignored values --- crates/project/src/worktree.rs | 99 ++++++++++++++++++++++------------ 1 file changed, 65 insertions(+), 34 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d1633b828d..eb2612a735 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2072,9 +2072,12 @@ impl LocalSnapshot { } } + // TODO kb choose the correct ignore stack for custom `is_abs_path_included(..) = true` cases let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { + if !self.is_abs_path_included(parent_abs_path) + && self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) + { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2082,7 +2085,9 @@ impl LocalSnapshot { } } - if self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { + if !self.is_abs_path_included(abs_path) + && self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) + { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2178,30 +2183,28 @@ impl LocalSnapshot { paths } + fn is_abs_path_included(&self, abs_path: &Path) -> bool { + self.scan_include_files + .iter() + .any(|include_matcher| include_matcher.is_match(abs_path)) + } + fn is_abs_path_ignored( &self, abs_path: &Path, ignore_stack: &IgnoreStack, is_dir: bool, ) -> bool { - dbg!(&abs_path); if self - .scan_include_files - .iter() - .any(|include_matcher| include_matcher.is_match(abs_path)) - { - dbg!("included!!"); - return false; - } else if self .scan_exclude_files .iter() .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) { - dbg!("excluded!!"); return true; } else if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { return true; } + match ignore_stack { IgnoreStack::None => false, IgnoreStack::All => true, @@ -2219,8 +2222,8 @@ impl LocalSnapshot { } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry) -> bool { - (!entry.is_external && !entry.is_ignored) + fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { + (!entry.is_external && (!entry.is_ignored || self.snapshot.is_abs_path_included(entry_abs_path))) || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2325,6 +2328,16 @@ impl BackgroundScannerState { let mut entries_by_id_edits = Vec::new(); for entry in entries { + let abs_path = self.snapshot.abs_path.join(&entry.path); + let ignore_stack = self + .snapshot + .ignore_stack_for_abs_path(&abs_path, entry.is_dir()); + let actual_ignored = + self.snapshot + .is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); + if entry.path.to_string_lossy().contains("node_modules") { + dbg!("@@@@@@@@@", &entry, actual_ignored, ignore_stack.is_all()); + } entries_by_id_edits.push(Edit::Insert(PathEntry { id: entry.id, path: entry.path.clone(), @@ -3165,7 +3178,10 @@ impl BackgroundScanner { let ignore_stack = state .snapshot .ignore_stack_for_abs_path(&root_abs_path, true); - if ignore_stack.is_all() { + if state + .snapshot + .is_abs_path_ignored(&root_abs_path, &ignore_stack, true) + { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref()); } @@ -3539,7 +3555,9 @@ impl BackgroundScanner { if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { - job.ignore_stack = if entry.is_ignored { + job.ignore_stack = if entry.is_ignored + && !self.is_abs_path_included(&entry_abs_path) + { IgnoreStack::all() } else { ignore_stack.clone() @@ -3603,15 +3621,17 @@ impl BackgroundScanner { ancestor_inodes.insert(child_entry.inode); new_jobs.push(Some(ScanJob { - abs_path: child_abs_path, path: child_path, is_external: child_entry.is_external, - ignore_stack: if child_entry.is_ignored { + ignore_stack: if child_entry.is_ignored + && !self.is_abs_path_included(&child_abs_path) + { IgnoreStack::all() } else { ignore_stack.clone() }, ancestor_inodes, + abs_path: child_abs_path, scan_queue: job.scan_queue.clone(), containing_repository: job.containing_repository.clone(), })); @@ -3621,7 +3641,7 @@ impl BackgroundScanner { } else { child_entry.is_ignored = self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); - if !child_entry.is_ignored { + if !child_entry.is_ignored || self.is_abs_path_included(&child_abs_path) { if let Some((repository_dir, repository, staged_statuses)) = &job.containing_repository { @@ -3648,7 +3668,8 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry) { + let entry_abs_path = root_abs_path.join(&entry.path); + if state.should_scan_directory(&entry, &entry_abs_path) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3735,25 +3756,27 @@ impl BackgroundScanner { self.next_entry_id.as_ref(), state.snapshot.root_char_bag, ); - fs_entry.is_ignored = ignore_stack.is_all(); + let is_dir = fs_entry.is_dir(); + fs_entry.is_ignored = + state + .snapshot + .is_abs_path_ignored(&abs_path, &ignore_stack, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !fs_entry.is_ignored { - if !fs_entry.is_dir() { - if let Some((work_dir, repo)) = - state.snapshot.local_repo_for_path(&path) - { - if let Ok(repo_path) = path.strip_prefix(work_dir.0) { - let repo_path = RepoPath(repo_path.into()); - let repo = repo.repo_ptr.lock(); - fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); - } + if !is_dir + && (!fs_entry.is_ignored || state.snapshot.is_abs_path_included(&abs_path)) + { + if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(work_dir.0) { + let repo_path = RepoPath(repo_path.into()); + let repo = repo.repo_ptr.lock(); + fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); } } } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry) { + if state.should_scan_directory(&fs_entry, &abs_path) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3900,7 +3923,8 @@ impl BackgroundScanner { let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { - let child_ignore_stack = if entry.is_ignored { + let child_ignore_stack = if entry.is_ignored && self.is_abs_path_included(&abs_path) + { IgnoreStack::all() } else { ignore_stack.clone() @@ -3908,9 +3932,12 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't // previously scanned. - if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { + if was_ignored + && (!entry.is_ignored || self.is_abs_path_included(&abs_path)) + && entry.kind.is_unloaded() + { let state = self.state.lock(); - if state.should_scan_directory(&entry) { + if state.should_scan_directory(&entry, &abs_path) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } @@ -4082,6 +4109,10 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } + fn is_abs_path_included(&self, abs_path: &Path) -> bool { + self.state.lock().snapshot.is_abs_path_included(abs_path) + } + fn is_abs_path_ignored( &self, abs_path: &Path, From 401f85bed219441c4c840078422c48f2a2e4cb18 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 13 Nov 2023 18:59:09 +0200 Subject: [PATCH 08/27] Properly ignore elements from configured exceptions --- crates/project/src/project_settings.rs | 2 +- crates/project/src/worktree.rs | 34 ++++++-------------------- 2 files changed, 8 insertions(+), 28 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index b2bafe228e..2caac6191e 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,7 +10,7 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, - // TODO kb better names and docs + // TODO kb better names and docs and tests // TODO kb how to react on their changes? #[serde(default)] pub scan_exclude_files: Vec, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index eb2612a735..af5531f5d3 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1513,7 +1513,7 @@ impl Snapshot { self.entries_by_id.get(&entry_id, &()).is_some() } - pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result { + fn insert_entry(&mut self, entry: proto::Entry) -> Result { let entry = Entry::try_from((&self.root_char_bag, entry))?; let old_entry = self.entries_by_id.insert_or_replace( PathEntry { @@ -2075,9 +2075,7 @@ impl LocalSnapshot { // TODO kb choose the correct ignore stack for custom `is_abs_path_included(..) = true` cases let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if !self.is_abs_path_included(parent_abs_path) - && self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) - { + if !self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2085,9 +2083,7 @@ impl LocalSnapshot { } } - if !self.is_abs_path_included(abs_path) - && self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) - { + if !self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2241,7 +2237,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() { + if !ignore_stack.is_all() || self.snapshot.is_abs_path_included(&abs_path) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -2326,18 +2322,7 @@ impl BackgroundScannerState { self.scanned_dirs.insert(parent_entry_id); let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)]; let mut entries_by_id_edits = Vec::new(); - for entry in entries { - let abs_path = self.snapshot.abs_path.join(&entry.path); - let ignore_stack = self - .snapshot - .ignore_stack_for_abs_path(&abs_path, entry.is_dir()); - let actual_ignored = - self.snapshot - .is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); - if entry.path.to_string_lossy().contains("node_modules") { - dbg!("@@@@@@@@@", &entry, actual_ignored, ignore_stack.is_all()); - } entries_by_id_edits.push(Edit::Insert(PathEntry { id: entry.id, path: entry.path.clone(), @@ -3555,9 +3540,7 @@ impl BackgroundScanner { if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { - job.ignore_stack = if entry.is_ignored - && !self.is_abs_path_included(&entry_abs_path) - { + job.ignore_stack = if entry.is_ignored { IgnoreStack::all() } else { ignore_stack.clone() @@ -3623,9 +3606,7 @@ impl BackgroundScanner { new_jobs.push(Some(ScanJob { path: child_path, is_external: child_entry.is_external, - ignore_stack: if child_entry.is_ignored - && !self.is_abs_path_included(&child_abs_path) - { + ignore_stack: if child_entry.is_ignored { IgnoreStack::all() } else { ignore_stack.clone() @@ -3923,8 +3904,7 @@ impl BackgroundScanner { let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { - let child_ignore_stack = if entry.is_ignored && self.is_abs_path_included(&abs_path) - { + let child_ignore_stack = if entry.is_ignored { IgnoreStack::all() } else { ignore_stack.clone() From 126e4cce8f7e8f7629eb2d354a731bb5143bb03a Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 13 Nov 2023 22:40:21 +0200 Subject: [PATCH 09/27] Scan all ignored files by default now --- crates/project/src/ignore.rs | 4 -- crates/project/src/project_settings.rs | 2 - crates/project/src/worktree.rs | 88 ++++++++------------------ crates/project/src/worktree_tests.rs | 1 - 4 files changed, 26 insertions(+), 69 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index 4f28160e3a..e241f7fbfc 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -20,10 +20,6 @@ impl IgnoreStack { Arc::new(Self::All) } - pub fn is_all(&self) -> bool { - matches!(self, IgnoreStack::All) - } - pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 2caac6191e..17233219d7 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -14,8 +14,6 @@ pub struct ProjectSettings { // TODO kb how to react on their changes? #[serde(default)] pub scan_exclude_files: Vec, - #[serde(default)] - pub scan_include_files: Vec, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index af5531f5d3..4af8548a83 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -221,7 +221,6 @@ pub struct LocalSnapshot { /// id of their parent directory. git_repositories: TreeMap, scan_exclude_files: Vec, - scan_include_files: Vec, } struct BackgroundScannerState { @@ -322,20 +321,7 @@ impl Worktree { }) }) .collect::>(); - let scan_include_files = project_settings.scan_include_files.iter() - .filter_map(|pattern| { - PathMatcher::new(pattern) - .map(Some) - .unwrap_or_else(|e| { - log::error!( - "Skipping pattern {pattern} in `scan_include_files` project settings due to parsing error: {e:#}" - ); - None - }) - }) - .collect::>(); let mut snapshot = LocalSnapshot { - scan_include_files, scan_exclude_files, ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), @@ -2072,7 +2058,6 @@ impl LocalSnapshot { } } - // TODO kb choose the correct ignore stack for custom `is_abs_path_included(..) = true` cases let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { if !self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { @@ -2179,12 +2164,6 @@ impl LocalSnapshot { paths } - fn is_abs_path_included(&self, abs_path: &Path) -> bool { - self.scan_include_files - .iter() - .any(|include_matcher| include_matcher.is_match(abs_path)) - } - fn is_abs_path_ignored( &self, abs_path: &Path, @@ -2218,8 +2197,8 @@ impl LocalSnapshot { } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { - (!entry.is_external && (!entry.is_ignored || self.snapshot.is_abs_path_included(entry_abs_path))) + fn should_scan_directory(&self, entry: &Entry) -> bool { + !entry.is_external || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2237,15 +2216,13 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() || self.snapshot.is_abs_path_included(&abs_path) { - if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { - if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { - containing_repository = Some(( - workdir_path, - repo.repo_ptr.clone(), - repo.repo_ptr.lock().staged_statuses(repo_path), - )); - } + if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { + containing_repository = Some(( + workdir_path, + repo.repo_ptr.clone(), + repo.repo_ptr.lock().staged_statuses(repo_path), + )); } } if !ancestor_inodes.contains(&entry.inode) { @@ -3622,19 +3599,17 @@ impl BackgroundScanner { } else { child_entry.is_ignored = self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); - if !child_entry.is_ignored || self.is_abs_path_included(&child_abs_path) { - if let Some((repository_dir, repository, staged_statuses)) = - &job.containing_repository - { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = combine_git_statuses( - staged_statuses.get(&repo_path).copied(), - repository - .lock() - .unstaged_status(&repo_path, child_entry.mtime), - ); - } + if let Some((repository_dir, repository, staged_statuses)) = + &job.containing_repository + { + if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { + let repo_path = RepoPath(repo_path.into()); + child_entry.git_status = combine_git_statuses( + staged_statuses.get(&repo_path).copied(), + repository + .lock() + .unstaged_status(&repo_path, child_entry.mtime), + ); } } } @@ -3649,8 +3624,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - let entry_abs_path = root_abs_path.join(&entry.path); - if state.should_scan_directory(&entry, &entry_abs_path) { + if state.should_scan_directory(&entry) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3744,9 +3718,7 @@ impl BackgroundScanner { .is_abs_path_ignored(&abs_path, &ignore_stack, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !is_dir - && (!fs_entry.is_ignored || state.snapshot.is_abs_path_included(&abs_path)) - { + if !is_dir { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) { let repo_path = RepoPath(repo_path.into()); @@ -3757,7 +3729,7 @@ impl BackgroundScanner { } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry, &abs_path) { + if state.should_scan_directory(&fs_entry) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3910,14 +3882,10 @@ impl BackgroundScanner { ignore_stack.clone() }; - // Scan any directories that were previously ignored and weren't - // previously scanned. - if was_ignored - && (!entry.is_ignored || self.is_abs_path_included(&abs_path)) - && entry.kind.is_unloaded() - { + // Scan any directories that were previously ignored and weren't previously scanned. + if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); - if state.should_scan_directory(&entry, &abs_path) { + if state.should_scan_directory(&entry) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } @@ -4089,10 +4057,6 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } - fn is_abs_path_included(&self, abs_path: &Path) -> bool { - self.state.lock().snapshot.is_abs_path_included(abs_path) - } - fn is_abs_path_ignored( &self, abs_path: &Path, diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index fff23a36b4..1fb4aa9a34 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -911,7 +911,6 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { store.update_user_settings::(cx, |project_settings| { project_settings.scan_exclude_files = vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; - project_settings.scan_include_files = vec!["**/node_modules".to_string()]; }); }); }); From b8be720490e9fe5662a14950d96620a88a0677e1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 13 Nov 2023 23:18:24 +0200 Subject: [PATCH 10/27] Fix the bugs --- crates/project/src/ignore.rs | 21 +++++- crates/project/src/project_settings.rs | 1 + crates/project/src/worktree.rs | 97 ++++++++------------------ 3 files changed, 50 insertions(+), 69 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index e241f7fbfc..ed1ce4006d 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -1,5 +1,5 @@ use ignore::gitignore::Gitignore; -use std::{path::Path, sync::Arc}; +use std::{ffi::OsStr, path::Path, sync::Arc}; pub enum IgnoreStack { None, @@ -30,4 +30,23 @@ impl IgnoreStack { }), } } + + pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool { + if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { + return true; + } + match self { + Self::None => false, + Self::All => true, + Self::Some { + abs_base_path, + ignore, + parent: prev, + } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { + ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir), + ignore::Match::Ignore(_) => true, + ignore::Match::Whitelist(_) => false, + }, + } + } } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 17233219d7..8aebb380b6 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -12,6 +12,7 @@ pub struct ProjectSettings { pub git: GitSettings, // TODO kb better names and docs and tests // TODO kb how to react on their changes? + // TODO kb /something/node_modules/ does not match `"**/node_modules/**"` glob!!! #[serde(default)] pub scan_exclude_files: Vec, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4af8548a83..dbc18e086d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2060,7 +2060,7 @@ impl LocalSnapshot { let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if !self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { + if !ignore_stack.is_abs_path_ignored(parent_abs_path, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2068,7 +2068,7 @@ impl LocalSnapshot { } } - if !self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { + if !ignore_stack.is_abs_path_ignored(abs_path, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2164,41 +2164,16 @@ impl LocalSnapshot { paths } - fn is_abs_path_ignored( - &self, - abs_path: &Path, - ignore_stack: &IgnoreStack, - is_dir: bool, - ) -> bool { - if self - .scan_exclude_files + fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { + self.scan_exclude_files .iter() .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) - { - return true; - } else if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { - return true; - } - - match ignore_stack { - IgnoreStack::None => false, - IgnoreStack::All => true, - IgnoreStack::Some { - abs_base_path, - ignore, - parent: prev, - } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { - ignore::Match::None => self.is_abs_path_ignored(abs_path, &prev, is_dir), - ignore::Match::Ignore(_) => true, - ignore::Match::Whitelist(_) => false, - }, - } } } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry) -> bool { - !entry.is_external + fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { + !entry.is_external && !self.snapshot.is_abs_path_excluded(entry_abs_path) || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2216,13 +2191,17 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { - if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { - containing_repository = Some(( - workdir_path, - repo.repo_ptr.clone(), - repo.repo_ptr.lock().staged_statuses(repo_path), - )); + if !matches!(ignore_stack.as_ref(), &IgnoreStack::All) + && !self.snapshot.is_abs_path_excluded(&abs_path) + { + if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { + containing_repository = Some(( + workdir_path, + repo.repo_ptr.clone(), + repo.repo_ptr.lock().staged_statuses(repo_path), + )); + } } } if !ancestor_inodes.contains(&entry.inode) { @@ -3140,10 +3119,7 @@ impl BackgroundScanner { let ignore_stack = state .snapshot .ignore_stack_for_abs_path(&root_abs_path, true); - if state - .snapshot - .is_abs_path_ignored(&root_abs_path, &ignore_stack, true) - { + if ignore_stack.is_abs_path_ignored(&root_abs_path, true) { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref()); } @@ -3513,7 +3489,7 @@ impl BackgroundScanner { for entry in &mut new_entries { let entry_abs_path = root_abs_path.join(&entry.path); entry.is_ignored = - self.is_abs_path_ignored(&entry_abs_path, &ignore_stack, entry.is_dir()); + ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir()); if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { @@ -3572,8 +3548,7 @@ impl BackgroundScanner { } if child_entry.is_dir() { - child_entry.is_ignored = - self.is_abs_path_ignored(&child_abs_path, &ignore_stack, true); + child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true); // Avoid recursing until crash in the case of a recursive symlink if !job.ancestor_inodes.contains(&child_entry.inode) { @@ -3597,8 +3572,7 @@ impl BackgroundScanner { new_jobs.push(None); } } else { - child_entry.is_ignored = - self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); + child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); if let Some((repository_dir, repository, staged_statuses)) = &job.containing_repository { @@ -3624,7 +3598,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry) { + if state.should_scan_directory(&entry, &job.path.join(&entry.path)) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3712,13 +3686,12 @@ impl BackgroundScanner { state.snapshot.root_char_bag, ); let is_dir = fs_entry.is_dir(); - fs_entry.is_ignored = - state - .snapshot - .is_abs_path_ignored(&abs_path, &ignore_stack, is_dir); + fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !is_dir { + if !is_dir + && !(fs_entry.is_ignored || state.snapshot.is_abs_path_excluded(&abs_path)) + { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) { let repo_path = RepoPath(repo_path.into()); @@ -3729,7 +3702,7 @@ impl BackgroundScanner { } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry) { + if state.should_scan_directory(&fs_entry, &abs_path) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3874,7 +3847,7 @@ impl BackgroundScanner { for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); - entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); + entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir()); if entry.is_dir() { let child_ignore_stack = if entry.is_ignored { IgnoreStack::all() @@ -3885,7 +3858,7 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); - if state.should_scan_directory(&entry) { + if state.should_scan_directory(&entry, &abs_path) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } @@ -4056,18 +4029,6 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } - - fn is_abs_path_ignored( - &self, - abs_path: &Path, - ignore_stack: &IgnoreStack, - is_dir: bool, - ) -> bool { - self.state - .lock() - .snapshot - .is_abs_path_ignored(abs_path, ignore_stack, is_dir) - } } fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { From 1612c90052fd570fbee7856d5d53c45416d3dcf1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 00:19:51 +0200 Subject: [PATCH 11/27] More lenient file path matchers --- crates/project/src/project_settings.rs | 1 - crates/project/src/worktree.rs | 2 +- crates/util/src/paths.rs | 24 +++++++++++++++++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 8aebb380b6..17233219d7 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -12,7 +12,6 @@ pub struct ProjectSettings { pub git: GitSettings, // TODO kb better names and docs and tests // TODO kb how to react on their changes? - // TODO kb /something/node_modules/ does not match `"**/node_modules/**"` glob!!! #[serde(default)] pub scan_exclude_files: Vec, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index dbc18e086d..3cc1ff6fef 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3598,7 +3598,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry, &job.path.join(&entry.path)) { + if state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index d54e0b1cd6..5999bd1d39 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -211,7 +211,19 @@ impl PathMatcher { } pub fn is_match>(&self, other: P) -> bool { - other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other) + other.as_ref().starts_with(&self.maybe_path) + || self.glob.is_match(&other) + || self.check_with_end_separator(other.as_ref()) + } + + fn check_with_end_separator(&self, path: &Path) -> bool { + let path_str = path.to_string_lossy(); + let separator = std::path::MAIN_SEPARATOR_STR; + if path_str.ends_with(separator) { + self.glob.is_match(path) + } else { + self.glob.is_match(path_str.to_string() + separator) + } } } @@ -388,4 +400,14 @@ mod tests { let path = Path::new("/a/b/c/.eslintrc.js"); assert_eq!(path.extension_or_hidden_file_name(), Some("js")); } + + #[test] + fn edge_of_glob() { + let path = Path::new("/work/node_modules"); + let path_matcher = PathMatcher::new("**/node_modules/**").unwrap(); + assert!( + path_matcher.is_match(&path), + "Path matcher {path_matcher} should match {path:?}" + ); + } } From 9373d3843457b3e3acdc07480337de8a3e486345 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 16:35:49 +0200 Subject: [PATCH 12/27] Rescan worktree on scan exclusions settings change --- crates/project/src/project_settings.rs | 1 - crates/project/src/worktree.rs | 193 ++++++++++++++++--------- crates/util/src/paths.rs | 8 + 3 files changed, 136 insertions(+), 66 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 17233219d7..7cbcc32d4e 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -11,7 +11,6 @@ pub struct ProjectSettings { #[serde(default)] pub git: GitSettings, // TODO kb better names and docs and tests - // TODO kb how to react on their changes? #[serde(default)] pub scan_exclude_files: Vec, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3cc1ff6fef..3168780305 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -22,7 +22,10 @@ use futures::{ }; use fuzzy::CharBag; use git::{DOT_GIT, GITIGNORE}; -use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; +use gpui::{ + executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task, +}; +use itertools::Itertools; use language::{ proto::{ deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, @@ -37,6 +40,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; +use settings::SettingsStore; use smol::channel::{self, Sender}; use std::{ any::Any, @@ -74,7 +78,8 @@ pub struct LocalWorktree { scan_requests_tx: channel::Sender, path_prefixes_to_scan_tx: channel::Sender>, is_scanning: (watch::Sender, watch::Receiver), - _background_scanner_task: Task<()>, + _settings_subscription: Subscription, + _background_scanner_tasks: Vec>, share: Option, diagnostics: HashMap< Arc, @@ -304,30 +309,55 @@ impl Worktree { .await .context("failed to stat worktree path")?; + let closure_fs = Arc::clone(&fs); + let closure_next_entry_id = Arc::clone(&next_entry_id); + let closure_abs_path = abs_path.to_path_buf(); Ok(cx.add_model(move |cx: &mut ModelContext| { + let settings_subscription = cx.observe_global::(move |this, cx| { + if let Self::Local(this) = this { + let new_scan_exclude_files = + scan_exclude_files(settings::get::(cx)); + if new_scan_exclude_files != this.snapshot.scan_exclude_files { + this.snapshot.scan_exclude_files = new_scan_exclude_files; + log::info!( + "Re-scanning due to new scan exclude files: {:?}", + this.snapshot + .scan_exclude_files + .iter() + .map(ToString::to_string) + .collect::>() + ); + + let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); + let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = + channel::unbounded(); + this.scan_requests_tx = scan_requests_tx; + this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx; + this._background_scanner_tasks = start_background_scan_tasks( + &closure_abs_path, + this.snapshot(), + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&closure_next_entry_id), + Arc::clone(&closure_fs), + cx, + ); + this.is_scanning = watch::channel_with(true); + // TODO kb change more state? will this even work now? + } + } + }); + let root_name = abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); - let project_settings = settings::get::(cx); - let scan_exclude_files = project_settings.scan_exclude_files.iter() - .filter_map(|pattern| { - PathMatcher::new(pattern) - .map(Some) - .unwrap_or_else(|e| { - log::error!( - "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" - ); - None - }) - }) - .collect::>(); let mut snapshot = LocalSnapshot { - scan_exclude_files, + scan_exclude_files: scan_exclude_files(settings::get::(cx)), ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { id: WorktreeId::from_usize(cx.model_id()), - abs_path: abs_path.clone(), + abs_path: abs_path.to_path_buf().into(), root_name: root_name.clone(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), entries_by_path: Default::default(), @@ -352,60 +382,23 @@ impl Worktree { let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); - let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); - - cx.spawn_weak(|this, mut cx| async move { - while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) { - this.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); - match state { - ScanState::Started => { - *this.is_scanning.0.borrow_mut() = true; - } - ScanState::Updated { - snapshot, - changes, - barrier, - scanning, - } => { - *this.is_scanning.0.borrow_mut() = scanning; - this.set_snapshot(snapshot, changes, cx); - drop(barrier); - } - } - cx.notify(); - }); - } - }) - .detach(); - - let background_scanner_task = cx.background().spawn({ - let fs = fs.clone(); - let snapshot = snapshot.clone(); - let background = cx.background().clone(); - async move { - let events = fs.watch(&abs_path, Duration::from_millis(100)).await; - BackgroundScanner::new( - snapshot, - next_entry_id, - fs, - scan_states_tx, - background, - scan_requests_rx, - path_prefixes_to_scan_rx, - ) - .run(events) - .await; - } - }); - + let task_snapshot = snapshot.clone(); Worktree::Local(LocalWorktree { snapshot, is_scanning: watch::channel_with(true), share: None, scan_requests_tx, path_prefixes_to_scan_tx, - _background_scanner_task: background_scanner_task, + _settings_subscription: settings_subscription, + _background_scanner_tasks: start_background_scan_tasks( + &abs_path, + task_snapshot, + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&next_entry_id), + Arc::clone(&fs), + cx, + ), diagnostics: Default::default(), diagnostic_summaries: Default::default(), client, @@ -602,6 +595,76 @@ impl Worktree { } } +fn start_background_scan_tasks( + abs_path: &Path, + snapshot: LocalSnapshot, + scan_requests_rx: channel::Receiver, + path_prefixes_to_scan_rx: channel::Receiver>, + next_entry_id: Arc, + fs: Arc, + cx: &mut ModelContext<'_, Worktree>, +) -> Vec> { + let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); + let background_scanner = cx.background().spawn({ + let abs_path = abs_path.to_path_buf(); + let background = cx.background().clone(); + async move { + let events = fs.watch(&abs_path, Duration::from_millis(100)).await; + BackgroundScanner::new( + snapshot, + next_entry_id, + fs, + scan_states_tx, + background, + scan_requests_rx, + path_prefixes_to_scan_rx, + ) + .run(events) + .await; + } + }); + let scan_state_updater = cx.spawn_weak(|this, mut cx| async move { + while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) { + this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + match state { + ScanState::Started => { + *this.is_scanning.0.borrow_mut() = true; + } + ScanState::Updated { + snapshot, + changes, + barrier, + scanning, + } => { + *this.is_scanning.0.borrow_mut() = scanning; + this.set_snapshot(snapshot, changes, cx); + drop(barrier); + } + } + cx.notify(); + }); + } + }); + vec![background_scanner, scan_state_updater] +} + +fn scan_exclude_files(project_settings: &ProjectSettings) -> Vec { + project_settings.scan_exclude_files.iter() + .sorted() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect() +} + impl LocalWorktree { pub fn contains_abs_path(&self, path: &Path) -> bool { path.starts_with(&self.abs_path) diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 5999bd1d39..d0ba7957ec 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher { } } +impl PartialEq for PathMatcher { + fn eq(&self, other: &Self) -> bool { + self.maybe_path.eq(&other.maybe_path) + } +} + +impl Eq for PathMatcher {} + impl PathMatcher { pub fn new(maybe_glob: &str) -> Result { Ok(PathMatcher { From c52fe2f536a92999b014e526c97c1942863fcc6f Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 22:43:33 +0200 Subject: [PATCH 13/27] Move toggle ignored button into include directories editor --- crates/search/src/project_search.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 74e06b4e2a..8f19e74802 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1875,7 +1875,6 @@ impl View for ProjectSearchBar { .with_child(filter_button) .with_children(case_sensitive) .with_children(whole_word) - .with_children(include_ignored) .flex(1., false) .constrained() .contained(), @@ -1892,7 +1891,15 @@ impl View for ProjectSearchBar { .with_children(search.filters_enabled.then(|| { Flex::row() .with_child( - ChildView::new(&search.included_files_editor, cx) + Flex::row() + .with_child( + ChildView::new(&search.included_files_editor, cx) + .contained() + .constrained() + .with_height(theme.search.search_bar_row_height) + .flex(1., true), + ) + .with_children(include_ignored) .contained() .with_style(include_container_style) .constrained() From 26f7e66b492c4c9860705e997b40a5208d0ecb0e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 22:55:06 +0200 Subject: [PATCH 14/27] Add default scan excluded files settings --- assets/settings/default.json | 12 ++++++++++++ crates/project/src/project_settings.rs | 2 +- crates/project/src/worktree.rs | 2 +- crates/project/src/worktree_tests.rs | 2 +- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 08d85dd723..4a21b708ee 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -268,6 +268,18 @@ // Whether to show warnings or not by default. "include_warnings": true }, + // TODO kb docs + "scan_exclude_files": [ + "**/.git", + "**/.svn", + "**/.hg", + "**/CVS", + "**/.DS_Store", + "**/Thumbs.db", + "**/.classpath", + "**/.settings", + "**/target" + ], // Git gutter behavior configuration. "git": { // Control whether the git gutter is shown. May take 2 values: diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 7cbcc32d4e..511241bc22 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -12,7 +12,7 @@ pub struct ProjectSettings { pub git: GitSettings, // TODO kb better names and docs and tests #[serde(default)] - pub scan_exclude_files: Vec, + pub scan_exclude_files: Option>, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3168780305..41fd647c9c 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -650,7 +650,7 @@ fn start_background_scan_tasks( } fn scan_exclude_files(project_settings: &ProjectSettings) -> Vec { - project_settings.scan_exclude_files.iter() + project_settings.scan_exclude_files.as_deref().unwrap_or(&[]).iter() .sorted() .filter_map(|pattern| { PathMatcher::new(pattern) diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 1fb4aa9a34..79ac25a147 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -910,7 +910,7 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.scan_exclude_files = - vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; + Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); }); }); }); From ce2cfc60356c4d76af8d109e8f78138b2b1c5414 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 10:15:50 +0200 Subject: [PATCH 15/27] Fix the ! bug, better test draft --- crates/project/src/worktree.rs | 4 +- crates/project/src/worktree_tests.rs | 115 +++++++++++++++++++-------- 2 files changed, 86 insertions(+), 33 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 41fd647c9c..49ba8c93c4 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2123,7 +2123,7 @@ impl LocalSnapshot { let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if !ignore_stack.is_abs_path_ignored(parent_abs_path, true) { + if ignore_stack.is_abs_path_ignored(parent_abs_path, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2131,7 +2131,7 @@ impl LocalSnapshot { } } - if !ignore_stack.is_abs_path_ignored(abs_path, is_dir) { + if ignore_stack.is_abs_path_ignored(abs_path, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 79ac25a147..74268d2ed5 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -7,7 +7,7 @@ use anyhow::Result; use client::Client; use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; use git::GITIGNORE; -use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext}; +use gpui::{executor::Deterministic, ModelContext, ModelHandle, Task, TestAppContext}; use parking_lot::Mutex; use postage::stream::Stream; use pretty_assertions::assert_eq; @@ -882,9 +882,13 @@ async fn test_write_file(cx: &mut TestAppContext) { #[gpui::test] async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { let dir = temp_tree(json!({ - ".git": {}, + ".git": { + "index": "blah" + }, ".gitignore": "**/target\n/node_modules\n", - "target": {}, + "target": { + "index2": "blah2" + }, "node_modules": { ".DS_Store": "", "prettier": { @@ -928,35 +932,52 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; tree.flush_fs_events(cx).await; + check_worktree_entries( + &tree, + &[ + "src/foo/foo.rs", + "src/foo/another.rs", + // TODO kb + // "node_modules/.DS_Store", + // "src/.DS_Store", + // ".DS_Store", + ], + &[ + ".git/index", + "target/index2", + "node_modules/prettier/package.json", + ], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + cx, + ); - // tree.update(cx, |tree, cx| { - // tree.as_local().unwrap().write_file( - // Path::new("tracked-dir/file.txt"), - // "hello".into(), - // Default::default(), - // cx, - // ) - // }) - // .await - // .unwrap(); - // tree.update(cx, |tree, cx| { - // tree.as_local().unwrap().write_file( - // Path::new("ignored-dir/file.txt"), - // "world".into(), - // Default::default(), - // cx, - // ) - // }) - // .await - // .unwrap(); - - // tree.read_with(cx, |tree, _| { - // let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); - // let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); - // assert!(!tracked.is_ignored); - // assert!(ignored.is_ignored); - // }); - dbg!("!!!!!!!!!!!!"); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.scan_exclude_files = Some(vec!["**/node_modules/**".to_string()]); + }); + }); + }); + tree.flush_fs_events(cx).await; + cx.foreground().run_until_parked(); + check_worktree_entries( + &tree, + &[ + "node_modules/prettier/package.json", + "node_modules/.DS_Store", + ], + &[".git/index", "target/index2"], + &[ + ".gitignore", + "src/lib.rs", + "src/bar/bar.rs", + "src/foo/foo.rs", + "src/foo/another.rs", + "src/.DS_Store", + ".DS_Store", + ], + cx, + ); } #[gpui::test(iterations = 30)] @@ -2221,3 +2242,35 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap, + expected_excluded_paths: &[&str], + expected_ignored_paths: &[&str], + expected_tracked_paths: &[&str], + cx: &mut TestAppContext, +) { + tree.read_with(cx, |tree, _| { + for path in expected_excluded_paths { + let entry = tree.entry_for_path(path); + assert!( + entry.is_none(), + "expected path '{path}' to be excluded, but got entry: {entry:?}", + ); + } + for path in expected_ignored_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + entry.is_ignored, + "expected path '{path}' to be ignored, but got entry: {entry:?}", + ); + } + for path in expected_tracked_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + !entry.is_ignored, + "expected path '{path}' to be tracked, but got entry: {entry:?}", + ); + } + }); +} From cafeba103bc157058868a25a06d4331beddc0844 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 16:36:00 +0200 Subject: [PATCH 16/27] Exclude ignored opened buffers from search --- crates/project/src/project.rs | 12 ++- crates/project/src/worktree_tests.rs | 127 +++++++++++++-------------- 2 files changed, 70 insertions(+), 69 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 09d2c9a981..c24fb5eea1 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -5570,8 +5570,16 @@ impl Project { .iter() .filter_map(|(_, b)| { let buffer = b.upgrade(cx)?; - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - if let Some(path) = snapshot.file().map(|file| file.path()) { + let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| { + let is_ignored = buffer + .project_path(cx) + .and_then(|path| self.entry_for_path(&path, cx)) + .map_or(false, |entry| entry.is_ignored); + (is_ignored, buffer.snapshot()) + }); + if is_ignored && !query.include_ignored() { + return None; + } else if let Some(path) = snapshot.file().map(|file| file.path()) { Some((path.clone(), (buffer, snapshot))) } else { unnamed_files.push(buffer); diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 74268d2ed5..4c0df4dd7d 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -7,7 +7,7 @@ use anyhow::Result; use client::Client; use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; use git::GITIGNORE; -use gpui::{executor::Deterministic, ModelContext, ModelHandle, Task, TestAppContext}; +use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext}; use parking_lot::Mutex; use postage::stream::Stream; use pretty_assertions::assert_eq; @@ -880,14 +880,11 @@ async fn test_write_file(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { +async fn test_ignore_exclusions(cx: &mut TestAppContext) { let dir = temp_tree(json!({ - ".git": { - "index": "blah" - }, ".gitignore": "**/target\n/node_modules\n", "target": { - "index2": "blah2" + "index": "blah2" }, "node_modules": { ".DS_Store": "", @@ -932,24 +929,21 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; tree.flush_fs_events(cx).await; - check_worktree_entries( - &tree, - &[ - "src/foo/foo.rs", - "src/foo/another.rs", - // TODO kb - // "node_modules/.DS_Store", - // "src/.DS_Store", - // ".DS_Store", - ], - &[ - ".git/index", - "target/index2", - "node_modules/prettier/package.json", - ], - &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], - cx, - ); + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "src/foo/foo.rs", + "src/foo/another.rs", + // TODO kb + // "node_modules/.DS_Store", + // "src/.DS_Store", + // ".DS_Store", + ], + &["target/index", "node_modules/prettier/package.json"], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + ) + }); cx.update(|cx| { cx.update_global::(|store, cx| { @@ -960,24 +954,25 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { }); tree.flush_fs_events(cx).await; cx.foreground().run_until_parked(); - check_worktree_entries( - &tree, - &[ - "node_modules/prettier/package.json", - "node_modules/.DS_Store", - ], - &[".git/index", "target/index2"], - &[ - ".gitignore", - "src/lib.rs", - "src/bar/bar.rs", - "src/foo/foo.rs", - "src/foo/another.rs", - "src/.DS_Store", - ".DS_Store", - ], - cx, - ); + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "node_modules/prettier/package.json", + "node_modules/.DS_Store", + ], + &["target/index"], + &[ + ".gitignore", + "src/lib.rs", + "src/bar/bar.rs", + "src/foo/foo.rs", + "src/foo/another.rs", + "src/.DS_Store", + ".DS_Store", + ], + ) + }); } #[gpui::test(iterations = 30)] @@ -2243,34 +2238,32 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap, + tree: &Worktree, expected_excluded_paths: &[&str], expected_ignored_paths: &[&str], expected_tracked_paths: &[&str], - cx: &mut TestAppContext, ) { - tree.read_with(cx, |tree, _| { - for path in expected_excluded_paths { - let entry = tree.entry_for_path(path); - assert!( - entry.is_none(), - "expected path '{path}' to be excluded, but got entry: {entry:?}", - ); - } - for path in expected_ignored_paths { - let entry = tree.entry_for_path(path).unwrap(); - assert!( - entry.is_ignored, - "expected path '{path}' to be ignored, but got entry: {entry:?}", - ); - } - for path in expected_tracked_paths { - let entry = tree.entry_for_path(path).unwrap(); - assert!( - !entry.is_ignored, - "expected path '{path}' to be tracked, but got entry: {entry:?}", - ); - } - }); + for path in expected_excluded_paths { + let entry = tree.entry_for_path(path); + assert!( + entry.is_none(), + "expected path '{path}' to be excluded, but got entry: {entry:?}", + ); + } + for path in expected_ignored_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + entry.is_ignored, + "expected path '{path}' to be ignored, but got entry: {entry:?}", + ); + } + for path in expected_tracked_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + !entry.is_ignored, + "expected path '{path}' to be tracked, but got entry: {entry:?}", + ); + } } From d3ce82e82cae8935220ba6bed4478a951fed44ff Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 18:37:15 +0200 Subject: [PATCH 17/27] Fix the new test --- crates/project/src/worktree.rs | 47 +++++++++++++++++++--------- crates/project/src/worktree_tests.rs | 7 ++--- 2 files changed, 35 insertions(+), 19 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 49ba8c93c4..6508a8635c 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -320,7 +320,7 @@ impl Worktree { if new_scan_exclude_files != this.snapshot.scan_exclude_files { this.snapshot.scan_exclude_files = new_scan_exclude_files; log::info!( - "Re-scanning due to new scan exclude files: {:?}", + "Re-scanning directories, new scan exclude files: {:?}", this.snapshot .scan_exclude_files .iter() @@ -343,7 +343,6 @@ impl Worktree { cx, ); this.is_scanning = watch::channel_with(true); - // TODO kb change more state? will this even work now? } } }); @@ -3489,18 +3488,26 @@ impl BackgroundScanner { } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { - log::debug!("scan directory {:?}", job.path); - - let mut ignore_stack = job.ignore_stack.clone(); - let mut new_ignore = None; - let (root_abs_path, root_char_bag, next_entry_id) = { - let snapshot = &self.state.lock().snapshot; - ( - snapshot.abs_path().clone(), - snapshot.root_char_bag, - self.next_entry_id.clone(), - ) - }; + let root_abs_path; + let mut ignore_stack; + let mut new_ignore; + let root_char_bag; + let next_entry_id; + { + let state = self.state.lock(); + let snapshot = &state.snapshot; + root_abs_path = snapshot.abs_path().clone(); + if snapshot.is_abs_path_excluded(&job.abs_path) { + log::error!("skipping excluded directory {:?}", job.path); + return Ok(()); + } + log::debug!("scanning directory {:?}", job.path); + ignore_stack = job.ignore_stack.clone(); + new_ignore = None; + root_char_bag = snapshot.root_char_bag; + next_entry_id = self.next_entry_id.clone(); + drop(state); + } let mut dotgit_path = None; let mut root_canonical_path = None; @@ -3515,8 +3522,18 @@ impl BackgroundScanner { continue; } }; - let child_name = child_abs_path.file_name().unwrap(); + { + let mut state = self.state.lock(); + if state.snapshot.is_abs_path_excluded(&child_abs_path) { + let relative_path = job.path.join(child_name); + log::debug!("skipping excluded child entry {relative_path:?}"); + state.remove_path(&relative_path); + continue; + } + drop(state); + } + let child_path: Arc = job.path.join(child_name).into(); let child_metadata = match self.fs.metadata(&child_abs_path).await { Ok(Some(metadata)) => metadata, diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 4c0df4dd7d..bd548bb9cb 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -935,10 +935,9 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { &[ "src/foo/foo.rs", "src/foo/another.rs", - // TODO kb - // "node_modules/.DS_Store", - // "src/.DS_Store", - // ".DS_Store", + "node_modules/.DS_Store", + "src/.DS_Store", + ".DS_Store", ], &["target/index", "node_modules/prettier/package.json"], &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], From 5f468970f039db8e7ba609d0bf53c7035ebed238 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 19:07:16 +0200 Subject: [PATCH 18/27] Fix some of the old tests --- crates/collab/src/tests/integration_tests.rs | 2 ++ crates/project/src/worktree_tests.rs | 26 ++++++++++++++++++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index fa7c4fe67d..61bbabccdd 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1381,6 +1381,8 @@ async fn test_share_project( Path::new("a.txt"), Path::new("b.txt"), Path::new("ignored-dir"), + Path::new("ignored-dir/c.txt"), + Path::new("ignored-dir/d.txt"), ] ); }); diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index bd548bb9cb..256bc6477a 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -25,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; #[gpui::test] async fn test_traversal(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -80,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) { #[gpui::test] async fn test_descendent_entries(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -187,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) { #[gpui::test(iterations = 10)] async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -266,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppCo #[gpui::test] async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -441,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { #[gpui::test] async fn test_open_gitignored_files(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -601,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) { #[gpui::test] async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -724,6 +730,7 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { #[gpui::test(iterations = 10)] async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -829,6 +836,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { #[gpui::test] async fn test_write_file(cx: &mut TestAppContext) { + init_test(cx); let dir = temp_tree(json!({ ".git": {}, ".gitignore": "ignored-dir\n", @@ -881,6 +889,7 @@ async fn test_write_file(cx: &mut TestAppContext) { #[gpui::test] async fn test_ignore_exclusions(cx: &mut TestAppContext) { + init_test(cx); let dir = temp_tree(json!({ ".gitignore": "**/target\n/node_modules\n", "target": { @@ -906,8 +915,6 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { ".DS_Store": "", })); cx.update(|cx| { - cx.set_global(SettingsStore::test(cx)); - Project::init_settings(cx); cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.scan_exclude_files = @@ -976,6 +983,7 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { #[gpui::test(iterations = 30)] async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -1035,6 +1043,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { #[gpui::test] async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { + init_test(cx); let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let fs_fake = FakeFs::new(cx.background()); @@ -1151,6 +1160,7 @@ async fn test_random_worktree_operations_during_initial_scan( cx: &mut TestAppContext, mut rng: StdRng, ) { + init_test(cx); let operations = env::var("OPERATIONS") .map(|o| o.parse().unwrap()) .unwrap_or(5); @@ -1240,6 +1250,7 @@ async fn test_random_worktree_operations_during_initial_scan( #[gpui::test(iterations = 100)] async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { + init_test(cx); let operations = env::var("OPERATIONS") .map(|o| o.parse().unwrap()) .unwrap_or(40); @@ -1654,6 +1665,7 @@ fn random_filename(rng: &mut impl Rng) -> String { #[gpui::test] async fn test_rename_work_directory(cx: &mut TestAppContext) { + init_test(cx); let root = temp_tree(json!({ "projects": { "project1": { @@ -1724,6 +1736,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { #[gpui::test] async fn test_git_repository_for_path(cx: &mut TestAppContext) { + init_test(cx); let root = temp_tree(json!({ "c.txt": "", "dir1": { @@ -1844,6 +1857,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { #[gpui::test] async fn test_git_status(deterministic: Arc, cx: &mut TestAppContext) { + init_test(cx); const IGNORE_RULE: &'static str = "**/target"; let root = temp_tree(json!({ @@ -2032,6 +2046,7 @@ async fn test_git_status(deterministic: Arc, cx: &mut TestAppCont #[gpui::test] async fn test_propagate_git_statuses(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -2266,3 +2281,10 @@ fn check_worktree_entries( ); } } + +fn init_test(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + cx.set_global(SettingsStore::test(cx)); + Project::init_settings(cx); + }); +} From 30fefa0ef8a05f9d8a337065b667511a3d33ce5d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 19:09:58 +0200 Subject: [PATCH 19/27] Use a better name --- assets/settings/default.json | 5 ++--- crates/project/src/project_settings.rs | 4 ++-- crates/project/src/worktree.rs | 22 +++++++++++----------- crates/project/src/worktree_tests.rs | 7 ++++--- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 4a21b708ee..b47f0dc2e7 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -269,7 +269,7 @@ "include_warnings": true }, // TODO kb docs - "scan_exclude_files": [ + "file_scan_exclusions": [ "**/.git", "**/.svn", "**/.hg", @@ -277,8 +277,7 @@ "**/.DS_Store", "**/Thumbs.db", "**/.classpath", - "**/.settings", - "**/target" + "**/.settings" ], // Git gutter behavior configuration. "git": { diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 511241bc22..cda37be601 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,9 +10,9 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, - // TODO kb better names and docs and tests + // TODO kb docs and project_search test #[serde(default)] - pub scan_exclude_files: Option>, + pub file_scan_exclusions: Option>, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 6508a8635c..b05593cd4d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -225,7 +225,7 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, - scan_exclude_files: Vec, + file_scan_exclusions: Vec, } struct BackgroundScannerState { @@ -315,14 +315,14 @@ impl Worktree { Ok(cx.add_model(move |cx: &mut ModelContext| { let settings_subscription = cx.observe_global::(move |this, cx| { if let Self::Local(this) = this { - let new_scan_exclude_files = - scan_exclude_files(settings::get::(cx)); - if new_scan_exclude_files != this.snapshot.scan_exclude_files { - this.snapshot.scan_exclude_files = new_scan_exclude_files; + let new_file_scan_exclusions = + file_scan_exclusions(settings::get::(cx)); + if new_file_scan_exclusions != this.snapshot.file_scan_exclusions { + this.snapshot.file_scan_exclusions = new_file_scan_exclusions; log::info!( "Re-scanning directories, new scan exclude files: {:?}", this.snapshot - .scan_exclude_files + .file_scan_exclusions .iter() .map(ToString::to_string) .collect::>() @@ -351,7 +351,7 @@ impl Worktree { .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); let mut snapshot = LocalSnapshot { - scan_exclude_files: scan_exclude_files(settings::get::(cx)), + file_scan_exclusions: file_scan_exclusions(settings::get::(cx)), ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { @@ -648,15 +648,15 @@ fn start_background_scan_tasks( vec![background_scanner, scan_state_updater] } -fn scan_exclude_files(project_settings: &ProjectSettings) -> Vec { - project_settings.scan_exclude_files.as_deref().unwrap_or(&[]).iter() +fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec { + project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter() .sorted() .filter_map(|pattern| { PathMatcher::new(pattern) .map(Some) .unwrap_or_else(|e| { log::error!( - "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" + "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}" ); None }) @@ -2227,7 +2227,7 @@ impl LocalSnapshot { } fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { - self.scan_exclude_files + self.file_scan_exclusions .iter() .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) } diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 256bc6477a..f66a71ee7d 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -888,7 +888,7 @@ async fn test_write_file(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_ignore_exclusions(cx: &mut TestAppContext) { +async fn test_file_scan_exclusions(cx: &mut TestAppContext) { init_test(cx); let dir = temp_tree(json!({ ".gitignore": "**/target\n/node_modules\n", @@ -917,7 +917,7 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.scan_exclude_files = + project_settings.file_scan_exclusions = Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); }); }); @@ -954,7 +954,8 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.scan_exclude_files = Some(vec!["**/node_modules/**".to_string()]); + project_settings.file_scan_exclusions = + Some(vec!["**/node_modules/**".to_string()]); }); }); }); From 906db58188fccbc14638b296c45170de0fc7bcea Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 22:53:02 +0200 Subject: [PATCH 20/27] Defer ignored dirs scanning --- crates/project/src/ignore.rs | 4 ++++ crates/project/src/worktree.rs | 12 ++++++++---- crates/project/src/worktree_tests.rs | 8 ++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index ed1ce4006d..a29e005019 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -20,6 +20,10 @@ impl IgnoreStack { Arc::new(Self::All) } + pub fn is_all(&self) -> bool { + matches!(self, Self::All) + } + pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index b05593cd4d..e8114832c2 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2253,9 +2253,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !matches!(ignore_stack.as_ref(), &IgnoreStack::All) - && !self.snapshot.is_abs_path_excluded(&abs_path) - { + if !ignore_stack.is_all() && !self.snapshot.is_abs_path_excluded(&abs_path) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -3327,6 +3325,10 @@ impl BackgroundScanner { log::debug!("ignoring event {relative_path:?} within unloaded directory"); return false; } + if snapshot.is_abs_path_excluded(abs_path) { + log::debug!("ignoring event {relative_path:?} within excluded directory"); + return false; + } relative_paths.push(relative_path); true @@ -3678,7 +3680,9 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) { + if !ignore_stack.is_all() + && state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) + { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index f66a71ee7d..cd7f3cff81 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -2268,14 +2268,18 @@ fn check_worktree_entries( ); } for path in expected_ignored_paths { - let entry = tree.entry_for_path(path).unwrap(); + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'")); assert!( entry.is_ignored, "expected path '{path}' to be ignored, but got entry: {entry:?}", ); } for path in expected_tracked_paths { - let entry = tree.entry_for_path(path).unwrap(); + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'")); assert!( !entry.is_ignored, "expected path '{path}' to be tracked, but got entry: {entry:?}", From 6028cd90d4ae36578ea8451cf14ce59abe378846 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 16 Nov 2023 22:09:50 +0200 Subject: [PATCH 21/27] Retract back to original scanning strategy Do not descend into ignored directories, to avoid tracking their state. --- crates/project/src/ignore.rs | 4 --- crates/project/src/worktree.rs | 51 ++++++++++++++-------------- crates/project/src/worktree_tests.rs | 5 +-- 3 files changed, 29 insertions(+), 31 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index a29e005019..ed1ce4006d 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -20,10 +20,6 @@ impl IgnoreStack { Arc::new(Self::All) } - pub fn is_all(&self) -> bool { - matches!(self, Self::All) - } - pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index e8114832c2..1dd0771cfe 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2234,8 +2234,8 @@ impl LocalSnapshot { } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { - !entry.is_external && !self.snapshot.is_abs_path_excluded(entry_abs_path) + fn should_scan_directory(&self, entry: &Entry) -> bool { + (!entry.is_external && !entry.is_ignored) || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2253,7 +2253,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() && !self.snapshot.is_abs_path_excluded(&abs_path) { + if !ignore_stack.is_abs_path_ignored(&abs_path, true) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -2338,6 +2338,7 @@ impl BackgroundScannerState { self.scanned_dirs.insert(parent_entry_id); let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)]; let mut entries_by_id_edits = Vec::new(); + for entry in entries { entries_by_id_edits.push(Edit::Insert(PathEntry { id: entry.id, @@ -2852,7 +2853,7 @@ pub struct Entry { pub mtime: SystemTime, pub is_symlink: bool, - /// Whether this entry is ignored by Zed. + /// Whether this entry is ignored by Git. /// /// We only scan ignored entries once the directory is expanded and /// exclude them from searches. @@ -3326,7 +3327,9 @@ impl BackgroundScanner { return false; } if snapshot.is_abs_path_excluded(abs_path) { - log::debug!("ignoring event {relative_path:?} within excluded directory"); + log::debug!( + "ignoring FS event for path {relative_path:?} within excluded directory" + ); return false; } @@ -3638,6 +3641,7 @@ impl BackgroundScanner { ancestor_inodes.insert(child_entry.inode); new_jobs.push(Some(ScanJob { + abs_path: child_abs_path, path: child_path, is_external: child_entry.is_external, ignore_stack: if child_entry.is_ignored { @@ -3646,7 +3650,6 @@ impl BackgroundScanner { ignore_stack.clone() }, ancestor_inodes, - abs_path: child_abs_path, scan_queue: job.scan_queue.clone(), containing_repository: job.containing_repository.clone(), })); @@ -3655,17 +3658,19 @@ impl BackgroundScanner { } } else { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); - if let Some((repository_dir, repository, staged_statuses)) = - &job.containing_repository - { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = combine_git_statuses( - staged_statuses.get(&repo_path).copied(), - repository - .lock() - .unstaged_status(&repo_path, child_entry.mtime), - ); + if !child_entry.is_ignored { + if let Some((repository_dir, repository, staged_statuses)) = + &job.containing_repository + { + if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { + let repo_path = RepoPath(repo_path.into()); + child_entry.git_status = combine_git_statuses( + staged_statuses.get(&repo_path).copied(), + repository + .lock() + .unstaged_status(&repo_path, child_entry.mtime), + ); + } } } } @@ -3680,9 +3685,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if !ignore_stack.is_all() - && state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) - { + if state.should_scan_directory(&entry) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3773,9 +3776,7 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !is_dir - && !(fs_entry.is_ignored || state.snapshot.is_abs_path_excluded(&abs_path)) - { + if !is_dir && !fs_entry.is_ignored { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) { let repo_path = RepoPath(repo_path.into()); @@ -3786,7 +3787,7 @@ impl BackgroundScanner { } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry, &abs_path) { + if state.should_scan_directory(&fs_entry) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3942,7 +3943,7 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); - if state.should_scan_directory(&entry, &abs_path) { + if state.should_scan_directory(&entry) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index cd7f3cff81..f2b519021c 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -946,7 +946,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { "src/.DS_Store", ".DS_Store", ], - &["target/index", "node_modules/prettier/package.json"], + &["target", "node_modules"], &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], ) }); @@ -967,8 +967,9 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { &[ "node_modules/prettier/package.json", "node_modules/.DS_Store", + "node_modules", ], - &["target/index"], + &["target"], &[ ".gitignore", "src/lib.rs", From 81809384016b846db4a137e698280d189e638a32 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 16 Nov 2023 22:48:26 +0200 Subject: [PATCH 22/27] Fix most of the TODOs --- assets/settings/default.json | 4 +++- crates/collab/src/tests/integration_tests.rs | 2 -- crates/project/src/project_settings.rs | 1 - crates/project/src/worktree.rs | 1 + crates/search/src/project_search.rs | 6 ++++-- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index b47f0dc2e7..bf2acc708e 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -268,7 +268,9 @@ // Whether to show warnings or not by default. "include_warnings": true }, - // TODO kb docs + // Add files or globs of files that will be excluded by Zed entirely: + // they will be skipped during FS scan(s), file tree and file search + // will lack the corresponding file entries. "file_scan_exclusions": [ "**/.git", "**/.svn", diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 61bbabccdd..fa7c4fe67d 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1381,8 +1381,6 @@ async fn test_share_project( Path::new("a.txt"), Path::new("b.txt"), Path::new("ignored-dir"), - Path::new("ignored-dir/c.txt"), - Path::new("ignored-dir/d.txt"), ] ); }); diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index cda37be601..a7acc7bba8 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,7 +10,6 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, - // TODO kb docs and project_search test #[serde(default)] pub file_scan_exclusions: Option>, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 1dd0771cfe..aa6341f330 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3502,6 +3502,7 @@ impl BackgroundScanner { let state = self.state.lock(); let snapshot = &state.snapshot; root_abs_path = snapshot.abs_path().clone(); + // TODO kb we need `DOT_GIT` and `GITIGNORE` entries always processed. if snapshot.is_abs_path_excluded(&job.abs_path) { log::error!("skipping excluded directory {:?}", job.path); return Ok(()); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 8f19e74802..5f3a6db6d4 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1767,14 +1767,16 @@ impl View for ProjectSearchBar { render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx) }); - let include_ignored = is_semantic_disabled.then(|| { + let mut include_ignored = is_semantic_disabled.then(|| { render_option_button_icon( - // TODO kb icon + // TODO proper icon "icons/case_insensitive.svg", SearchOptions::INCLUDE_IGNORED, cx, ) }); + // TODO not implemented yet + let _ = include_ignored.take(); let search_button_for_mode = |mode, side, cx: &mut ViewContext| { let is_active = if let Some(search) = self.active_project_search.as_ref() { From 2759ed4d006bb46c50ce1df00f5b5a3fd3acdefb Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 14:30:07 +0200 Subject: [PATCH 23/27] An attempt to ignore git regularly --- crates/project/src/worktree.rs | 71 ++++++++++++++++------------ crates/project/src/worktree_tests.rs | 15 ++++++ 2 files changed, 57 insertions(+), 29 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index aa6341f330..a169e062f3 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2464,14 +2464,17 @@ impl BackgroundScannerState { // Remove any git repositories whose .git entry no longer exists. let snapshot = &mut self.snapshot; + // TODO kb stop cleaning those up here? let mut repositories = mem::take(&mut snapshot.git_repositories); let mut repository_entries = mem::take(&mut snapshot.repository_entries); - repositories.retain(|work_directory_id, _| { - snapshot - .entry_for_id(*work_directory_id) - .map_or(false, |entry| { - snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() - }) + repositories.retain(|_, entry| { + // TODO kb use fs + snapshot.abs_path().join(&entry.git_dir_path).exists() + // snapshot + // .entry_for_id(*work_directory_id) + // .map_or(false, |entry| { + // snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() + // }) }); repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); snapshot.git_repositories = repositories; @@ -3322,11 +3325,11 @@ impl BackgroundScanner { .entry_for_path(parent) .map_or(false, |entry| entry.kind == EntryKind::Dir) }); - if !parent_dir_is_loaded { + if !parent_dir_is_loaded && !is_git_related(&abs_path) { log::debug!("ignoring event {relative_path:?} within unloaded directory"); return false; } - if snapshot.is_abs_path_excluded(abs_path) { + if snapshot.is_abs_path_excluded(abs_path) && !is_git_related(&abs_path) { log::debug!( "ignoring FS event for path {relative_path:?} within excluded directory" ); @@ -3502,7 +3505,6 @@ impl BackgroundScanner { let state = self.state.lock(); let snapshot = &state.snapshot; root_abs_path = snapshot.abs_path().clone(); - // TODO kb we need `DOT_GIT` and `GITIGNORE` entries always processed. if snapshot.is_abs_path_excluded(&job.abs_path) { log::error!("skipping excluded directory {:?}", job.path); return Ok(()); @@ -3529,27 +3531,7 @@ impl BackgroundScanner { } }; let child_name = child_abs_path.file_name().unwrap(); - { - let mut state = self.state.lock(); - if state.snapshot.is_abs_path_excluded(&child_abs_path) { - let relative_path = job.path.join(child_name); - log::debug!("skipping excluded child entry {relative_path:?}"); - state.remove_path(&relative_path); - continue; - } - drop(state); - } - let child_path: Arc = job.path.join(child_name).into(); - let child_metadata = match self.fs.metadata(&child_abs_path).await { - Ok(Some(metadata)) => metadata, - Ok(None) => continue, - Err(err) => { - log::error!("error processing {:?}: {:?}", child_abs_path, err); - continue; - } - }; - // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { @@ -3591,8 +3573,33 @@ impl BackgroundScanner { // If we find a .git, we'll need to load the repository. else if child_name == *DOT_GIT { dotgit_path = Some(child_path.clone()); + { + let mut state = self.state.lock(); + state.build_git_repository(child_path.clone(), self.fs.as_ref()); + drop(state); + } } + { + let mut state = self.state.lock(); + if state.snapshot.is_abs_path_excluded(&child_abs_path) { + let relative_path = job.path.join(child_name); + log::debug!("skipping excluded child entry {relative_path:?}"); + state.remove_path(&relative_path); + continue; + } + drop(state); + } + + let child_metadata = match self.fs.metadata(&child_abs_path).await { + Ok(Some(metadata)) => metadata, + Ok(None) => continue, + Err(err) => { + log::error!("error processing {:?}: {:?}", child_abs_path, err); + continue; + } + }; + let mut child_entry = Entry::new( child_path.clone(), &child_metadata, @@ -4117,6 +4124,12 @@ impl BackgroundScanner { } } +fn is_git_related(abs_path: &&PathBuf) -> bool { + abs_path + .components() + .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE) +} + fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { let mut result = root_char_bag; result.extend( diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index f2b519021c..22a5cc1e01 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -731,6 +731,13 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { #[gpui::test(iterations = 10)] async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -1860,6 +1867,14 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { #[gpui::test] async fn test_git_status(deterministic: Arc, cx: &mut TestAppContext) { init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]); + }); + }); + }); const IGNORE_RULE: &'static str = "**/target"; let root = temp_tree(json!({ From 616bda85e9405c8e20cfb9e37ac18d197b96803b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 19:21:25 +0200 Subject: [PATCH 24/27] Fix the tests --- crates/project/src/worktree.rs | 74 +++++++------ crates/project_panel/src/project_panel.rs | 126 +++++++++++++++++++++- 2 files changed, 165 insertions(+), 35 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index a169e062f3..82fa5d6020 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2464,21 +2464,30 @@ impl BackgroundScannerState { // Remove any git repositories whose .git entry no longer exists. let snapshot = &mut self.snapshot; - // TODO kb stop cleaning those up here? - let mut repositories = mem::take(&mut snapshot.git_repositories); - let mut repository_entries = mem::take(&mut snapshot.repository_entries); - repositories.retain(|_, entry| { - // TODO kb use fs - snapshot.abs_path().join(&entry.git_dir_path).exists() - // snapshot - // .entry_for_id(*work_directory_id) - // .map_or(false, |entry| { - // snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() - // }) - }); - repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); - snapshot.git_repositories = repositories; - snapshot.repository_entries = repository_entries; + let mut ids_to_preserve = HashSet::default(); + for (&work_directory_id, entry) in snapshot.git_repositories.iter() { + let exists_in_snapshot = snapshot + .entry_for_id(work_directory_id) + .map_or(false, |entry| { + snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() + }); + if exists_in_snapshot { + ids_to_preserve.insert(work_directory_id); + } else { + let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); + if snapshot.is_abs_path_excluded(&git_dir_abs_path) + && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) + { + ids_to_preserve.insert(work_directory_id); + } + } + } + snapshot + .git_repositories + .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); + snapshot + .repository_entries + .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); } fn build_git_repository( @@ -3320,20 +3329,22 @@ impl BackgroundScanner { return false; }; - let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { - snapshot - .entry_for_path(parent) - .map_or(false, |entry| entry.kind == EntryKind::Dir) - }); - if !parent_dir_is_loaded && !is_git_related(&abs_path) { - log::debug!("ignoring event {relative_path:?} within unloaded directory"); - return false; - } - if snapshot.is_abs_path_excluded(abs_path) && !is_git_related(&abs_path) { - log::debug!( + if !is_git_related(&abs_path) { + let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { + snapshot + .entry_for_path(parent) + .map_or(false, |entry| entry.kind == EntryKind::Dir) + }); + if !parent_dir_is_loaded { + log::debug!("ignoring event {relative_path:?} within unloaded directory"); + return false; + } + if snapshot.is_abs_path_excluded(abs_path) { + log::debug!( "ignoring FS event for path {relative_path:?} within excluded directory" ); - return false; + return false; + } } relative_paths.push(relative_path); @@ -3573,11 +3584,6 @@ impl BackgroundScanner { // If we find a .git, we'll need to load the repository. else if child_name == *DOT_GIT { dotgit_path = Some(child_path.clone()); - { - let mut state = self.state.lock(); - state.build_git_repository(child_path.clone(), self.fs.as_ref()); - drop(state); - } } { @@ -3595,7 +3601,7 @@ impl BackgroundScanner { Ok(Some(metadata)) => metadata, Ok(None) => continue, Err(err) => { - log::error!("error processing {:?}: {:?}", child_abs_path, err); + log::error!("error processing {child_abs_path:?}: {err:?}"); continue; } }; @@ -4124,7 +4130,7 @@ impl BackgroundScanner { } } -fn is_git_related(abs_path: &&PathBuf) -> bool { +fn is_git_related(abs_path: &Path) -> bool { abs_path .components() .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index d66de1ad2e..e43423073c 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1732,7 +1732,7 @@ mod tests { use super::*; use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle}; use pretty_assertions::assert_eq; - use project::FakeFs; + use project::{project_settings::ProjectSettings, FakeFs}; use serde_json::json; use settings::SettingsStore; use std::{ @@ -1832,6 +1832,123 @@ mod tests { ); } + #[gpui::test] + async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); + }); + }); + }); + + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root1", + json!({ + ".dockerignore": "", + ".git": { + "HEAD": "", + }, + "a": { + "0": { "q": "", "r": "", "s": "" }, + "1": { "t": "", "u": "" }, + "2": { "v": "", "w": "", "x": "", "y": "" }, + }, + "b": { + "3": { "Q": "" }, + "4": { "R": "", "S": "", "T": "", "U": "" }, + }, + "C": { + "5": {}, + "6": { "V": "", "W": "" }, + "7": { "X": "" }, + "8": { "Y": {}, "Z": "" } + } + }), + ) + .await; + fs.insert_tree( + "/root2", + json!({ + "d": { + "4": "" + }, + "e": {} + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await; + let workspace = cx + .add_window(|cx| Workspace::test_new(project.clone(), cx)) + .root(cx); + let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx)); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " > b", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root1/b", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b <== selected", + " > 3", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/d", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d <== selected", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/e", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d", + " v e <== selected", + ] + ); + } + #[gpui::test(iterations = 30)] async fn test_editing_files(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -2930,6 +3047,13 @@ mod tests { client::init_settings(cx); Project::init_settings(cx); }); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); } fn init_test_with_editor(cx: &mut TestAppContext) { From ceb20dea969bbfbe498ff24d165081bb540757a2 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 13:23:12 -0700 Subject: [PATCH 25/27] Refactorings --- crates/gpui2/src/view.rs | 4 + crates/project_panel2/src/project_panel.rs | 28 ++-- crates/settings2/src/settings_file.rs | 1 + crates/workspace2/src/dock.rs | 146 ++++++++++++++------- crates/workspace2/src/workspace2.rs | 4 +- 5 files changed, 119 insertions(+), 64 deletions(-) diff --git a/crates/gpui2/src/view.rs b/crates/gpui2/src/view.rs index 6b8c8a3eef..a26812c7ae 100644 --- a/crates/gpui2/src/view.rs +++ b/crates/gpui2/src/view.rs @@ -191,6 +191,10 @@ impl AnyView { self.model.entity_type } + pub fn entity_id(&self) -> EntityId { + self.model.entity_id() + } + pub(crate) fn draw( &self, origin: Point, diff --git a/crates/project_panel2/src/project_panel.rs b/crates/project_panel2/src/project_panel.rs index 87edabab52..7f36cffd60 100644 --- a/crates/project_panel2/src/project_panel.rs +++ b/crates/project_panel2/src/project_panel.rs @@ -1,6 +1,6 @@ pub mod file_associations; mod project_panel_settings; -use settings::Settings; +use settings::{Settings, SettingsStore}; use db::kvp::KEY_VALUE_STORE; use editor::{scroll::autoscroll::Autoscroll, Cancel, Editor}; @@ -34,7 +34,7 @@ use ui::{h_stack, v_stack, IconElement, Label}; use unicase::UniCase; use util::{maybe, ResultExt, TryFutureExt}; use workspace::{ - dock::{DockPosition, PanelEvent}, + dock::{DockPosition, Panel, PanelEvent}, Workspace, }; @@ -148,7 +148,6 @@ pub enum Event { SplitEntry { entry_id: ProjectEntryId, }, - DockPositionChanged, Focus, NewSearchInDirectory { dir_entry: Entry, @@ -244,16 +243,17 @@ impl ProjectPanel { this.update_visible_entries(None, cx); // Update the dock position when the setting changes. - // todo!() - // let mut old_dock_position = this.position(cx); - // cx.observe_global::(move |this, cx| { - // let new_dock_position = this.position(cx); - // if new_dock_position != old_dock_position { - // old_dock_position = new_dock_position; - // cx.emit(Event::DockPositionChanged); - // } - // }) - // .detach(); + let mut old_dock_position = this.position(cx); + ProjectPanelSettings::register(cx); + cx.observe_global::(move |this, cx| { + dbg!("OLA!"); + let new_dock_position = this.position(cx); + if new_dock_position != old_dock_position { + old_dock_position = new_dock_position; + cx.emit(PanelEvent::ChangePosition); + } + }) + .detach(); this }); @@ -1485,7 +1485,7 @@ impl EventEmitter for ProjectPanel {} impl EventEmitter for ProjectPanel {} -impl workspace::dock::Panel for ProjectPanel { +impl Panel for ProjectPanel { fn position(&self, cx: &WindowContext) -> DockPosition { match ProjectPanelSettings::get_global(cx).dock { ProjectPanelDockPosition::Left => DockPosition::Left, diff --git a/crates/settings2/src/settings_file.rs b/crates/settings2/src/settings_file.rs index fc4ad5882e..c28e281895 100644 --- a/crates/settings2/src/settings_file.rs +++ b/crates/settings2/src/settings_file.rs @@ -77,6 +77,7 @@ pub fn handle_settings_file_changes( }); cx.spawn(move |mut cx| async move { while let Some(user_settings_content) = user_settings_file_rx.next().await { + eprintln!("settings file changed"); let result = cx.update_global(|store: &mut SettingsStore, cx| { store .set_user_settings(&user_settings_content, cx) diff --git a/crates/workspace2/src/dock.rs b/crates/workspace2/src/dock.rs index 07237d6f62..1d6b8b3a2a 100644 --- a/crates/workspace2/src/dock.rs +++ b/crates/workspace2/src/dock.rs @@ -42,7 +42,7 @@ pub trait Panel: FocusableView + EventEmitter { } pub trait PanelHandle: Send + Sync { - fn id(&self) -> EntityId; + fn entity_id(&self) -> EntityId; fn persistent_name(&self) -> &'static str; fn position(&self, cx: &WindowContext) -> DockPosition; fn position_is_valid(&self, position: DockPosition, cx: &WindowContext) -> bool; @@ -64,8 +64,8 @@ impl PanelHandle for View where T: Panel, { - fn id(&self) -> EntityId { - self.entity_id() + fn entity_id(&self) -> EntityId { + Entity::entity_id(self) } fn persistent_name(&self) -> &'static str { @@ -256,20 +256,19 @@ impl Dock { } } - // todo!() - // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext) { - // for entry in &mut self.panel_entries { - // if entry.panel.as_any() == panel { - // if zoomed != entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(zoomed, cx); - // } - // } else if entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(false, cx); - // } - // } + pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext) { + for entry in &mut self.panel_entries { + if entry.panel.entity_id() == panel.entity_id() { + if zoomed != entry.panel.is_zoomed(cx) { + entry.panel.set_zoomed(zoomed, cx); + } + } else if entry.panel.is_zoomed(cx) { + entry.panel.set_zoomed(false, cx); + } + } - // cx.notify(); - // } + cx.notify(); + } pub fn zoom_out(&mut self, cx: &mut ViewContext) { for entry in &mut self.panel_entries { @@ -279,42 +278,91 @@ impl Dock { } } - pub(crate) fn add_panel(&mut self, panel: View, cx: &mut ViewContext) { + pub(crate) fn add_panel( + &mut self, + panel: View, + workspace: WeakView, + cx: &mut ViewContext, + ) { let subscriptions = [ cx.observe(&panel, |_, _, cx| cx.notify()), - cx.subscribe(&panel, |this, panel, event, cx| { - match event { - PanelEvent::ChangePosition => { - //todo!() - // see: Workspace::add_panel_with_extra_event_handler - } - PanelEvent::ZoomIn => { - //todo!() - // see: Workspace::add_panel_with_extra_event_handler - } - PanelEvent::ZoomOut => { - // todo!() - // // see: Workspace::add_panel_with_extra_event_handler - } - PanelEvent::Activate => { - if let Some(ix) = this - .panel_entries - .iter() - .position(|entry| entry.panel.id() == panel.id()) - { - this.set_open(true, cx); - this.activate_panel(ix, cx); - //` todo!() - // cx.focus(&panel); + cx.subscribe(&panel, move |this, panel, event, cx| match event { + PanelEvent::ChangePosition => { + let new_position = panel.read(cx).position(cx); + + let Ok(new_dock) = workspace.update(cx, |workspace, cx| { + if panel.is_zoomed(cx) { + workspace.zoomed_position = Some(new_position); } - } - PanelEvent::Close => { - if this.visible_panel().map_or(false, |p| p.id() == panel.id()) { - this.set_open(false, cx); + match new_position { + DockPosition::Left => &workspace.left_dock, + DockPosition::Bottom => &workspace.bottom_dock, + DockPosition::Right => &workspace.right_dock, } - } - PanelEvent::Focus => todo!(), + .clone() + }) else { + return; + }; + + let was_visible = this.is_open() + && this.visible_panel().map_or(false, |active_panel| { + active_panel.entity_id() == Entity::entity_id(&panel) + }); + + this.remove_panel(&panel, cx); + + new_dock.update(cx, |new_dock, cx| { + new_dock.add_panel(panel.clone(), workspace.clone(), cx); + if was_visible { + new_dock.set_open(true, cx); + new_dock.activate_panel(this.panels_len() - 1, cx); + } + }); } + PanelEvent::ZoomIn => { + this.set_panel_zoomed(&panel.to_any(), true, cx); + if !panel.has_focus(cx) { + cx.focus_view(&panel); + } + workspace + .update(cx, |workspace, cx| { + workspace.zoomed = Some(panel.downgrade().into()); + workspace.zoomed_position = Some(panel.read(cx).position(cx)); + }) + .ok(); + } + PanelEvent::ZoomOut => { + this.set_panel_zoomed(&panel.to_any(), false, cx); + workspace + .update(cx, |workspace, cx| { + if workspace.zoomed_position == Some(this.position) { + workspace.zoomed = None; + workspace.zoomed_position = None; + } + cx.notify(); + }) + .ok(); + } + PanelEvent::Activate => { + if let Some(ix) = this + .panel_entries + .iter() + .position(|entry| entry.panel.entity_id() == Entity::entity_id(&panel)) + { + this.set_open(true, cx); + this.activate_panel(ix, cx); + cx.focus_view(&panel); + } + } + PanelEvent::Close => { + if this + .visible_panel() + .map_or(false, |p| p.entity_id() == Entity::entity_id(&panel)) + { + this.set_open(false, cx); + } + } + PanelEvent::Focus => todo!(), }), ]; @@ -337,7 +385,7 @@ impl Dock { if let Some(panel_ix) = self .panel_entries .iter() - .position(|entry| entry.panel.id() == panel.id()) + .position(|entry| entry.panel.entity_id() == Entity::entity_id(panel)) { if panel_ix == self.active_panel_index { self.active_panel_index = 0; @@ -398,7 +446,7 @@ impl Dock { pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option { self.panel_entries .iter() - .find(|entry| entry.panel.id() == panel.id()) + .find(|entry| entry.panel.entity_id() == panel.entity_id()) .map(|entry| entry.panel.size(cx)) } diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index dc69280c1e..ae85f0a868 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -831,7 +831,9 @@ impl Workspace { DockPosition::Right => &self.right_dock, }; - dock.update(cx, |dock, cx| dock.add_panel(panel, cx)); + dock.update(cx, |dock, cx| { + dock.add_panel(panel, self.weak_self.clone(), cx) + }); } pub fn status_bar(&self) -> &View { From 6bfe6fa0e10b651a9d1e1b8743ba4399e8643d79 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 13:45:05 -0700 Subject: [PATCH 26/27] Fix image errors * Firstly only log one error per image load, not per frame * Secondly use an Icon not an image for rendering Icons --- crates/gpui2/src/elements/img.rs | 4 ++-- crates/gpui2/src/image_cache.rs | 10 +++++++++- crates/terminal_view2/src/terminal_view.rs | 13 ++++++------- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/crates/gpui2/src/elements/img.rs b/crates/gpui2/src/elements/img.rs index 1080135fe1..5376c40012 100644 --- a/crates/gpui2/src/elements/img.rs +++ b/crates/gpui2/src/elements/img.rs @@ -81,7 +81,7 @@ impl Element for Img { if let Some(data) = image_future .clone() .now_or_never() - .and_then(ResultExt::log_err) + .and_then(|result| result.ok()) { let corner_radii = corner_radii.to_pixels(bounds.size, cx.rem_size()); cx.with_z_index(1, |cx| { @@ -90,7 +90,7 @@ impl Element for Img { }); } else { cx.spawn(|_, mut cx| async move { - if image_future.await.log_err().is_some() { + if image_future.await.ok().is_some() { cx.on_next_frame(|cx| cx.notify()); } }) diff --git a/crates/gpui2/src/image_cache.rs b/crates/gpui2/src/image_cache.rs index 6417f7d5e1..f80b0f0c2f 100644 --- a/crates/gpui2/src/image_cache.rs +++ b/crates/gpui2/src/image_cache.rs @@ -2,7 +2,7 @@ use crate::{ImageData, ImageId, SharedString}; use collections::HashMap; use futures::{ future::{BoxFuture, Shared}, - AsyncReadExt, FutureExt, + AsyncReadExt, FutureExt, TryFutureExt, }; use image::ImageError; use parking_lot::Mutex; @@ -88,6 +88,14 @@ impl ImageCache { Ok(Arc::new(ImageData::new(image))) } } + .map_err({ + let uri = uri.clone(); + + move |error| { + log::log!(log::Level::Error, "{:?} {:?}", &uri, &error); + error + } + }) .boxed() .shared(); diff --git a/crates/terminal_view2/src/terminal_view.rs b/crates/terminal_view2/src/terminal_view.rs index 56de1ee7ef..27e55602fb 100644 --- a/crates/terminal_view2/src/terminal_view.rs +++ b/crates/terminal_view2/src/terminal_view.rs @@ -9,11 +9,10 @@ pub mod terminal_panel; // use crate::terminal_element::TerminalElement; use editor::{scroll::autoscroll::Autoscroll, Editor}; use gpui::{ - actions, div, img, red, Action, AnyElement, AppContext, Component, DispatchPhase, Div, - EventEmitter, FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, - InputHandler, InteractiveComponent, KeyDownEvent, Keystroke, Model, MouseButton, - ParentComponent, Pixels, Render, SharedString, Styled, Task, View, ViewContext, VisualContext, - WeakView, + actions, div, Action, AnyElement, AppContext, Component, DispatchPhase, Div, EventEmitter, + FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, InputHandler, + InteractiveComponent, KeyDownEvent, Keystroke, Model, MouseButton, ParentComponent, Pixels, + Render, SharedString, Styled, Task, View, ViewContext, VisualContext, WeakView, }; use language::Bias; use persistence::TERMINAL_DB; @@ -32,7 +31,7 @@ use workspace::{ notifications::NotifyResultExt, register_deserializable_item, searchable::{SearchEvent, SearchOptions, SearchableItem}, - ui::{ContextMenu, Label, ListEntry}, + ui::{ContextMenu, Icon, IconElement, Label, ListEntry}, CloseActiveItem, NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId, }; @@ -755,7 +754,7 @@ impl Item for TerminalView { let title = self.terminal().read(cx).title(); div() - .child(img().uri("icons/terminal.svg").bg(red())) + .child(IconElement::new(Icon::Terminal)) .child(title) .render() } From d352a63d9d5d9b0eba4ece72c5eb0cee7cc3ae5b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 22:54:19 +0200 Subject: [PATCH 27/27] Port new workspace logic to gpui2, uncomment most of the workspace2 tests --- .../src/chat_panel/message_editor.rs | 10 +- crates/project/src/ignore.rs | 1 + crates/project2/src/ignore.rs | 4 - crates/project2/src/project2.rs | 12 +- crates/project2/src/project_settings.rs | 2 + crates/project2/src/worktree.rs | 342 +- crates/project2/src/worktree_tests.rs | 4451 +++++++++-------- crates/project_panel/src/project_panel.rs | 3 +- crates/project_panel2/src/project_panel.rs | 126 +- 9 files changed, 2684 insertions(+), 2267 deletions(-) diff --git a/crates/collab_ui2/src/chat_panel/message_editor.rs b/crates/collab_ui2/src/chat_panel/message_editor.rs index 6dbe3aa204..9e6bfb553e 100644 --- a/crates/collab_ui2/src/chat_panel/message_editor.rs +++ b/crates/collab_ui2/src/chat_panel/message_editor.rs @@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration}; const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); lazy_static! { - static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex( - "@[-_\\w]+", - false, - false, - Default::default(), - Default::default() - ) - .unwrap(); + static ref MENTIONS_SEARCH: SearchQuery = + SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap(); } pub struct MessageEditor { diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index ed1ce4006d..41e5746f13 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -35,6 +35,7 @@ impl IgnoreStack { if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { return true; } + match self { Self::None => false, Self::All => true, diff --git a/crates/project2/src/ignore.rs b/crates/project2/src/ignore.rs index 8bac08b96c..41e5746f13 100644 --- a/crates/project2/src/ignore.rs +++ b/crates/project2/src/ignore.rs @@ -20,10 +20,6 @@ impl IgnoreStack { Arc::new(Self::All) } - pub fn is_all(&self) -> bool { - matches!(self, IgnoreStack::All) - } - pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project2/src/project2.rs b/crates/project2/src/project2.rs index 3cc4e8a293..3f7c9b7188 100644 --- a/crates/project2/src/project2.rs +++ b/crates/project2/src/project2.rs @@ -5640,8 +5640,16 @@ impl Project { .iter() .filter_map(|(_, b)| { let buffer = b.upgrade()?; - let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); - if let Some(path) = snapshot.file().map(|file| file.path()) { + let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| { + let is_ignored = buffer + .project_path(cx) + .and_then(|path| self.entry_for_path(&path, cx)) + .map_or(false, |entry| entry.is_ignored); + (is_ignored, buffer.snapshot()) + }); + if is_ignored && !query.include_ignored() { + return None; + } else if let Some(path) = snapshot.file().map(|file| file.path()) { Some((path.clone(), (buffer, snapshot))) } else { unnamed_files.push(buffer); diff --git a/crates/project2/src/project_settings.rs b/crates/project2/src/project_settings.rs index 028a564b9c..2a8df47e67 100644 --- a/crates/project2/src/project_settings.rs +++ b/crates/project2/src/project_settings.rs @@ -11,6 +11,8 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, + #[serde(default)] + pub file_scan_exclusions: Option>, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project2/src/worktree.rs b/crates/project2/src/worktree.rs index a020e8db4c..fcb64c40b4 100644 --- a/crates/project2/src/worktree.rs +++ b/crates/project2/src/worktree.rs @@ -1,5 +1,6 @@ use crate::{ - copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, + copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary, + ProjectEntryId, RemoveOptions, }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context as _, Result}; @@ -25,6 +26,7 @@ use gpui::{ AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext, Task, }; +use itertools::Itertools; use language::{ proto::{ deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, @@ -39,6 +41,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; +use settings::{Settings, SettingsStore}; use smol::channel::{self, Sender}; use std::{ any::Any, @@ -58,7 +61,10 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt}; +use util::{ + paths::{PathMatcher, HOME}, + ResultExt, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -73,7 +79,7 @@ pub struct LocalWorktree { scan_requests_tx: channel::Sender, path_prefixes_to_scan_tx: channel::Sender>, is_scanning: (watch::Sender, watch::Receiver), - _background_scanner_task: Task<()>, + _background_scanner_tasks: Vec>, share: Option, diagnostics: HashMap< Arc, @@ -219,6 +225,7 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, + file_scan_exclusions: Vec, } struct BackgroundScannerState { @@ -302,17 +309,56 @@ impl Worktree { .await .context("failed to stat worktree path")?; + let closure_fs = Arc::clone(&fs); + let closure_next_entry_id = Arc::clone(&next_entry_id); + let closure_abs_path = abs_path.to_path_buf(); cx.build_model(move |cx: &mut ModelContext| { + cx.observe_global::(move |this, cx| { + if let Self::Local(this) = this { + let new_file_scan_exclusions = + file_scan_exclusions(ProjectSettings::get_global(cx)); + if new_file_scan_exclusions != this.snapshot.file_scan_exclusions { + this.snapshot.file_scan_exclusions = new_file_scan_exclusions; + log::info!( + "Re-scanning directories, new scan exclude files: {:?}", + this.snapshot + .file_scan_exclusions + .iter() + .map(ToString::to_string) + .collect::>() + ); + + let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); + let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = + channel::unbounded(); + this.scan_requests_tx = scan_requests_tx; + this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx; + this._background_scanner_tasks = start_background_scan_tasks( + &closure_abs_path, + this.snapshot(), + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&closure_next_entry_id), + Arc::clone(&closure_fs), + cx, + ); + this.is_scanning = watch::channel_with(true); + } + } + }) + .detach(); + let root_name = abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); let mut snapshot = LocalSnapshot { + file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)), ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize), - abs_path: abs_path.clone(), + abs_path: abs_path.to_path_buf().into(), root_name: root_name.clone(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), entries_by_path: Default::default(), @@ -337,61 +383,22 @@ impl Worktree { let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); - let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); - - cx.spawn(|this, mut cx| async move { - while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) { - this.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); - match state { - ScanState::Started => { - *this.is_scanning.0.borrow_mut() = true; - } - ScanState::Updated { - snapshot, - changes, - barrier, - scanning, - } => { - *this.is_scanning.0.borrow_mut() = scanning; - this.set_snapshot(snapshot, changes, cx); - drop(barrier); - } - } - cx.notify(); - }) - .ok(); - } - }) - .detach(); - - let background_scanner_task = cx.background_executor().spawn({ - let fs = fs.clone(); - let snapshot = snapshot.clone(); - let background = cx.background_executor().clone(); - async move { - let events = fs.watch(&abs_path, Duration::from_millis(100)).await; - BackgroundScanner::new( - snapshot, - next_entry_id, - fs, - scan_states_tx, - background, - scan_requests_rx, - path_prefixes_to_scan_rx, - ) - .run(events) - .await; - } - }); - + let task_snapshot = snapshot.clone(); Worktree::Local(LocalWorktree { snapshot, is_scanning: watch::channel_with(true), share: None, scan_requests_tx, path_prefixes_to_scan_tx, - _background_scanner_task: background_scanner_task, + _background_scanner_tasks: start_background_scan_tasks( + &abs_path, + task_snapshot, + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&next_entry_id), + Arc::clone(&fs), + cx, + ), diagnostics: Default::default(), diagnostic_summaries: Default::default(), client, @@ -584,6 +591,77 @@ impl Worktree { } } +fn start_background_scan_tasks( + abs_path: &Path, + snapshot: LocalSnapshot, + scan_requests_rx: channel::Receiver, + path_prefixes_to_scan_rx: channel::Receiver>, + next_entry_id: Arc, + fs: Arc, + cx: &mut ModelContext<'_, Worktree>, +) -> Vec> { + let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); + let background_scanner = cx.background_executor().spawn({ + let abs_path = abs_path.to_path_buf(); + let background = cx.background_executor().clone(); + async move { + let events = fs.watch(&abs_path, Duration::from_millis(100)).await; + BackgroundScanner::new( + snapshot, + next_entry_id, + fs, + scan_states_tx, + background, + scan_requests_rx, + path_prefixes_to_scan_rx, + ) + .run(events) + .await; + } + }); + let scan_state_updater = cx.spawn(|this, mut cx| async move { + while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) { + this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + match state { + ScanState::Started => { + *this.is_scanning.0.borrow_mut() = true; + } + ScanState::Updated { + snapshot, + changes, + barrier, + scanning, + } => { + *this.is_scanning.0.borrow_mut() = scanning; + this.set_snapshot(snapshot, changes, cx); + drop(barrier); + } + } + cx.notify(); + }) + .ok(); + } + }); + vec![background_scanner, scan_state_updater] +} + +fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec { + project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter() + .sorted() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect() +} + impl LocalWorktree { pub fn contains_abs_path(&self, path: &Path) -> bool { path.starts_with(&self.abs_path) @@ -1482,7 +1560,7 @@ impl Snapshot { self.entries_by_id.get(&entry_id, &()).is_some() } - pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result { + fn insert_entry(&mut self, entry: proto::Entry) -> Result { let entry = Entry::try_from((&self.root_char_bag, entry))?; let old_entry = self.entries_by_id.insert_or_replace( PathEntry { @@ -2143,6 +2221,12 @@ impl LocalSnapshot { paths.sort_by(|a, b| a.0.cmp(b.0)); paths } + + fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { + self.file_scan_exclusions + .iter() + .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) + } } impl BackgroundScannerState { @@ -2165,7 +2249,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() { + if !ignore_stack.is_abs_path_ignored(&abs_path, true) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -2376,18 +2460,30 @@ impl BackgroundScannerState { // Remove any git repositories whose .git entry no longer exists. let snapshot = &mut self.snapshot; - let mut repositories = mem::take(&mut snapshot.git_repositories); - let mut repository_entries = mem::take(&mut snapshot.repository_entries); - repositories.retain(|work_directory_id, _| { - snapshot - .entry_for_id(*work_directory_id) + let mut ids_to_preserve = HashSet::default(); + for (&work_directory_id, entry) in snapshot.git_repositories.iter() { + let exists_in_snapshot = snapshot + .entry_for_id(work_directory_id) .map_or(false, |entry| { snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() - }) - }); - repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); - snapshot.git_repositories = repositories; - snapshot.repository_entries = repository_entries; + }); + if exists_in_snapshot { + ids_to_preserve.insert(work_directory_id); + } else { + let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); + if snapshot.is_abs_path_excluded(&git_dir_abs_path) + && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) + { + ids_to_preserve.insert(work_directory_id); + } + } + } + snapshot + .git_repositories + .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); + snapshot + .repository_entries + .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); } fn build_git_repository( @@ -3085,7 +3181,7 @@ impl BackgroundScanner { let ignore_stack = state .snapshot .ignore_stack_for_abs_path(&root_abs_path, true); - if ignore_stack.is_all() { + if ignore_stack.is_abs_path_ignored(&root_abs_path, true) { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref()); } @@ -3222,14 +3318,22 @@ impl BackgroundScanner { return false; }; - let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { - snapshot - .entry_for_path(parent) - .map_or(false, |entry| entry.kind == EntryKind::Dir) - }); - if !parent_dir_is_loaded { - log::debug!("ignoring event {relative_path:?} within unloaded directory"); - return false; + if !is_git_related(&abs_path) { + let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { + snapshot + .entry_for_path(parent) + .map_or(false, |entry| entry.kind == EntryKind::Dir) + }); + if !parent_dir_is_loaded { + log::debug!("ignoring event {relative_path:?} within unloaded directory"); + return false; + } + if snapshot.is_abs_path_excluded(abs_path) { + log::debug!( + "ignoring FS event for path {relative_path:?} within excluded directory" + ); + return false; + } } relative_paths.push(relative_path); @@ -3392,18 +3496,26 @@ impl BackgroundScanner { } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { - log::debug!("scan directory {:?}", job.path); - - let mut ignore_stack = job.ignore_stack.clone(); - let mut new_ignore = None; - let (root_abs_path, root_char_bag, next_entry_id) = { - let snapshot = &self.state.lock().snapshot; - ( - snapshot.abs_path().clone(), - snapshot.root_char_bag, - self.next_entry_id.clone(), - ) - }; + let root_abs_path; + let mut ignore_stack; + let mut new_ignore; + let root_char_bag; + let next_entry_id; + { + let state = self.state.lock(); + let snapshot = &state.snapshot; + root_abs_path = snapshot.abs_path().clone(); + if snapshot.is_abs_path_excluded(&job.abs_path) { + log::error!("skipping excluded directory {:?}", job.path); + return Ok(()); + } + log::debug!("scanning directory {:?}", job.path); + ignore_stack = job.ignore_stack.clone(); + new_ignore = None; + root_char_bag = snapshot.root_char_bag; + next_entry_id = self.next_entry_id.clone(); + drop(state); + } let mut dotgit_path = None; let mut root_canonical_path = None; @@ -3418,18 +3530,8 @@ impl BackgroundScanner { continue; } }; - let child_name = child_abs_path.file_name().unwrap(); let child_path: Arc = job.path.join(child_name).into(); - let child_metadata = match self.fs.metadata(&child_abs_path).await { - Ok(Some(metadata)) => metadata, - Ok(None) => continue, - Err(err) => { - log::error!("error processing {:?}: {:?}", child_abs_path, err); - continue; - } - }; - // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { @@ -3473,6 +3575,26 @@ impl BackgroundScanner { dotgit_path = Some(child_path.clone()); } + { + let mut state = self.state.lock(); + if state.snapshot.is_abs_path_excluded(&child_abs_path) { + let relative_path = job.path.join(child_name); + log::debug!("skipping excluded child entry {relative_path:?}"); + state.remove_path(&relative_path); + continue; + } + drop(state); + } + + let child_metadata = match self.fs.metadata(&child_abs_path).await { + Ok(Some(metadata)) => metadata, + Ok(None) => continue, + Err(err) => { + log::error!("error processing {child_abs_path:?}: {err:?}"); + continue; + } + }; + let mut child_entry = Entry::new( child_path.clone(), &child_metadata, @@ -3653,19 +3775,16 @@ impl BackgroundScanner { self.next_entry_id.as_ref(), state.snapshot.root_char_bag, ); - fs_entry.is_ignored = ignore_stack.is_all(); + let is_dir = fs_entry.is_dir(); + fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !fs_entry.is_ignored { - if !fs_entry.is_dir() { - if let Some((work_dir, repo)) = - state.snapshot.local_repo_for_path(&path) - { - if let Ok(repo_path) = path.strip_prefix(work_dir.0) { - let repo_path = RepoPath(repo_path.into()); - let repo = repo.repo_ptr.lock(); - fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); - } + if !is_dir && !fs_entry.is_ignored { + if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(work_dir.0) { + let repo_path = RepoPath(repo_path.into()); + let repo = repo.repo_ptr.lock(); + fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); } } } @@ -3824,8 +3943,7 @@ impl BackgroundScanner { ignore_stack.clone() }; - // Scan any directories that were previously ignored and weren't - // previously scanned. + // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); if state.should_scan_directory(&entry) { @@ -4001,6 +4119,12 @@ impl BackgroundScanner { } } +fn is_git_related(abs_path: &Path) -> bool { + abs_path + .components() + .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE) +} + fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { let mut result = root_char_bag; result.extend( diff --git a/crates/project2/src/worktree_tests.rs b/crates/project2/src/worktree_tests.rs index bf195f24c4..df7307f694 100644 --- a/crates/project2/src/worktree_tests.rs +++ b/crates/project2/src/worktree_tests.rs @@ -1,2141 +1,2310 @@ -// use crate::{ -// worktree::{Event, Snapshot, WorktreeModelHandle}, -// Entry, EntryKind, PathChange, Worktree, -// }; -// use anyhow::Result; -// use client2::Client; -// use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; -// use git::GITIGNORE; -// use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext}; -// use parking_lot::Mutex; -// use postage::stream::Stream; -// use pretty_assertions::assert_eq; -// use rand::prelude::*; -// use serde_json::json; -// use std::{ -// env, -// fmt::Write, -// mem, -// path::{Path, PathBuf}, -// sync::Arc, -// }; -// use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; - -// #[gpui::test] -// async fn test_traversal(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "a/b\n", -// "a": { -// "b": "", -// "c": "", -// } -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(false) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new(".gitignore"), -// Path::new("a"), -// Path::new("a/c"), -// ] -// ); -// assert_eq!( -// tree.entries(true) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new(".gitignore"), -// Path::new("a"), -// Path::new("a/b"), -// Path::new("a/c"), -// ] -// ); -// }) -// } - -// #[gpui::test] -// async fn test_descendent_entries(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "a": "", -// "b": { -// "c": { -// "d": "" -// }, -// "e": {} -// }, -// "f": "", -// "g": { -// "h": {} -// }, -// "i": { -// "j": { -// "k": "" -// }, -// "l": { - -// } -// }, -// ".gitignore": "i/j\n", -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.descendent_entries(false, false, Path::new("b")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("b/c/d"),] -// ); -// assert_eq!( -// tree.descendent_entries(true, false, Path::new("b")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new("b"), -// Path::new("b/c"), -// Path::new("b/c/d"), -// Path::new("b/e"), -// ] -// ); - -// assert_eq!( -// tree.descendent_entries(false, false, Path::new("g")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// Vec::::new() -// ); -// assert_eq!( -// tree.descendent_entries(true, false, Path::new("g")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("g"), Path::new("g/h"),] -// ); -// }); - -// // Expand gitignored directory. -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("i/j").into()]) -// }) -// .recv() -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.descendent_entries(false, false, Path::new("i")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// Vec::::new() -// ); -// assert_eq!( -// tree.descendent_entries(false, true, Path::new("i")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("i/j/k")] -// ); -// assert_eq!( -// tree.descendent_entries(true, false, Path::new("i")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("i"), Path::new("i/l"),] -// ); -// }) -// } - -// #[gpui::test(iterations = 10)] -// async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "lib": { -// "a": { -// "a.txt": "" -// }, -// "b": { -// "b.txt": "" -// } -// } -// }), -// ) -// .await; -// fs.insert_symlink("/root/lib/a/lib", "..".into()).await; -// fs.insert_symlink("/root/lib/b/lib", "..".into()).await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(false) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new("lib"), -// Path::new("lib/a"), -// Path::new("lib/a/a.txt"), -// Path::new("lib/a/lib"), -// Path::new("lib/b"), -// Path::new("lib/b/b.txt"), -// Path::new("lib/b/lib"), -// ] -// ); -// }); - -// fs.rename( -// Path::new("/root/lib/a/lib"), -// Path::new("/root/lib/a/lib-2"), -// Default::default(), -// ) -// .await -// .unwrap(); -// executor.run_until_parked(); -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(false) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new("lib"), -// Path::new("lib/a"), -// Path::new("lib/a/a.txt"), -// Path::new("lib/a/lib-2"), -// Path::new("lib/b"), -// Path::new("lib/b/b.txt"), -// Path::new("lib/b/lib"), -// ] -// ); -// }); -// } - -// #[gpui::test] -// async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "dir1": { -// "deps": { -// // symlinks here -// }, -// "src": { -// "a.rs": "", -// "b.rs": "", -// }, -// }, -// "dir2": { -// "src": { -// "c.rs": "", -// "d.rs": "", -// } -// }, -// "dir3": { -// "deps": {}, -// "src": { -// "e.rs": "", -// "f.rs": "", -// }, -// } -// }), -// ) -// .await; - -// // These symlinks point to directories outside of the worktree's root, dir1. -// fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into()) -// .await; -// fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into()) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root/dir1"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// let tree_updates = Arc::new(Mutex::new(Vec::new())); -// tree.update(cx, |_, cx| { -// let tree_updates = tree_updates.clone(); -// cx.subscribe(&tree, move |_, _, event, _| { -// if let Event::UpdatedEntries(update) = event { -// tree_updates.lock().extend( -// update -// .iter() -// .map(|(path, _, change)| (path.clone(), *change)), -// ); -// } -// }) -// .detach(); -// }); - -// // The symlinked directories are not scanned by default. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_external)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new("deps"), false), -// (Path::new("deps/dep-dir2"), true), -// (Path::new("deps/dep-dir3"), true), -// (Path::new("src"), false), -// (Path::new("src/a.rs"), false), -// (Path::new("src/b.rs"), false), -// ] -// ); - -// assert_eq!( -// tree.entry_for_path("deps/dep-dir2").unwrap().kind, -// EntryKind::UnloadedDir -// ); -// }); - -// // Expand one of the symlinked directories. -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()]) -// }) -// .recv() -// .await; - -// // The expanded directory's contents are loaded. Subdirectories are -// // not scanned yet. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_external)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new("deps"), false), -// (Path::new("deps/dep-dir2"), true), -// (Path::new("deps/dep-dir3"), true), -// (Path::new("deps/dep-dir3/deps"), true), -// (Path::new("deps/dep-dir3/src"), true), -// (Path::new("src"), false), -// (Path::new("src/a.rs"), false), -// (Path::new("src/b.rs"), false), -// ] -// ); -// }); -// assert_eq!( -// mem::take(&mut *tree_updates.lock()), -// &[ -// (Path::new("deps/dep-dir3").into(), PathChange::Loaded), -// (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded), -// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded) -// ] -// ); - -// // Expand a subdirectory of one of the symlinked directories. -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()]) -// }) -// .recv() -// .await; - -// // The expanded subdirectory's contents are loaded. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_external)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new("deps"), false), -// (Path::new("deps/dep-dir2"), true), -// (Path::new("deps/dep-dir3"), true), -// (Path::new("deps/dep-dir3/deps"), true), -// (Path::new("deps/dep-dir3/src"), true), -// (Path::new("deps/dep-dir3/src/e.rs"), true), -// (Path::new("deps/dep-dir3/src/f.rs"), true), -// (Path::new("src"), false), -// (Path::new("src/a.rs"), false), -// (Path::new("src/b.rs"), false), -// ] -// ); -// }); - -// assert_eq!( -// mem::take(&mut *tree_updates.lock()), -// &[ -// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded), -// ( -// Path::new("deps/dep-dir3/src/e.rs").into(), -// PathChange::Loaded -// ), -// ( -// Path::new("deps/dep-dir3/src/f.rs").into(), -// PathChange::Loaded -// ) -// ] -// ); -// } - -// #[gpui::test] -// async fn test_open_gitignored_files(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "node_modules\n", -// "one": { -// "node_modules": { -// "a": { -// "a1.js": "a1", -// "a2.js": "a2", -// }, -// "b": { -// "b1.js": "b1", -// "b2.js": "b2", -// }, -// "c": { -// "c1.js": "c1", -// "c2.js": "c2", -// } -// }, -// }, -// "two": { -// "x.js": "", -// "y.js": "", -// }, -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_ignored)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("one"), false), -// (Path::new("one/node_modules"), true), -// (Path::new("two"), false), -// (Path::new("two/x.js"), false), -// (Path::new("two/y.js"), false), -// ] -// ); -// }); - -// // Open a file that is nested inside of a gitignored directory that -// // has not yet been expanded. -// let prev_read_dir_count = fs.read_dir_call_count(); -// let buffer = tree -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx) -// }) -// .await -// .unwrap(); - -// tree.read_with(cx, |tree, cx| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_ignored)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("one"), false), -// (Path::new("one/node_modules"), true), -// (Path::new("one/node_modules/a"), true), -// (Path::new("one/node_modules/b"), true), -// (Path::new("one/node_modules/b/b1.js"), true), -// (Path::new("one/node_modules/b/b2.js"), true), -// (Path::new("one/node_modules/c"), true), -// (Path::new("two"), false), -// (Path::new("two/x.js"), false), -// (Path::new("two/y.js"), false), -// ] -// ); - -// assert_eq!( -// buffer.read(cx).file().unwrap().path().as_ref(), -// Path::new("one/node_modules/b/b1.js") -// ); - -// // Only the newly-expanded directories are scanned. -// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2); -// }); - -// // Open another file in a different subdirectory of the same -// // gitignored directory. -// let prev_read_dir_count = fs.read_dir_call_count(); -// let buffer = tree -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx) -// }) -// .await -// .unwrap(); - -// tree.read_with(cx, |tree, cx| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_ignored)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("one"), false), -// (Path::new("one/node_modules"), true), -// (Path::new("one/node_modules/a"), true), -// (Path::new("one/node_modules/a/a1.js"), true), -// (Path::new("one/node_modules/a/a2.js"), true), -// (Path::new("one/node_modules/b"), true), -// (Path::new("one/node_modules/b/b1.js"), true), -// (Path::new("one/node_modules/b/b2.js"), true), -// (Path::new("one/node_modules/c"), true), -// (Path::new("two"), false), -// (Path::new("two/x.js"), false), -// (Path::new("two/y.js"), false), -// ] -// ); - -// assert_eq!( -// buffer.read(cx).file().unwrap().path().as_ref(), -// Path::new("one/node_modules/a/a2.js") -// ); - -// // Only the newly-expanded directory is scanned. -// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1); -// }); - -// // No work happens when files and directories change within an unloaded directory. -// let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count(); -// fs.create_dir("/root/one/node_modules/c/lib".as_ref()) -// .await -// .unwrap(); -// cx.foreground().run_until_parked(); -// assert_eq!( -// fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count, -// 0 -// ); -// } - -// #[gpui::test] -// async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "node_modules\n", -// "a": { -// "a.js": "", -// }, -// "b": { -// "b.js": "", -// }, -// "node_modules": { -// "c": { -// "c.js": "", -// }, -// "d": { -// "d.js": "", -// "e": { -// "e1.js": "", -// "e2.js": "", -// }, -// "f": { -// "f1.js": "", -// "f2.js": "", -// } -// }, -// }, -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// // Open a file within the gitignored directory, forcing some of its -// // subdirectories to be read, but not all. -// let read_dir_count_1 = fs.read_dir_call_count(); -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()]) -// }) -// .recv() -// .await; - -// // Those subdirectories are now loaded. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|e| (e.path.as_ref(), e.is_ignored)) -// .collect::>(), -// &[ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("a"), false), -// (Path::new("a/a.js"), false), -// (Path::new("b"), false), -// (Path::new("b/b.js"), false), -// (Path::new("node_modules"), true), -// (Path::new("node_modules/c"), true), -// (Path::new("node_modules/d"), true), -// (Path::new("node_modules/d/d.js"), true), -// (Path::new("node_modules/d/e"), true), -// (Path::new("node_modules/d/f"), true), -// ] -// ); -// }); -// let read_dir_count_2 = fs.read_dir_call_count(); -// assert_eq!(read_dir_count_2 - read_dir_count_1, 2); - -// // Update the gitignore so that node_modules is no longer ignored, -// // but a subdirectory is ignored -// fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default()) -// .await -// .unwrap(); -// cx.foreground().run_until_parked(); - -// // All of the directories that are no longer ignored are now loaded. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|e| (e.path.as_ref(), e.is_ignored)) -// .collect::>(), -// &[ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("a"), false), -// (Path::new("a/a.js"), false), -// (Path::new("b"), false), -// (Path::new("b/b.js"), false), -// // This directory is no longer ignored -// (Path::new("node_modules"), false), -// (Path::new("node_modules/c"), false), -// (Path::new("node_modules/c/c.js"), false), -// (Path::new("node_modules/d"), false), -// (Path::new("node_modules/d/d.js"), false), -// // This subdirectory is now ignored -// (Path::new("node_modules/d/e"), true), -// (Path::new("node_modules/d/f"), false), -// (Path::new("node_modules/d/f/f1.js"), false), -// (Path::new("node_modules/d/f/f2.js"), false), -// ] -// ); -// }); - -// // Each of the newly-loaded directories is scanned only once. -// let read_dir_count_3 = fs.read_dir_call_count(); -// assert_eq!(read_dir_count_3 - read_dir_count_2, 2); -// } - -// #[gpui::test(iterations = 10)] -// async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n", -// "tree": { -// ".git": {}, -// ".gitignore": "ignored-dir\n", -// "tracked-dir": { -// "tracked-file1": "", -// "ancestor-ignored-file1": "", -// }, -// "ignored-dir": { -// "ignored-file1": "" -// } -// } -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// "/root/tree".as_ref(), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()]) -// }) -// .recv() -// .await; - -// cx.read(|cx| { -// let tree = tree.read(cx); -// assert!( -// !tree -// .entry_for_path("tracked-dir/tracked-file1") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("tracked-dir/ancestor-ignored-file1") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("ignored-dir/ignored-file1") -// .unwrap() -// .is_ignored -// ); -// }); - -// fs.create_file( -// "/root/tree/tracked-dir/tracked-file2".as_ref(), -// Default::default(), -// ) -// .await -// .unwrap(); -// fs.create_file( -// "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(), -// Default::default(), -// ) -// .await -// .unwrap(); -// fs.create_file( -// "/root/tree/ignored-dir/ignored-file2".as_ref(), -// Default::default(), -// ) -// .await -// .unwrap(); - -// cx.foreground().run_until_parked(); -// cx.read(|cx| { -// let tree = tree.read(cx); -// assert!( -// !tree -// .entry_for_path("tracked-dir/tracked-file2") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("tracked-dir/ancestor-ignored-file2") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("ignored-dir/ignored-file2") -// .unwrap() -// .is_ignored -// ); -// assert!(tree.entry_for_path(".git").unwrap().is_ignored); -// }); -// } - -// #[gpui::test] -// async fn test_write_file(cx: &mut TestAppContext) { -// let dir = temp_tree(json!({ -// ".git": {}, -// ".gitignore": "ignored-dir\n", -// "tracked-dir": {}, -// "ignored-dir": {} -// })); - -// let tree = Worktree::local( -// build_client(cx), -// dir.path(), -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; -// tree.flush_fs_events(cx).await; - -// tree.update(cx, |tree, cx| { -// tree.as_local().unwrap().write_file( -// Path::new("tracked-dir/file.txt"), -// "hello".into(), -// Default::default(), -// cx, -// ) -// }) -// .await -// .unwrap(); -// tree.update(cx, |tree, cx| { -// tree.as_local().unwrap().write_file( -// Path::new("ignored-dir/file.txt"), -// "world".into(), -// Default::default(), -// cx, -// ) -// }) -// .await -// .unwrap(); - -// tree.read_with(cx, |tree, _| { -// let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); -// let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); -// assert!(!tracked.is_ignored); -// assert!(ignored.is_ignored); -// }); -// } - -// #[gpui::test(iterations = 30)] -// async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "b": {}, -// "c": {}, -// "d": {}, -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// "/root".as_ref(), -// true, -// fs, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let snapshot1 = tree.update(cx, |tree, cx| { -// let tree = tree.as_local_mut().unwrap(); -// let snapshot = Arc::new(Mutex::new(tree.snapshot())); -// let _ = tree.observe_updates(0, cx, { -// let snapshot = snapshot.clone(); -// move |update| { -// snapshot.lock().apply_remote_update(update).unwrap(); -// async { true } -// } -// }); -// snapshot -// }); - -// let entry = tree -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/e".as_ref(), true, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_dir()); - -// cx.foreground().run_until_parked(); -// tree.read_with(cx, |tree, _| { -// assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir); -// }); - -// let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// assert_eq!( -// snapshot1.lock().entries(true).collect::>(), -// snapshot2.entries(true).collect::>() -// ); -// } - -// #[gpui::test] -// async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { -// let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); - -// let fs_fake = FakeFs::new(cx.background()); -// fs_fake -// .insert_tree( -// "/root", -// json!({ -// "a": {}, -// }), -// ) -// .await; - -// let tree_fake = Worktree::local( -// client_fake, -// "/root".as_ref(), -// true, -// fs_fake, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let entry = tree_fake -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/b/c/d.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_fake.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); -// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); -// assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); -// }); - -// let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); - -// let fs_real = Arc::new(RealFs); -// let temp_root = temp_tree(json!({ -// "a": {} -// })); - -// let tree_real = Worktree::local( -// client_real, -// temp_root.path(), -// true, -// fs_real, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let entry = tree_real -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/b/c/d.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_real.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); -// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); -// assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); -// }); - -// // Test smallest change -// let entry = tree_real -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/b/c/e.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_real.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file()); -// }); - -// // Test largest change -// let entry = tree_real -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("d/e/f/g.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_real.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file()); -// assert!(tree.entry_for_path("d/e/f").unwrap().is_dir()); -// assert!(tree.entry_for_path("d/e/").unwrap().is_dir()); -// assert!(tree.entry_for_path("d/").unwrap().is_dir()); -// }); -// } - -// #[gpui::test(iterations = 100)] -// async fn test_random_worktree_operations_during_initial_scan( -// cx: &mut TestAppContext, -// mut rng: StdRng, -// ) { -// let operations = env::var("OPERATIONS") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(5); -// let initial_entries = env::var("INITIAL_ENTRIES") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(20); - -// let root_dir = Path::new("/test"); -// let fs = FakeFs::new(cx.background()) as Arc; -// fs.as_fake().insert_tree(root_dir, json!({})).await; -// for _ in 0..initial_entries { -// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; -// } -// log::info!("generated initial tree"); - -// let worktree = Worktree::local( -// build_client(cx), -// root_dir, -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())]; -// let updates = Arc::new(Mutex::new(Vec::new())); -// worktree.update(cx, |tree, cx| { -// check_worktree_change_events(tree, cx); - -// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { -// let updates = updates.clone(); -// move |update| { -// updates.lock().push(update); -// async { true } -// } -// }); -// }); - -// for _ in 0..operations { -// worktree -// .update(cx, |worktree, cx| { -// randomly_mutate_worktree(worktree, &mut rng, cx) -// }) -// .await -// .log_err(); -// worktree.read_with(cx, |tree, _| { -// tree.as_local().unwrap().snapshot().check_invariants(true) -// }); - -// if rng.gen_bool(0.6) { -// snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())); -// } -// } - -// worktree -// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) -// .await; - -// cx.foreground().run_until_parked(); - -// let final_snapshot = worktree.read_with(cx, |tree, _| { -// let tree = tree.as_local().unwrap(); -// let snapshot = tree.snapshot(); -// snapshot.check_invariants(true); -// snapshot -// }); - -// for (i, snapshot) in snapshots.into_iter().enumerate().rev() { -// let mut updated_snapshot = snapshot.clone(); -// for update in updates.lock().iter() { -// if update.scan_id >= updated_snapshot.scan_id() as u64 { -// updated_snapshot -// .apply_remote_update(update.clone()) -// .unwrap(); -// } -// } - -// assert_eq!( -// updated_snapshot.entries(true).collect::>(), -// final_snapshot.entries(true).collect::>(), -// "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}", -// ); -// } -// } - -// #[gpui::test(iterations = 100)] -// async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { -// let operations = env::var("OPERATIONS") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(40); -// let initial_entries = env::var("INITIAL_ENTRIES") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(20); - -// let root_dir = Path::new("/test"); -// let fs = FakeFs::new(cx.background()) as Arc; -// fs.as_fake().insert_tree(root_dir, json!({})).await; -// for _ in 0..initial_entries { -// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; -// } -// log::info!("generated initial tree"); - -// let worktree = Worktree::local( -// build_client(cx), -// root_dir, -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let updates = Arc::new(Mutex::new(Vec::new())); -// worktree.update(cx, |tree, cx| { -// check_worktree_change_events(tree, cx); - -// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { -// let updates = updates.clone(); -// move |update| { -// updates.lock().push(update); -// async { true } -// } -// }); -// }); - -// worktree -// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) -// .await; - -// fs.as_fake().pause_events(); -// let mut snapshots = Vec::new(); -// let mut mutations_len = operations; -// while mutations_len > 1 { -// if rng.gen_bool(0.2) { -// worktree -// .update(cx, |worktree, cx| { -// randomly_mutate_worktree(worktree, &mut rng, cx) -// }) -// .await -// .log_err(); -// } else { -// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; -// } - -// let buffered_event_count = fs.as_fake().buffered_event_count(); -// if buffered_event_count > 0 && rng.gen_bool(0.3) { -// let len = rng.gen_range(0..=buffered_event_count); -// log::info!("flushing {} events", len); -// fs.as_fake().flush_events(len); -// } else { -// randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; -// mutations_len -= 1; -// } - -// cx.foreground().run_until_parked(); -// if rng.gen_bool(0.2) { -// log::info!("storing snapshot {}", snapshots.len()); -// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// snapshots.push(snapshot); -// } -// } - -// log::info!("quiescing"); -// fs.as_fake().flush_events(usize::MAX); -// cx.foreground().run_until_parked(); - -// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// snapshot.check_invariants(true); -// let expanded_paths = snapshot -// .expanded_entries() -// .map(|e| e.path.clone()) -// .collect::>(); - -// { -// let new_worktree = Worktree::local( -// build_client(cx), -// root_dir, -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// new_worktree -// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) -// .await; -// new_worktree -// .update(cx, |tree, _| { -// tree.as_local_mut() -// .unwrap() -// .refresh_entries_for_paths(expanded_paths) -// }) -// .recv() -// .await; -// let new_snapshot = -// new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// assert_eq!( -// snapshot.entries_without_ids(true), -// new_snapshot.entries_without_ids(true) -// ); -// } - -// for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() { -// for update in updates.lock().iter() { -// if update.scan_id >= prev_snapshot.scan_id() as u64 { -// prev_snapshot.apply_remote_update(update.clone()).unwrap(); -// } -// } - -// assert_eq!( -// prev_snapshot -// .entries(true) -// .map(ignore_pending_dir) -// .collect::>(), -// snapshot -// .entries(true) -// .map(ignore_pending_dir) -// .collect::>(), -// "wrong updates after snapshot {i}: {updates:#?}", -// ); -// } - -// fn ignore_pending_dir(entry: &Entry) -> Entry { -// let mut entry = entry.clone(); -// if entry.kind.is_dir() { -// entry.kind = EntryKind::Dir -// } -// entry -// } -// } - -// // The worktree's `UpdatedEntries` event can be used to follow along with -// // all changes to the worktree's snapshot. -// fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext) { -// let mut entries = tree.entries(true).cloned().collect::>(); -// cx.subscribe(&cx.handle(), move |tree, _, event, _| { -// if let Event::UpdatedEntries(changes) = event { -// for (path, _, change_type) in changes.iter() { -// let entry = tree.entry_for_path(&path).cloned(); -// let ix = match entries.binary_search_by_key(&path, |e| &e.path) { -// Ok(ix) | Err(ix) => ix, -// }; -// match change_type { -// PathChange::Added => entries.insert(ix, entry.unwrap()), -// PathChange::Removed => drop(entries.remove(ix)), -// PathChange::Updated => { -// let entry = entry.unwrap(); -// let existing_entry = entries.get_mut(ix).unwrap(); -// assert_eq!(existing_entry.path, entry.path); -// *existing_entry = entry; -// } -// PathChange::AddedOrUpdated | PathChange::Loaded => { -// let entry = entry.unwrap(); -// if entries.get(ix).map(|e| &e.path) == Some(&entry.path) { -// *entries.get_mut(ix).unwrap() = entry; -// } else { -// entries.insert(ix, entry); -// } -// } -// } -// } - -// let new_entries = tree.entries(true).cloned().collect::>(); -// assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes); -// } -// }) -// .detach(); -// } - -// fn randomly_mutate_worktree( -// worktree: &mut Worktree, -// rng: &mut impl Rng, -// cx: &mut ModelContext, -// ) -> Task> { -// log::info!("mutating worktree"); -// let worktree = worktree.as_local_mut().unwrap(); -// let snapshot = worktree.snapshot(); -// let entry = snapshot.entries(false).choose(rng).unwrap(); - -// match rng.gen_range(0_u32..100) { -// 0..=33 if entry.path.as_ref() != Path::new("") => { -// log::info!("deleting entry {:?} ({})", entry.path, entry.id.0); -// worktree.delete_entry(entry.id, cx).unwrap() -// } -// ..=66 if entry.path.as_ref() != Path::new("") => { -// let other_entry = snapshot.entries(false).choose(rng).unwrap(); -// let new_parent_path = if other_entry.is_dir() { -// other_entry.path.clone() -// } else { -// other_entry.path.parent().unwrap().into() -// }; -// let mut new_path = new_parent_path.join(random_filename(rng)); -// if new_path.starts_with(&entry.path) { -// new_path = random_filename(rng).into(); -// } - -// log::info!( -// "renaming entry {:?} ({}) to {:?}", -// entry.path, -// entry.id.0, -// new_path -// ); -// let task = worktree.rename_entry(entry.id, new_path, cx).unwrap(); -// cx.foreground().spawn(async move { -// task.await?; -// Ok(()) -// }) -// } -// _ => { -// let task = if entry.is_dir() { -// let child_path = entry.path.join(random_filename(rng)); -// let is_dir = rng.gen_bool(0.3); -// log::info!( -// "creating {} at {:?}", -// if is_dir { "dir" } else { "file" }, -// child_path, -// ); -// worktree.create_entry(child_path, is_dir, cx) -// } else { -// log::info!("overwriting file {:?} ({})", entry.path, entry.id.0); -// worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx) -// }; -// cx.foreground().spawn(async move { -// task.await?; -// Ok(()) -// }) -// } -// } -// } - -// async fn randomly_mutate_fs( -// fs: &Arc, -// root_path: &Path, -// insertion_probability: f64, -// rng: &mut impl Rng, -// ) { -// log::info!("mutating fs"); -// let mut files = Vec::new(); -// let mut dirs = Vec::new(); -// for path in fs.as_fake().paths(false) { -// if path.starts_with(root_path) { -// if fs.is_file(&path).await { -// files.push(path); -// } else { -// dirs.push(path); -// } -// } -// } - -// if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) { -// let path = dirs.choose(rng).unwrap(); -// let new_path = path.join(random_filename(rng)); - -// if rng.gen() { -// log::info!( -// "creating dir {:?}", -// new_path.strip_prefix(root_path).unwrap() -// ); -// fs.create_dir(&new_path).await.unwrap(); -// } else { -// log::info!( -// "creating file {:?}", -// new_path.strip_prefix(root_path).unwrap() -// ); -// fs.create_file(&new_path, Default::default()).await.unwrap(); -// } -// } else if rng.gen_bool(0.05) { -// let ignore_dir_path = dirs.choose(rng).unwrap(); -// let ignore_path = ignore_dir_path.join(&*GITIGNORE); - -// let subdirs = dirs -// .iter() -// .filter(|d| d.starts_with(&ignore_dir_path)) -// .cloned() -// .collect::>(); -// let subfiles = files -// .iter() -// .filter(|d| d.starts_with(&ignore_dir_path)) -// .cloned() -// .collect::>(); -// let files_to_ignore = { -// let len = rng.gen_range(0..=subfiles.len()); -// subfiles.choose_multiple(rng, len) -// }; -// let dirs_to_ignore = { -// let len = rng.gen_range(0..subdirs.len()); -// subdirs.choose_multiple(rng, len) -// }; - -// let mut ignore_contents = String::new(); -// for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) { -// writeln!( -// ignore_contents, -// "{}", -// path_to_ignore -// .strip_prefix(&ignore_dir_path) -// .unwrap() -// .to_str() -// .unwrap() -// ) -// .unwrap(); -// } -// log::info!( -// "creating gitignore {:?} with contents:\n{}", -// ignore_path.strip_prefix(&root_path).unwrap(), -// ignore_contents -// ); -// fs.save( -// &ignore_path, -// &ignore_contents.as_str().into(), -// Default::default(), -// ) -// .await -// .unwrap(); -// } else { -// let old_path = { -// let file_path = files.choose(rng); -// let dir_path = dirs[1..].choose(rng); -// file_path.into_iter().chain(dir_path).choose(rng).unwrap() -// }; - -// let is_rename = rng.gen(); -// if is_rename { -// let new_path_parent = dirs -// .iter() -// .filter(|d| !d.starts_with(old_path)) -// .choose(rng) -// .unwrap(); - -// let overwrite_existing_dir = -// !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3); -// let new_path = if overwrite_existing_dir { -// fs.remove_dir( -// &new_path_parent, -// RemoveOptions { -// recursive: true, -// ignore_if_not_exists: true, -// }, -// ) -// .await -// .unwrap(); -// new_path_parent.to_path_buf() -// } else { -// new_path_parent.join(random_filename(rng)) -// }; - -// log::info!( -// "renaming {:?} to {}{:?}", -// old_path.strip_prefix(&root_path).unwrap(), -// if overwrite_existing_dir { -// "overwrite " -// } else { -// "" -// }, -// new_path.strip_prefix(&root_path).unwrap() -// ); -// fs.rename( -// &old_path, -// &new_path, -// fs::RenameOptions { -// overwrite: true, -// ignore_if_exists: true, -// }, -// ) -// .await -// .unwrap(); -// } else if fs.is_file(&old_path).await { -// log::info!( -// "deleting file {:?}", -// old_path.strip_prefix(&root_path).unwrap() -// ); -// fs.remove_file(old_path, Default::default()).await.unwrap(); -// } else { -// log::info!( -// "deleting dir {:?}", -// old_path.strip_prefix(&root_path).unwrap() -// ); -// fs.remove_dir( -// &old_path, -// RemoveOptions { -// recursive: true, -// ignore_if_not_exists: true, -// }, -// ) -// .await -// .unwrap(); -// } -// } -// } - -// fn random_filename(rng: &mut impl Rng) -> String { -// (0..6) -// .map(|_| rng.sample(rand::distributions::Alphanumeric)) -// .map(char::from) -// .collect() -// } - -// #[gpui::test] -// async fn test_rename_work_directory(cx: &mut TestAppContext) { -// let root = temp_tree(json!({ -// "projects": { -// "project1": { -// "a": "", -// "b": "", -// } -// }, - -// })); -// let root_path = root.path(); - -// let tree = Worktree::local( -// build_client(cx), -// root_path, -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let repo = git_init(&root_path.join("projects/project1")); -// git_add("a", &repo); -// git_commit("init", &repo); -// std::fs::write(root_path.join("projects/project1/a"), "aa").ok(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.flush_fs_events(cx).await; - -// cx.read(|cx| { -// let tree = tree.read(cx); -// let (work_dir, _) = tree.repositories().next().unwrap(); -// assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project1/a")), -// Some(GitFileStatus::Modified) -// ); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project1/b")), -// Some(GitFileStatus::Added) -// ); -// }); - -// std::fs::rename( -// root_path.join("projects/project1"), -// root_path.join("projects/project2"), -// ) -// .ok(); -// tree.flush_fs_events(cx).await; - -// cx.read(|cx| { -// let tree = tree.read(cx); -// let (work_dir, _) = tree.repositories().next().unwrap(); -// assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project2/a")), -// Some(GitFileStatus::Modified) -// ); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project2/b")), -// Some(GitFileStatus::Added) -// ); -// }); -// } - -// #[gpui::test] -// async fn test_git_repository_for_path(cx: &mut TestAppContext) { -// let root = temp_tree(json!({ -// "c.txt": "", -// "dir1": { -// ".git": {}, -// "deps": { -// "dep1": { -// ".git": {}, -// "src": { -// "a.txt": "" -// } -// } -// }, -// "src": { -// "b.txt": "" -// } -// }, -// })); - -// let tree = Worktree::local( -// build_client(cx), -// root.path(), -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; -// tree.flush_fs_events(cx).await; - -// tree.read_with(cx, |tree, _cx| { -// let tree = tree.as_local().unwrap(); - -// assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); - -// let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); -// assert_eq!( -// entry -// .work_directory(tree) -// .map(|directory| directory.as_ref().to_owned()), -// Some(Path::new("dir1").to_owned()) -// ); - -// let entry = tree -// .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) -// .unwrap(); -// assert_eq!( -// entry -// .work_directory(tree) -// .map(|directory| directory.as_ref().to_owned()), -// Some(Path::new("dir1/deps/dep1").to_owned()) -// ); - -// let entries = tree.files(false, 0); - -// let paths_with_repos = tree -// .entries_with_repositories(entries) -// .map(|(entry, repo)| { -// ( -// entry.path.as_ref(), -// repo.and_then(|repo| { -// repo.work_directory(&tree) -// .map(|work_directory| work_directory.0.to_path_buf()) -// }), -// ) -// }) -// .collect::>(); - -// assert_eq!( -// paths_with_repos, -// &[ -// (Path::new("c.txt"), None), -// ( -// Path::new("dir1/deps/dep1/src/a.txt"), -// Some(Path::new("dir1/deps/dep1").into()) -// ), -// (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())), -// ] -// ); -// }); - -// let repo_update_events = Arc::new(Mutex::new(vec![])); -// tree.update(cx, |_, cx| { -// let repo_update_events = repo_update_events.clone(); -// cx.subscribe(&tree, move |_, _, event, _| { -// if let Event::UpdatedGitRepositories(update) = event { -// repo_update_events.lock().push(update.clone()); -// } -// }) -// .detach(); -// }); - -// std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap(); -// tree.flush_fs_events(cx).await; - -// assert_eq!( -// repo_update_events.lock()[0] -// .iter() -// .map(|e| e.0.clone()) -// .collect::>>(), -// vec![Path::new("dir1").into()] -// ); - -// std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap(); -// tree.flush_fs_events(cx).await; - -// tree.read_with(cx, |tree, _cx| { -// let tree = tree.as_local().unwrap(); - -// assert!(tree -// .repository_for_path("dir1/src/b.txt".as_ref()) -// .is_none()); -// }); -// } - -// #[gpui::test] -// async fn test_git_status(deterministic: Arc, cx: &mut TestAppContext) { -// const IGNORE_RULE: &'static str = "**/target"; - -// let root = temp_tree(json!({ -// "project": { -// "a.txt": "a", -// "b.txt": "bb", -// "c": { -// "d": { -// "e.txt": "eee" -// } -// }, -// "f.txt": "ffff", -// "target": { -// "build_file": "???" -// }, -// ".gitignore": IGNORE_RULE -// }, - -// })); - -// const A_TXT: &'static str = "a.txt"; -// const B_TXT: &'static str = "b.txt"; -// const E_TXT: &'static str = "c/d/e.txt"; -// const F_TXT: &'static str = "f.txt"; -// const DOTGITIGNORE: &'static str = ".gitignore"; -// const BUILD_FILE: &'static str = "target/build_file"; -// let project_path = Path::new("project"); - -// // Set up git repository before creating the worktree. -// let work_dir = root.path().join("project"); -// let mut repo = git_init(work_dir.as_path()); -// repo.add_ignore_rule(IGNORE_RULE).unwrap(); -// git_add(A_TXT, &repo); -// git_add(E_TXT, &repo); -// git_add(DOTGITIGNORE, &repo); -// git_commit("Initial commit", &repo); - -// let tree = Worktree::local( -// build_client(cx), -// root.path(), -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// tree.flush_fs_events(cx).await; -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; -// deterministic.run_until_parked(); - -// // Check that the right git state is observed on startup -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!(snapshot.repositories().count(), 1); -// let (dir, _) = snapshot.repositories().next().unwrap(); -// assert_eq!(dir.as_ref(), Path::new("project")); - -// assert_eq!( -// snapshot.status_for_file(project_path.join(B_TXT)), -// Some(GitFileStatus::Added) -// ); -// assert_eq!( -// snapshot.status_for_file(project_path.join(F_TXT)), -// Some(GitFileStatus::Added) -// ); -// }); - -// // Modify a file in the working copy. -// std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// // The worktree detects that the file's git status has changed. -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!( -// snapshot.status_for_file(project_path.join(A_TXT)), -// Some(GitFileStatus::Modified) -// ); -// }); - -// // Create a commit in the git repository. -// git_add(A_TXT, &repo); -// git_add(B_TXT, &repo); -// git_commit("Committing modified and added", &repo); -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// // The worktree detects that the files' git status have changed. -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!( -// snapshot.status_for_file(project_path.join(F_TXT)), -// Some(GitFileStatus::Added) -// ); -// assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); -// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); -// }); - -// // Modify files in the working copy and perform git operations on other files. -// git_reset(0, &repo); -// git_remove_index(Path::new(B_TXT), &repo); -// git_stash(&mut repo); -// std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); -// std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// // Check that more complex repo changes are tracked -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); - -// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); -// assert_eq!( -// snapshot.status_for_file(project_path.join(B_TXT)), -// Some(GitFileStatus::Added) -// ); -// assert_eq!( -// snapshot.status_for_file(project_path.join(E_TXT)), -// Some(GitFileStatus::Modified) -// ); -// }); - -// std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); -// std::fs::remove_dir_all(work_dir.join("c")).unwrap(); -// std::fs::write( -// work_dir.join(DOTGITIGNORE), -// [IGNORE_RULE, "f.txt"].join("\n"), -// ) -// .unwrap(); - -// git_add(Path::new(DOTGITIGNORE), &repo); -// git_commit("Committing modified git ignore", &repo); - -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// let mut renamed_dir_name = "first_directory/second_directory"; -// const RENAMED_FILE: &'static str = "rf.txt"; - -// std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); -// std::fs::write( -// work_dir.join(renamed_dir_name).join(RENAMED_FILE), -// "new-contents", -// ) -// .unwrap(); - -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!( -// snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)), -// Some(GitFileStatus::Added) -// ); -// }); - -// renamed_dir_name = "new_first_directory/second_directory"; - -// std::fs::rename( -// work_dir.join("first_directory"), -// work_dir.join("new_first_directory"), -// ) -// .unwrap(); - -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); - -// assert_eq!( -// snapshot.status_for_file( -// project_path -// .join(Path::new(renamed_dir_name)) -// .join(RENAMED_FILE) -// ), -// Some(GitFileStatus::Added) -// ); -// }); -// } - -// #[gpui::test] -// async fn test_propagate_git_statuses(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".git": {}, -// "a": { -// "b": { -// "c1.txt": "", -// "c2.txt": "", -// }, -// "d": { -// "e1.txt": "", -// "e2.txt": "", -// "e3.txt": "", -// } -// }, -// "f": { -// "no-status.txt": "" -// }, -// "g": { -// "h1.txt": "", -// "h2.txt": "" -// }, - -// }), -// ) -// .await; - -// fs.set_status_for_repo_via_git_operation( -// &Path::new("/root/.git"), -// &[ -// (Path::new("a/b/c1.txt"), GitFileStatus::Added), -// (Path::new("a/d/e2.txt"), GitFileStatus::Modified), -// (Path::new("g/h2.txt"), GitFileStatus::Conflict), -// ], -// ); - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// cx.foreground().run_until_parked(); -// let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); - -// check_propagated_statuses( -// &snapshot, -// &[ -// (Path::new(""), Some(GitFileStatus::Conflict)), -// (Path::new("a"), Some(GitFileStatus::Modified)), -// (Path::new("a/b"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c2.txt"), None), -// (Path::new("a/d"), Some(GitFileStatus::Modified)), -// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), -// (Path::new("f"), None), -// (Path::new("f/no-status.txt"), None), -// (Path::new("g"), Some(GitFileStatus::Conflict)), -// (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), -// ], -// ); - -// check_propagated_statuses( -// &snapshot, -// &[ -// (Path::new("a/b"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c2.txt"), None), -// (Path::new("a/d"), Some(GitFileStatus::Modified)), -// (Path::new("a/d/e1.txt"), None), -// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), -// (Path::new("f"), None), -// (Path::new("f/no-status.txt"), None), -// (Path::new("g"), Some(GitFileStatus::Conflict)), -// ], -// ); - -// check_propagated_statuses( -// &snapshot, -// &[ -// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c2.txt"), None), -// (Path::new("a/d/e1.txt"), None), -// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), -// (Path::new("f/no-status.txt"), None), -// ], -// ); - -// #[track_caller] -// fn check_propagated_statuses( -// snapshot: &Snapshot, -// expected_statuses: &[(&Path, Option)], -// ) { -// let mut entries = expected_statuses -// .iter() -// .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) -// .collect::>(); -// snapshot.propagate_git_statuses(&mut entries); -// assert_eq!( -// entries -// .iter() -// .map(|e| (e.path.as_ref(), e.git_status)) -// .collect::>(), -// expected_statuses -// ); -// } -// } - -// fn build_client(cx: &mut TestAppContext) -> Arc { -// let http_client = FakeHttpClient::with_404_response(); -// cx.read(|cx| Client::new(http_client, cx)) -// } - -// #[track_caller] -// fn git_init(path: &Path) -> git2::Repository { -// git2::Repository::init(path).expect("Failed to initialize git repository") -// } - -// #[track_caller] -// fn git_add>(path: P, repo: &git2::Repository) { -// let path = path.as_ref(); -// let mut index = repo.index().expect("Failed to get index"); -// index.add_path(path).expect("Failed to add a.txt"); -// index.write().expect("Failed to write index"); -// } - -// #[track_caller] -// fn git_remove_index(path: &Path, repo: &git2::Repository) { -// let mut index = repo.index().expect("Failed to get index"); -// index.remove_path(path).expect("Failed to add a.txt"); -// index.write().expect("Failed to write index"); -// } - -// #[track_caller] -// fn git_commit(msg: &'static str, repo: &git2::Repository) { -// use git2::Signature; - -// let signature = Signature::now("test", "test@zed.dev").unwrap(); -// let oid = repo.index().unwrap().write_tree().unwrap(); -// let tree = repo.find_tree(oid).unwrap(); -// if let Some(head) = repo.head().ok() { -// let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); - -// let parent_commit = parent_obj.as_commit().unwrap(); - -// repo.commit( -// Some("HEAD"), -// &signature, -// &signature, -// msg, -// &tree, -// &[parent_commit], -// ) -// .expect("Failed to commit with parent"); -// } else { -// repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) -// .expect("Failed to commit"); -// } -// } - -// #[track_caller] -// fn git_stash(repo: &mut git2::Repository) { -// use git2::Signature; - -// let signature = Signature::now("test", "test@zed.dev").unwrap(); -// repo.stash_save(&signature, "N/A", None) -// .expect("Failed to stash"); -// } - -// #[track_caller] -// fn git_reset(offset: usize, repo: &git2::Repository) { -// let head = repo.head().expect("Couldn't get repo head"); -// let object = head.peel(git2::ObjectType::Commit).unwrap(); -// let commit = object.as_commit().unwrap(); -// let new_head = commit -// .parents() -// .inspect(|parnet| { -// parnet.message(); -// }) -// .skip(offset) -// .next() -// .expect("Not enough history"); -// repo.reset(&new_head.as_object(), git2::ResetType::Soft, None) -// .expect("Could not reset"); -// } - -// #[allow(dead_code)] -// #[track_caller] -// fn git_status(repo: &git2::Repository) -> collections::HashMap { -// repo.statuses(None) -// .unwrap() -// .iter() -// .map(|status| (status.path().unwrap().to_string(), status.status())) -// .collect() -// } +use crate::{ + project_settings::ProjectSettings, + worktree::{Event, Snapshot, WorktreeModelHandle}, + Entry, EntryKind, PathChange, Project, Worktree, +}; +use anyhow::Result; +use client::Client; +use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; +use git::GITIGNORE; +use gpui::{ModelContext, Task, TestAppContext}; +use parking_lot::Mutex; +use postage::stream::Stream; +use pretty_assertions::assert_eq; +use rand::prelude::*; +use serde_json::json; +use settings::SettingsStore; +use std::{ + env, + fmt::Write, + mem, + path::{Path, PathBuf}, + sync::Arc, +}; +use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; + +#[gpui::test] +async fn test_traversal(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "a/b\n", + "a": { + "b": "", + "c": "", + } + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(false) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new(".gitignore"), + Path::new("a"), + Path::new("a/c"), + ] + ); + assert_eq!( + tree.entries(true) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new(".gitignore"), + Path::new("a"), + Path::new("a/b"), + Path::new("a/c"), + ] + ); + }) +} + +#[gpui::test] +async fn test_descendent_entries(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "a": "", + "b": { + "c": { + "d": "" + }, + "e": {} + }, + "f": "", + "g": { + "h": {} + }, + "i": { + "j": { + "k": "" + }, + "l": { + + } + }, + ".gitignore": "i/j\n", + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.descendent_entries(false, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("b/c/d"),] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new("b"), + Path::new("b/c"), + Path::new("b/c/d"), + Path::new("b/e"), + ] + ); + + assert_eq!( + tree.descendent_entries(false, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("g"), Path::new("g/h"),] + ); + }); + + // Expand gitignored directory. + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("i/j").into()]) + }) + .recv() + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.descendent_entries(false, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(false, true, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i/j/k")] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i"), Path::new("i/l"),] + ); + }) +} + +#[gpui::test(iterations = 10)] +async fn test_circular_symlinks(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "lib": { + "a": { + "a.txt": "" + }, + "b": { + "b.txt": "" + } + } + }), + ) + .await; + fs.insert_symlink("/root/lib/a/lib", "..".into()).await; + fs.insert_symlink("/root/lib/b/lib", "..".into()).await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(false) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new("lib"), + Path::new("lib/a"), + Path::new("lib/a/a.txt"), + Path::new("lib/a/lib"), + Path::new("lib/b"), + Path::new("lib/b/b.txt"), + Path::new("lib/b/lib"), + ] + ); + }); + + fs.rename( + Path::new("/root/lib/a/lib"), + Path::new("/root/lib/a/lib-2"), + Default::default(), + ) + .await + .unwrap(); + cx.executor().run_until_parked(); + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(false) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new("lib"), + Path::new("lib/a"), + Path::new("lib/a/a.txt"), + Path::new("lib/a/lib-2"), + Path::new("lib/b"), + Path::new("lib/b/b.txt"), + Path::new("lib/b/lib"), + ] + ); + }); +} + +#[gpui::test] +async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "deps": { + // symlinks here + }, + "src": { + "a.rs": "", + "b.rs": "", + }, + }, + "dir2": { + "src": { + "c.rs": "", + "d.rs": "", + } + }, + "dir3": { + "deps": {}, + "src": { + "e.rs": "", + "f.rs": "", + }, + } + }), + ) + .await; + + // These symlinks point to directories outside of the worktree's root, dir1. + fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into()) + .await; + fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into()) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root/dir1"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + let tree_updates = Arc::new(Mutex::new(Vec::new())); + tree.update(cx, |_, cx| { + let tree_updates = tree_updates.clone(); + cx.subscribe(&tree, move |_, _, event, _| { + if let Event::UpdatedEntries(update) = event { + tree_updates.lock().extend( + update + .iter() + .map(|(path, _, change)| (path.clone(), *change)), + ); + } + }) + .detach(); + }); + + // The symlinked directories are not scanned by default. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_external)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new("deps"), false), + (Path::new("deps/dep-dir2"), true), + (Path::new("deps/dep-dir3"), true), + (Path::new("src"), false), + (Path::new("src/a.rs"), false), + (Path::new("src/b.rs"), false), + ] + ); + + assert_eq!( + tree.entry_for_path("deps/dep-dir2").unwrap().kind, + EntryKind::UnloadedDir + ); + }); + + // Expand one of the symlinked directories. + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()]) + }) + .recv() + .await; + + // The expanded directory's contents are loaded. Subdirectories are + // not scanned yet. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_external)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new("deps"), false), + (Path::new("deps/dep-dir2"), true), + (Path::new("deps/dep-dir3"), true), + (Path::new("deps/dep-dir3/deps"), true), + (Path::new("deps/dep-dir3/src"), true), + (Path::new("src"), false), + (Path::new("src/a.rs"), false), + (Path::new("src/b.rs"), false), + ] + ); + }); + assert_eq!( + mem::take(&mut *tree_updates.lock()), + &[ + (Path::new("deps/dep-dir3").into(), PathChange::Loaded), + (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded), + (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded) + ] + ); + + // Expand a subdirectory of one of the symlinked directories. + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()]) + }) + .recv() + .await; + + // The expanded subdirectory's contents are loaded. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_external)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new("deps"), false), + (Path::new("deps/dep-dir2"), true), + (Path::new("deps/dep-dir3"), true), + (Path::new("deps/dep-dir3/deps"), true), + (Path::new("deps/dep-dir3/src"), true), + (Path::new("deps/dep-dir3/src/e.rs"), true), + (Path::new("deps/dep-dir3/src/f.rs"), true), + (Path::new("src"), false), + (Path::new("src/a.rs"), false), + (Path::new("src/b.rs"), false), + ] + ); + }); + + assert_eq!( + mem::take(&mut *tree_updates.lock()), + &[ + (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded), + ( + Path::new("deps/dep-dir3/src/e.rs").into(), + PathChange::Loaded + ), + ( + Path::new("deps/dep-dir3/src/f.rs").into(), + PathChange::Loaded + ) + ] + ); +} + +#[gpui::test] +async fn test_open_gitignored_files(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "node_modules\n", + "one": { + "node_modules": { + "a": { + "a1.js": "a1", + "a2.js": "a2", + }, + "b": { + "b1.js": "b1", + "b2.js": "b2", + }, + "c": { + "c1.js": "c1", + "c2.js": "c2", + } + }, + }, + "two": { + "x.js": "", + "y.js": "", + }, + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_ignored)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("one"), false), + (Path::new("one/node_modules"), true), + (Path::new("two"), false), + (Path::new("two/x.js"), false), + (Path::new("two/y.js"), false), + ] + ); + }); + + // Open a file that is nested inside of a gitignored directory that + // has not yet been expanded. + let prev_read_dir_count = fs.read_dir_call_count(); + let buffer = tree + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, cx| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_ignored)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("one"), false), + (Path::new("one/node_modules"), true), + (Path::new("one/node_modules/a"), true), + (Path::new("one/node_modules/b"), true), + (Path::new("one/node_modules/b/b1.js"), true), + (Path::new("one/node_modules/b/b2.js"), true), + (Path::new("one/node_modules/c"), true), + (Path::new("two"), false), + (Path::new("two/x.js"), false), + (Path::new("two/y.js"), false), + ] + ); + + assert_eq!( + buffer.read(cx).file().unwrap().path().as_ref(), + Path::new("one/node_modules/b/b1.js") + ); + + // Only the newly-expanded directories are scanned. + assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2); + }); + + // Open another file in a different subdirectory of the same + // gitignored directory. + let prev_read_dir_count = fs.read_dir_call_count(); + let buffer = tree + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, cx| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_ignored)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("one"), false), + (Path::new("one/node_modules"), true), + (Path::new("one/node_modules/a"), true), + (Path::new("one/node_modules/a/a1.js"), true), + (Path::new("one/node_modules/a/a2.js"), true), + (Path::new("one/node_modules/b"), true), + (Path::new("one/node_modules/b/b1.js"), true), + (Path::new("one/node_modules/b/b2.js"), true), + (Path::new("one/node_modules/c"), true), + (Path::new("two"), false), + (Path::new("two/x.js"), false), + (Path::new("two/y.js"), false), + ] + ); + + assert_eq!( + buffer.read(cx).file().unwrap().path().as_ref(), + Path::new("one/node_modules/a/a2.js") + ); + + // Only the newly-expanded directory is scanned. + assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1); + }); + + // No work happens when files and directories change within an unloaded directory. + let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count(); + fs.create_dir("/root/one/node_modules/c/lib".as_ref()) + .await + .unwrap(); + cx.executor().run_until_parked(); + assert_eq!( + fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count, + 0 + ); +} + +#[gpui::test] +async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "node_modules\n", + "a": { + "a.js": "", + }, + "b": { + "b.js": "", + }, + "node_modules": { + "c": { + "c.js": "", + }, + "d": { + "d.js": "", + "e": { + "e1.js": "", + "e2.js": "", + }, + "f": { + "f1.js": "", + "f2.js": "", + } + }, + }, + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + // Open a file within the gitignored directory, forcing some of its + // subdirectories to be read, but not all. + let read_dir_count_1 = fs.read_dir_call_count(); + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()]) + }) + .recv() + .await; + + // Those subdirectories are now loaded. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|e| (e.path.as_ref(), e.is_ignored)) + .collect::>(), + &[ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("a"), false), + (Path::new("a/a.js"), false), + (Path::new("b"), false), + (Path::new("b/b.js"), false), + (Path::new("node_modules"), true), + (Path::new("node_modules/c"), true), + (Path::new("node_modules/d"), true), + (Path::new("node_modules/d/d.js"), true), + (Path::new("node_modules/d/e"), true), + (Path::new("node_modules/d/f"), true), + ] + ); + }); + let read_dir_count_2 = fs.read_dir_call_count(); + assert_eq!(read_dir_count_2 - read_dir_count_1, 2); + + // Update the gitignore so that node_modules is no longer ignored, + // but a subdirectory is ignored + fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default()) + .await + .unwrap(); + cx.executor().run_until_parked(); + + // All of the directories that are no longer ignored are now loaded. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|e| (e.path.as_ref(), e.is_ignored)) + .collect::>(), + &[ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("a"), false), + (Path::new("a/a.js"), false), + (Path::new("b"), false), + (Path::new("b/b.js"), false), + // This directory is no longer ignored + (Path::new("node_modules"), false), + (Path::new("node_modules/c"), false), + (Path::new("node_modules/c/c.js"), false), + (Path::new("node_modules/d"), false), + (Path::new("node_modules/d/d.js"), false), + // This subdirectory is now ignored + (Path::new("node_modules/d/e"), true), + (Path::new("node_modules/d/f"), false), + (Path::new("node_modules/d/f/f1.js"), false), + (Path::new("node_modules/d/f/f2.js"), false), + ] + ); + }); + + // Each of the newly-loaded directories is scanned only once. + let read_dir_count_3 = fs.read_dir_call_count(); + assert_eq!(read_dir_count_3 - read_dir_count_2, 2); +} + +#[gpui::test(iterations = 10)] +async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n", + "tree": { + ".git": {}, + ".gitignore": "ignored-dir\n", + "tracked-dir": { + "tracked-file1": "", + "ancestor-ignored-file1": "", + }, + "ignored-dir": { + "ignored-file1": "" + } + } + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + "/root/tree".as_ref(), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()]) + }) + .recv() + .await; + + cx.read(|cx| { + let tree = tree.read(cx); + assert!( + !tree + .entry_for_path("tracked-dir/tracked-file1") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("tracked-dir/ancestor-ignored-file1") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("ignored-dir/ignored-file1") + .unwrap() + .is_ignored + ); + }); + + fs.create_file( + "/root/tree/tracked-dir/tracked-file2".as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.create_file( + "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.create_file( + "/root/tree/ignored-dir/ignored-file2".as_ref(), + Default::default(), + ) + .await + .unwrap(); + + cx.executor().run_until_parked(); + cx.read(|cx| { + let tree = tree.read(cx); + assert!( + !tree + .entry_for_path("tracked-dir/tracked-file2") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("tracked-dir/ancestor-ignored-file2") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("ignored-dir/ignored-file2") + .unwrap() + .is_ignored + ); + assert!(tree.entry_for_path(".git").unwrap().is_ignored); + }); +} + +#[gpui::test] +async fn test_write_file(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".git": {}, + ".gitignore": "ignored-dir\n", + "tracked-dir": {}, + "ignored-dir": {} + })); + + let tree = Worktree::local( + build_client(cx), + dir.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + tree.update(cx, |tree, cx| { + tree.as_local().unwrap().write_file( + Path::new("tracked-dir/file.txt"), + "hello".into(), + Default::default(), + cx, + ) + }) + .await + .unwrap(); + tree.update(cx, |tree, cx| { + tree.as_local().unwrap().write_file( + Path::new("ignored-dir/file.txt"), + "world".into(), + Default::default(), + cx, + ) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, _| { + let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); + let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); + assert!(!tracked.is_ignored); + assert!(ignored.is_ignored); + }); +} + +#[gpui::test] +async fn test_file_scan_exclusions(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".gitignore": "**/target\n/node_modules\n", + "target": { + "index": "blah2" + }, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + "bar": { + "bar.rs": "// bar", + }, + "lib.rs": "mod foo;\nmod bar;\n", + }, + ".DS_Store": "", + })); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); + }); + }); + }); + + let tree = Worktree::local( + build_client(cx), + dir.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "src/foo/foo.rs", + "src/foo/another.rs", + "node_modules/.DS_Store", + "src/.DS_Store", + ".DS_Store", + ], + &["target", "node_modules"], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + ) + }); + + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/node_modules/**".to_string()]); + }); + }); + }); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "node_modules/prettier/package.json", + "node_modules/.DS_Store", + "node_modules", + ], + &["target"], + &[ + ".gitignore", + "src/lib.rs", + "src/bar/bar.rs", + "src/foo/foo.rs", + "src/foo/another.rs", + "src/.DS_Store", + ".DS_Store", + ], + ) + }); +} + +#[gpui::test(iterations = 30)] +async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "b": {}, + "c": {}, + "d": {}, + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + "/root".as_ref(), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let snapshot1 = tree.update(cx, |tree, cx| { + let tree = tree.as_local_mut().unwrap(); + let snapshot = Arc::new(Mutex::new(tree.snapshot())); + let _ = tree.observe_updates(0, cx, { + let snapshot = snapshot.clone(); + move |update| { + snapshot.lock().apply_remote_update(update).unwrap(); + async { true } + } + }); + snapshot + }); + + let entry = tree + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/e".as_ref(), true, cx) + }) + .await + .unwrap(); + assert!(entry.is_dir()); + + cx.executor().run_until_parked(); + tree.read_with(cx, |tree, _| { + assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir); + }); + + let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot()); + assert_eq!( + snapshot1.lock().entries(true).collect::>(), + snapshot2.entries(true).collect::>() + ); +} + +#[gpui::test] +async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); + + let fs_fake = FakeFs::new(cx.background_executor.clone()); + fs_fake + .insert_tree( + "/root", + json!({ + "a": {}, + }), + ) + .await; + + let tree_fake = Worktree::local( + client_fake, + "/root".as_ref(), + true, + fs_fake, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let entry = tree_fake + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/b/c/d.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_fake.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); + assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); + assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); + }); + + let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); + + let fs_real = Arc::new(RealFs); + let temp_root = temp_tree(json!({ + "a": {} + })); + + let tree_real = Worktree::local( + client_real, + temp_root.path(), + true, + fs_real, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let entry = tree_real + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/b/c/d.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_real.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); + assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); + assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); + }); + + // Test smallest change + let entry = tree_real + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/b/c/e.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_real.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file()); + }); + + // Test largest change + let entry = tree_real + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("d/e/f/g.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_real.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file()); + assert!(tree.entry_for_path("d/e/f").unwrap().is_dir()); + assert!(tree.entry_for_path("d/e/").unwrap().is_dir()); + assert!(tree.entry_for_path("d/").unwrap().is_dir()); + }); +} + +#[gpui::test(iterations = 100)] +async fn test_random_worktree_operations_during_initial_scan( + cx: &mut TestAppContext, + mut rng: StdRng, +) { + init_test(cx); + let operations = env::var("OPERATIONS") + .map(|o| o.parse().unwrap()) + .unwrap_or(5); + let initial_entries = env::var("INITIAL_ENTRIES") + .map(|o| o.parse().unwrap()) + .unwrap_or(20); + + let root_dir = Path::new("/test"); + let fs = FakeFs::new(cx.background_executor.clone()) as Arc; + fs.as_fake().insert_tree(root_dir, json!({})).await; + for _ in 0..initial_entries { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + log::info!("generated initial tree"); + + let worktree = Worktree::local( + build_client(cx), + root_dir, + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())]; + let updates = Arc::new(Mutex::new(Vec::new())); + worktree.update(cx, |tree, cx| { + check_worktree_change_events(tree, cx); + + let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { + let updates = updates.clone(); + move |update| { + updates.lock().push(update); + async { true } + } + }); + }); + + for _ in 0..operations { + worktree + .update(cx, |worktree, cx| { + randomly_mutate_worktree(worktree, &mut rng, cx) + }) + .await + .log_err(); + worktree.read_with(cx, |tree, _| { + tree.as_local().unwrap().snapshot().check_invariants(true) + }); + + if rng.gen_bool(0.6) { + snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())); + } + } + + worktree + .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) + .await; + + cx.executor().run_until_parked(); + + let final_snapshot = worktree.read_with(cx, |tree, _| { + let tree = tree.as_local().unwrap(); + let snapshot = tree.snapshot(); + snapshot.check_invariants(true); + snapshot + }); + + for (i, snapshot) in snapshots.into_iter().enumerate().rev() { + let mut updated_snapshot = snapshot.clone(); + for update in updates.lock().iter() { + if update.scan_id >= updated_snapshot.scan_id() as u64 { + updated_snapshot + .apply_remote_update(update.clone()) + .unwrap(); + } + } + + assert_eq!( + updated_snapshot.entries(true).collect::>(), + final_snapshot.entries(true).collect::>(), + "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}", + ); + } +} + +#[gpui::test(iterations = 100)] +async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { + init_test(cx); + let operations = env::var("OPERATIONS") + .map(|o| o.parse().unwrap()) + .unwrap_or(40); + let initial_entries = env::var("INITIAL_ENTRIES") + .map(|o| o.parse().unwrap()) + .unwrap_or(20); + + let root_dir = Path::new("/test"); + let fs = FakeFs::new(cx.background_executor.clone()) as Arc; + fs.as_fake().insert_tree(root_dir, json!({})).await; + for _ in 0..initial_entries { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + log::info!("generated initial tree"); + + let worktree = Worktree::local( + build_client(cx), + root_dir, + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let updates = Arc::new(Mutex::new(Vec::new())); + worktree.update(cx, |tree, cx| { + check_worktree_change_events(tree, cx); + + let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { + let updates = updates.clone(); + move |update| { + updates.lock().push(update); + async { true } + } + }); + }); + + worktree + .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) + .await; + + fs.as_fake().pause_events(); + let mut snapshots = Vec::new(); + let mut mutations_len = operations; + while mutations_len > 1 { + if rng.gen_bool(0.2) { + worktree + .update(cx, |worktree, cx| { + randomly_mutate_worktree(worktree, &mut rng, cx) + }) + .await + .log_err(); + } else { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + + let buffered_event_count = fs.as_fake().buffered_event_count(); + if buffered_event_count > 0 && rng.gen_bool(0.3) { + let len = rng.gen_range(0..=buffered_event_count); + log::info!("flushing {} events", len); + fs.as_fake().flush_events(len); + } else { + randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; + mutations_len -= 1; + } + + cx.executor().run_until_parked(); + if rng.gen_bool(0.2) { + log::info!("storing snapshot {}", snapshots.len()); + let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); + snapshots.push(snapshot); + } + } + + log::info!("quiescing"); + fs.as_fake().flush_events(usize::MAX); + cx.executor().run_until_parked(); + + let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); + snapshot.check_invariants(true); + let expanded_paths = snapshot + .expanded_entries() + .map(|e| e.path.clone()) + .collect::>(); + + { + let new_worktree = Worktree::local( + build_client(cx), + root_dir, + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + new_worktree + .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) + .await; + new_worktree + .update(cx, |tree, _| { + tree.as_local_mut() + .unwrap() + .refresh_entries_for_paths(expanded_paths) + }) + .recv() + .await; + let new_snapshot = + new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); + assert_eq!( + snapshot.entries_without_ids(true), + new_snapshot.entries_without_ids(true) + ); + } + + for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() { + for update in updates.lock().iter() { + if update.scan_id >= prev_snapshot.scan_id() as u64 { + prev_snapshot.apply_remote_update(update.clone()).unwrap(); + } + } + + assert_eq!( + prev_snapshot + .entries(true) + .map(ignore_pending_dir) + .collect::>(), + snapshot + .entries(true) + .map(ignore_pending_dir) + .collect::>(), + "wrong updates after snapshot {i}: {updates:#?}", + ); + } + + fn ignore_pending_dir(entry: &Entry) -> Entry { + let mut entry = entry.clone(); + if entry.kind.is_dir() { + entry.kind = EntryKind::Dir + } + entry + } +} + +// The worktree's `UpdatedEntries` event can be used to follow along with +// all changes to the worktree's snapshot. +fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext) { + let mut entries = tree.entries(true).cloned().collect::>(); + cx.subscribe(&cx.handle(), move |tree, _, event, _| { + if let Event::UpdatedEntries(changes) = event { + for (path, _, change_type) in changes.iter() { + let entry = tree.entry_for_path(&path).cloned(); + let ix = match entries.binary_search_by_key(&path, |e| &e.path) { + Ok(ix) | Err(ix) => ix, + }; + match change_type { + PathChange::Added => entries.insert(ix, entry.unwrap()), + PathChange::Removed => drop(entries.remove(ix)), + PathChange::Updated => { + let entry = entry.unwrap(); + let existing_entry = entries.get_mut(ix).unwrap(); + assert_eq!(existing_entry.path, entry.path); + *existing_entry = entry; + } + PathChange::AddedOrUpdated | PathChange::Loaded => { + let entry = entry.unwrap(); + if entries.get(ix).map(|e| &e.path) == Some(&entry.path) { + *entries.get_mut(ix).unwrap() = entry; + } else { + entries.insert(ix, entry); + } + } + } + } + + let new_entries = tree.entries(true).cloned().collect::>(); + assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes); + } + }) + .detach(); +} + +fn randomly_mutate_worktree( + worktree: &mut Worktree, + rng: &mut impl Rng, + cx: &mut ModelContext, +) -> Task> { + log::info!("mutating worktree"); + let worktree = worktree.as_local_mut().unwrap(); + let snapshot = worktree.snapshot(); + let entry = snapshot.entries(false).choose(rng).unwrap(); + + match rng.gen_range(0_u32..100) { + 0..=33 if entry.path.as_ref() != Path::new("") => { + log::info!("deleting entry {:?} ({})", entry.path, entry.id.0); + worktree.delete_entry(entry.id, cx).unwrap() + } + ..=66 if entry.path.as_ref() != Path::new("") => { + let other_entry = snapshot.entries(false).choose(rng).unwrap(); + let new_parent_path = if other_entry.is_dir() { + other_entry.path.clone() + } else { + other_entry.path.parent().unwrap().into() + }; + let mut new_path = new_parent_path.join(random_filename(rng)); + if new_path.starts_with(&entry.path) { + new_path = random_filename(rng).into(); + } + + log::info!( + "renaming entry {:?} ({}) to {:?}", + entry.path, + entry.id.0, + new_path + ); + let task = worktree.rename_entry(entry.id, new_path, cx).unwrap(); + cx.background_executor().spawn(async move { + task.await?; + Ok(()) + }) + } + _ => { + let task = if entry.is_dir() { + let child_path = entry.path.join(random_filename(rng)); + let is_dir = rng.gen_bool(0.3); + log::info!( + "creating {} at {:?}", + if is_dir { "dir" } else { "file" }, + child_path, + ); + worktree.create_entry(child_path, is_dir, cx) + } else { + log::info!("overwriting file {:?} ({})", entry.path, entry.id.0); + worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx) + }; + cx.background_executor().spawn(async move { + task.await?; + Ok(()) + }) + } + } +} + +async fn randomly_mutate_fs( + fs: &Arc, + root_path: &Path, + insertion_probability: f64, + rng: &mut impl Rng, +) { + log::info!("mutating fs"); + let mut files = Vec::new(); + let mut dirs = Vec::new(); + for path in fs.as_fake().paths(false) { + if path.starts_with(root_path) { + if fs.is_file(&path).await { + files.push(path); + } else { + dirs.push(path); + } + } + } + + if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) { + let path = dirs.choose(rng).unwrap(); + let new_path = path.join(random_filename(rng)); + + if rng.gen() { + log::info!( + "creating dir {:?}", + new_path.strip_prefix(root_path).unwrap() + ); + fs.create_dir(&new_path).await.unwrap(); + } else { + log::info!( + "creating file {:?}", + new_path.strip_prefix(root_path).unwrap() + ); + fs.create_file(&new_path, Default::default()).await.unwrap(); + } + } else if rng.gen_bool(0.05) { + let ignore_dir_path = dirs.choose(rng).unwrap(); + let ignore_path = ignore_dir_path.join(&*GITIGNORE); + + let subdirs = dirs + .iter() + .filter(|d| d.starts_with(&ignore_dir_path)) + .cloned() + .collect::>(); + let subfiles = files + .iter() + .filter(|d| d.starts_with(&ignore_dir_path)) + .cloned() + .collect::>(); + let files_to_ignore = { + let len = rng.gen_range(0..=subfiles.len()); + subfiles.choose_multiple(rng, len) + }; + let dirs_to_ignore = { + let len = rng.gen_range(0..subdirs.len()); + subdirs.choose_multiple(rng, len) + }; + + let mut ignore_contents = String::new(); + for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) { + writeln!( + ignore_contents, + "{}", + path_to_ignore + .strip_prefix(&ignore_dir_path) + .unwrap() + .to_str() + .unwrap() + ) + .unwrap(); + } + log::info!( + "creating gitignore {:?} with contents:\n{}", + ignore_path.strip_prefix(&root_path).unwrap(), + ignore_contents + ); + fs.save( + &ignore_path, + &ignore_contents.as_str().into(), + Default::default(), + ) + .await + .unwrap(); + } else { + let old_path = { + let file_path = files.choose(rng); + let dir_path = dirs[1..].choose(rng); + file_path.into_iter().chain(dir_path).choose(rng).unwrap() + }; + + let is_rename = rng.gen(); + if is_rename { + let new_path_parent = dirs + .iter() + .filter(|d| !d.starts_with(old_path)) + .choose(rng) + .unwrap(); + + let overwrite_existing_dir = + !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3); + let new_path = if overwrite_existing_dir { + fs.remove_dir( + &new_path_parent, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + .unwrap(); + new_path_parent.to_path_buf() + } else { + new_path_parent.join(random_filename(rng)) + }; + + log::info!( + "renaming {:?} to {}{:?}", + old_path.strip_prefix(&root_path).unwrap(), + if overwrite_existing_dir { + "overwrite " + } else { + "" + }, + new_path.strip_prefix(&root_path).unwrap() + ); + fs.rename( + &old_path, + &new_path, + fs::RenameOptions { + overwrite: true, + ignore_if_exists: true, + }, + ) + .await + .unwrap(); + } else if fs.is_file(&old_path).await { + log::info!( + "deleting file {:?}", + old_path.strip_prefix(&root_path).unwrap() + ); + fs.remove_file(old_path, Default::default()).await.unwrap(); + } else { + log::info!( + "deleting dir {:?}", + old_path.strip_prefix(&root_path).unwrap() + ); + fs.remove_dir( + &old_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + .unwrap(); + } + } +} + +fn random_filename(rng: &mut impl Rng) -> String { + (0..6) + .map(|_| rng.sample(rand::distributions::Alphanumeric)) + .map(char::from) + .collect() +} + +#[gpui::test] +async fn test_rename_work_directory(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let root = temp_tree(json!({ + "projects": { + "project1": { + "a": "", + "b": "", + } + }, + + })); + let root_path = root.path(); + + let tree = Worktree::local( + build_client(cx), + root_path, + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let repo = git_init(&root_path.join("projects/project1")); + git_add("a", &repo); + git_commit("init", &repo); + std::fs::write(root_path.join("projects/project1/a"), "aa").ok(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.flush_fs_events(cx).await; + + cx.read(|cx| { + let tree = tree.read(cx); + let (work_dir, _) = tree.repositories().next().unwrap(); + assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); + assert_eq!( + tree.status_for_file(Path::new("projects/project1/a")), + Some(GitFileStatus::Modified) + ); + assert_eq!( + tree.status_for_file(Path::new("projects/project1/b")), + Some(GitFileStatus::Added) + ); + }); + + std::fs::rename( + root_path.join("projects/project1"), + root_path.join("projects/project2"), + ) + .ok(); + tree.flush_fs_events(cx).await; + + cx.read(|cx| { + let tree = tree.read(cx); + let (work_dir, _) = tree.repositories().next().unwrap(); + assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); + assert_eq!( + tree.status_for_file(Path::new("projects/project2/a")), + Some(GitFileStatus::Modified) + ); + assert_eq!( + tree.status_for_file(Path::new("projects/project2/b")), + Some(GitFileStatus::Added) + ); + }); +} + +#[gpui::test] +async fn test_git_repository_for_path(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let root = temp_tree(json!({ + "c.txt": "", + "dir1": { + ".git": {}, + "deps": { + "dep1": { + ".git": {}, + "src": { + "a.txt": "" + } + } + }, + "src": { + "b.txt": "" + } + }, + })); + + let tree = Worktree::local( + build_client(cx), + root.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + + assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); + + let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); + assert_eq!( + entry + .work_directory(tree) + .map(|directory| directory.as_ref().to_owned()), + Some(Path::new("dir1").to_owned()) + ); + + let entry = tree + .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) + .unwrap(); + assert_eq!( + entry + .work_directory(tree) + .map(|directory| directory.as_ref().to_owned()), + Some(Path::new("dir1/deps/dep1").to_owned()) + ); + + let entries = tree.files(false, 0); + + let paths_with_repos = tree + .entries_with_repositories(entries) + .map(|(entry, repo)| { + ( + entry.path.as_ref(), + repo.and_then(|repo| { + repo.work_directory(&tree) + .map(|work_directory| work_directory.0.to_path_buf()) + }), + ) + }) + .collect::>(); + + assert_eq!( + paths_with_repos, + &[ + (Path::new("c.txt"), None), + ( + Path::new("dir1/deps/dep1/src/a.txt"), + Some(Path::new("dir1/deps/dep1").into()) + ), + (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())), + ] + ); + }); + + let repo_update_events = Arc::new(Mutex::new(vec![])); + tree.update(cx, |_, cx| { + let repo_update_events = repo_update_events.clone(); + cx.subscribe(&tree, move |_, _, event, _| { + if let Event::UpdatedGitRepositories(update) = event { + repo_update_events.lock().push(update.clone()); + } + }) + .detach(); + }); + + std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap(); + tree.flush_fs_events(cx).await; + + assert_eq!( + repo_update_events.lock()[0] + .iter() + .map(|e| e.0.clone()) + .collect::>>(), + vec![Path::new("dir1").into()] + ); + + std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + + assert!(tree + .repository_for_path("dir1/src/b.txt".as_ref()) + .is_none()); + }); +} + +#[gpui::test] +async fn test_git_status(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + const IGNORE_RULE: &'static str = "**/target"; + + let root = temp_tree(json!({ + "project": { + "a.txt": "a", + "b.txt": "bb", + "c": { + "d": { + "e.txt": "eee" + } + }, + "f.txt": "ffff", + "target": { + "build_file": "???" + }, + ".gitignore": IGNORE_RULE + }, + + })); + + const A_TXT: &'static str = "a.txt"; + const B_TXT: &'static str = "b.txt"; + const E_TXT: &'static str = "c/d/e.txt"; + const F_TXT: &'static str = "f.txt"; + const DOTGITIGNORE: &'static str = ".gitignore"; + const BUILD_FILE: &'static str = "target/build_file"; + let project_path = Path::new("project"); + + // Set up git repository before creating the worktree. + let work_dir = root.path().join("project"); + let mut repo = git_init(work_dir.as_path()); + repo.add_ignore_rule(IGNORE_RULE).unwrap(); + git_add(A_TXT, &repo); + git_add(E_TXT, &repo); + git_add(DOTGITIGNORE, &repo); + git_commit("Initial commit", &repo); + + let tree = Worktree::local( + build_client(cx), + root.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + // Check that the right git state is observed on startup + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!(snapshot.repositories().count(), 1); + let (dir, _) = snapshot.repositories().next().unwrap(); + assert_eq!(dir.as_ref(), Path::new("project")); + + assert_eq!( + snapshot.status_for_file(project_path.join(B_TXT)), + Some(GitFileStatus::Added) + ); + assert_eq!( + snapshot.status_for_file(project_path.join(F_TXT)), + Some(GitFileStatus::Added) + ); + }); + + // Modify a file in the working copy. + std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // The worktree detects that the file's git status has changed. + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!( + snapshot.status_for_file(project_path.join(A_TXT)), + Some(GitFileStatus::Modified) + ); + }); + + // Create a commit in the git repository. + git_add(A_TXT, &repo); + git_add(B_TXT, &repo); + git_commit("Committing modified and added", &repo); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // The worktree detects that the files' git status have changed. + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!( + snapshot.status_for_file(project_path.join(F_TXT)), + Some(GitFileStatus::Added) + ); + assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); + assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); + }); + + // Modify files in the working copy and perform git operations on other files. + git_reset(0, &repo); + git_remove_index(Path::new(B_TXT), &repo); + git_stash(&mut repo); + std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); + std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // Check that more complex repo changes are tracked + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + + assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); + assert_eq!( + snapshot.status_for_file(project_path.join(B_TXT)), + Some(GitFileStatus::Added) + ); + assert_eq!( + snapshot.status_for_file(project_path.join(E_TXT)), + Some(GitFileStatus::Modified) + ); + }); + + std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); + std::fs::remove_dir_all(work_dir.join("c")).unwrap(); + std::fs::write( + work_dir.join(DOTGITIGNORE), + [IGNORE_RULE, "f.txt"].join("\n"), + ) + .unwrap(); + + git_add(Path::new(DOTGITIGNORE), &repo); + git_commit("Committing modified git ignore", &repo); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + let mut renamed_dir_name = "first_directory/second_directory"; + const RENAMED_FILE: &'static str = "rf.txt"; + + std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); + std::fs::write( + work_dir.join(renamed_dir_name).join(RENAMED_FILE), + "new-contents", + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!( + snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)), + Some(GitFileStatus::Added) + ); + }); + + renamed_dir_name = "new_first_directory/second_directory"; + + std::fs::rename( + work_dir.join("first_directory"), + work_dir.join("new_first_directory"), + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + + assert_eq!( + snapshot.status_for_file( + project_path + .join(Path::new(renamed_dir_name)) + .join(RENAMED_FILE) + ), + Some(GitFileStatus::Added) + ); + }); +} + +#[gpui::test] +async fn test_propagate_git_statuses(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".git": {}, + "a": { + "b": { + "c1.txt": "", + "c2.txt": "", + }, + "d": { + "e1.txt": "", + "e2.txt": "", + "e3.txt": "", + } + }, + "f": { + "no-status.txt": "" + }, + "g": { + "h1.txt": "", + "h2.txt": "" + }, + + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + &Path::new("/root/.git"), + &[ + (Path::new("a/b/c1.txt"), GitFileStatus::Added), + (Path::new("a/d/e2.txt"), GitFileStatus::Modified), + (Path::new("g/h2.txt"), GitFileStatus::Conflict), + ], + ); + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + cx.executor().run_until_parked(); + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), + (Path::new("a"), Some(GitFileStatus::Modified)), + (Path::new("a/b"), Some(GitFileStatus::Added)), + (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), + (Path::new("a/b/c2.txt"), None), + (Path::new("a/d"), Some(GitFileStatus::Modified)), + (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), + (Path::new("f"), None), + (Path::new("f/no-status.txt"), None), + (Path::new("g"), Some(GitFileStatus::Conflict)), + (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("a/b"), Some(GitFileStatus::Added)), + (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), + (Path::new("a/b/c2.txt"), None), + (Path::new("a/d"), Some(GitFileStatus::Modified)), + (Path::new("a/d/e1.txt"), None), + (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), + (Path::new("f"), None), + (Path::new("f/no-status.txt"), None), + (Path::new("g"), Some(GitFileStatus::Conflict)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), + (Path::new("a/b/c2.txt"), None), + (Path::new("a/d/e1.txt"), None), + (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), + (Path::new("f/no-status.txt"), None), + ], + ); + + #[track_caller] + fn check_propagated_statuses( + snapshot: &Snapshot, + expected_statuses: &[(&Path, Option)], + ) { + let mut entries = expected_statuses + .iter() + .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) + .collect::>(); + snapshot.propagate_git_statuses(&mut entries); + assert_eq!( + entries + .iter() + .map(|e| (e.path.as_ref(), e.git_status)) + .collect::>(), + expected_statuses + ); + } +} + +fn build_client(cx: &mut TestAppContext) -> Arc { + let http_client = FakeHttpClient::with_404_response(); + cx.read(|cx| Client::new(http_client, cx)) +} + +#[track_caller] +fn git_init(path: &Path) -> git2::Repository { + git2::Repository::init(path).expect("Failed to initialize git repository") +} + +#[track_caller] +fn git_add>(path: P, repo: &git2::Repository) { + let path = path.as_ref(); + let mut index = repo.index().expect("Failed to get index"); + index.add_path(path).expect("Failed to add a.txt"); + index.write().expect("Failed to write index"); +} + +#[track_caller] +fn git_remove_index(path: &Path, repo: &git2::Repository) { + let mut index = repo.index().expect("Failed to get index"); + index.remove_path(path).expect("Failed to add a.txt"); + index.write().expect("Failed to write index"); +} + +#[track_caller] +fn git_commit(msg: &'static str, repo: &git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + let oid = repo.index().unwrap().write_tree().unwrap(); + let tree = repo.find_tree(oid).unwrap(); + if let Some(head) = repo.head().ok() { + let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); + + let parent_commit = parent_obj.as_commit().unwrap(); + + repo.commit( + Some("HEAD"), + &signature, + &signature, + msg, + &tree, + &[parent_commit], + ) + .expect("Failed to commit with parent"); + } else { + repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) + .expect("Failed to commit"); + } +} + +#[track_caller] +fn git_stash(repo: &mut git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + repo.stash_save(&signature, "N/A", None) + .expect("Failed to stash"); +} + +#[track_caller] +fn git_reset(offset: usize, repo: &git2::Repository) { + let head = repo.head().expect("Couldn't get repo head"); + let object = head.peel(git2::ObjectType::Commit).unwrap(); + let commit = object.as_commit().unwrap(); + let new_head = commit + .parents() + .inspect(|parnet| { + parnet.message(); + }) + .skip(offset) + .next() + .expect("Not enough history"); + repo.reset(&new_head.as_object(), git2::ResetType::Soft, None) + .expect("Could not reset"); +} + +#[allow(dead_code)] +#[track_caller] +fn git_status(repo: &git2::Repository) -> collections::HashMap { + repo.statuses(None) + .unwrap() + .iter() + .map(|status| (status.path().unwrap().to_string(), status.status())) + .collect() +} + +#[track_caller] +fn check_worktree_entries( + tree: &Worktree, + expected_excluded_paths: &[&str], + expected_ignored_paths: &[&str], + expected_tracked_paths: &[&str], +) { + for path in expected_excluded_paths { + let entry = tree.entry_for_path(path); + assert!( + entry.is_none(), + "expected path '{path}' to be excluded, but got entry: {entry:?}", + ); + } + for path in expected_ignored_paths { + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'")); + assert!( + entry.is_ignored, + "expected path '{path}' to be ignored, but got entry: {entry:?}", + ); + } + for path in expected_tracked_paths { + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'")); + assert!( + !entry.is_ignored, + "expected path '{path}' to be tracked, but got entry: {entry:?}", + ); + } +} + +fn init_test(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + Project::init_settings(cx); + }); +} diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index e43423073c..eb124bfca2 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -3046,8 +3046,7 @@ mod tests { workspace::init_settings(cx); client::init_settings(cx); Project::init_settings(cx); - }); - cx.update(|cx| { + cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = Some(Vec::new()); diff --git a/crates/project_panel2/src/project_panel.rs b/crates/project_panel2/src/project_panel.rs index 7a455fe8ce..6702a17957 100644 --- a/crates/project_panel2/src/project_panel.rs +++ b/crates/project_panel2/src/project_panel.rs @@ -1571,7 +1571,7 @@ mod tests { use super::*; use gpui::{TestAppContext, View, VisualTestContext, WindowHandle}; use pretty_assertions::assert_eq; - use project::FakeFs; + use project::{project_settings::ProjectSettings, FakeFs}; use serde_json::json; use settings::SettingsStore; use std::{ @@ -1672,6 +1672,124 @@ mod tests { ); } + #[gpui::test] + async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); + }); + }); + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root1", + json!({ + ".dockerignore": "", + ".git": { + "HEAD": "", + }, + "a": { + "0": { "q": "", "r": "", "s": "" }, + "1": { "t": "", "u": "" }, + "2": { "v": "", "w": "", "x": "", "y": "" }, + }, + "b": { + "3": { "Q": "" }, + "4": { "R": "", "S": "", "T": "", "U": "" }, + }, + "C": { + "5": {}, + "6": { "V": "", "W": "" }, + "7": { "X": "" }, + "8": { "Y": {}, "Z": "" } + } + }), + ) + .await; + fs.insert_tree( + "/root2", + json!({ + "d": { + "4": "" + }, + "e": {} + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace + .update(cx, |workspace, cx| ProjectPanel::new(workspace, cx)) + .unwrap(); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " > b", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root1/b", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b <== selected", + " > 3", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/d", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d <== selected", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/e", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d", + " v e <== selected", + ] + ); + } + #[gpui::test(iterations = 30)] async fn test_editing_files(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -2792,6 +2910,12 @@ mod tests { workspace::init_settings(cx); client::init_settings(cx); Project::init_settings(cx); + + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); }); }