Allow opening files without project entries (#3497)

Addresses
https://zed-industries.slack.com/archives/C04S5TU0RSN/p1701428432093989
by allowing to open a buffer without a corresponding project `Entry`.
This means that now, we can remove the "fake worktree" requirement and
open files without creating any (the PR does not do this step yet, but
prepares the code for it).

In addition, the PR fixes glob matching code and adds more tests on
excluded files queries.

Release Notes:

- Fixed search query inclusions/exclusions working incorrectly
- Fixed excluded files being opened via CLI and failing or spoiling
project tree view
This commit is contained in:
Kirill Bulatov 2023-12-05 16:33:30 +02:00 committed by GitHub
commit c2d3e74ffb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 3253 additions and 585 deletions

View file

@ -4,8 +4,10 @@ use collab_ui::notifications::project_shared_notification::ProjectSharedNotifica
use editor::{Editor, ExcerptRange, MultiBuffer};
use gpui::{executor::Deterministic, geometry::vector::vec2f, TestAppContext, ViewHandle};
use live_kit_client::MacOSDisplay;
use project::project_settings::ProjectSettings;
use rpc::proto::PeerId;
use serde_json::json;
use settings::SettingsStore;
use std::{borrow::Cow, sync::Arc};
use workspace::{
dock::{test::TestPanel, DockPosition},
@ -1602,6 +1604,141 @@ async fn test_following_across_workspaces(
});
}
#[gpui::test]
async fn test_following_into_excluded_file(
deterministic: Arc<Deterministic>,
mut cx_a: &mut TestAppContext,
mut cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
for cx in [&mut cx_a, &mut cx_b] {
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
});
});
});
}
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
cx_a.update(editor::init);
cx_b.update(editor::init);
client_a
.fs()
.insert_tree(
"/a",
json!({
".git": {
"COMMIT_EDITMSG": "write your commit message here",
},
"1.txt": "one\none\none",
"2.txt": "two\ntwo\ntwo",
"3.txt": "three\nthree\nthree",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
.await
.unwrap();
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.build_remote_project(project_id, cx_b).await;
active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
.await
.unwrap();
let window_a = client_a.build_workspace(&project_a, cx_a);
let workspace_a = window_a.root(cx_a);
let peer_id_a = client_a.peer_id().unwrap();
let window_b = client_b.build_workspace(&project_b, cx_b);
let workspace_b = window_b.root(cx_b);
// Client A opens editors for a regular file and an excluded file.
let editor_for_regular = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "1.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let editor_for_excluded_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// Client A updates their selections in those editors
editor_for_regular.update(cx_a, |editor, cx| {
editor.handle_input("a", cx);
editor.handle_input("b", cx);
editor.handle_input("c", cx);
editor.select_left(&Default::default(), cx);
assert_eq!(editor.selections.ranges(cx), vec![3..2]);
});
editor_for_excluded_a.update(cx_a, |editor, cx| {
editor.select_all(&Default::default(), cx);
editor.handle_input("new commit message", cx);
editor.select_left(&Default::default(), cx);
assert_eq!(editor.selections.ranges(cx), vec![18..17]);
});
// When client B starts following client A, currently visible file is replicated
workspace_b
.update(cx_b, |workspace, cx| {
workspace.follow(peer_id_a, cx).unwrap()
})
.await
.unwrap();
let editor_for_excluded_b = workspace_b.read_with(cx_b, |workspace, cx| {
workspace
.active_item(cx)
.unwrap()
.downcast::<Editor>()
.unwrap()
});
assert_eq!(
cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
Some((worktree_id, ".git/COMMIT_EDITMSG").into())
);
assert_eq!(
editor_for_excluded_b.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
vec![18..17]
);
// Changes from B to the excluded file are replicated in A's editor
editor_for_excluded_b.update(cx_b, |editor, cx| {
editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
});
deterministic.run_until_parked();
editor_for_excluded_a.update(cx_a, |editor, cx| {
assert_eq!(
editor.text(cx),
"new commit messag\nCo-Authored-By: B <b@b.b>"
);
});
}
fn visible_push_notifications(
cx: &mut TestAppContext,
) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {

View file

@ -2981,11 +2981,10 @@ async fn test_fs_operations(
let entry = project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "c.txt"), false, cx)
.unwrap()
project.create_entry((worktree_id, "c.txt"), false, cx)
})
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
@ -3010,7 +3009,6 @@ async fn test_fs_operations(
.update(cx_b, |project, cx| {
project.rename_entry(entry.id, Path::new("d.txt"), cx)
})
.unwrap()
.await
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@ -3034,11 +3032,10 @@ async fn test_fs_operations(
let dir_entry = project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR"), true, cx)
.unwrap()
project.create_entry((worktree_id, "DIR"), true, cx)
})
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
@ -3061,25 +3058,19 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR/e.txt"), false, cx)
.unwrap()
project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
})
.await
.unwrap();
project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
.unwrap()
project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
})
.await
.unwrap();
project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
.unwrap()
project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
})
.await
.unwrap();
@ -3120,9 +3111,7 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
project
.copy_entry(entry.id, Path::new("f.txt"), cx)
.unwrap()
project.copy_entry(entry.id, Path::new("f.txt"), cx)
})
.await
.unwrap();

View file

@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
ensure_project_shared(&project, client, cx).await;
project
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
.unwrap()
.await?;
}

View file

@ -4,10 +4,12 @@
// use call::ActiveCall;
// use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
// use editor::{Editor, ExcerptRange, MultiBuffer};
// use gpui::{BackgroundExecutor, TestAppContext, View};
// use gpui::{point, BackgroundExecutor, TestAppContext, View, VisualTestContext, WindowContext};
// use live_kit_client::MacOSDisplay;
// use project::project_settings::ProjectSettings;
// use rpc::proto::PeerId;
// use serde_json::json;
// use settings::SettingsStore;
// use std::borrow::Cow;
// use workspace::{
// dock::{test::TestPanel, DockPosition},
@ -24,7 +26,7 @@
// cx_c: &mut TestAppContext,
// cx_d: &mut TestAppContext,
// ) {
// let mut server = TestServer::start(&executor).await;
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// let client_c = server.create_client(cx_c, "user_c").await;
@ -71,12 +73,22 @@
// .unwrap();
// let window_a = client_a.build_workspace(&project_a, cx_a);
// let workspace_a = window_a.root(cx_a);
// let workspace_a = window_a.root(cx_a).unwrap();
// let window_b = client_b.build_workspace(&project_b, cx_b);
// let workspace_b = window_b.root(cx_b);
// let workspace_b = window_b.root(cx_b).unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// let mut cx_c = VisualTestContext::from_window(*window_c, cx_c);
// let cx_c = &mut cx_c;
// let mut cx_d = VisualTestContext::from_window(*window_d, cx_d);
// let cx_d = &mut cx_d;
// // Client A opens some editors.
// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
// let editor_a1 = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@ -132,8 +144,8 @@
// .await
// .unwrap();
// cx_c.foreground().run_until_parked();
// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
// cx_c.executor().run_until_parked();
// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
@ -145,19 +157,19 @@
// Some((worktree_id, "2.txt").into())
// );
// assert_eq!(
// editor_b2.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
// editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![2..1]
// );
// assert_eq!(
// editor_b1.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
// editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![3..2]
// );
// cx_c.foreground().run_until_parked();
// cx_c.executor().run_until_parked();
// let active_call_c = cx_c.read(ActiveCall::global);
// let project_c = client_c.build_remote_project(project_id, cx_c).await;
// let window_c = client_c.build_workspace(&project_c, cx_c);
// let workspace_c = window_c.root(cx_c);
// let workspace_c = window_c.root(cx_c).unwrap();
// active_call_c
// .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
// .await
@ -172,10 +184,13 @@
// .await
// .unwrap();
// cx_d.foreground().run_until_parked();
// cx_d.executor().run_until_parked();
// let active_call_d = cx_d.read(ActiveCall::global);
// let project_d = client_d.build_remote_project(project_id, cx_d).await;
// let workspace_d = client_d.build_workspace(&project_d, cx_d).root(cx_d);
// let workspace_d = client_d
// .build_workspace(&project_d, cx_d)
// .root(cx_d)
// .unwrap();
// active_call_d
// .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
// .await
@ -183,7 +198,7 @@
// drop(project_d);
// // All clients see that clients B and C are following client A.
// cx_c.foreground().run_until_parked();
// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@ -198,7 +213,7 @@
// });
// // All clients see that clients B is following client A.
// cx_c.foreground().run_until_parked();
// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@ -216,7 +231,7 @@
// .unwrap();
// // All clients see that clients B and C are following client A.
// cx_c.foreground().run_until_parked();
// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@ -240,7 +255,7 @@
// .unwrap();
// // All clients see that D is following C
// cx_d.foreground().run_until_parked();
// cx_d.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@ -257,7 +272,7 @@
// cx_c.drop_last(workspace_c);
// // Clients A and B see that client B is following A, and client C is not present in the followers.
// cx_c.foreground().run_until_parked();
// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@ -271,12 +286,15 @@
// workspace.activate_item(&editor_a1, cx)
// });
// executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
// workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b1.item_id()
// );
// });
// // When client A opens a multibuffer, client B does so as well.
// let multibuffer_a = cx_a.add_model(|cx| {
// let multibuffer_a = cx_a.build_model(|cx| {
// let buffer_a1 = project_a.update(cx, |project, cx| {
// project
// .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
@ -308,12 +326,12 @@
// });
// let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
// let editor =
// cx.add_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
// cx.build_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
// workspace.add_item(Box::new(editor.clone()), cx);
// editor
// });
// executor.run_until_parked();
// let multibuffer_editor_b = workspace_b.read_with(cx_b, |workspace, cx| {
// let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
@ -321,8 +339,8 @@
// .unwrap()
// });
// assert_eq!(
// multibuffer_editor_a.read_with(cx_a, |editor, cx| editor.text(cx)),
// multibuffer_editor_b.read_with(cx_b, |editor, cx| editor.text(cx)),
// multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)),
// multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)),
// );
// // When client A navigates back and forth, client B does so as well.
@ -333,8 +351,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
// workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b1.item_id()
// );
// });
// workspace_a
@ -344,8 +365,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b2.id());
// workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b2.item_id()
// );
// });
// workspace_a
@ -355,8 +379,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
// workspace_b.read_with(cx_b, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
// workspace_b.update(cx_b, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_b1.item_id()
// );
// });
// // Changes to client A's editor are reflected on client B.
@ -364,20 +391,20 @@
// editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
// });
// executor.run_until_parked();
// editor_b1.read_with(cx_b, |editor, cx| {
// editor_b1.update(cx_b, |editor, cx| {
// assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
// });
// editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
// executor.run_until_parked();
// editor_b1.read_with(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
// editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
// editor_a1.update(cx_a, |editor, cx| {
// editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
// editor.set_scroll_position(vec2f(0., 100.), cx);
// editor.set_scroll_position(point(0., 100.), cx);
// });
// executor.run_until_parked();
// editor_b1.read_with(cx_b, |editor, cx| {
// editor_b1.update(cx_b, |editor, cx| {
// assert_eq!(editor.selections.ranges(cx), &[3..3]);
// });
@ -390,11 +417,11 @@
// });
// executor.run_until_parked();
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, cx| workspace
// workspace_b.update(cx_b, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
// .id()),
// editor_b1.id()
// .item_id()),
// editor_b1.item_id()
// );
// // Client A starts following client B.
@ -405,15 +432,15 @@
// .await
// .unwrap();
// assert_eq!(
// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// Some(peer_id_b)
// );
// assert_eq!(
// workspace_a.read_with(cx_a, |workspace, cx| workspace
// workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
// .id()),
// editor_a1.id()
// .item_id()),
// editor_a1.item_id()
// );
// // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
@ -432,7 +459,7 @@
// .await
// .unwrap();
// executor.run_until_parked();
// let shared_screen = workspace_a.read_with(cx_a, |workspace, cx| {
// let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
// workspace
// .active_item(cx)
// .expect("no active item")
@ -446,8 +473,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
// workspace_a.read_with(cx_a, |workspace, cx| {
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_a1.id())
// workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().item_id(),
// editor_a1.item_id()
// )
// });
// // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
@ -455,26 +485,26 @@
// workspace.activate_item(&multibuffer_editor_b, cx)
// });
// executor.run_until_parked();
// workspace_a.read_with(cx_a, |workspace, cx| {
// workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().id(),
// multibuffer_editor_a.id()
// workspace.active_item(cx).unwrap().item_id(),
// multibuffer_editor_a.item_id()
// )
// });
// // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
// let panel = window_b.add_view(cx_b, |_| TestPanel::new(DockPosition::Left));
// let panel = window_b.build_view(cx_b, |_| TestPanel::new(DockPosition::Left));
// workspace_b.update(cx_b, |workspace, cx| {
// workspace.add_panel(panel, cx);
// workspace.toggle_panel_focus::<TestPanel>(cx);
// });
// executor.run_until_parked();
// assert_eq!(
// workspace_a.read_with(cx_a, |workspace, cx| workspace
// workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
// .id()),
// shared_screen.id()
// .item_id()),
// shared_screen.item_id()
// );
// // Toggling the focus back to the pane causes client A to return to the multibuffer.
@ -482,16 +512,16 @@
// workspace.toggle_panel_focus::<TestPanel>(cx);
// });
// executor.run_until_parked();
// workspace_a.read_with(cx_a, |workspace, cx| {
// workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!(
// workspace.active_item(cx).unwrap().id(),
// multibuffer_editor_a.id()
// workspace.active_item(cx).unwrap().item_id(),
// multibuffer_editor_a.item_id()
// )
// });
// // Client B activates an item that doesn't implement following,
// // so the previously-opened screen-sharing item gets activated.
// let unfollowable_item = window_b.add_view(cx_b, |_| TestItem::new());
// let unfollowable_item = window_b.build_view(cx_b, |_| TestItem::new());
// workspace_b.update(cx_b, |workspace, cx| {
// workspace.active_pane().update(cx, |pane, cx| {
// pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
@ -499,18 +529,18 @@
// });
// executor.run_until_parked();
// assert_eq!(
// workspace_a.read_with(cx_a, |workspace, cx| workspace
// workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
// .id()),
// shared_screen.id()
// .item_id()),
// shared_screen.item_id()
// );
// // Following interrupts when client B disconnects.
// client_b.disconnect(&cx_b.to_async());
// executor.advance_clock(RECONNECT_TIMEOUT);
// assert_eq!(
// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// None
// );
// }
@ -521,7 +551,7 @@
// cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext,
// ) {
// let mut server = TestServer::start(&executor).await;
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@ -560,13 +590,19 @@
// .await
// .unwrap();
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
// let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
// let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
// let client_b_id = project_a.read_with(cx_a, |project, _| {
// let client_b_id = project_a.update(cx_a, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
@ -584,7 +620,7 @@
// .await
// .unwrap();
// let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| {
// let pane_paths = |pane: &View<workspace::Pane>, cx: &mut TestAppContext| {
// pane.update(cx, |pane, cx| {
// pane.items()
// .map(|item| {
@ -642,7 +678,7 @@
// cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext,
// ) {
// let mut server = TestServer::start(&executor).await;
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@ -685,7 +721,10 @@
// .unwrap();
// // Client A opens a file.
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
// let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@ -696,7 +735,10 @@
// .unwrap();
// // Client B opens a different file.
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
// let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// workspace_b
// .update(cx_b, |workspace, cx| {
// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
@ -1167,7 +1209,7 @@
// cx_b: &mut TestAppContext,
// ) {
// // 2 clients connect to a server.
// let mut server = TestServer::start(&executor).await;
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@ -1207,8 +1249,17 @@
// .await
// .unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// // Client A opens some editors.
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
// let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let _editor_a1 = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@ -1219,9 +1270,12 @@
// .unwrap();
// // Client B starts following client A.
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
// let leader_id = project_b.read_with(cx_b, |project, _| {
// let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
// let leader_id = project_b.update(cx_b, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
// workspace_b
@ -1231,10 +1285,10 @@
// .await
// .unwrap();
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
@ -1245,7 +1299,7 @@
// // When client B moves, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
@ -1256,14 +1310,14 @@
// .await
// .unwrap();
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// // When client B edits, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
@ -1274,16 +1328,16 @@
// .await
// .unwrap();
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// // When client B scrolls, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| {
// editor.set_scroll_position(vec2f(0., 3.), cx)
// editor.set_scroll_position(point(0., 3.), cx)
// });
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
@ -1294,7 +1348,7 @@
// .await
// .unwrap();
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
@ -1303,13 +1357,13 @@
// workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
// });
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
@ -1321,7 +1375,7 @@
// .await
// .unwrap();
// assert_eq!(
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
// }
@ -1332,7 +1386,7 @@
// cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext,
// ) {
// let mut server = TestServer::start(&executor).await;
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@ -1345,20 +1399,26 @@
// client_a.fs().insert_tree("/a", json!({})).await;
// let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
// let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let project_id = active_call_a
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
// .await
// .unwrap();
// let project_b = client_b.build_remote_project(project_id, cx_b).await;
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
// let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// executor.run_until_parked();
// let client_a_id = project_b.read_with(cx_b, |project, _| {
// let client_a_id = project_b.update(cx_b, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
// let client_b_id = project_a.read_with(cx_a, |project, _| {
// let client_b_id = project_a.update(cx_a, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
@ -1370,13 +1430,13 @@
// });
// futures::try_join!(a_follow_b, b_follow_a).unwrap();
// workspace_a.read_with(cx_a, |workspace, _| {
// workspace_a.update(cx_a, |workspace, _| {
// assert_eq!(
// workspace.leader_for_pane(workspace.active_pane()),
// Some(client_b_id)
// );
// });
// workspace_b.read_with(cx_b, |workspace, _| {
// workspace_b.update(cx_b, |workspace, _| {
// assert_eq!(
// workspace.leader_for_pane(workspace.active_pane()),
// Some(client_a_id)
@ -1398,7 +1458,7 @@
// // b opens a different file in project 2, a follows b
// // b opens a different file in project 1, a cannot follow b
// // b shares the project, a joins the project and follows b
// let mut server = TestServer::start(&executor).await;
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// cx_a.update(editor::init);
@ -1435,8 +1495,14 @@
// let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
// let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
// let workspace_a = client_a
// .build_workspace(&project_a, cx_a)
// .root(cx_a)
// .unwrap();
// let workspace_b = client_b
// .build_workspace(&project_b, cx_b)
// .root(cx_b)
// .unwrap();
// cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
// cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
@ -1455,6 +1521,12 @@
// .await
// .unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
@ -1476,11 +1548,12 @@
// let workspace_b_project_a = cx_b
// .windows()
// .iter()
// .max_by_key(|window| window.id())
// .max_by_key(|window| window.item_id())
// .unwrap()
// .downcast::<Workspace>()
// .unwrap()
// .root(cx_b);
// .root(cx_b)
// .unwrap();
// // assert that b is following a in project a in w.rs
// workspace_b_project_a.update(cx_b, |workspace, cx| {
@ -1534,7 +1607,7 @@
// workspace.leader_for_pane(workspace.active_pane())
// );
// let item = workspace.active_pane().read(cx).active_item().unwrap();
// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
// assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs".into());
// });
// // b moves to y.rs in b's project, a is still following but can't yet see
@ -1578,11 +1651,12 @@
// let workspace_a_project_b = cx_a
// .windows()
// .iter()
// .max_by_key(|window| window.id())
// .max_by_key(|window| window.item_id())
// .unwrap()
// .downcast::<Workspace>()
// .unwrap()
// .root(cx_a);
// .root(cx_a)
// .unwrap();
// workspace_a_project_b.update(cx_a, |workspace, cx| {
// assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
@ -1596,12 +1670,151 @@
// });
// }
// #[gpui::test]
// async fn test_following_into_excluded_file(
// executor: BackgroundExecutor,
// mut cx_a: &mut TestAppContext,
// mut cx_b: &mut TestAppContext,
// ) {
// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// for cx in [&mut cx_a, &mut cx_b] {
// cx.update(|cx| {
// cx.update_global::<SettingsStore, _>(|store, cx| {
// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
// project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
// });
// });
// });
// }
// server
// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
// .await;
// let active_call_a = cx_a.read(ActiveCall::global);
// let active_call_b = cx_b.read(ActiveCall::global);
// cx_a.update(editor::init);
// cx_b.update(editor::init);
// client_a
// .fs()
// .insert_tree(
// "/a",
// json!({
// ".git": {
// "COMMIT_EDITMSG": "write your commit message here",
// },
// "1.txt": "one\none\none",
// "2.txt": "two\ntwo\ntwo",
// "3.txt": "three\nthree\nthree",
// }),
// )
// .await;
// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
// active_call_a
// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
// .await
// .unwrap();
// let project_id = active_call_a
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
// .await
// .unwrap();
// let project_b = client_b.build_remote_project(project_id, cx_b).await;
// active_call_b
// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
// .await
// .unwrap();
// let window_a = client_a.build_workspace(&project_a, cx_a);
// let workspace_a = window_a.root(cx_a).unwrap();
// let peer_id_a = client_a.peer_id().unwrap();
// let window_b = client_b.build_workspace(&project_b, cx_b);
// let workspace_b = window_b.root(cx_b).unwrap();
// todo!("could be wrong")
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
// let cx_a = &mut cx_a;
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
// let cx_b = &mut cx_b;
// // Client A opens editors for a regular file and an excluded file.
// let editor_for_regular = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
// })
// .await
// .unwrap()
// .downcast::<Editor>()
// .unwrap();
// let editor_for_excluded_a = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
// })
// .await
// .unwrap()
// .downcast::<Editor>()
// .unwrap();
// // Client A updates their selections in those editors
// editor_for_regular.update(cx_a, |editor, cx| {
// editor.handle_input("a", cx);
// editor.handle_input("b", cx);
// editor.handle_input("c", cx);
// editor.select_left(&Default::default(), cx);
// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
// });
// editor_for_excluded_a.update(cx_a, |editor, cx| {
// editor.select_all(&Default::default(), cx);
// editor.handle_input("new commit message", cx);
// editor.select_left(&Default::default(), cx);
// assert_eq!(editor.selections.ranges(cx), vec![18..17]);
// });
// // When client B starts following client A, currently visible file is replicated
// workspace_b
// .update(cx_b, |workspace, cx| {
// workspace.follow(peer_id_a, cx).unwrap()
// })
// .await
// .unwrap();
// let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
// .downcast::<Editor>()
// .unwrap()
// });
// assert_eq!(
// cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
// Some((worktree_id, ".git/COMMIT_EDITMSG").into())
// );
// assert_eq!(
// editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![18..17]
// );
// // Changes from B to the excluded file are replicated in A's editor
// editor_for_excluded_b.update(cx_b, |editor, cx| {
// editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
// });
// executor.run_until_parked();
// editor_for_excluded_a.update(cx_a, |editor, cx| {
// assert_eq!(
// editor.text(cx),
// "new commit messag\nCo-Authored-By: B <b@b.b>"
// );
// });
// }
// fn visible_push_notifications(
// cx: &mut TestAppContext,
// ) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {
// ) -> Vec<gpui::View<ProjectSharedNotification>> {
// let mut ret = Vec::new();
// for window in cx.windows() {
// window.read_with(cx, |window| {
// window.update(cx, |window| {
// if let Some(handle) = window
// .root_view()
// .clone()
@ -1645,8 +1858,8 @@
// })
// }
// fn pane_summaries(workspace: &ViewHandle<Workspace>, cx: &mut TestAppContext) -> Vec<PaneSummary> {
// workspace.read_with(cx, |workspace, cx| {
// fn pane_summaries(workspace: &View<Workspace>, cx: &mut WindowContext<'_>) -> Vec<PaneSummary> {
// workspace.update(cx, |workspace, cx| {
// let active_pane = workspace.active_pane();
// workspace
// .panes()

View file

@ -2781,11 +2781,10 @@ async fn test_fs_operations(
let entry = project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "c.txt"), false, cx)
.unwrap()
project.create_entry((worktree_id, "c.txt"), false, cx)
})
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@ -2812,8 +2811,8 @@ async fn test_fs_operations(
.update(cx_b, |project, cx| {
project.rename_entry(entry.id, Path::new("d.txt"), cx)
})
.unwrap()
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@ -2838,11 +2837,10 @@ async fn test_fs_operations(
let dir_entry = project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR"), true, cx)
.unwrap()
project.create_entry((worktree_id, "DIR"), true, cx)
})
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@ -2867,27 +2865,24 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR/e.txt"), false, cx)
.unwrap()
project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
})
.await
.unwrap()
.unwrap();
project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
.unwrap()
project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
})
.await
.unwrap()
.unwrap();
project_b
.update(cx_b, |project, cx| {
project
.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
.unwrap()
project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
})
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@ -2928,11 +2923,10 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
project
.copy_entry(entry.id, Path::new("f.txt"), cx)
.unwrap()
project.copy_entry(entry.id, Path::new("f.txt"), cx)
})
.await
.unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {

View file

@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
ensure_project_shared(&project, client, cx).await;
project
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
.unwrap()
.await?;
}

View file

@ -1121,20 +1121,22 @@ impl Project {
project_path: impl Into<ProjectPath>,
is_directory: bool,
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> {
) -> Task<Result<Option<Entry>>> {
let project_path = project_path.into();
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
return Task::ready(Ok(None));
};
if self.is_local() {
Some(worktree.update(cx, |worktree, cx| {
worktree.update(cx, |worktree, cx| {
worktree
.as_local_mut()
.unwrap()
.create_entry(project_path.path, is_directory, cx)
}))
})
} else {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
cx.spawn_weak(|_, mut cx| async move {
let response = client
.request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(),
@ -1143,19 +1145,20 @@ impl Project {
is_directory,
})
.await?;
let entry = response
.entry
.ok_or_else(|| anyhow!("missing entry in response"))?;
worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})
.await
}))
match response.entry {
Some(entry) => worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})
.await
.map(Some),
None => Ok(None),
}
})
}
}
@ -1164,8 +1167,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?;
) -> Task<Result<Option<Entry>>> {
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@ -1178,7 +1183,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
cx.spawn_weak(|_, mut cx| async move {
let response = client
.request(proto::CopyProjectEntry {
project_id,
@ -1186,19 +1191,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
let entry = response
.entry
.ok_or_else(|| anyhow!("missing entry in response"))?;
worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})
.await
}))
match response.entry {
Some(entry) => worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})
.await
.map(Some),
None => Ok(None),
}
})
}
}
@ -1207,8 +1213,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?;
) -> Task<Result<Option<Entry>>> {
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@ -1221,7 +1229,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
cx.spawn_weak(|_, mut cx| async move {
let response = client
.request(proto::RenameProjectEntry {
project_id,
@ -1229,19 +1237,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
let entry = response
.entry
.ok_or_else(|| anyhow!("missing entry in response"))?;
worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})
.await
}))
match response.entry {
Some(entry) => worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})
.await
.map(Some),
None => Ok(None),
}
})
}
}
@ -1658,19 +1667,15 @@ impl Project {
pub fn open_path(
&mut self,
path: impl Into<ProjectPath>,
path: ProjectPath,
cx: &mut ModelContext<Self>,
) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
let project_path = path.into();
let task = self.open_buffer(project_path.clone(), cx);
) -> Task<Result<(Option<ProjectEntryId>, AnyModelHandle)>> {
let task = self.open_buffer(path.clone(), cx);
cx.spawn_weak(|_, cx| async move {
let buffer = task.await?;
let project_entry_id = buffer
.read_with(&cx, |buffer, cx| {
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
})
.with_context(|| format!("no project entry for {project_path:?}"))?;
let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
});
let buffer: &AnyModelHandle = &buffer;
Ok((project_entry_id, buffer.clone()))
})
@ -1985,8 +1990,10 @@ impl Project {
remote_id,
);
self.local_buffer_ids_by_entry_id
.insert(file.entry_id, remote_id);
if let Some(entry_id) = file.entry_id {
self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
}
}
@ -2441,24 +2448,25 @@ impl Project {
return None;
};
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
Some(_) => {
return None;
let remote_id = buffer.read(cx).remote_id();
if let Some(entry_id) = file.entry_id {
match self.local_buffer_ids_by_entry_id.get(&entry_id) {
Some(_) => {
return None;
}
None => {
self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
}
None => {
let remote_id = buffer.read(cx).remote_id();
self.local_buffer_ids_by_entry_id
.insert(file.entry_id, remote_id);
self.local_buffer_ids_by_path.insert(
ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path.clone(),
},
remote_id,
);
}
}
};
self.local_buffer_ids_by_path.insert(
ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path.clone(),
},
remote_id,
);
}
_ => {}
}
@ -5776,11 +5784,6 @@ impl Project {
while let Some(ignored_abs_path) =
ignored_paths_to_process.pop_front()
{
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(&ignored_abs_path)
{
continue;
}
if let Some(fs_metadata) = fs
.metadata(&ignored_abs_path)
.await
@ -5808,6 +5811,13 @@ impl Project {
}
}
} else if !fs_metadata.is_symlink {
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(
ignored_entry.path.to_path_buf(),
)
{
continue;
}
let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path)
.await
@ -6208,10 +6218,13 @@ impl Project {
return;
}
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
let new_file = if let Some(entry) = old_file
.entry_id
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
{
File {
is_local: true,
entry_id: entry.id,
entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@ -6220,7 +6233,7 @@ impl Project {
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
File {
is_local: true,
entry_id: entry.id,
entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@ -6250,10 +6263,12 @@ impl Project {
);
}
if new_file.entry_id != *entry_id {
if new_file.entry_id != Some(*entry_id) {
self.local_buffer_ids_by_entry_id.remove(entry_id);
self.local_buffer_ids_by_entry_id
.insert(new_file.entry_id, buffer_id);
if let Some(entry_id) = new_file.entry_id {
self.local_buffer_ids_by_entry_id
.insert(entry_id, buffer_id);
}
}
if new_file != *old_file {
@ -6816,7 +6831,7 @@ impl Project {
})
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@ -6840,11 +6855,10 @@ impl Project {
.as_local_mut()
.unwrap()
.rename_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})?
})
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@ -6868,11 +6882,10 @@ impl Project {
.as_local_mut()
.unwrap()
.copy_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})?
})
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}

View file

@ -4050,6 +4050,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
);
}
#[gpui::test]
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
".git": {},
".gitignore": "**/target\n/node_modules\n",
"target": {
"index.txt": "index_key:index_value"
},
"node_modules": {
"eslint": {
"index.ts": "const eslint_key = 'eslint value'",
"package.json": r#"{ "some_key": "some value" }"#,
},
"prettier": {
"index.ts": "const prettier_key = 'prettier value'",
"package.json": r#"{ "other_key": "other value" }"#,
},
},
"package.json": r#"{ "main_key": "main value" }"#,
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let query = "key";
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
"Only one non-ignored file should have the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([
("package.json".to_string(), vec![8..11]),
("target/index.txt".to_string(), vec![6..9]),
(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
),
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
("node_modules/eslint/package.json".to_string(), vec![8..11]),
]),
"Unrestricted search with ignored directories should find every file with the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(
query,
false,
false,
true,
vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
)
.unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
)]),
"With search including ignored prettier directory and excluding TS files, only one file should be found"
);
}
#[test]
fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), "");

View file

@ -371,15 +371,25 @@ impl SearchQuery {
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path {
Some(file_path) => {
!self
.files_to_exclude()
.iter()
.any(|exclude_glob| exclude_glob.is_match(file_path))
&& (self.files_to_include().is_empty()
let mut path = file_path.to_path_buf();
loop {
if self
.files_to_exclude()
.iter()
.any(|exclude_glob| exclude_glob.is_match(&path))
{
return false;
} else if self.files_to_include().is_empty()
|| self
.files_to_include()
.iter()
.any(|include_glob| include_glob.is_match(file_path)))
.any(|include_glob| include_glob.is_match(&path))
{
return true;
} else if !path.pop() {
return false;
}
}
}
None => self.files_to_include().is_empty(),
}

View file

@ -960,8 +960,6 @@ impl LocalWorktree {
cx.spawn(|this, cx| async move {
let text = fs.load(&abs_path).await?;
let entry = entry.await?;
let mut index_task = None;
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
if let Some(repo) = snapshot.repository_for_path(&path) {
@ -981,18 +979,43 @@ impl LocalWorktree {
None
};
Ok((
File {
entry_id: entry.id,
worktree: this,
path: entry.path,
mtime: entry.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
))
match entry.await? {
Some(entry) => Ok((
File {
entry_id: Some(entry.id),
worktree: this,
path: entry.path,
mtime: entry.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
)),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!("Loading metadata for excluded file {abs_path:?}")
})?
.with_context(|| {
format!("Excluded file {abs_path:?} got removed during loading")
})?;
Ok((
File {
entry_id: None,
worktree: this,
path,
mtime: metadata.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
))
}
}
})
}
@ -1013,17 +1036,37 @@ impl LocalWorktree {
let text = buffer.as_rope().clone();
let fingerprint = text.fingerprint();
let version = buffer.version();
let save = self.write_file(path, text, buffer.line_ending(), cx);
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = self.absolutize(&path);
cx.as_mut().spawn(|mut cx| async move {
let entry = save.await?;
let (entry_id, mtime, path) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, metadata.mtime, path)
}
};
if has_changed_file {
let new_file = Arc::new(File {
entry_id: entry.id,
entry_id,
worktree: handle,
path: entry.path,
mtime: entry.mtime,
path,
mtime,
is_local: true,
is_deleted: false,
});
@ -1049,13 +1092,13 @@ impl LocalWorktree {
project_id,
buffer_id,
version: serialize_version(&version),
mtime: Some(entry.mtime.into()),
mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
buffer.did_save(version.clone(), fingerprint, mtime, cx);
});
Ok(())
@ -1080,7 +1123,7 @@ impl LocalWorktree {
path: impl Into<Arc<Path>>,
is_dir: bool,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
) -> Task<Result<Option<Entry>>> {
let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path);
@ -1097,7 +1140,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
write.await?;
let (result, refreshes) = this.update(&mut cx, |this, cx| {
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
let mut refreshes = Vec::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") {
@ -1124,14 +1167,14 @@ impl LocalWorktree {
})
}
pub fn write_file(
pub(crate) fn write_file(
&self,
path: impl Into<Arc<Path>>,
text: Rope,
line_ending: LineEnding,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
let path = path.into();
) -> Task<Result<Option<Entry>>> {
let path: Arc<Path> = path.into();
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx
@ -1190,8 +1233,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone();
) -> Task<Result<Option<Entry>>> {
let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@ -1201,7 +1247,7 @@ impl LocalWorktree {
.await
});
Some(cx.spawn(|this, mut cx| async move {
cx.spawn(|this, mut cx| async move {
rename.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@ -1209,7 +1255,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), Some(old_path), cx)
})
.await
}))
})
}
pub fn copy_entry(
@ -1217,8 +1263,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone();
) -> Task<Result<Option<Entry>>> {
let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@ -1233,7 +1282,7 @@ impl LocalWorktree {
.await
});
Some(cx.spawn(|this, mut cx| async move {
cx.spawn(|this, mut cx| async move {
copy.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@ -1241,7 +1290,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), None, cx)
})
.await
}))
})
}
pub fn expand_entry(
@ -1277,7 +1326,10 @@ impl LocalWorktree {
path: Arc<Path>,
old_path: Option<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
) -> Task<Result<Option<Entry>>> {
if self.is_path_excluded(path.to_path_buf()) {
return Task::ready(Ok(None));
}
let paths = if let Some(old_path) = old_path.as_ref() {
vec![old_path.clone(), path.clone()]
} else {
@ -1286,13 +1338,15 @@ impl LocalWorktree {
let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn_weak(move |this, mut cx| async move {
refresh.recv().await;
this.upgrade(&cx)
let new_entry = this
.upgrade(&cx)
.ok_or_else(|| anyhow!("worktree was dropped"))?
.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
})
})?;
Ok(Some(new_entry))
})
}
@ -2226,10 +2280,19 @@ impl LocalSnapshot {
paths
}
pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
self.file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
loop {
if self
.file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(&path))
{
return true;
}
if !path.pop() {
return false;
}
}
}
}
@ -2458,8 +2521,7 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id);
} else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|| snapshot.is_path_excluded(&git_dir_abs_path);
let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{
@ -2666,7 +2728,7 @@ pub struct File {
pub worktree: ModelHandle<Worktree>,
pub path: Arc<Path>,
pub mtime: SystemTime,
pub(crate) entry_id: ProjectEntryId,
pub(crate) entry_id: Option<ProjectEntryId>,
pub(crate) is_local: bool,
pub(crate) is_deleted: bool,
}
@ -2735,7 +2797,7 @@ impl language::File for File {
fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File {
worktree_id: self.worktree.id() as u64,
entry_id: self.entry_id.to_proto(),
entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()),
is_deleted: self.is_deleted,
@ -2793,7 +2855,7 @@ impl File {
worktree,
path: entry.path.clone(),
mtime: entry.mtime,
entry_id: entry.id,
entry_id: Some(entry.id),
is_local: true,
is_deleted: false,
})
@ -2818,7 +2880,7 @@ impl File {
worktree,
path: Path::new(&proto.path).into(),
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
entry_id: ProjectEntryId::from_proto(proto.entry_id),
entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false,
is_deleted: proto.is_deleted,
})
@ -2836,7 +2898,7 @@ impl File {
if self.is_deleted {
None
} else {
Some(self.entry_id)
self.entry_id
}
}
}
@ -3338,16 +3400,7 @@ impl BackgroundScanner {
return false;
}
// FS events may come for files which parent directory is excluded, need to check ignore those.
let mut path_to_test = abs_path.clone();
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|| snapshot.is_path_excluded(&relative_path);
while !excluded_file_event && path_to_test.pop() {
if snapshot.is_path_excluded(&path_to_test) {
excluded_file_event = true;
}
}
if excluded_file_event {
if snapshot.is_path_excluded(relative_path.to_path_buf()) {
if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}");
}
@ -3531,7 +3584,7 @@ impl BackgroundScanner {
let state = self.state.lock();
let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone();
if snapshot.is_path_excluded(&job.abs_path) {
if snapshot.is_path_excluded(job.path.to_path_buf()) {
log::error!("skipping excluded directory {:?}", job.path);
return Ok(());
}
@ -3603,8 +3656,8 @@ impl BackgroundScanner {
{
let mut state = self.state.lock();
if state.snapshot.is_path_excluded(&child_abs_path) {
let relative_path = job.path.join(child_name);
let relative_path = job.path.join(child_name);
if state.snapshot.is_path_excluded(relative_path.clone()) {
log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path);
continue;

View file

@ -1052,11 +1052,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
&[
".git/HEAD",
".git/foo",
"node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
],
&["target", "node_modules"],
&["target"],
&[
".DS_Store",
"src/.DS_Store",
@ -1106,6 +1107,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
".git/HEAD",
".git/foo",
".git/new_file",
"node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
@ -1114,7 +1116,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"build_output/new_file",
"test_output/new_file",
],
&["target", "node_modules", "test_output"],
&["target", "test_output"],
&[
".DS_Store",
"src/.DS_Store",
@ -1174,6 +1176,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.create_entry("a/e".as_ref(), true, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_dir());
@ -1222,6 +1225,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1257,6 +1261,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1275,6 +1280,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1291,6 +1297,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1616,14 +1623,14 @@ fn randomly_mutate_worktree(
entry.id.0,
new_path
);
let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
let task = worktree.rename_entry(entry.id, new_path, cx);
cx.foreground().spawn(async move {
task.await?;
task.await?.unwrap();
Ok(())
})
}
_ => {
let task = if entry.is_dir() {
if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3);
log::info!(
@ -1631,15 +1638,20 @@ fn randomly_mutate_worktree(
if is_dir { "dir" } else { "file" },
child_path,
);
worktree.create_entry(child_path, is_dir, cx)
let task = worktree.create_entry(child_path, is_dir, cx);
cx.foreground().spawn(async move {
task.await?;
Ok(())
})
} else {
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
};
cx.foreground().spawn(async move {
task.await?;
Ok(())
})
let task =
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
cx.foreground().spawn(async move {
task.await?;
Ok(())
})
}
}
}
}

View file

@ -1151,20 +1151,22 @@ impl Project {
project_path: impl Into<ProjectPath>,
is_directory: bool,
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> {
) -> Task<Result<Option<Entry>>> {
let project_path = project_path.into();
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
return Task::ready(Ok(None));
};
if self.is_local() {
Some(worktree.update(cx, |worktree, cx| {
worktree.update(cx, |worktree, cx| {
worktree
.as_local_mut()
.unwrap()
.create_entry(project_path.path, is_directory, cx)
}))
})
} else {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn(move |_, mut cx| async move {
cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(),
@ -1173,19 +1175,20 @@ impl Project {
is_directory,
})
.await?;
let entry = response
.entry
.ok_or_else(|| anyhow!("missing entry in response"))?;
worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})?
.await
}))
match response.entry {
Some(entry) => worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})?
.await
.map(Some),
None => Ok(None),
}
})
}
}
@ -1194,8 +1197,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?;
) -> Task<Result<Option<Entry>>> {
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@ -1208,7 +1213,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn(move |_, mut cx| async move {
cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::CopyProjectEntry {
project_id,
@ -1216,19 +1221,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
let entry = response
.entry
.ok_or_else(|| anyhow!("missing entry in response"))?;
worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})?
.await
}))
match response.entry {
Some(entry) => worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})?
.await
.map(Some),
None => Ok(None),
}
})
}
}
@ -1237,8 +1243,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
) -> Option<Task<Result<Entry>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?;
) -> Task<Result<Option<Entry>>> {
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
return Task::ready(Ok(None));
};
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@ -1251,7 +1259,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn(move |_, mut cx| async move {
cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::RenameProjectEntry {
project_id,
@ -1259,19 +1267,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
let entry = response
.entry
.ok_or_else(|| anyhow!("missing entry in response"))?;
worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})?
.await
}))
match response.entry {
Some(entry) => worktree
.update(&mut cx, |worktree, cx| {
worktree.as_remote_mut().unwrap().insert_entry(
entry,
response.worktree_scan_id as usize,
cx,
)
})?
.await
.map(Some),
None => Ok(None),
}
})
}
}
@ -1688,18 +1697,15 @@ impl Project {
pub fn open_path(
&mut self,
path: impl Into<ProjectPath>,
path: ProjectPath,
cx: &mut ModelContext<Self>,
) -> Task<Result<(ProjectEntryId, AnyModel)>> {
let project_path = path.into();
let task = self.open_buffer(project_path.clone(), cx);
cx.spawn(move |_, mut cx| async move {
) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
let task = self.open_buffer(path.clone(), cx);
cx.spawn(move |_, cx| async move {
let buffer = task.await?;
let project_entry_id = buffer
.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
})?
.with_context(|| format!("no project entry for {project_path:?}"))?;
let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
})?;
let buffer: &AnyModel = &buffer;
Ok((project_entry_id, buffer.clone()))
@ -2018,8 +2024,10 @@ impl Project {
remote_id,
);
self.local_buffer_ids_by_entry_id
.insert(file.entry_id, remote_id);
if let Some(entry_id) = file.entry_id {
self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
}
}
@ -2474,24 +2482,25 @@ impl Project {
return None;
};
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
Some(_) => {
return None;
let remote_id = buffer.read(cx).remote_id();
if let Some(entry_id) = file.entry_id {
match self.local_buffer_ids_by_entry_id.get(&entry_id) {
Some(_) => {
return None;
}
None => {
self.local_buffer_ids_by_entry_id
.insert(entry_id, remote_id);
}
}
None => {
let remote_id = buffer.read(cx).remote_id();
self.local_buffer_ids_by_entry_id
.insert(file.entry_id, remote_id);
self.local_buffer_ids_by_path.insert(
ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path.clone(),
},
remote_id,
);
}
}
};
self.local_buffer_ids_by_path.insert(
ProjectPath {
worktree_id: file.worktree_id(cx),
path: file.path.clone(),
},
remote_id,
);
}
_ => {}
}
@ -5845,11 +5854,6 @@ impl Project {
while let Some(ignored_abs_path) =
ignored_paths_to_process.pop_front()
{
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(&ignored_abs_path)
{
continue;
}
if let Some(fs_metadata) = fs
.metadata(&ignored_abs_path)
.await
@ -5877,6 +5881,13 @@ impl Project {
}
}
} else if !fs_metadata.is_symlink {
if !query.file_matches(Some(&ignored_abs_path))
|| snapshot.is_path_excluded(
ignored_entry.path.to_path_buf(),
)
{
continue;
}
let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path)
.await
@ -6278,10 +6289,13 @@ impl Project {
return;
}
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
let new_file = if let Some(entry) = old_file
.entry_id
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
{
File {
is_local: true,
entry_id: entry.id,
entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@ -6290,7 +6304,7 @@ impl Project {
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
File {
is_local: true,
entry_id: entry.id,
entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@ -6320,10 +6334,12 @@ impl Project {
);
}
if new_file.entry_id != *entry_id {
if new_file.entry_id != Some(*entry_id) {
self.local_buffer_ids_by_entry_id.remove(entry_id);
self.local_buffer_ids_by_entry_id
.insert(new_file.entry_id, buffer_id);
if let Some(entry_id) = new_file.entry_id {
self.local_buffer_ids_by_entry_id
.insert(entry_id, buffer_id);
}
}
if new_file != *old_file {
@ -6890,7 +6906,7 @@ impl Project {
})?
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@ -6914,11 +6930,10 @@ impl Project {
.as_local_mut()
.unwrap()
.rename_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})??
})?
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@ -6942,11 +6957,10 @@ impl Project {
.as_local_mut()
.unwrap()
.copy_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})??
})?
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}

View file

@ -4182,6 +4182,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
);
}
#[gpui::test]
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/dir",
json!({
".git": {},
".gitignore": "**/target\n/node_modules\n",
"target": {
"index.txt": "index_key:index_value"
},
"node_modules": {
"eslint": {
"index.ts": "const eslint_key = 'eslint value'",
"package.json": r#"{ "some_key": "some value" }"#,
},
"prettier": {
"index.ts": "const prettier_key = 'prettier value'",
"package.json": r#"{ "other_key": "other value" }"#,
},
},
"package.json": r#"{ "main_key": "main value" }"#,
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let query = "key";
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
"Only one non-ignored file should have the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([
("package.json".to_string(), vec![8..11]),
("target/index.txt".to_string(), vec![6..9]),
(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
),
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
("node_modules/eslint/package.json".to_string(), vec![8..11]),
]),
"Unrestricted search with ignored directories should find every file with the query"
);
assert_eq!(
search(
&project,
SearchQuery::text(
query,
false,
false,
true,
vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
)
.unwrap(),
cx
)
.await
.unwrap(),
HashMap::from_iter([(
"node_modules/prettier/package.json".to_string(),
vec![9..12]
)]),
"With search including ignored prettier directory and excluding TS files, only one file should be found"
);
}
#[test]
fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), "");

View file

@ -371,15 +371,25 @@ impl SearchQuery {
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path {
Some(file_path) => {
!self
.files_to_exclude()
.iter()
.any(|exclude_glob| exclude_glob.is_match(file_path))
&& (self.files_to_include().is_empty()
let mut path = file_path.to_path_buf();
loop {
if self
.files_to_exclude()
.iter()
.any(|exclude_glob| exclude_glob.is_match(&path))
{
return false;
} else if self.files_to_include().is_empty()
|| self
.files_to_include()
.iter()
.any(|include_glob| include_glob.is_match(file_path)))
.any(|include_glob| include_glob.is_match(&path))
{
return true;
} else if !path.pop() {
return false;
}
}
}
None => self.files_to_include().is_empty(),
}

View file

@ -958,8 +958,6 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
let text = fs.load(&abs_path).await?;
let entry = entry.await?;
let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
@ -982,18 +980,43 @@ impl LocalWorktree {
let worktree = this
.upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?;
Ok((
File {
entry_id: entry.id,
worktree,
path: entry.path,
mtime: entry.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
))
match entry.await? {
Some(entry) => Ok((
File {
entry_id: Some(entry.id),
worktree,
path: entry.path,
mtime: entry.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
)),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!("Loading metadata for excluded file {abs_path:?}")
})?
.with_context(|| {
format!("Excluded file {abs_path:?} got removed during loading")
})?;
Ok((
File {
entry_id: None,
worktree,
path,
mtime: metadata.mtime,
is_local: true,
is_deleted: false,
},
text,
diff_base,
))
}
}
})
}
@ -1013,18 +1036,38 @@ impl LocalWorktree {
let text = buffer.as_rope().clone();
let fingerprint = text.fingerprint();
let version = buffer.version();
let save = self.write_file(path, text, buffer.line_ending(), cx);
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
let fs = Arc::clone(&self.fs);
let abs_path = self.absolutize(&path);
cx.spawn(move |this, mut cx| async move {
let entry = save.await?;
let this = this.upgrade().context("worktree dropped")?;
let (entry_id, mtime, path) = match entry {
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!(
"Fetching metadata after saving the excluded buffer {abs_path:?}"
)
})?
.with_context(|| {
format!("Excluded buffer {path:?} got removed during saving")
})?;
(None, metadata.mtime, path)
}
};
if has_changed_file {
let new_file = Arc::new(File {
entry_id: entry.id,
entry_id,
worktree: this,
path: entry.path,
mtime: entry.mtime,
path,
mtime,
is_local: true,
is_deleted: false,
});
@ -1050,13 +1093,13 @@ impl LocalWorktree {
project_id,
buffer_id,
version: serialize_version(&version),
mtime: Some(entry.mtime.into()),
mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
buffer.did_save(version.clone(), fingerprint, mtime, cx);
})?;
Ok(())
@ -1081,7 +1124,7 @@ impl LocalWorktree {
path: impl Into<Arc<Path>>,
is_dir: bool,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
) -> Task<Result<Option<Entry>>> {
let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path);
@ -1098,7 +1141,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
write.await?;
let (result, refreshes) = this.update(&mut cx, |this, cx| {
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
let mut refreshes = Vec::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") {
@ -1125,14 +1168,14 @@ impl LocalWorktree {
})
}
pub fn write_file(
pub(crate) fn write_file(
&self,
path: impl Into<Arc<Path>>,
text: Rope,
line_ending: LineEnding,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
let path = path.into();
) -> Task<Result<Option<Entry>>> {
let path: Arc<Path> = path.into();
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx
@ -1191,8 +1234,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone();
) -> Task<Result<Option<Entry>>> {
let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@ -1202,7 +1248,7 @@ impl LocalWorktree {
.await
});
Some(cx.spawn(|this, mut cx| async move {
cx.spawn(|this, mut cx| async move {
rename.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@ -1210,7 +1256,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), Some(old_path), cx)
})?
.await
}))
})
}
pub fn copy_entry(
@ -1218,8 +1264,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
) -> Option<Task<Result<Entry>>> {
let old_path = self.entry_for_id(entry_id)?.path.clone();
) -> Task<Result<Option<Entry>>> {
let old_path = match self.entry_for_id(entry_id) {
Some(entry) => entry.path.clone(),
None => return Task::ready(Ok(None)),
};
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@ -1234,7 +1283,7 @@ impl LocalWorktree {
.await
});
Some(cx.spawn(|this, mut cx| async move {
cx.spawn(|this, mut cx| async move {
copy.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@ -1242,7 +1291,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), None, cx)
})?
.await
}))
})
}
pub fn expand_entry(
@ -1278,7 +1327,10 @@ impl LocalWorktree {
path: Arc<Path>,
old_path: Option<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
) -> Task<Result<Option<Entry>>> {
if self.is_path_excluded(path.to_path_buf()) {
return Task::ready(Ok(None));
}
let paths = if let Some(old_path) = old_path.as_ref() {
vec![old_path.clone(), path.clone()]
} else {
@ -1287,11 +1339,12 @@ impl LocalWorktree {
let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn(move |this, mut cx| async move {
refresh.recv().await;
this.update(&mut cx, |this, _| {
let new_entry = this.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
})?
})??;
Ok(Some(new_entry))
})
}
@ -2222,10 +2275,19 @@ impl LocalSnapshot {
paths
}
pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
self.file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
loop {
if self
.file_scan_exclusions
.iter()
.any(|exclude_matcher| exclude_matcher.is_match(&path))
{
return true;
}
if !path.pop() {
return false;
}
}
}
}
@ -2455,8 +2517,7 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id);
} else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|| snapshot.is_path_excluded(&git_dir_abs_path);
let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{
@ -2663,7 +2724,7 @@ pub struct File {
pub worktree: Model<Worktree>,
pub path: Arc<Path>,
pub mtime: SystemTime,
pub(crate) entry_id: ProjectEntryId,
pub(crate) entry_id: Option<ProjectEntryId>,
pub(crate) is_local: bool,
pub(crate) is_deleted: bool,
}
@ -2732,7 +2793,7 @@ impl language::File for File {
fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File {
worktree_id: self.worktree.entity_id().as_u64(),
entry_id: self.entry_id.to_proto(),
entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()),
is_deleted: self.is_deleted,
@ -2790,7 +2851,7 @@ impl File {
worktree,
path: entry.path.clone(),
mtime: entry.mtime,
entry_id: entry.id,
entry_id: Some(entry.id),
is_local: true,
is_deleted: false,
})
@ -2815,7 +2876,7 @@ impl File {
worktree,
path: Path::new(&proto.path).into(),
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
entry_id: ProjectEntryId::from_proto(proto.entry_id),
entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false,
is_deleted: proto.is_deleted,
})
@ -2833,7 +2894,7 @@ impl File {
if self.is_deleted {
None
} else {
Some(self.entry_id)
self.entry_id
}
}
}
@ -3329,16 +3390,7 @@ impl BackgroundScanner {
return false;
}
// FS events may come for files which parent directory is excluded, need to check ignore those.
let mut path_to_test = abs_path.clone();
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|| snapshot.is_path_excluded(&relative_path);
while !excluded_file_event && path_to_test.pop() {
if snapshot.is_path_excluded(&path_to_test) {
excluded_file_event = true;
}
}
if excluded_file_event {
if snapshot.is_path_excluded(relative_path.to_path_buf()) {
if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}");
}
@ -3522,7 +3574,7 @@ impl BackgroundScanner {
let state = self.state.lock();
let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone();
if snapshot.is_path_excluded(&job.abs_path) {
if snapshot.is_path_excluded(job.path.to_path_buf()) {
log::error!("skipping excluded directory {:?}", job.path);
return Ok(());
}
@ -3593,9 +3645,9 @@ impl BackgroundScanner {
}
{
let relative_path = job.path.join(child_name);
let mut state = self.state.lock();
if state.snapshot.is_path_excluded(&child_abs_path) {
let relative_path = job.path.join(child_name);
if state.snapshot.is_path_excluded(relative_path.clone()) {
log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path);
continue;

View file

@ -1055,11 +1055,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
&[
".git/HEAD",
".git/foo",
"node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
],
&["target", "node_modules"],
&["target"],
&[
".DS_Store",
"src/.DS_Store",
@ -1109,6 +1110,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
".git/HEAD",
".git/foo",
".git/new_file",
"node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
@ -1117,7 +1119,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"build_output/new_file",
"test_output/new_file",
],
&["target", "node_modules", "test_output"],
&["target", "test_output"],
&[
".DS_Store",
"src/.DS_Store",
@ -1177,6 +1179,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.create_entry("a/e".as_ref(), true, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_dir());
@ -1226,6 +1229,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1261,6 +1265,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1279,6 +1284,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1295,6 +1301,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
})
.await
.unwrap()
.unwrap();
assert!(entry.is_file());
@ -1620,14 +1627,14 @@ fn randomly_mutate_worktree(
entry.id.0,
new_path
);
let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
let task = worktree.rename_entry(entry.id, new_path, cx);
cx.background_executor().spawn(async move {
task.await?;
task.await?.unwrap();
Ok(())
})
}
_ => {
let task = if entry.is_dir() {
if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3);
log::info!(
@ -1635,15 +1642,20 @@ fn randomly_mutate_worktree(
if is_dir { "dir" } else { "file" },
child_path,
);
worktree.create_entry(child_path, is_dir, cx)
let task = worktree.create_entry(child_path, is_dir, cx);
cx.background_executor().spawn(async move {
task.await?;
Ok(())
})
} else {
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
};
cx.background_executor().spawn(async move {
task.await?;
Ok(())
})
let task =
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
cx.background_executor().spawn(async move {
task.await?;
Ok(())
})
}
}
}
}

View file

@ -621,7 +621,7 @@ impl ProjectPanel {
edited_entry_id = NEW_ENTRY_ID;
edit_task = self.project.update(cx, |project, cx| {
project.create_entry((worktree_id, &new_path), is_dir, cx)
})?;
});
} else {
let new_path = if let Some(parent) = entry.path.clone().parent() {
parent.join(&filename)
@ -635,7 +635,7 @@ impl ProjectPanel {
edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx)
})?;
});
};
edit_state.processing_filename = Some(filename);
@ -648,21 +648,22 @@ impl ProjectPanel {
cx.notify();
})?;
let new_entry = new_entry?;
this.update(&mut cx, |this, cx| {
if let Some(selection) = &mut this.selection {
if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id;
this.expand_to_selection(cx);
if let Some(new_entry) = new_entry? {
this.update(&mut cx, |this, cx| {
if let Some(selection) = &mut this.selection {
if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id;
this.expand_to_selection(cx);
}
}
}
this.update_visible_entries(None, cx);
if is_new_entry && !is_dir {
this.open_entry(new_entry.id, true, cx);
}
cx.notify();
})?;
this.update_visible_entries(None, cx);
if is_new_entry && !is_dir {
this.open_entry(new_entry.id, true, cx);
}
cx.notify();
})?;
}
Ok(())
}))
}
@ -935,15 +936,17 @@ impl ProjectPanel {
}
if clipboard_entry.is_cut() {
if let Some(task) = self.project.update(cx, |project, cx| {
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
}) {
task.detach_and_log_err(cx)
}
} else if let Some(task) = self.project.update(cx, |project, cx| {
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
}) {
task.detach_and_log_err(cx)
self.project
.update(cx, |project, cx| {
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
})
.detach_and_log_err(cx)
} else {
self.project
.update(cx, |project, cx| {
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
})
.detach_and_log_err(cx)
}
}
None
@ -1026,7 +1029,7 @@ impl ProjectPanel {
let mut new_path = destination_path.to_path_buf();
new_path.push(entry_path.path.file_name()?);
if new_path != entry_path.path.as_ref() {
let task = project.rename_entry(entry_to_move, new_path, cx)?;
let task = project.rename_entry(entry_to_move, new_path, cx);
cx.foreground().spawn(task).detach_and_log_err(cx);
}

View file

@ -610,7 +610,7 @@ impl ProjectPanel {
edited_entry_id = NEW_ENTRY_ID;
edit_task = self.project.update(cx, |project, cx| {
project.create_entry((worktree_id, &new_path), is_dir, cx)
})?;
});
} else {
let new_path = if let Some(parent) = entry.path.clone().parent() {
parent.join(&filename)
@ -624,7 +624,7 @@ impl ProjectPanel {
edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx)
})?;
});
};
edit_state.processing_filename = Some(filename);
@ -637,21 +637,22 @@ impl ProjectPanel {
cx.notify();
})?;
let new_entry = new_entry?;
this.update(&mut cx, |this, cx| {
if let Some(selection) = &mut this.selection {
if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id;
this.expand_to_selection(cx);
if let Some(new_entry) = new_entry? {
this.update(&mut cx, |this, cx| {
if let Some(selection) = &mut this.selection {
if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id;
this.expand_to_selection(cx);
}
}
}
this.update_visible_entries(None, cx);
if is_new_entry && !is_dir {
this.open_entry(new_entry.id, true, cx);
}
cx.notify();
})?;
this.update_visible_entries(None, cx);
if is_new_entry && !is_dir {
this.open_entry(new_entry.id, true, cx);
}
cx.notify();
})?;
}
Ok(())
}))
}
@ -931,15 +932,17 @@ impl ProjectPanel {
}
if clipboard_entry.is_cut() {
if let Some(task) = self.project.update(cx, |project, cx| {
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
}) {
task.detach_and_log_err(cx);
}
} else if let Some(task) = self.project.update(cx, |project, cx| {
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
}) {
task.detach_and_log_err(cx);
self.project
.update(cx, |project, cx| {
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
})
.detach_and_log_err(cx)
} else {
self.project
.update(cx, |project, cx| {
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
})
.detach_and_log_err(cx)
}
Some(())
@ -1025,7 +1028,7 @@ impl ProjectPanel {
// let mut new_path = destination_path.to_path_buf();
// new_path.push(entry_path.path.file_name()?);
// if new_path != entry_path.path.as_ref() {
// let task = project.rename_entry(entry_to_move, new_path, cx)?;
// let task = project.rename_entry(entry_to_move, new_path, cx);
// cx.foreground_executor().spawn(task).detach_and_log_err(cx);
// }

View file

@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
}
message ProjectEntryResponse {
Entry entry = 1;
optional Entry entry = 1;
uint64 worktree_scan_id = 2;
}
@ -1357,7 +1357,7 @@ message User {
message File {
uint64 worktree_id = 1;
uint64 entry_id = 2;
optional uint64 entry_id = 2;
string path = 3;
Timestamp mtime = 4;
bool is_deleted = 5;

View file

@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*;
mod macros;
pub const PROTOCOL_VERSION: u32 = 66;
pub const PROTOCOL_VERSION: u32 = 67;

View file

@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
}
message ProjectEntryResponse {
Entry entry = 1;
optional Entry entry = 1;
uint64 worktree_scan_id = 2;
}
@ -1357,7 +1357,7 @@ message User {
message File {
uint64 worktree_id = 1;
uint64 entry_id = 2;
optional uint64 entry_id = 2;
string path = 3;
Timestamp mtime = 4;
bool is_deleted = 5;

View file

@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*;
mod macros;
pub const PROTOCOL_VERSION: u32 = 66;
pub const PROTOCOL_VERSION: u32 = 67;

View file

@ -1132,6 +1132,7 @@ mod tests {
})
})
.await
.unwrap()
.unwrap();
(wt, entry)

View file

@ -1170,6 +1170,7 @@ mod tests {
})
})
.await
.unwrap()
.unwrap();
(wt, entry)

View file

@ -219,9 +219,11 @@ impl PathMatcher {
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path)
|| self.glob.is_match(&other)
|| self.check_with_end_separator(other.as_ref())
let other_path = other.as_ref();
other_path.starts_with(&self.maybe_path)
|| other_path.ends_with(&self.maybe_path)
|| self.glob.is_match(other_path)
|| self.check_with_end_separator(other_path)
}
fn check_with_end_separator(&self, path: &Path) -> bool {
@ -418,4 +420,14 @@ mod tests {
"Path matcher {path_matcher} should match {path:?}"
);
}
#[test]
fn project_search() {
let path = Path::new("/Users/someonetoignore/work/zed/zed.dev/node_modules");
let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
assert!(
path_matcher.is_match(&path),
"Path matcher {path_matcher} should match {path:?}"
);
}
}

View file

@ -481,18 +481,21 @@ impl Pane {
pub(crate) fn open_item(
&mut self,
project_entry_id: ProjectEntryId,
project_entry_id: Option<ProjectEntryId>,
focus_item: bool,
cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> {
let mut existing_item = None;
for (index, item) in self.items.iter().enumerate() {
if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id]
{
let item = item.boxed_clone();
existing_item = Some((index, item));
break;
if let Some(project_entry_id) = project_entry_id {
for (index, item) in self.items.iter().enumerate() {
if item.is_singleton(cx)
&& item.project_entry_ids(cx).as_slice() == [project_entry_id]
{
let item = item.boxed_clone();
existing_item = Some((index, item));
break;
}
}
}

View file

@ -2129,13 +2129,13 @@ impl Workspace {
})
}
pub(crate) fn load_path(
fn load_path(
&mut self,
path: ProjectPath,
cx: &mut ViewContext<Self>,
) -> Task<
Result<(
ProjectEntryId,
Option<ProjectEntryId>,
impl 'static + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
)>,
> {

View file

@ -537,18 +537,21 @@ impl Pane {
pub(crate) fn open_item(
&mut self,
project_entry_id: ProjectEntryId,
project_entry_id: Option<ProjectEntryId>,
focus_item: bool,
cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> {
let mut existing_item = None;
for (index, item) in self.items.iter().enumerate() {
if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id]
{
let item = item.boxed_clone();
existing_item = Some((index, item));
break;
if let Some(project_entry_id) = project_entry_id {
for (index, item) in self.items.iter().enumerate() {
if item.is_singleton(cx)
&& item.project_entry_ids(cx).as_slice() == [project_entry_id]
{
let item = item.boxed_clone();
existing_item = Some((index, item));
break;
}
}
}

View file

@ -10,7 +10,7 @@ mod persistence;
pub mod searchable;
// todo!()
mod modal_layer;
mod shared_screen;
pub mod shared_screen;
mod status_bar;
mod toolbar;
mod workspace_settings;
@ -1853,13 +1853,13 @@ impl Workspace {
})
}
pub(crate) fn load_path(
fn load_path(
&mut self,
path: ProjectPath,
cx: &mut ViewContext<Self>,
) -> Task<
Result<(
ProjectEntryId,
Option<ProjectEntryId>,
impl 'static + Send + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
)>,
> {

View file

@ -615,8 +615,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| {
project.create_entry((tree_id, dir_path), true, cx)
})
.ok_or_else(|| anyhow!("worktree was removed"))?
.await?;
.await
.context("worktree was removed")?;
}
}
@ -625,8 +625,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| {
project.create_entry((tree_id, file_path), false, cx)
})
.ok_or_else(|| anyhow!("worktree was removed"))?
.await?;
.await
.context("worktree was removed")?;
}
let editor = workspace
@ -763,7 +763,7 @@ mod tests {
AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle,
};
use language::LanguageRegistry;
use project::{Project, ProjectPath};
use project::{project_settings::ProjectSettings, Project, ProjectPath};
use serde_json::json;
use settings::{handle_settings_file_changes, watch_config_file, SettingsStore};
use std::{
@ -1308,6 +1308,122 @@ mod tests {
});
}
#[gpui::test]
async fn test_opening_excluded_paths(cx: &mut TestAppContext) {
let app_state = init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
project_settings.file_scan_exclusions =
Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]);
});
});
});
app_state
.fs
.as_fake()
.insert_tree(
"/root",
json!({
".gitignore": "ignored_dir\n",
".git": {
"HEAD": "ref: refs/heads/main",
},
"regular_dir": {
"file": "regular file contents",
},
"ignored_dir": {
"ignored_subdir": {
"file": "ignored subfile contents",
},
"file": "ignored file contents",
},
"excluded_dir": {
"file": "excluded file contents",
},
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
let window = cx.add_window(|cx| Workspace::test_new(project, cx));
let workspace = window.root(cx);
let initial_entries = cx.read(|cx| workspace.file_project_paths(cx));
let paths_to_open = [
Path::new("/root/excluded_dir/file").to_path_buf(),
Path::new("/root/.git/HEAD").to_path_buf(),
Path::new("/root/excluded_dir/ignored_subdir").to_path_buf(),
];
let (opened_workspace, new_items) = cx
.update(|cx| workspace::open_paths(&paths_to_open, &app_state, None, cx))
.await
.unwrap();
assert_eq!(
opened_workspace.id(),
workspace.id(),
"Excluded files in subfolders of a workspace root should be opened in the workspace"
);
let mut opened_paths = cx.read(|cx| {
assert_eq!(
new_items.len(),
paths_to_open.len(),
"Expect to get the same number of opened items as submitted paths to open"
);
new_items
.iter()
.zip(paths_to_open.iter())
.map(|(i, path)| {
match i {
Some(Ok(i)) => {
Some(i.project_path(cx).map(|p| p.path.display().to_string()))
}
Some(Err(e)) => panic!("Excluded file {path:?} failed to open: {e:?}"),
None => None,
}
.flatten()
})
.collect::<Vec<_>>()
});
opened_paths.sort();
assert_eq!(
opened_paths,
vec![
None,
Some(".git/HEAD".to_string()),
Some("excluded_dir/file".to_string()),
],
"Excluded files should get opened, excluded dir should not get opened"
);
let entries = cx.read(|cx| workspace.file_project_paths(cx));
assert_eq!(
initial_entries, entries,
"Workspace entries should not change after opening excluded files and directories paths"
);
cx.read(|cx| {
let pane = workspace.read(cx).active_pane().read(cx);
let mut opened_buffer_paths = pane
.items()
.map(|i| {
i.project_path(cx)
.expect("all excluded files that got open should have a path")
.path
.display()
.to_string()
})
.collect::<Vec<_>>();
opened_buffer_paths.sort();
assert_eq!(
opened_buffer_paths,
vec![".git/HEAD".to_string(), "excluded_dir/file".to_string()],
"Despite not being present in the worktrees, buffers for excluded files are opened and added to the pane"
);
});
}
#[gpui::test]
async fn test_save_conflicting_item(cx: &mut TestAppContext) {
let app_state = init_test(cx);

File diff suppressed because it is too large Load diff