From 9613084f5933d5ac0d6da28d04f4340fbe2cf425 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 3 Jan 2025 17:00:16 -0800 Subject: [PATCH] Move git status out of Entry (#22224) - [x] Rewrite worktree git handling - [x] Fix tests - [x] Fix `test_propagate_statuses_for_repos_under_project` - [x] Replace `WorkDirectoryEntry` with `WorkDirectory` in `RepositoryEntry` - [x] Add a worktree event for capturing git status changes - [x] Confirm that the local repositories are correctly updating the new WorkDirectory field - [x] Implement the git statuses query as a join when pulling entries out of worktree - [x] Use this new join to implement the project panel and outline panel. - [x] Synchronize git statuses over the wire for collab and remote dev (use the existing `worktree_repository_statuses` table, adjust as needed) - [x] Only send changed statuses to collab Release Notes: - N/A --------- Co-authored-by: Cole Miller Co-authored-by: Mikayla Co-authored-by: Nathan --- Cargo.lock | 4 +- Cargo.toml | 5 +- crates/assistant/src/assistant_panel.rs | 2 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/assistant2/src/inline_assistant.rs | 2 +- crates/cli/src/main.rs | 2 +- .../20221109000000_test_schema.sql | 16 + crates/collab/src/db/queries/projects.rs | 112 +- crates/collab/src/db/queries/rooms.rs | 52 +- crates/collab/src/tests/integration_tests.rs | 12 +- .../remote_editing_collaboration_tests.rs | 10 +- crates/collections/Cargo.toml | 3 +- crates/collections/src/collections.rs | 12 + crates/editor/src/display_map/block_map.rs | 4 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/git/project_diff.rs | 26 +- crates/editor/src/items.rs | 21 +- .../src/test/editor_lsp_test_context.rs | 3 +- crates/editor/src/test/editor_test_context.rs | 1 + crates/git/src/git.rs | 1 + crates/git/src/repository.rs | 49 +- crates/git/src/status.rs | 18 +- crates/git_ui/Cargo.toml | 5 +- crates/git_ui/src/git_panel.rs | 187 ++- crates/git_ui/src/git_ui.rs | 5 +- crates/gpui/src/platform/linux/x11/client.rs | 2 +- crates/gpui/src/platform/mac/events.rs | 2 +- crates/gpui/src/window.rs | 9 + crates/image_viewer/src/image_viewer.rs | 8 +- crates/outline_panel/Cargo.toml | 4 +- crates/outline_panel/src/outline_panel.rs | 1148 +++++++------ crates/project/src/buffer_store.rs | 16 +- crates/project/src/lsp_store.rs | 6 +- crates/project/src/project.rs | 36 +- crates/project/src/task_inventory.rs | 2 +- crates/project/src/worktree_store.rs | 17 +- crates/project_panel/src/project_panel.rs | 103 +- crates/proto/proto/zed.proto | 5 +- .../remote_server/src/remote_editing_tests.rs | 11 +- crates/repl/src/kernels/remote_kernels.rs | 2 +- crates/settings/src/key_equivalents.rs | 2 +- crates/sum_tree/src/cursor.rs | 25 + crates/sum_tree/src/sum_tree.rs | 64 + crates/tab_switcher/src/tab_switcher.rs | 15 +- crates/task/src/lib.rs | 2 +- crates/task/src/task_template.rs | 4 +- crates/terminal_view/src/terminal_panel.rs | 2 +- crates/theme/src/schema.rs | 2 +- crates/theme/src/styles/colors.rs | 2 +- crates/theme/src/theme.rs | 2 +- crates/title_bar/src/title_bar.rs | 4 +- crates/workspace/src/persistence.rs | 2 +- crates/worktree/src/worktree.rs | 1457 +++++++++++------ crates/worktree/src/worktree_tests.rs | 554 ++++++- crates/zed/src/main.rs | 4 +- extensions/perplexity/README.md | 2 +- script/bundle-mac | 6 +- 57 files changed, 2824 insertions(+), 1254 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1b28ac7362..d4e3b53e5c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2784,7 +2784,8 @@ dependencies = [ name = "collections" version = "0.1.0" dependencies = [ - "rustc-hash 1.1.0", + "indexmap", + "rustc-hash 2.1.0", ] [[package]] @@ -5193,6 +5194,7 @@ dependencies = [ "util", "windows 0.58.0", "workspace", + "worktree", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 9ee3bcb4bc..aaa4dc48a7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -389,7 +389,7 @@ hyper = "0.14" http = "1.1" ignore = "0.4.22" image = "0.25.1" -indexmap = { version = "2", features = ["serde"] } +indexmap = { version = "2.7.0", features = ["serde"] } indoc = "2" itertools = "0.13.0" jsonwebtoken = "9.3" @@ -440,9 +440,10 @@ runtimelib = { version = "0.24.0", default-features = false, features = [ ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } +rustc-hash = "2.1.0" rustls = "0.21.12" rustls-native-certs = "0.8.0" -schemars = { version = "0.8", features = ["impl_json_schema"] } +schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] } semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] } diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index e75a9d88e0..0eaa0f5ca8 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -122,7 +122,7 @@ pub fn init(cx: &mut AppContext) { cx.observe_new_views( |terminal_panel: &mut TerminalPanel, cx: &mut ViewContext| { let settings = AssistantSettings::get_global(cx); - terminal_panel.asssistant_enabled(settings.enabled, cx); + terminal_panel.set_assistant_enabled(settings.enabled, cx); }, ) .detach(); diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 27cd20dcb9..04bf7982ef 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -133,7 +133,7 @@ impl InlineAssistant { }; let enabled = AssistantSettings::get_global(cx).enabled; terminal_panel.update(cx, |terminal_panel, cx| { - terminal_panel.asssistant_enabled(enabled, cx) + terminal_panel.set_assistant_enabled(enabled, cx) }); }) .detach(); diff --git a/crates/assistant2/src/inline_assistant.rs b/crates/assistant2/src/inline_assistant.rs index 8db73315b2..ef1896440c 100644 --- a/crates/assistant2/src/inline_assistant.rs +++ b/crates/assistant2/src/inline_assistant.rs @@ -118,7 +118,7 @@ impl InlineAssistant { }; let enabled = AssistantSettings::get_global(cx).enabled; terminal_panel.update(cx, |terminal_panel, cx| { - terminal_panel.asssistant_enabled(enabled, cx) + terminal_panel.set_assistant_enabled(enabled, cx) }); }) .detach(); diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index 3dbfa0e737..32c5bbb21b 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -79,7 +79,7 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result { Ok(existing_path) => PathWithPosition::from_path(existing_path), Err(_) => { let path = PathWithPosition::parse_str(argument_str); - let curdir = env::current_dir().context("reteiving current directory")?; + let curdir = env::current_dir().context("retrieving current directory")?; path.map_path(|path| match fs::canonicalize(&path) { Ok(path) => Ok(path), Err(e) => { diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index c59091d66d..a43146b938 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -106,6 +106,22 @@ CREATE TABLE "worktree_repositories" ( CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id"); CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id"); +CREATE TABLE "worktree_repository_statuses" ( + "project_id" INTEGER NOT NULL, + "worktree_id" INT8 NOT NULL, + "work_directory_id" INT8 NOT NULL, + "repo_path" VARCHAR NOT NULL, + "status" INT8 NOT NULL, + "scan_id" INT8 NOT NULL, + "is_deleted" BOOL NOT NULL, + PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path), + FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE, + FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE +); +CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id"); +CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id"); +CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id"); + CREATE TABLE "worktree_settings_files" ( "project_id" INTEGER NOT NULL, "worktree_id" INTEGER NOT NULL, diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 7ff8aa7a9f..f2a5988064 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -1,4 +1,5 @@ use anyhow::Context as _; + use util::ResultExt; use super::*; @@ -274,8 +275,8 @@ impl Database { mtime_nanos: ActiveValue::set(mtime.nanos as i32), canonical_path: ActiveValue::set(entry.canonical_path.clone()), is_ignored: ActiveValue::set(entry.is_ignored), + git_status: ActiveValue::set(None), is_external: ActiveValue::set(entry.is_external), - git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)), is_deleted: ActiveValue::set(false), scan_id: ActiveValue::set(update.scan_id as i64), is_fifo: ActiveValue::set(entry.is_fifo), @@ -295,7 +296,6 @@ impl Database { worktree_entry::Column::MtimeNanos, worktree_entry::Column::CanonicalPath, worktree_entry::Column::IsIgnored, - worktree_entry::Column::GitStatus, worktree_entry::Column::ScanId, ]) .to_owned(), @@ -349,6 +349,79 @@ impl Database { ) .exec(&*tx) .await?; + + let has_any_statuses = update + .updated_repositories + .iter() + .any(|repository| !repository.updated_statuses.is_empty()); + + if has_any_statuses { + worktree_repository_statuses::Entity::insert_many( + update.updated_repositories.iter().flat_map( + |repository: &proto::RepositoryEntry| { + repository.updated_statuses.iter().map(|status_entry| { + worktree_repository_statuses::ActiveModel { + project_id: ActiveValue::set(project_id), + worktree_id: ActiveValue::set(worktree_id), + work_directory_id: ActiveValue::set( + repository.work_directory_id as i64, + ), + scan_id: ActiveValue::set(update.scan_id as i64), + is_deleted: ActiveValue::set(false), + repo_path: ActiveValue::set(status_entry.repo_path.clone()), + status: ActiveValue::set(status_entry.status as i64), + } + }) + }, + ), + ) + .on_conflict( + OnConflict::columns([ + worktree_repository_statuses::Column::ProjectId, + worktree_repository_statuses::Column::WorktreeId, + worktree_repository_statuses::Column::WorkDirectoryId, + worktree_repository_statuses::Column::RepoPath, + ]) + .update_columns([ + worktree_repository_statuses::Column::ScanId, + worktree_repository_statuses::Column::Status, + ]) + .to_owned(), + ) + .exec(&*tx) + .await?; + } + + let has_any_removed_statuses = update + .updated_repositories + .iter() + .any(|repository| !repository.removed_statuses.is_empty()); + + if has_any_removed_statuses { + worktree_repository_statuses::Entity::update_many() + .filter( + worktree_repository_statuses::Column::ProjectId + .eq(project_id) + .and( + worktree_repository_statuses::Column::WorktreeId + .eq(worktree_id), + ) + .and( + worktree_repository_statuses::Column::RepoPath.is_in( + update.updated_repositories.iter().flat_map(|repository| { + repository.removed_statuses.iter() + }), + ), + ), + ) + .set(worktree_repository_statuses::ActiveModel { + is_deleted: ActiveValue::Set(true), + scan_id: ActiveValue::Set(update.scan_id as i64), + ..Default::default() + }) + .exec(&*tx) + .await?; + } } if !update.removed_repositories.is_empty() { @@ -643,7 +716,6 @@ impl Database { canonical_path: db_entry.canonical_path, is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, - git_status: db_entry.git_status.map(|status| status as i32), // This is only used in the summarization backlog, so if it's None, // that just means we won't be able to detect when to resummarize // based on total number of backlogged bytes - instead, we'd go @@ -657,23 +729,49 @@ impl Database { // Populate repository entries. { - let mut db_repository_entries = worktree_repository::Entity::find() + let db_repository_entries = worktree_repository::Entity::find() .filter( Condition::all() .add(worktree_repository::Column::ProjectId.eq(project.id)) .add(worktree_repository::Column::IsDeleted.eq(false)), ) - .stream(tx) + .all(tx) .await?; - while let Some(db_repository_entry) = db_repository_entries.next().await { - let db_repository_entry = db_repository_entry?; + for db_repository_entry in db_repository_entries { if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64)) { + let mut repository_statuses = worktree_repository_statuses::Entity::find() + .filter( + Condition::all() + .add(worktree_repository_statuses::Column::ProjectId.eq(project.id)) + .add( + worktree_repository_statuses::Column::WorktreeId + .eq(worktree.id), + ) + .add( + worktree_repository_statuses::Column::WorkDirectoryId + .eq(db_repository_entry.work_directory_id), + ) + .add(worktree_repository_statuses::Column::IsDeleted.eq(false)), + ) + .stream(tx) + .await?; + let mut updated_statuses = Vec::new(); + while let Some(status_entry) = repository_statuses.next().await { + let status_entry: worktree_repository_statuses::Model = status_entry?; + updated_statuses.push(proto::StatusEntry { + repo_path: status_entry.repo_path, + status: status_entry.status as i32, + }); + } + worktree.repository_entries.insert( db_repository_entry.work_directory_id as u64, proto::RepositoryEntry { work_directory_id: db_repository_entry.work_directory_id as u64, branch: db_repository_entry.branch, + updated_statuses, + removed_statuses: Vec::new(), }, ); } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index a3a99bee71..6036a8fddc 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -662,7 +662,6 @@ impl Database { canonical_path: db_entry.canonical_path, is_ignored: db_entry.is_ignored, is_external: db_entry.is_external, - git_status: db_entry.git_status.map(|status| status as i32), // This is only used in the summarization backlog, so if it's None, // that just means we won't be able to detect when to resummarize // based on total number of backlogged bytes - instead, we'd go @@ -682,26 +681,69 @@ impl Database { worktree_repository::Column::IsDeleted.eq(false) }; - let mut db_repositories = worktree_repository::Entity::find() + let db_repositories = worktree_repository::Entity::find() .filter( Condition::all() .add(worktree_repository::Column::ProjectId.eq(project.id)) .add(worktree_repository::Column::WorktreeId.eq(worktree.id)) .add(repository_entry_filter), ) - .stream(tx) + .all(tx) .await?; - while let Some(db_repository) = db_repositories.next().await { - let db_repository = db_repository?; + for db_repository in db_repositories.into_iter() { if db_repository.is_deleted { worktree .removed_repositories .push(db_repository.work_directory_id as u64); } else { + let status_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree + { + worktree_repository_statuses::Column::ScanId + .gt(rejoined_worktree.scan_id) + } else { + worktree_repository_statuses::Column::IsDeleted.eq(false) + }; + + let mut db_statuses = worktree_repository_statuses::Entity::find() + .filter( + Condition::all() + .add( + worktree_repository_statuses::Column::ProjectId + .eq(project.id), + ) + .add( + worktree_repository_statuses::Column::WorktreeId + .eq(worktree.id), + ) + .add( + worktree_repository_statuses::Column::WorkDirectoryId + .eq(db_repository.work_directory_id), + ) + .add(status_entry_filter), + ) + .stream(tx) + .await?; + let mut removed_statuses = Vec::new(); + let mut updated_statuses = Vec::new(); + + while let Some(db_status) = db_statuses.next().await { + let db_status: worktree_repository_statuses::Model = db_status?; + if db_status.is_deleted { + removed_statuses.push(db_status.repo_path); + } else { + updated_statuses.push(proto::StatusEntry { + repo_path: db_status.repo_path, + status: db_status.status as i32, + }); + } + } + worktree.updated_repositories.push(proto::RepositoryEntry { work_directory_id: db_repository.work_directory_id as u64, branch: db_repository.branch, + updated_statuses, + removed_statuses, }); } } diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index f2320c7e3e..6ef0d964de 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2925,8 +2925,6 @@ async fn test_git_status_sync( assert_eq!(snapshot.status_for_file(file), status); } - // Smoke test status reading - project_local.read_with(cx_a, |project, cx| { assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx); assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx); @@ -6669,6 +6667,10 @@ async fn test_remote_git_branches( client_a .fs() .insert_branches(Path::new("/project/.git"), &branches); + let branches_set = branches + .into_iter() + .map(ToString::to_string) + .collect::>(); let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await; let project_id = active_call_a @@ -6690,10 +6692,10 @@ async fn test_remote_git_branches( let branches_b = branches_b .into_iter() - .map(|branch| branch.name) - .collect::>(); + .map(|branch| branch.name.to_string()) + .collect::>(); - assert_eq!(&branches_b, &branches); + assert_eq!(branches_b, branches_set); cx_b.update(|cx| { project_b.update(cx, |project, cx| { diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index cb045f14f0..0698bc4007 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -229,6 +229,10 @@ async fn test_ssh_collaboration_git_branches( .await; let branches = ["main", "dev", "feature-1"]; + let branches_set = branches + .iter() + .map(ToString::to_string) + .collect::>(); remote_fs.insert_branches(Path::new("/project/.git"), &branches); // User A connects to the remote project via SSH. @@ -281,10 +285,10 @@ async fn test_ssh_collaboration_git_branches( let branches_b = branches_b .into_iter() - .map(|branch| branch.name) - .collect::>(); + .map(|branch| branch.name.to_string()) + .collect::>(); - assert_eq!(&branches_b, &branches); + assert_eq!(&branches_b, &branches_set); cx_b.update(|cx| { project_b.update(cx, |project, cx| { diff --git a/crates/collections/Cargo.toml b/crates/collections/Cargo.toml index b16b4c1300..3daaf83c69 100644 --- a/crates/collections/Cargo.toml +++ b/crates/collections/Cargo.toml @@ -16,4 +16,5 @@ doctest = false test-support = [] [dependencies] -rustc-hash = "1.1" +indexmap.workspace = true +rustc-hash.workspace = true diff --git a/crates/collections/src/collections.rs b/crates/collections/src/collections.rs index 25f6135c1f..be7bbdb59f 100644 --- a/crates/collections/src/collections.rs +++ b/crates/collections/src/collections.rs @@ -4,12 +4,24 @@ pub type HashMap = FxHashMap; #[cfg(feature = "test-support")] pub type HashSet = FxHashSet; +#[cfg(feature = "test-support")] +pub type IndexMap = indexmap::IndexMap; + +#[cfg(feature = "test-support")] +pub type IndexSet = indexmap::IndexSet; + #[cfg(not(feature = "test-support"))] pub type HashMap = std::collections::HashMap; #[cfg(not(feature = "test-support"))] pub type HashSet = std::collections::HashSet; +#[cfg(not(feature = "test-support"))] +pub type IndexMap = indexmap::IndexMap; + +#[cfg(not(feature = "test-support"))] +pub type IndexSet = indexmap::IndexSet; + pub use rustc_hash::FxHasher; pub use rustc_hash::{FxHashMap, FxHashSet}; pub use std::collections::*; diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 73e1b32c55..7de2797079 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -2748,7 +2748,7 @@ mod tests { .iter() .filter(|(_, block)| matches!(block, Block::FoldedBuffer { .. })) .count(), - "Should have one folded block, prodicing a header of the second buffer" + "Should have one folded block, producing a header of the second buffer" ); assert_eq!( blocks_snapshot.text(), @@ -2994,7 +2994,7 @@ mod tests { } }) .count(), - "Should have one folded block, prodicing a header of the second buffer" + "Should have one folded block, producing a header of the second buffer" ); assert_eq!(blocks_snapshot.text(), "\n"); assert_eq!( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 88297f72e4..9dfc804de6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11780,7 +11780,7 @@ impl Editor { } /// Merges all anchor ranges for all context types ever set, picking the last highlight added in case of a row conflict. - /// Rerturns a map of display rows that are highlighted and their corresponding highlight color. + /// Returns a map of display rows that are highlighted and their corresponding highlight color. /// Allows to ignore certain kinds of highlights. pub fn highlighted_display_rows( &mut self, @@ -12573,7 +12573,7 @@ impl Editor { .file() .is_none() .then(|| { - // Handle file-less buffers separately: those are not really the project items, so won't have a paroject path or entity id, + // Handle file-less buffers separately: those are not really the project items, so won't have a project path or entity id, // so `workspace.open_project_item` will never find them, always opening a new editor. // Instead, we try to activate the existing editor in the pane first. let (editor, pane_item_index) = diff --git a/crates/editor/src/git/project_diff.rs b/crates/editor/src/git/project_diff.rs index e76e5922db..f06841e445 100644 --- a/crates/editor/src/git/project_diff.rs +++ b/crates/editor/src/git/project_diff.rs @@ -194,14 +194,24 @@ impl ProjectDiffEditor { let open_tasks = project .update(&mut cx, |project, cx| { let worktree = project.worktree_for_id(id, cx)?; - let applicable_entries = worktree - .read(cx) - .entries(false, 0) - .filter(|entry| !entry.is_external) - .filter(|entry| entry.is_file()) - .filter_map(|entry| Some((entry.git_status?, entry))) - .filter_map(|(git_status, entry)| { - Some((git_status, entry.id, project.path_for_entry(entry.id, cx)?)) + let snapshot = worktree.read(cx).snapshot(); + let applicable_entries = snapshot + .repositories() + .flat_map(|entry| { + entry.status().map(|git_entry| { + (git_entry.status, entry.join(git_entry.repo_path)) + }) + }) + .filter_map(|(status, path)| { + let id = snapshot.entry_for_path(&path)?.id; + Some(( + status, + id, + ProjectPath { + worktree_id: snapshot.id(), + path: path.into(), + }, + )) }) .collect::>(); Some( diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 48b7af086b..620fcd5ec4 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -615,9 +615,20 @@ impl Item for Editor { .read(cx) .as_singleton() .and_then(|buffer| buffer.read(cx).project_path(cx)) - .and_then(|path| self.project.as_ref()?.read(cx).entry_for_path(&path, cx)) - .map(|entry| { - entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected) + .and_then(|path| { + let project = self.project.as_ref()?.read(cx); + let entry = project.entry_for_path(&path, cx)?; + let git_status = project + .worktree_for_id(path.worktree_id, cx)? + .read(cx) + .snapshot() + .status_for_file(path.path); + + Some(entry_git_aware_label_color( + git_status, + entry.is_ignored, + params.selected, + )) }) .unwrap_or_else(|| entry_label_color(params.selected)) } else { @@ -1559,10 +1570,10 @@ pub fn entry_git_aware_label_color( Color::Ignored } else { match git_status { - Some(GitFileStatus::Added) => Color::Created, + Some(GitFileStatus::Added) | Some(GitFileStatus::Untracked) => Color::Created, Some(GitFileStatus::Modified) => Color::Modified, Some(GitFileStatus::Conflict) => Color::Conflict, - None => entry_label_color(selected), + Some(GitFileStatus::Deleted) | None => entry_label_color(selected), } } } diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index fd890b839d..3831ca963f 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -257,7 +257,8 @@ impl EditorLspTestContext { Self::new(language, Default::default(), cx).await } - // Constructs lsp range using a marked string with '[', ']' range delimiters + /// Constructs lsp range using a marked string with '[', ']' range delimiters + #[track_caller] pub fn lsp_range(&mut self, marked_text: &str) -> lsp::Range { let ranges = self.ranges(marked_text); self.to_lsp_range(ranges[0].clone()) diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index aecdfa91c2..1cbd238e7d 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -230,6 +230,7 @@ impl EditorTestContext { self.cx.background_executor.run_until_parked(); } + #[track_caller] pub fn ranges(&mut self, marked_text: &str) -> Vec> { let (unmarked_text, ranges) = marked_text_ranges(marked_text, false); assert_eq!(self.buffer_text(), unmarked_text); diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index cf07b74ac5..c608c23cf3 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -16,6 +16,7 @@ use std::sync::LazyLock; pub use crate::hosting_provider::*; pub use crate::remote::*; pub use git2 as libgit; +pub use repository::WORK_DIRECTORY_REPO_PATH; pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies")); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index b37e517d43..bb890150e5 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -7,6 +7,8 @@ use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; use serde::{Deserialize, Serialize}; +use std::borrow::Borrow; +use std::sync::LazyLock; use std::{ cmp::Ordering, path::{Component, Path, PathBuf}, @@ -37,7 +39,8 @@ pub trait GitRepository: Send + Sync { /// Returns the SHA of the current HEAD. fn head_sha(&self) -> Option; - fn status(&self, path_prefixes: &[PathBuf]) -> Result; + /// Returns the list of git statuses, sorted by path + fn status(&self, path_prefixes: &[RepoPath]) -> Result; fn branches(&self) -> Result>; fn change_branch(&self, _: &str) -> Result<()>; @@ -132,7 +135,7 @@ impl GitRepository for RealGitRepository { Some(self.repository.lock().head().ok()?.target()?.to_string()) } - fn status(&self, path_prefixes: &[PathBuf]) -> Result { + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let working_directory = self .repository .lock() @@ -289,8 +292,9 @@ impl GitRepository for FakeGitRepository { state.dot_git_dir.clone() } - fn status(&self, path_prefixes: &[PathBuf]) -> Result { + fn status(&self, path_prefixes: &[RepoPath]) -> Result { let state = self.state.lock(); + let mut entries = state .worktree_statuses .iter() @@ -306,6 +310,7 @@ impl GitRepository for FakeGitRepository { }) .collect::>(); entries.sort_unstable_by(|a, b| a.0.cmp(&b.0)); + Ok(GitStatus { entries: entries.into(), }) @@ -394,6 +399,8 @@ pub enum GitFileStatus { Added, Modified, Conflict, + Deleted, + Untracked, } impl GitFileStatus { @@ -421,20 +428,34 @@ impl GitFileStatus { } } +pub static WORK_DIRECTORY_REPO_PATH: LazyLock = + LazyLock::new(|| RepoPath(Path::new("").into())); + #[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)] -pub struct RepoPath(pub PathBuf); +pub struct RepoPath(pub Arc); impl RepoPath { pub fn new(path: PathBuf) -> Self { debug_assert!(path.is_relative(), "Repo paths must be relative"); - RepoPath(path) + RepoPath(path.into()) + } + + pub fn from_str(path: &str) -> Self { + let path = Path::new(path); + debug_assert!(path.is_relative(), "Repo paths must be relative"); + + RepoPath(path.into()) + } + + pub fn to_proto(&self) -> String { + self.0.to_string_lossy().to_string() } } impl From<&Path> for RepoPath { fn from(value: &Path) -> Self { - RepoPath::new(value.to_path_buf()) + RepoPath::new(value.into()) } } @@ -444,9 +465,15 @@ impl From for RepoPath { } } +impl From<&str> for RepoPath { + fn from(value: &str) -> Self { + Self::from_str(value) + } +} + impl Default for RepoPath { fn default() -> Self { - RepoPath(PathBuf::new()) + RepoPath(Path::new("").into()) } } @@ -457,13 +484,19 @@ impl AsRef for RepoPath { } impl std::ops::Deref for RepoPath { - type Target = PathBuf; + type Target = Path; fn deref(&self) -> &Self::Target { &self.0 } } +impl Borrow for RepoPath { + fn borrow(&self) -> &Path { + self.0.as_ref() + } +} + #[derive(Debug)] pub struct RepoPathDescendants<'a>(pub &'a Path); diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index f8ffdc6714..0d62cfaae9 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -1,10 +1,6 @@ use crate::repository::{GitFileStatus, RepoPath}; use anyhow::{anyhow, Result}; -use std::{ - path::{Path, PathBuf}, - process::Stdio, - sync::Arc, -}; +use std::{path::Path, process::Stdio, sync::Arc}; #[derive(Clone)] pub struct GitStatus { @@ -15,7 +11,7 @@ impl GitStatus { pub(crate) fn new( git_binary: &Path, working_directory: &Path, - path_prefixes: &[PathBuf], + path_prefixes: &[RepoPath], ) -> Result { let child = util::command::new_std_command(git_binary) .current_dir(working_directory) @@ -27,7 +23,7 @@ impl GitStatus { "-z", ]) .args(path_prefixes.iter().map(|path_prefix| { - if *path_prefix == Path::new("") { + if path_prefix.0.as_ref() == Path::new("") { Path::new(".") } else { path_prefix @@ -55,10 +51,12 @@ impl GitStatus { let (status, path) = entry.split_at(3); let status = status.trim(); Some(( - RepoPath(PathBuf::from(path)), + RepoPath(Path::new(path).into()), match status { - "A" | "??" => GitFileStatus::Added, + "A" => GitFileStatus::Added, "M" => GitFileStatus::Modified, + "D" => GitFileStatus::Deleted, + "??" => GitFileStatus::Untracked, _ => return None, }, )) @@ -75,7 +73,7 @@ impl GitStatus { pub fn get(&self, path: &Path) -> Option { self.entries - .binary_search_by(|(repo_path, _)| repo_path.0.as_path().cmp(path)) + .binary_search_by(|(repo_path, _)| repo_path.0.as_ref().cmp(path)) .ok() .map(|index| self.entries[index].1) } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 69e70805e7..38c50f0c80 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -14,9 +14,11 @@ path = "src/git_ui.rs" [dependencies] anyhow.workspace = true +collections.workspace = true db.workspace = true editor.workspace = true futures.workspace = true +git.workspace = true gpui.workspace = true language.workspace = true menu.workspace = true @@ -29,8 +31,7 @@ settings.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true -git.workspace = true -collections.workspace = true +worktree.workspace = true [target.'cfg(windows)'.dependencies] windows.workspace = true diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index be3f4485fb..33eee26957 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1,11 +1,16 @@ +use crate::{git_status_icon, settings::GitPanelSettings}; +use crate::{CommitAllChanges, CommitStagedChanges, DiscardAll, StageAll, UnstageAll}; use anyhow::{Context as _, Result}; -use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use editor::{ scroll::{Autoscroll, AutoscrollStrategy}, Editor, MultiBuffer, DEFAULT_MULTIBUFFER_CONTEXT, }; -use git::{diff::DiffHunk, repository::GitFileStatus}; +use git::{ + diff::DiffHunk, + repository::{GitFileStatus, RepoPath}, +}; +use gpui::*; use gpui::{ actions, prelude::*, uniform_list, Action, AppContext, AsyncWindowContext, ClickEvent, CursorStyle, EventEmitter, FocusHandle, FocusableView, KeyContext, @@ -14,7 +19,7 @@ use gpui::{ }; use language::{Buffer, BufferRow, OffsetRangeExt}; use menu::{SelectNext, SelectPrev}; -use project::{Entry, EntryKind, Fs, Project, ProjectEntryId, WorktreeId}; +use project::{EntryKind, Fs, Project, ProjectEntryId, ProjectPath, WorktreeId}; use serde::{Deserialize, Serialize}; use settings::Settings as _; use std::{ @@ -22,7 +27,7 @@ use std::{ collections::HashSet, ffi::OsStr, ops::{Deref, Range}, - path::{Path, PathBuf}, + path::PathBuf, rc::Rc, sync::Arc, time::Duration, @@ -37,9 +42,7 @@ use workspace::{ dock::{DockPosition, Panel, PanelEvent}, ItemHandle, Workspace, }; - -use crate::{git_status_icon, settings::GitPanelSettings}; -use crate::{CommitAllChanges, CommitStagedChanges, DiscardAll, StageAll, UnstageAll}; +use worktree::StatusEntry; actions!(git_panel, [ToggleFocus]); @@ -69,7 +72,7 @@ pub struct GitStatusEntry {} struct EntryDetails { filename: String, display_name: String, - path: Arc, + path: RepoPath, kind: EntryKind, depth: usize, is_expanded: bool, @@ -101,7 +104,8 @@ pub struct GitPanel { scrollbar_state: ScrollbarState, selected_item: Option, show_scrollbar: bool, - expanded_dir_ids: HashMap>, + // TODO Reintroduce expanded directories, once we're deriving directories from paths + // expanded_dir_ids: HashMap>, // The entries that are currently shown in the panel, aka // not hidden by folding or such @@ -115,18 +119,20 @@ pub struct GitPanel { #[derive(Debug, Clone)] struct WorktreeEntries { worktree_id: WorktreeId, + // TODO support multiple repositories per worktree + work_directory: worktree::WorkDirectory, visible_entries: Vec, - paths: Rc>>>, + paths: Rc>>, } #[derive(Debug, Clone)] struct GitPanelEntry { - entry: Entry, + entry: worktree::StatusEntry, hunks: Rc>>, } impl Deref for GitPanelEntry { - type Target = Entry; + type Target = worktree::StatusEntry; fn deref(&self) -> &Self::Target { &self.entry @@ -134,11 +140,11 @@ impl Deref for GitPanelEntry { } impl WorktreeEntries { - fn paths(&self) -> &HashSet> { + fn paths(&self) -> &HashSet { self.paths.get_or_init(|| { self.visible_entries .iter() - .map(|e| (e.entry.path.clone())) + .map(|e| (e.entry.repo_path.clone())) .collect() }) } @@ -165,8 +171,11 @@ impl GitPanel { }) .detach(); cx.subscribe(&project, |this, _, event, cx| match event { - project::Event::WorktreeRemoved(id) => { - this.expanded_dir_ids.remove(id); + project::Event::GitRepositoryUpdated => { + this.update_visible_entries(None, None, cx); + } + project::Event::WorktreeRemoved(_id) => { + // this.expanded_dir_ids.remove(id); this.update_visible_entries(None, None, cx); cx.notify(); } @@ -183,7 +192,7 @@ impl GitPanel { project::Event::Closed => { this.git_diff_editor_updates = Task::ready(()); this.reveal_in_editor = Task::ready(()); - this.expanded_dir_ids.clear(); + // this.expanded_dir_ids.clear(); this.visible_entries.clear(); this.git_diff_editor = None; } @@ -200,8 +209,7 @@ impl GitPanel { pending_serialization: Task::ready(None), visible_entries: Vec::new(), current_modifiers: cx.modifiers(), - expanded_dir_ids: Default::default(), - + // expanded_dir_ids: Default::default(), width: Some(px(360.)), scrollbar_state: ScrollbarState::new(scroll_handle.clone()).parent_view(cx.view()), scroll_handle, @@ -288,16 +296,16 @@ impl GitPanel { } fn calculate_depth_and_difference( - entry: &Entry, - visible_worktree_entries: &HashSet>, + entry: &StatusEntry, + visible_worktree_entries: &HashSet, ) -> (usize, usize) { let (depth, difference) = entry - .path + .repo_path .ancestors() .skip(1) // Skip the entry itself .find_map(|ancestor| { if let Some(parent_entry) = visible_worktree_entries.get(ancestor) { - let entry_path_components_count = entry.path.components().count(); + let entry_path_components_count = entry.repo_path.components().count(); let parent_path_components_count = parent_entry.components().count(); let difference = entry_path_components_count - parent_path_components_count; let depth = parent_entry @@ -432,13 +440,7 @@ impl GitPanel { fn entry_count(&self) -> usize { self.visible_entries .iter() - .map(|worktree_entries| { - worktree_entries - .visible_entries - .iter() - .filter(|entry| entry.git_status.is_some()) - .count() - }) + .map(|worktree_entries| worktree_entries.visible_entries.len()) .sum() } @@ -446,7 +448,7 @@ impl GitPanel { &self, range: Range, cx: &mut ViewContext, - mut callback: impl FnMut(ProjectEntryId, EntryDetails, &mut ViewContext), + mut callback: impl FnMut(usize, EntryDetails, &mut ViewContext), ) { let mut ix = 0; for worktree_entries in &self.visible_entries { @@ -468,11 +470,11 @@ impl GitPanel { { let snapshot = worktree.read(cx).snapshot(); let root_name = OsStr::new(snapshot.root_name()); - let expanded_entry_ids = self - .expanded_dir_ids - .get(&snapshot.id()) - .map(Vec::as_slice) - .unwrap_or(&[]); + // let expanded_entry_ids = self + // .expanded_dir_ids + // .get(&snapshot.id()) + // .map(Vec::as_slice) + // .unwrap_or(&[]); let entry_range = range.start.saturating_sub(ix)..end_ix - ix; let entries = worktree_entries.paths(); @@ -483,22 +485,22 @@ impl GitPanel { .enumerate() { let index = index_start + i; - let status = entry.git_status; - let is_expanded = expanded_entry_ids.binary_search(&entry.id).is_ok(); + let status = entry.status; + let is_expanded = true; //expanded_entry_ids.binary_search(&entry.id).is_ok(); let (depth, difference) = Self::calculate_depth_and_difference(entry, entries); let filename = match difference { diff if diff > 1 => entry - .path + .repo_path .iter() - .skip(entry.path.components().count() - diff) + .skip(entry.repo_path.components().count() - diff) .collect::() .to_str() .unwrap_or_default() .to_string(), _ => entry - .path + .repo_path .file_name() .map(|name| name.to_string_lossy().into_owned()) .unwrap_or_else(|| root_name.to_string_lossy().to_string()), @@ -506,16 +508,17 @@ impl GitPanel { let details = EntryDetails { filename, - display_name: entry.path.to_string_lossy().into_owned(), - kind: entry.kind, + display_name: entry.repo_path.to_string_lossy().into_owned(), + // TODO get it from StatusEntry? + kind: EntryKind::File, is_expanded, - path: entry.path.clone(), - status, + path: entry.repo_path.clone(), + status: Some(status), hunks: entry.hunks.clone(), depth, index, }; - callback(entry.id, details, cx); + callback(ix, details, cx); } } ix = end_ix; @@ -527,7 +530,7 @@ impl GitPanel { fn update_visible_entries( &mut self, for_worktree: Option, - new_selected_entry: Option<(WorktreeId, ProjectEntryId)>, + _new_selected_entry: Option<(WorktreeId, ProjectEntryId)>, cx: &mut ViewContext, ) { let project = self.project.read(cx); @@ -549,24 +552,36 @@ impl GitPanel { None => false, }); for worktree in project.visible_worktrees(cx) { - let worktree_id = worktree.read(cx).id(); + let snapshot = worktree.read(cx).snapshot(); + let worktree_id = snapshot.id(); + if for_worktree.is_some() && for_worktree != Some(worktree_id) { continue; } - let snapshot = worktree.read(cx).snapshot(); - let mut visible_worktree_entries = snapshot - .entries(false, 0) - .filter(|entry| !entry.is_external) - .filter(|entry| entry.git_status.is_some()) - .cloned() - .collect::>(); - snapshot.propagate_git_statuses(&mut visible_worktree_entries); - project::sort_worktree_entries(&mut visible_worktree_entries); + let mut visible_worktree_entries = Vec::new(); + // Only use the first repository for now + let repositories = snapshot.repositories().take(1); + let mut work_directory = None; + for repository in repositories { + visible_worktree_entries.extend(repository.status()); + work_directory = Some(worktree::WorkDirectory::clone(repository)); + } + + // TODO use the GitTraversal + // let mut visible_worktree_entries = snapshot + // .entries(false, 0) + // .filter(|entry| !entry.is_external) + // .filter(|entry| entry.git_status.is_some()) + // .cloned() + // .collect::>(); + // snapshot.propagate_git_statuses(&mut visible_worktree_entries); + // project::sort_worktree_entries(&mut visible_worktree_entries); if !visible_worktree_entries.is_empty() { self.visible_entries.push(WorktreeEntries { worktree_id, + work_directory: work_directory.unwrap(), visible_entries: visible_worktree_entries .into_iter() .map(|entry| GitPanelEntry { @@ -580,24 +595,25 @@ impl GitPanel { } self.visible_entries.extend(after_update); - if let Some((worktree_id, entry_id)) = new_selected_entry { - self.selected_item = self.visible_entries.iter().enumerate().find_map( - |(worktree_index, worktree_entries)| { - if worktree_entries.worktree_id == worktree_id { - worktree_entries - .visible_entries - .iter() - .position(|entry| entry.id == entry_id) - .map(|entry_index| { - worktree_index * worktree_entries.visible_entries.len() - + entry_index - }) - } else { - None - } - }, - ); - } + // TODO re-implement this + // if let Some((worktree_id, entry_id)) = new_selected_entry { + // self.selected_item = self.visible_entries.iter().enumerate().find_map( + // |(worktree_index, worktree_entries)| { + // if worktree_entries.worktree_id == worktree_id { + // worktree_entries + // .visible_entries + // .iter() + // .position(|entry| entry.id == entry_id) + // .map(|entry_index| { + // worktree_index * worktree_entries.visible_entries.len() + // + entry_index + // }) + // } else { + // None + // } + // }, + // ); + // } let project = self.project.downgrade(); self.git_diff_editor_updates = cx.spawn(|git_panel, mut cx| async move { @@ -612,12 +628,14 @@ impl GitPanel { .visible_entries .iter() .filter_map(|entry| { - let git_status = entry.git_status()?; + let git_status = entry.status; let entry_hunks = entry.hunks.clone(); let (entry_path, unstaged_changes_task) = project.update(cx, |project, cx| { - let entry_path = - project.path_for_entry(entry.id, cx)?; + let entry_path = ProjectPath { + worktree_id: worktree_entries.worktree_id, + path: worktree_entries.work_directory.unrelativize(&entry.repo_path)?, + }; let open_task = project.open_path(entry_path.clone(), cx); let unstaged_changes_task = @@ -682,8 +700,8 @@ impl GitPanel { ) .collect() } - // TODO support conflicts display - GitFileStatus::Conflict => Vec::new(), + // TODO support these + GitFileStatus::Conflict | GitFileStatus::Deleted | GitFileStatus::Untracked => Vec::new(), } }).clone() })?; @@ -992,18 +1010,17 @@ impl GitPanel { fn render_entry( &self, - id: ProjectEntryId, + ix: usize, selected: bool, details: EntryDetails, cx: &ViewContext, ) -> impl IntoElement { - let id = id.to_proto() as usize; - let checkbox_id = ElementId::Name(format!("checkbox_{}", id).into()); + let checkbox_id = ElementId::Name(format!("checkbox_{}", ix).into()); let is_staged = ToggleState::Selected; let handle = cx.view().downgrade(); h_flex() - .id(id) + .id(("git-panel-entry", ix)) .h(px(28.)) .w_full() .pl(px(12. + 12. * details.depth as f32)) @@ -1019,7 +1036,7 @@ impl GitPanel { this.child(git_status_icon(status)) }) .child( - ListItem::new(("label", id)) + ListItem::new(details.path.0.clone()) .toggle_state(selected) .child(h_flex().gap_1p5().child(details.display_name.clone())) .on_click(move |e, cx| { diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index 5aa9a361fa..c1c3bd3ac0 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -44,10 +44,13 @@ const REMOVED_COLOR: Hsla = Hsla { // TODO: Add updated status colors to theme pub fn git_status_icon(status: GitFileStatus) -> impl IntoElement { match status { - GitFileStatus::Added => Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR)), + GitFileStatus::Added | GitFileStatus::Untracked => { + Icon::new(IconName::SquarePlus).color(Color::Custom(ADDED_COLOR)) + } GitFileStatus::Modified => { Icon::new(IconName::SquareDot).color(Color::Custom(MODIFIED_COLOR)) } GitFileStatus::Conflict => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)), + GitFileStatus::Deleted => Icon::new(IconName::Warning).color(Color::Custom(REMOVED_COLOR)), } } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index dd6b022fa0..58f53be020 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1580,7 +1580,7 @@ impl LinuxClient for X11Client { } } -// Adatpted from: +// Adapted from: // https://docs.rs/winit/0.29.11/src/winit/platform_impl/linux/x11/monitor.rs.html#103-111 pub fn mode_refresh_rate(mode: &randr::ModeInfo) -> Duration { if mode.dot_clock == 0 || mode.htotal == 0 || mode.vtotal == 0 { diff --git a/crates/gpui/src/platform/mac/events.rs b/crates/gpui/src/platform/mac/events.rs index e1aae9db39..56256aeffd 100644 --- a/crates/gpui/src/platform/mac/events.rs +++ b/crates/gpui/src/platform/mac/events.rs @@ -322,7 +322,7 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke { let mut chars_with_shift = chars_for_modified_key(native_event.keyCode(), SHIFT_MOD); let always_use_cmd_layout = always_use_command_layout(); - // Handle Dvorak+QWERTY / Russian / Armeniam + // Handle Dvorak+QWERTY / Russian / Armenian if command || always_use_cmd_layout { let chars_with_cmd = chars_for_modified_key(native_event.keyCode(), CMD_MOD); let chars_with_both = diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 4c144ff61f..2a4a664361 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -4880,6 +4880,8 @@ pub enum ElementId { FocusHandle(FocusId), /// A combination of a name and an integer. NamedInteger(SharedString, usize), + /// A path + Path(Arc), } impl Display for ElementId { @@ -4891,6 +4893,7 @@ impl Display for ElementId { ElementId::FocusHandle(_) => write!(f, "FocusHandle")?, ElementId::NamedInteger(s, i) => write!(f, "{}-{}", s, i)?, ElementId::Uuid(uuid) => write!(f, "{}", uuid)?, + ElementId::Path(path) => write!(f, "{}", path.display())?, } Ok(()) @@ -4927,6 +4930,12 @@ impl From for ElementId { } } +impl From> for ElementId { + fn from(path: Arc) -> Self { + ElementId::Path(path) + } +} + impl From<&'static str> for ElementId { fn from(name: &'static str) -> Self { ElementId::Name(name.into()) diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 570948a822..b78f1bd085 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -96,12 +96,18 @@ impl Item for ImageView { fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { let project_path = self.image_item.read(cx).project_path(cx); + let label_color = if ItemSettings::get_global(cx).git_status { + let git_status = self + .project + .read(cx) + .project_path_git_status(&project_path, cx); + self.project .read(cx) .entry_for_path(&project_path, cx) .map(|entry| { - entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected) + entry_git_aware_label_color(git_status, entry.is_ignored, params.selected) }) .unwrap_or_else(|| params.text_color()) } else { diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index 6dfe1ceccc..0333e487cc 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -19,8 +19,8 @@ db.workspace = true editor.workspace = true file_icons.workspace = true fuzzy.workspace = true -itertools.workspace = true gpui.workspace = true +itertools.workspace = true language.workspace = true log.workspace = true menu.workspace = true @@ -36,8 +36,8 @@ smol.workspace = true theme.workspace = true ui.workspace = true util.workspace = true -worktree.workspace = true workspace.workspace = true +worktree.workspace = true [dev-dependencies] search = { workspace = true, features = ["test-support"] } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index d43b76671a..6dd6689f18 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -56,7 +56,7 @@ use workspace::{ }, OpenInTerminal, WeakItemHandle, Workspace, }; -use worktree::{Entry, ProjectEntryId, WorktreeId}; +use worktree::{Entry, GitEntry, ProjectEntryId, WorktreeId}; actions!( outline_panel, @@ -348,10 +348,17 @@ enum ExcerptOutlines { NotFetched, } +#[derive(Clone, Debug, PartialEq, Eq)] +struct FoldedDirsEntry { + worktree_id: WorktreeId, + entries: Vec, +} + +// TODO: collapse the inner enums into panel entry #[derive(Clone, Debug)] enum PanelEntry { Fs(FsEntry), - FoldedDirs(WorktreeId, Vec), + FoldedDirs(FoldedDirsEntry), Outline(OutlineEntry), Search(SearchEntry), } @@ -383,7 +390,18 @@ impl PartialEq for PanelEntry { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::Fs(a), Self::Fs(b)) => a == b, - (Self::FoldedDirs(a1, a2), Self::FoldedDirs(b1, b2)) => a1 == b1 && a2 == b2, + ( + Self::FoldedDirs(FoldedDirsEntry { + worktree_id: worktree_id_a, + entries: entries_a, + .. + }), + Self::FoldedDirs(FoldedDirsEntry { + worktree_id: worktree_id_b, + entries: entries_b, + .. + }), + ) => worktree_id_a == worktree_id_b && entries_a == entries_b, (Self::Outline(a), Self::Outline(b)) => a == b, ( Self::Search(SearchEntry { @@ -505,54 +523,124 @@ impl SearchData { } } -#[derive(Clone, Debug, PartialEq, Eq)] -enum OutlineEntry { - Excerpt(BufferId, ExcerptId, ExcerptRange), - Outline(BufferId, ExcerptId, Outline), +#[derive(Clone, Debug, Eq)] +struct OutlineEntryExcerpt { + id: ExcerptId, + buffer_id: BufferId, + range: ExcerptRange, +} + +impl PartialEq for OutlineEntryExcerpt { + fn eq(&self, other: &Self) -> bool { + self.buffer_id == other.buffer_id && self.id == other.id + } +} + +impl Hash for OutlineEntryExcerpt { + fn hash(&self, state: &mut H) { + (self.buffer_id, self.id).hash(state) + } } #[derive(Clone, Debug, Eq)] -enum FsEntry { - ExternalFile(BufferId, Vec), - Directory(WorktreeId, Entry), - File(WorktreeId, Entry, BufferId, Vec), +struct OutlineEntryOutline { + buffer_id: BufferId, + excerpt_id: ExcerptId, + outline: Outline, } -impl PartialEq for FsEntry { +impl PartialEq for OutlineEntryOutline { fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::ExternalFile(id_a, _), Self::ExternalFile(id_b, _)) => id_a == id_b, - (Self::Directory(id_a, entry_a), Self::Directory(id_b, entry_b)) => { - id_a == id_b && entry_a.id == entry_b.id - } - ( - Self::File(worktree_a, entry_a, id_a, ..), - Self::File(worktree_b, entry_b, id_b, ..), - ) => worktree_a == worktree_b && entry_a.id == entry_b.id && id_a == id_b, - _ => false, + self.buffer_id == other.buffer_id && self.excerpt_id == other.excerpt_id + } +} + +impl Hash for OutlineEntryOutline { + fn hash(&self, state: &mut H) { + (self.buffer_id, self.excerpt_id).hash(state); + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +enum OutlineEntry { + Excerpt(OutlineEntryExcerpt), + Outline(OutlineEntryOutline), +} + +impl OutlineEntry { + fn ids(&self) -> (BufferId, ExcerptId) { + match self { + OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id), + OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id), } } } -impl Hash for FsEntry { - fn hash(&self, state: &mut H) { - match self { - Self::ExternalFile(buffer_id, _) => { - buffer_id.hash(state); - } - Self::Directory(worktree_id, entry) => { - worktree_id.hash(state); - entry.id.hash(state); - } - Self::File(worktree_id, entry, buffer_id, _) => { - worktree_id.hash(state); - entry.id.hash(state); - buffer_id.hash(state); - } - } +#[derive(Debug, Clone, Eq)] +struct FsEntryFile { + worktree_id: WorktreeId, + entry: GitEntry, + buffer_id: BufferId, + excerpts: Vec, +} + +impl PartialEq for FsEntryFile { + fn eq(&self, other: &Self) -> bool { + self.worktree_id == other.worktree_id + && self.entry.id == other.entry.id + && self.buffer_id == other.buffer_id } } +impl Hash for FsEntryFile { + fn hash(&self, state: &mut H) { + (self.buffer_id, self.entry.id, self.worktree_id).hash(state); + } +} + +#[derive(Debug, Clone, Eq)] +struct FsEntryDirectory { + worktree_id: WorktreeId, + entry: GitEntry, +} + +impl PartialEq for FsEntryDirectory { + fn eq(&self, other: &Self) -> bool { + self.worktree_id == other.worktree_id && self.entry.id == other.entry.id + } +} + +impl Hash for FsEntryDirectory { + fn hash(&self, state: &mut H) { + (self.worktree_id, self.entry.id).hash(state); + } +} + +#[derive(Debug, Clone, Eq)] +struct FsEntryExternalFile { + buffer_id: BufferId, + excerpts: Vec, +} + +impl PartialEq for FsEntryExternalFile { + fn eq(&self, other: &Self) -> bool { + self.buffer_id == other.buffer_id + } +} + +impl Hash for FsEntryExternalFile { + fn hash(&self, state: &mut H) { + self.buffer_id.hash(state); + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +enum FsEntry { + ExternalFile(FsEntryExternalFile), + Directory(FsEntryDirectory), + File(FsEntryFile), +} + struct ActiveItem { item_handle: Box, active_editor: WeakView, @@ -775,7 +863,12 @@ impl OutlinePanel { } fn unfold_directory(&mut self, _: &UnfoldDirectory, cx: &mut ViewContext) { - if let Some(PanelEntry::FoldedDirs(worktree_id, entries)) = self.selected_entry().cloned() { + if let Some(PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + })) = self.selected_entry().cloned() + { self.unfolded_dirs .entry(worktree_id) .or_default() @@ -786,11 +879,11 @@ impl OutlinePanel { fn fold_directory(&mut self, _: &FoldDirectory, cx: &mut ViewContext) { let (worktree_id, entry) = match self.selected_entry().cloned() { - Some(PanelEntry::Fs(FsEntry::Directory(worktree_id, entry))) => { - (worktree_id, Some(entry)) + Some(PanelEntry::Fs(FsEntry::Directory(directory))) => { + (directory.worktree_id, Some(directory.entry)) } - Some(PanelEntry::FoldedDirs(worktree_id, entries)) => { - (worktree_id, entries.last().cloned()) + Some(PanelEntry::FoldedDirs(folded_dirs)) => { + (folded_dirs.worktree_id, folded_dirs.entries.last().cloned()) } _ => return, }; @@ -875,12 +968,12 @@ impl OutlinePanel { let mut scroll_to_buffer = None; let scroll_target = match entry { PanelEntry::FoldedDirs(..) | PanelEntry::Fs(FsEntry::Directory(..)) => None, - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { + PanelEntry::Fs(FsEntry::ExternalFile(file)) => { change_selection = false; - scroll_to_buffer = Some(*buffer_id); + scroll_to_buffer = Some(file.buffer_id); multi_buffer_snapshot.excerpts().find_map( |(excerpt_id, buffer_snapshot, excerpt_range)| { - if &buffer_snapshot.remote_id() == buffer_id { + if buffer_snapshot.remote_id() == file.buffer_id { multi_buffer_snapshot .anchor_in_excerpt(excerpt_id, excerpt_range.context.start) } else { @@ -889,13 +982,14 @@ impl OutlinePanel { }, ) } - PanelEntry::Fs(FsEntry::File(_, file_entry, buffer_id, _)) => { + + PanelEntry::Fs(FsEntry::File(file)) => { change_selection = false; - scroll_to_buffer = Some(*buffer_id); + scroll_to_buffer = Some(file.buffer_id); self.project .update(cx, |project, cx| { project - .path_for_entry(file_entry.id, cx) + .path_for_entry(file.entry.id, cx) .and_then(|path| project.get_open_buffer(&path, cx)) }) .map(|buffer| { @@ -909,18 +1003,17 @@ impl OutlinePanel { .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) }) } - PanelEntry::Outline(OutlineEntry::Outline(_, excerpt_id, outline)) => { - multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, outline.range.start) - .or_else(|| { - multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, outline.range.end) - }) - } - PanelEntry::Outline(OutlineEntry::Excerpt(_, excerpt_id, excerpt_range)) => { + PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot + .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start) + .or_else(|| { + multi_buffer_snapshot + .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end) + }), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { change_selection = false; - multi_buffer_snapshot.anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) + multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start) } - PanelEntry::Search(SearchEntry { match_range, .. }) => Some(match_range.start), + PanelEntry::Search(search_entry) => Some(search_entry.match_range.start), }; if let Some(anchor) = scroll_target { @@ -960,8 +1053,10 @@ impl OutlinePanel { .iter() .rev() .filter_map(|entry| match entry { - FsEntry::File(_, _, buffer_id, _) - | FsEntry::ExternalFile(buffer_id, _) => Some(*buffer_id), + FsEntry::File(file) => Some(file.buffer_id), + FsEntry::ExternalFile(external_file) => { + Some(external_file.buffer_id) + } FsEntry::Directory(..) => None, }) .skip_while(|id| *id != buffer_id) @@ -1044,69 +1139,68 @@ impl OutlinePanel { match &selected_entry { PanelEntry::Fs(fs_entry) => match fs_entry { FsEntry::ExternalFile(..) => None, - FsEntry::File(worktree_id, entry, ..) - | FsEntry::Directory(worktree_id, entry) => { - entry.path.parent().and_then(|parent_path| { - previous_entries.find(|entry| match entry { - PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) => { - dir_worktree_id == worktree_id - && dir_entry.path.as_ref() == parent_path - } - PanelEntry::FoldedDirs(dirs_worktree_id, dirs) => { - dirs_worktree_id == worktree_id - && dirs - .last() - .map_or(false, |dir| dir.path.as_ref() == parent_path) - } - _ => false, - }) + FsEntry::File(FsEntryFile { + worktree_id, entry, .. + }) + | FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => entry.path.parent().and_then(|parent_path| { + previous_entries.find(|entry| match entry { + PanelEntry::Fs(FsEntry::Directory(directory)) => { + directory.worktree_id == *worktree_id + && directory.entry.path.as_ref() == parent_path + } + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id: dirs_worktree_id, + entries: dirs, + .. + }) => { + dirs_worktree_id == worktree_id + && dirs + .last() + .map_or(false, |dir| dir.path.as_ref() == parent_path) + } + _ => false, }) - } + }), }, - PanelEntry::FoldedDirs(worktree_id, entries) => entries + PanelEntry::FoldedDirs(folded_dirs) => folded_dirs + .entries .first() .and_then(|entry| entry.path.parent()) .and_then(|parent_path| { previous_entries.find(|entry| { - if let PanelEntry::Fs(FsEntry::Directory(dir_worktree_id, dir_entry)) = - entry - { - dir_worktree_id == worktree_id - && dir_entry.path.as_ref() == parent_path + if let PanelEntry::Fs(FsEntry::Directory(directory)) = entry { + directory.worktree_id == folded_dirs.worktree_id + && directory.entry.path.as_ref() == parent_path } else { false } }) }), - PanelEntry::Outline(OutlineEntry::Excerpt(excerpt_buffer_id, excerpt_id, _)) => { + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { previous_entries.find(|entry| match entry { - PanelEntry::Fs(FsEntry::File(_, _, file_buffer_id, file_excerpts)) => { - file_buffer_id == excerpt_buffer_id - && file_excerpts.contains(excerpt_id) + PanelEntry::Fs(FsEntry::File(file)) => { + file.buffer_id == excerpt.buffer_id + && file.excerpts.contains(&excerpt.id) } - PanelEntry::Fs(FsEntry::ExternalFile(file_buffer_id, file_excerpts)) => { - file_buffer_id == excerpt_buffer_id - && file_excerpts.contains(excerpt_id) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + external_file.buffer_id == excerpt.buffer_id + && external_file.excerpts.contains(&excerpt.id) } _ => false, }) } - PanelEntry::Outline(OutlineEntry::Outline( - outline_buffer_id, - outline_excerpt_id, - _, - )) => previous_entries.find(|entry| { - if let PanelEntry::Outline(OutlineEntry::Excerpt( - excerpt_buffer_id, - excerpt_id, - _, - )) = entry - { - outline_buffer_id == excerpt_buffer_id && outline_excerpt_id == excerpt_id - } else { - false - } - }), + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + previous_entries.find(|entry| { + if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry { + outline.buffer_id == excerpt.buffer_id + && outline.excerpt_id == excerpt.id + } else { + false + } + }) + } PanelEntry::Search(_) => { previous_entries.find(|entry| !matches!(entry, PanelEntry::Search(_))) } @@ -1164,8 +1258,12 @@ impl OutlinePanel { ) { self.select_entry(entry.clone(), true, cx); let is_root = match &entry { - PanelEntry::Fs(FsEntry::File(worktree_id, entry, ..)) - | PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, entry, .. + })) + | PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => self .project .read(cx) .worktree_for_id(*worktree_id, cx) @@ -1173,7 +1271,11 @@ impl OutlinePanel { worktree.read(cx).root_entry().map(|entry| entry.id) == Some(entry.id) }) .unwrap_or(false), - PanelEntry::FoldedDirs(worktree_id, entries) => entries + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => entries .first() .and_then(|entry| { self.project @@ -1232,9 +1334,11 @@ impl OutlinePanel { fn is_foldable(&self, entry: &PanelEntry) -> bool { let (directory_worktree, directory_entry) = match entry { - PanelEntry::Fs(FsEntry::Directory(directory_worktree, directory_entry)) => { - (*directory_worktree, Some(directory_entry)) - } + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry: directory_entry, + .. + })) => (*worktree_id, Some(directory_entry)), _ => return false, }; let Some(directory_entry) = directory_entry else { @@ -1270,24 +1374,34 @@ impl OutlinePanel { }; let mut buffers_to_unfold = HashSet::default(); let entry_to_expand = match &selected_entry { - PanelEntry::FoldedDirs(worktree_id, dir_entries) => dir_entries.last().map(|entry| { + PanelEntry::FoldedDirs(FoldedDirsEntry { + entries: dir_entries, + worktree_id, + .. + }) => dir_entries.last().map(|entry| { buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry)); CollapsedEntry::Dir(*worktree_id, entry.id) }), - PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => { - buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, dir_entry)); - Some(CollapsedEntry::Dir(*worktree_id, dir_entry.id)) + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => { + buffers_to_unfold.extend(self.buffers_inside_directory(*worktree_id, entry)); + Some(CollapsedEntry::Dir(*worktree_id, entry.id)) } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { buffers_to_unfold.insert(*buffer_id); Some(CollapsedEntry::File(*worktree_id, *buffer_id)) } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - buffers_to_unfold.insert(*buffer_id); - Some(CollapsedEntry::ExternalFile(*buffer_id)) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_unfold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) } PanelEntry::Search(_) | PanelEntry::Outline(..) => return, }; @@ -1330,19 +1444,24 @@ impl OutlinePanel { let mut buffers_to_fold = HashSet::default(); let collapsed = match &selected_entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, selected_dir_entry)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => { if self .collapsed_entries - .insert(CollapsedEntry::Dir(*worktree_id, selected_dir_entry.id)) + .insert(CollapsedEntry::Dir(*worktree_id, entry.id)) { - buffers_to_fold - .extend(self.buffers_inside_directory(*worktree_id, selected_dir_entry)); + buffers_to_fold.extend(self.buffers_inside_directory(*worktree_id, entry)); true } else { false } } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { if self .collapsed_entries .insert(CollapsedEntry::File(*worktree_id, *buffer_id)) @@ -1353,34 +1472,35 @@ impl OutlinePanel { false } } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { if self .collapsed_entries - .insert(CollapsedEntry::ExternalFile(*buffer_id)) + .insert(CollapsedEntry::ExternalFile(external_file.buffer_id)) { - buffers_to_fold.insert(*buffer_id); + buffers_to_fold.insert(external_file.buffer_id); true } else { false } } - PanelEntry::FoldedDirs(worktree_id, dir_entries) => { + PanelEntry::FoldedDirs(folded_dirs) => { let mut folded = false; - if let Some(dir_entry) = dir_entries.last() { + if let Some(dir_entry) = folded_dirs.entries.last() { if self .collapsed_entries - .insert(CollapsedEntry::Dir(*worktree_id, dir_entry.id)) + .insert(CollapsedEntry::Dir(folded_dirs.worktree_id, dir_entry.id)) { folded = true; - buffers_to_fold - .extend(self.buffers_inside_directory(*worktree_id, dir_entry)); + buffers_to_fold.extend( + self.buffers_inside_directory(folded_dirs.worktree_id, dir_entry), + ); } } folded } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => self + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self .collapsed_entries - .insert(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)), + .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)), PanelEntry::Search(_) | PanelEntry::Outline(..) => false, }; @@ -1409,30 +1529,41 @@ impl OutlinePanel { .iter() .fold(HashSet::default(), |mut entries, fs_entry| { match fs_entry { - FsEntry::ExternalFile(buffer_id, _) => { - buffers_to_unfold.insert(*buffer_id); - entries.insert(CollapsedEntry::ExternalFile(*buffer_id)); - entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.iter().map(|(excerpt_id, _)| { - CollapsedEntry::Excerpt(*buffer_id, *excerpt_id) - }) - }, + FsEntry::ExternalFile(external_file) => { + buffers_to_unfold.insert(external_file.buffer_id); + entries.insert(CollapsedEntry::ExternalFile(external_file.buffer_id)); + entries.extend( + self.excerpts + .get(&external_file.buffer_id) + .into_iter() + .flat_map(|excerpts| { + excerpts.iter().map(|(excerpt_id, _)| { + CollapsedEntry::Excerpt( + external_file.buffer_id, + *excerpt_id, + ) + }) + }), + ); + } + FsEntry::Directory(directory) => { + entries.insert(CollapsedEntry::Dir( + directory.worktree_id, + directory.entry.id, )); } - FsEntry::Directory(worktree_id, entry) => { - entries.insert(CollapsedEntry::Dir(*worktree_id, entry.id)); - } - FsEntry::File(worktree_id, _, buffer_id, _) => { - buffers_to_unfold.insert(*buffer_id); - entries.insert(CollapsedEntry::File(*worktree_id, *buffer_id)); - entries.extend(self.excerpts.get(buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.iter().map(|(excerpt_id, _)| { - CollapsedEntry::Excerpt(*buffer_id, *excerpt_id) - }) - }, - )); + FsEntry::File(file) => { + buffers_to_unfold.insert(file.buffer_id); + entries.insert(CollapsedEntry::File(file.worktree_id, file.buffer_id)); + entries.extend( + self.excerpts.get(&file.buffer_id).into_iter().flat_map( + |excerpts| { + excerpts.iter().map(|(excerpt_id, _)| { + CollapsedEntry::Excerpt(file.buffer_id, *excerpt_id) + }) + }, + ), + ); } }; entries @@ -1459,22 +1590,28 @@ impl OutlinePanel { .cached_entries .iter() .flat_map(|cached_entry| match &cached_entry.entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => { - Some(CollapsedEntry::Dir(*worktree_id, entry.id)) - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)), + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { buffers_to_fold.insert(*buffer_id); Some(CollapsedEntry::File(*worktree_id, *buffer_id)) } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - buffers_to_fold.insert(*buffer_id); - Some(CollapsedEntry::ExternalFile(*buffer_id)) + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_fold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } - PanelEntry::FoldedDirs(worktree_id, entries) => { - Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)) - } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - Some(CollapsedEntry::Excerpt(*buffer_id, *excerpt_id)) + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) @@ -1498,7 +1635,11 @@ impl OutlinePanel { let mut fold = false; let mut buffers_to_toggle = HashSet::default(); match entry { - PanelEntry::Fs(FsEntry::Directory(worktree_id, dir_entry)) => { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry: dir_entry, + .. + })) => { let entry_id = dir_entry.id; let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id); buffers_to_toggle.extend(self.buffers_inside_directory(*worktree_id, dir_entry)); @@ -1514,7 +1655,11 @@ impl OutlinePanel { fold = true; } } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { let collapsed_entry = CollapsedEntry::File(*worktree_id, *buffer_id); buffers_to_toggle.insert(*buffer_id); if !self.collapsed_entries.remove(&collapsed_entry) { @@ -1522,15 +1667,19 @@ impl OutlinePanel { fold = true; } } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - let collapsed_entry = CollapsedEntry::ExternalFile(*buffer_id); - buffers_to_toggle.insert(*buffer_id); + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + let collapsed_entry = CollapsedEntry::ExternalFile(external_file.buffer_id); + buffers_to_toggle.insert(external_file.buffer_id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); fold = true; } } - PanelEntry::FoldedDirs(worktree_id, dir_entries) => { + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries: dir_entries, + .. + }) => { if let Some(dir_entry) = dir_entries.first() { let entry_id = dir_entry.id; let collapsed_entry = CollapsedEntry::Dir(*worktree_id, entry_id); @@ -1549,8 +1698,8 @@ impl OutlinePanel { } } } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) => { - let collapsed_entry = CollapsedEntry::Excerpt(*buffer_id, *excerpt_id); + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } @@ -1625,7 +1774,9 @@ impl OutlinePanel { .selected_entry() .and_then(|entry| match entry { PanelEntry::Fs(entry) => self.relative_path(entry, cx), - PanelEntry::FoldedDirs(_, dirs) => dirs.last().map(|entry| entry.path.clone()), + PanelEntry::FoldedDirs(folded_dirs) => { + folded_dirs.entries.last().map(|entry| entry.path.clone()) + } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) .map(|p| p.to_string_lossy().to_string()) @@ -1679,23 +1830,24 @@ impl OutlinePanel { return Ok(()); }; let related_buffer_entry = match &entry_with_selection { - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - project.update(&mut cx, |project, cx| { - let entry_id = project - .buffer_for_id(*buffer_id, cx) - .and_then(|buffer| buffer.read(cx).entry_id(cx)); - project - .worktree_for_id(*worktree_id, cx) - .zip(entry_id) - .and_then(|(worktree, entry_id)| { - let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); - Some((worktree, entry)) - }) - })? - } + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => project.update(&mut cx, |project, cx| { + let entry_id = project + .buffer_for_id(*buffer_id, cx) + .and_then(|buffer| buffer.read(cx).entry_id(cx)); + project + .worktree_for_id(*worktree_id, cx) + .zip(entry_id) + .and_then(|(worktree, entry_id)| { + let entry = worktree.read(cx).entry_for_id(entry_id)?.clone(); + Some((worktree, entry)) + }) + })?, PanelEntry::Outline(outline_entry) => { - let &(OutlineEntry::Outline(buffer_id, excerpt_id, _) - | OutlineEntry::Excerpt(buffer_id, excerpt_id, _)) = outline_entry; + let (buffer_id, excerpt_id) = outline_entry.ids(); outline_panel.update(&mut cx, |outline_panel, cx| { outline_panel .collapsed_entries @@ -1808,25 +1960,21 @@ impl OutlinePanel { fn render_excerpt( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - range: &ExcerptRange, + excerpt: &OutlineEntryExcerpt, depth: usize, cx: &mut ViewContext, ) -> Option> { - let item_id = ElementId::from(excerpt_id.to_proto() as usize); + let item_id = ElementId::from(excerpt.id.to_proto() as usize); let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Excerpt( - selected_buffer_id, - selected_excerpt_id, - _, - ))) => selected_buffer_id == &buffer_id && selected_excerpt_id == &excerpt_id, + Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => { + selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id + } _ => false, }; let has_outlines = self .excerpts - .get(&buffer_id) - .and_then(|excerpts| match &excerpts.get(&excerpt_id)?.outlines { + .get(&excerpt.buffer_id) + .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines { ExcerptOutlines::Outlines(outlines) => Some(outlines), ExcerptOutlines::Invalidated(outlines) => Some(outlines), ExcerptOutlines::NotFetched => None, @@ -1834,7 +1982,7 @@ impl OutlinePanel { .map_or(false, |outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries - .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); let color = entry_git_aware_label_color(None, false, is_active); let icon = if has_outlines { FileIcons::get_chevron_icon(is_expanded, cx) @@ -1844,14 +1992,14 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let label = self.excerpt_label(buffer_id, range, cx)?; + let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?; let label_element = Label::new(label) .single_line() .color(color) .into_any_element(); Some(self.entry_element( - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, excerpt_id, range.clone())), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())), item_id, depth, Some(icon), @@ -1878,50 +2026,40 @@ impl OutlinePanel { fn render_outline( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, - rendered_outline: &Outline, + outline: &OutlineEntryOutline, depth: usize, string_match: Option<&StringMatch>, cx: &mut ViewContext, ) -> Stateful
{ - let (item_id, label_element) = ( - ElementId::from(SharedString::from(format!( - "{buffer_id:?}|{excerpt_id:?}{:?}|{:?}", - rendered_outline.range, &rendered_outline.text, - ))), - outline::render_item( - rendered_outline, - string_match - .map(|string_match| string_match.ranges().collect::>()) - .unwrap_or_default(), - cx, - ) - .into_any_element(), - ); + let item_id = ElementId::from(SharedString::from(format!( + "{:?}|{:?}{:?}|{:?}", + outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text, + ))); + + let label_element = outline::render_item( + &outline.outline, + string_match + .map(|string_match| string_match.ranges().collect::>()) + .unwrap_or_default(), + cx, + ) + .into_any_element(); + let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Outline( - selected_buffer_id, - selected_excerpt_id, - selected_entry, - ))) => { - selected_buffer_id == &buffer_id - && selected_excerpt_id == &excerpt_id - && selected_entry == rendered_outline + Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => { + outline == selected && outline.outline == selected.outline } _ => false, }; + let icon = if self.is_singleton_active(cx) { None } else { Some(empty_icon()) }; + self.entry_element( - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - rendered_outline.clone(), - )), + PanelEntry::Outline(OutlineEntry::Outline(outline.clone())), item_id, depth, icon, @@ -1944,7 +2082,9 @@ impl OutlinePanel { _ => false, }; let (item_id, label_element, icon) = match rendered_entry { - FsEntry::File(worktree_id, entry, ..) => { + FsEntry::File(FsEntryFile { + worktree_id, entry, .. + }) => { let name = self.entry_name(worktree_id, entry, cx); let color = entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active); @@ -1967,14 +2107,18 @@ impl OutlinePanel { icon.unwrap_or_else(empty_icon), ) } - FsEntry::Directory(worktree_id, entry) => { - let name = self.entry_name(worktree_id, entry, cx); + FsEntry::Directory(directory) => { + let name = self.entry_name(&directory.worktree_id, &directory.entry, cx); - let is_expanded = !self - .collapsed_entries - .contains(&CollapsedEntry::Dir(*worktree_id, entry.id)); - let color = - entry_git_aware_label_color(entry.git_status, entry.is_ignored, is_active); + let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Dir( + directory.worktree_id, + directory.entry.id, + )); + let color = entry_git_aware_label_color( + directory.entry.git_status, + directory.entry.is_ignored, + is_active, + ); let icon = if settings.folder_icons { FileIcons::get_folder_icon(is_expanded, cx) } else { @@ -1983,7 +2127,7 @@ impl OutlinePanel { .map(Icon::from_path) .map(|icon| icon.color(color).into_any_element()); ( - ElementId::from(entry.id.to_proto() as usize), + ElementId::from(directory.entry.id.to_proto() as usize), HighlightedLabel::new( name, string_match @@ -1995,9 +2139,9 @@ impl OutlinePanel { icon.unwrap_or_else(empty_icon), ) } - FsEntry::ExternalFile(buffer_id, _) => { + FsEntry::ExternalFile(external_file) => { let color = entry_label_color(is_active); - let (icon, name) = match self.buffer_snapshot_for_id(*buffer_id, cx) { + let (icon, name) = match self.buffer_snapshot_for_id(external_file.buffer_id, cx) { Some(buffer_snapshot) => match buffer_snapshot.file() { Some(file) => { let path = file.path(); @@ -2015,7 +2159,7 @@ impl OutlinePanel { None => (None, "Unknown buffer".to_string()), }; ( - ElementId::from(buffer_id.to_proto() as usize), + ElementId::from(external_file.buffer_id.to_proto() as usize), HighlightedLabel::new( name, string_match @@ -2042,29 +2186,32 @@ impl OutlinePanel { fn render_folded_dirs( &self, - worktree_id: WorktreeId, - dir_entries: &[Entry], + folded_dir: &FoldedDirsEntry, depth: usize, string_match: Option<&StringMatch>, cx: &mut ViewContext, ) -> Stateful
{ let settings = OutlinePanelSettings::get_global(cx); let is_active = match self.selected_entry() { - Some(PanelEntry::FoldedDirs(selected_worktree_id, selected_entries)) => { - selected_worktree_id == &worktree_id && selected_entries == dir_entries + Some(PanelEntry::FoldedDirs(selected_dirs)) => { + selected_dirs.worktree_id == folded_dir.worktree_id + && selected_dirs.entries == folded_dir.entries } _ => false, }; let (item_id, label_element, icon) = { - let name = self.dir_names_string(dir_entries, worktree_id, cx); + let name = self.dir_names_string(&folded_dir.entries, folded_dir.worktree_id, cx); - let is_expanded = dir_entries.iter().all(|dir| { + let is_expanded = folded_dir.entries.iter().all(|dir| { !self .collapsed_entries - .contains(&CollapsedEntry::Dir(worktree_id, dir.id)) + .contains(&CollapsedEntry::Dir(folded_dir.worktree_id, dir.id)) }); - let is_ignored = dir_entries.iter().any(|entry| entry.is_ignored); - let git_status = dir_entries.first().and_then(|entry| entry.git_status); + let is_ignored = folded_dir.entries.iter().any(|entry| entry.is_ignored); + let git_status = folded_dir + .entries + .first() + .and_then(|entry| entry.git_status); let color = entry_git_aware_label_color(git_status, is_ignored, is_active); let icon = if settings.folder_icons { FileIcons::get_folder_icon(is_expanded, cx) @@ -2075,10 +2222,12 @@ impl OutlinePanel { .map(|icon| icon.color(color).into_any_element()); ( ElementId::from( - dir_entries + folded_dir + .entries .last() .map(|entry| entry.id.to_proto()) - .unwrap_or_else(|| worktree_id.to_proto()) as usize, + .unwrap_or_else(|| folded_dir.worktree_id.to_proto()) + as usize, ), HighlightedLabel::new( name, @@ -2093,7 +2242,7 @@ impl OutlinePanel { }; self.entry_element( - PanelEntry::FoldedDirs(worktree_id, dir_entries.to_vec()), + PanelEntry::FoldedDirs(folded_dir.clone()), item_id, depth, Some(icon), @@ -2366,10 +2515,8 @@ impl OutlinePanel { .background_executor() .spawn(async move { let mut processed_external_buffers = HashSet::default(); - let mut new_worktree_entries = HashMap::< - WorktreeId, - (worktree::Snapshot, HashMap), - >::default(); + let mut new_worktree_entries = + HashMap::>::default(); let mut worktree_excerpts = HashMap::< WorktreeId, HashMap)>, @@ -2409,12 +2556,13 @@ impl OutlinePanel { match entry_id.and_then(|id| worktree.entry_for_id(id)).cloned() { Some(entry) => { - let mut traversal = worktree.traverse_from_path( - true, - true, - true, - entry.path.as_ref(), - ); + let entry = GitEntry { + git_status: worktree.status_for_file(&entry.path), + entry, + }; + let mut traversal = worktree + .traverse_from_path(true, true, true, entry.path.as_ref()) + .with_git_statuses(); let mut entries_to_add = HashMap::default(); worktree_excerpts @@ -2446,7 +2594,7 @@ impl OutlinePanel { .is_none(); if new_entry_added && traversal.back_to_parent() { if let Some(parent_entry) = traversal.entry() { - current_entry = parent_entry.clone(); + current_entry = parent_entry.to_owned(); continue; } } @@ -2454,8 +2602,7 @@ impl OutlinePanel { } new_worktree_entries .entry(worktree_id) - .or_insert_with(|| (worktree.clone(), HashMap::default())) - .1 + .or_insert_with(HashMap::default) .extend(entries_to_add); } None => { @@ -2480,11 +2627,9 @@ impl OutlinePanel { let worktree_entries = new_worktree_entries .into_iter() - .map(|(worktree_id, (worktree_snapshot, entries))| { + .map(|(worktree_id, entries)| { let mut entries = entries.into_values().collect::>(); - // For a proper git status propagation, we have to keep the entries sorted lexicographically. entries.sort_by(|a, b| a.path.as_ref().cmp(b.path.as_ref())); - worktree_snapshot.propagate_git_statuses(&mut entries); (worktree_id, entries) }) .flat_map(|(worktree_id, entries)| { @@ -2508,19 +2653,22 @@ impl OutlinePanel { } if entry.is_dir() { - Some(FsEntry::Directory(worktree_id, entry)) + Some(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry, + })) } else { let (buffer_id, excerpts) = worktree_excerpts .get_mut(&worktree_id) .and_then(|worktree_excerpts| { worktree_excerpts.remove(&entry.id) })?; - Some(FsEntry::File( + Some(FsEntry::File(FsEntryFile { worktree_id, - entry, buffer_id, + entry, excerpts, - )) + })) } }) .collect::>() @@ -2533,25 +2681,29 @@ impl OutlinePanel { let new_visible_entries = external_excerpts .into_iter() .sorted_by_key(|(id, _)| *id) - .map(|(buffer_id, excerpts)| FsEntry::ExternalFile(buffer_id, excerpts)) + .map(|(buffer_id, excerpts)| { + FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts, + }) + }) .chain(worktree_entries) .filter(|visible_item| { match visible_item { - FsEntry::Directory(worktree_id, dir_entry) => { + FsEntry::Directory(directory) => { let parent_id = back_to_common_visited_parent( &mut visited_dirs, - worktree_id, - dir_entry, + &directory.worktree_id, + &directory.entry, ); - let depth = if root_entries.contains(&dir_entry.id) { - 0 - } else { + let mut depth = 0; + if !root_entries.contains(&directory.entry.id) { if auto_fold_dirs { let children = new_children_count - .get(worktree_id) + .get(&directory.worktree_id) .and_then(|children_count| { - children_count.get(&dir_entry.path) + children_count.get(&directory.entry.path) }) .copied() .unwrap_or_default(); @@ -2562,7 +2714,7 @@ impl OutlinePanel { .last() .map(|(parent_dir_id, _)| { new_unfolded_dirs - .get(worktree_id) + .get(&directory.worktree_id) .map_or(true, |unfolded_dirs| { unfolded_dirs .contains(parent_dir_id) @@ -2571,23 +2723,29 @@ impl OutlinePanel { .unwrap_or(true)) { new_unfolded_dirs - .entry(*worktree_id) + .entry(directory.worktree_id) .or_default() - .insert(dir_entry.id); + .insert(directory.entry.id); } } - parent_id + depth = parent_id .and_then(|(worktree_id, id)| { new_depth_map.get(&(worktree_id, id)).copied() }) .unwrap_or(0) - + 1 + + 1; }; - visited_dirs.push((dir_entry.id, dir_entry.path.clone())); - new_depth_map.insert((*worktree_id, dir_entry.id), depth); + visited_dirs + .push((directory.entry.id, directory.entry.path.clone())); + new_depth_map + .insert((directory.worktree_id, directory.entry.id), depth); } - FsEntry::File(worktree_id, file_entry, ..) => { + FsEntry::File(FsEntryFile { + worktree_id, + entry: file_entry, + .. + }) => { let parent_id = back_to_common_visited_parent( &mut visited_dirs, worktree_id, @@ -2718,8 +2876,14 @@ impl OutlinePanel { .iter() .find(|fs_entry| match fs_entry { FsEntry::Directory(..) => false, - FsEntry::File(_, _, file_buffer_id, _) - | FsEntry::ExternalFile(file_buffer_id, _) => *file_buffer_id == buffer_id, + FsEntry::File(FsEntryFile { + buffer_id: other_buffer_id, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id: other_buffer_id, + .. + }) => buffer_id == *other_buffer_id, }) .cloned() .map(PanelEntry::Fs); @@ -2869,26 +3033,31 @@ impl OutlinePanel { .cloned(); let closest_container = match outline_item { - Some(outline) => { - PanelEntry::Outline(OutlineEntry::Outline(buffer_id, excerpt_id, outline)) - } + Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { + buffer_id, + excerpt_id, + outline, + })), None => { self.cached_entries.iter().rev().find_map(|cached_entry| { match &cached_entry.entry { - PanelEntry::Outline(OutlineEntry::Excerpt( - entry_buffer_id, - entry_excerpt_id, - _, - )) => { - if entry_buffer_id == &buffer_id && entry_excerpt_id == &excerpt_id { + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id { Some(cached_entry.entry.clone()) } else { None } } PanelEntry::Fs( - FsEntry::ExternalFile(file_buffer_id, file_excerpts) - | FsEntry::File(_, _, file_buffer_id, file_excerpts), + FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id: file_buffer_id, + excerpts: file_excerpts, + }) + | FsEntry::File(FsEntryFile { + buffer_id: file_buffer_id, + excerpts: file_excerpts, + .. + }), ) => { if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) { Some(cached_entry.entry.clone()) @@ -2987,8 +3156,15 @@ impl OutlinePanel { .iter() .fold(HashMap::default(), |mut excerpts_to_fetch, fs_entry| { match fs_entry { - FsEntry::File(_, _, buffer_id, file_excerpts) - | FsEntry::ExternalFile(buffer_id, file_excerpts) => { + FsEntry::File(FsEntryFile { + buffer_id, + excerpts: file_excerpts, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts: file_excerpts, + }) => { let excerpts = self.excerpts.get(buffer_id); for &file_excerpt in file_excerpts { if let Some(excerpt) = excerpts @@ -3038,21 +3214,28 @@ impl OutlinePanel { fn abs_path(&self, entry: &PanelEntry, cx: &AppContext) -> Option { match entry { PanelEntry::Fs( - FsEntry::File(_, _, buffer_id, _) | FsEntry::ExternalFile(buffer_id, _), + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }), ) => self .buffer_snapshot_for_id(*buffer_id, cx) .and_then(|buffer_snapshot| { let file = File::from_dyn(buffer_snapshot.file())?; file.worktree.read(cx).absolutize(&file.path).ok() }), - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => self + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + })) => self .project .read(cx) .worktree_for_id(*worktree_id, cx)? .read(cx) .absolutize(&entry.path) .ok(), - PanelEntry::FoldedDirs(worktree_id, dirs) => dirs.last().and_then(|entry| { + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries: dirs, + .. + }) => dirs.last().and_then(|entry| { self.project .read(cx) .worktree_for_id(*worktree_id, cx) @@ -3064,12 +3247,12 @@ impl OutlinePanel { fn relative_path(&self, entry: &FsEntry, cx: &AppContext) -> Option> { match entry { - FsEntry::ExternalFile(buffer_id, _) => { + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { let buffer_snapshot = self.buffer_snapshot_for_id(*buffer_id, cx)?; Some(buffer_snapshot.file()?.path().clone()) } - FsEntry::Directory(_, entry) => Some(entry.path.clone()), - FsEntry::File(_, entry, ..) => Some(entry.path.clone()), + FsEntry::Directory(FsEntryDirectory { entry, .. }) => Some(entry.path.clone()), + FsEntry::File(FsEntryFile { entry, .. }) => Some(entry.path.clone()), } } @@ -3135,7 +3318,7 @@ impl OutlinePanel { let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; - let mut folded_dirs_entry = None::<(usize, WorktreeId, Vec)>; + let mut folded_dirs_entry = None::<(usize, FoldedDirsEntry)>; let track_matches = query.is_some(); #[derive(Debug)] @@ -3149,29 +3332,29 @@ impl OutlinePanel { for entry in outline_panel.fs_entries.clone() { let is_expanded = outline_panel.is_expanded(&entry); let (depth, should_add) = match &entry { - FsEntry::Directory(worktree_id, dir_entry) => { + FsEntry::Directory(directory_entry) => { let mut should_add = true; let is_root = project .read(cx) - .worktree_for_id(*worktree_id, cx) + .worktree_for_id(directory_entry.worktree_id, cx) .map_or(false, |worktree| { - worktree.read(cx).root_entry() == Some(dir_entry) + worktree.read(cx).root_entry() == Some(&directory_entry.entry) }); let folded = auto_fold_dirs && !is_root && outline_panel .unfolded_dirs - .get(worktree_id) + .get(&directory_entry.worktree_id) .map_or(true, |unfolded_dirs| { - !unfolded_dirs.contains(&dir_entry.id) + !unfolded_dirs.contains(&directory_entry.entry.id) }); let fs_depth = outline_panel .fs_entries_depth - .get(&(*worktree_id, dir_entry.id)) + .get(&(directory_entry.worktree_id, directory_entry.entry.id)) .copied() .unwrap_or(0); while let Some(parent) = parent_dirs.last() { - if dir_entry.path.starts_with(&parent.path) { + if directory_entry.entry.path.starts_with(&parent.path) { break; } parent_dirs.pop(); @@ -3179,11 +3362,14 @@ impl OutlinePanel { let auto_fold = match parent_dirs.last() { Some(parent) => { parent.folded - && Some(parent.path.as_ref()) == dir_entry.path.parent() + && Some(parent.path.as_ref()) + == directory_entry.entry.path.parent() && outline_panel .fs_children_count - .get(worktree_id) - .and_then(|entries| entries.get(&dir_entry.path)) + .get(&directory_entry.worktree_id) + .and_then(|entries| { + entries.get(&directory_entry.entry.path) + }) .copied() .unwrap_or_default() .may_be_fold_part() @@ -3201,7 +3387,7 @@ impl OutlinePanel { parent.depth + 1 }; parent_dirs.push(ParentStats { - path: dir_entry.path.clone(), + path: directory_entry.entry.path.clone(), folded, expanded: parent_expanded && is_expanded, depth: new_depth, @@ -3210,7 +3396,7 @@ impl OutlinePanel { } None => { parent_dirs.push(ParentStats { - path: dir_entry.path.clone(), + path: directory_entry.entry.path.clone(), folded, expanded: is_expanded, depth: fs_depth, @@ -3219,37 +3405,38 @@ impl OutlinePanel { } }; - if let Some((folded_depth, folded_worktree_id, mut folded_dirs)) = - folded_dirs_entry.take() + if let Some((folded_depth, mut folded_dirs)) = folded_dirs_entry.take() { if folded - && worktree_id == &folded_worktree_id - && dir_entry.path.parent() - == folded_dirs.last().map(|entry| entry.path.as_ref()) + && directory_entry.worktree_id == folded_dirs.worktree_id + && directory_entry.entry.path.parent() + == folded_dirs + .entries + .last() + .map(|entry| entry.path.as_ref()) { - folded_dirs.push(dir_entry.clone()); - folded_dirs_entry = - Some((folded_depth, folded_worktree_id, folded_dirs)) + folded_dirs.entries.push(directory_entry.entry.clone()); + folded_dirs_entry = Some((folded_depth, folded_dirs)) } else { if !is_singleton { let start_of_collapsed_dir_sequence = !parent_expanded && parent_dirs .iter() .rev() - .nth(folded_dirs.len() + 1) + .nth(folded_dirs.entries.len() + 1) .map_or(true, |parent| parent.expanded); if start_of_collapsed_dir_sequence || parent_expanded || query.is_some() { if parent_folded { - folded_dirs.push(dir_entry.clone()); + folded_dirs + .entries + .push(directory_entry.entry.clone()); should_add = false; } - let new_folded_dirs = PanelEntry::FoldedDirs( - folded_worktree_id, - folded_dirs, - ); + let new_folded_dirs = + PanelEntry::FoldedDirs(folded_dirs.clone()); outline_panel.push_entry( &mut generation_state, track_matches, @@ -3263,12 +3450,23 @@ impl OutlinePanel { folded_dirs_entry = if parent_folded { None } else { - Some((depth, *worktree_id, vec![dir_entry.clone()])) + Some(( + depth, + FoldedDirsEntry { + worktree_id: directory_entry.worktree_id, + entries: vec![directory_entry.entry.clone()], + }, + )) }; } } else if folded { - folded_dirs_entry = - Some((depth, *worktree_id, vec![dir_entry.clone()])); + folded_dirs_entry = Some(( + depth, + FoldedDirsEntry { + worktree_id: directory_entry.worktree_id, + entries: vec![directory_entry.entry.clone()], + }, + )); } let should_add = @@ -3276,21 +3474,22 @@ impl OutlinePanel { (depth, should_add) } FsEntry::ExternalFile(..) => { - if let Some((folded_depth, worktree_id, folded_dirs)) = - folded_dirs_entry.take() - { + if let Some((folded_depth, folded_dir)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() .find(|parent| { - folded_dirs.iter().all(|entry| entry.path != parent.path) + folded_dir + .entries + .iter() + .all(|entry| entry.path != parent.path) }) .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dir), folded_depth, cx, ); @@ -3299,22 +3498,23 @@ impl OutlinePanel { parent_dirs.clear(); (0, true) } - FsEntry::File(worktree_id, file_entry, ..) => { - if let Some((folded_depth, worktree_id, folded_dirs)) = - folded_dirs_entry.take() - { + FsEntry::File(file) => { + if let Some((folded_depth, folded_dirs)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() .find(|parent| { - folded_dirs.iter().all(|entry| entry.path != parent.path) + folded_dirs + .entries + .iter() + .all(|entry| entry.path != parent.path) }) .map_or(true, |parent| parent.expanded); if !is_singleton && (parent_expanded || query.is_some()) { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dirs), folded_depth, cx, ); @@ -3323,23 +3523,22 @@ impl OutlinePanel { let fs_depth = outline_panel .fs_entries_depth - .get(&(*worktree_id, file_entry.id)) + .get(&(file.worktree_id, file.entry.id)) .copied() .unwrap_or(0); while let Some(parent) = parent_dirs.last() { - if file_entry.path.starts_with(&parent.path) { + if file.entry.path.starts_with(&parent.path) { break; } parent_dirs.pop(); } - let (depth, should_add) = match parent_dirs.last() { + match parent_dirs.last() { Some(parent) => { let new_depth = parent.depth + 1; (new_depth, parent.expanded) } None => (fs_depth, true), - }; - (depth, should_add) + } } }; @@ -3373,12 +3572,16 @@ impl OutlinePanel { let excerpts_to_consider = if is_singleton || query.is_some() || (should_add && is_expanded) { match &entry { - FsEntry::File(_, _, buffer_id, entry_excerpts) => { - Some((*buffer_id, entry_excerpts)) - } - FsEntry::ExternalFile(buffer_id, entry_excerpts) => { - Some((*buffer_id, entry_excerpts)) - } + FsEntry::File(FsEntryFile { + buffer_id, + excerpts, + .. + }) + | FsEntry::ExternalFile(FsEntryExternalFile { + buffer_id, + excerpts, + .. + }) => Some((*buffer_id, excerpts)), _ => None, } } else { @@ -3417,17 +3620,22 @@ impl OutlinePanel { } } - if let Some((folded_depth, worktree_id, folded_dirs)) = folded_dirs_entry.take() { + if let Some((folded_depth, folded_dirs)) = folded_dirs_entry.take() { let parent_expanded = parent_dirs .iter() .rev() - .find(|parent| folded_dirs.iter().all(|entry| entry.path != parent.path)) + .find(|parent| { + folded_dirs + .entries + .iter() + .all(|entry| entry.path != parent.path) + }) .map_or(true, |parent| parent.expanded); if parent_expanded || query.is_some() { outline_panel.push_entry( &mut generation_state, track_matches, - PanelEntry::FoldedDirs(worktree_id, folded_dirs), + PanelEntry::FoldedDirs(folded_dirs), folded_depth, cx, ); @@ -3490,13 +3698,16 @@ impl OutlinePanel { depth: usize, cx: &mut WindowContext, ) { - let entry = if let PanelEntry::FoldedDirs(worktree_id, entries) = &entry { - match entries.len() { + let entry = if let PanelEntry::FoldedDirs(folded_dirs_entry) = &entry { + match folded_dirs_entry.entries.len() { 0 => { debug_panic!("Empty folded dirs receiver"); return; } - 1 => PanelEntry::Fs(FsEntry::Directory(*worktree_id, entries[0].clone())), + 1 => PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id: folded_dirs_entry.worktree_id, + entry: folded_dirs_entry.entries[0].clone(), + })), _ => entry, } } else { @@ -3515,22 +3726,22 @@ impl OutlinePanel { .push(StringMatchCandidate::new(id, &file_name)); } } - PanelEntry::FoldedDirs(worktree_id, entries) => { - let dir_names = self.dir_names_string(entries, *worktree_id, cx); + PanelEntry::FoldedDirs(folded_dir_entry) => { + let dir_names = self.dir_names_string( + &folded_dir_entry.entries, + folded_dir_entry.worktree_id, + cx, + ); { state .match_candidates .push(StringMatchCandidate::new(id, &dir_names)); } } - PanelEntry::Outline(outline_entry) => match outline_entry { - OutlineEntry::Outline(_, _, outline) => { - state - .match_candidates - .push(StringMatchCandidate::new(id, &outline.text)); - } - OutlineEntry::Excerpt(..) => {} - }, + PanelEntry::Outline(OutlineEntry::Outline(outline_entry)) => state + .match_candidates + .push(StringMatchCandidate::new(id, &outline_entry.outline.text)), + PanelEntry::Outline(OutlineEntry::Excerpt(_)) => {} PanelEntry::Search(new_search_entry) => { if let Some(search_data) = new_search_entry.render_data.get() { state @@ -3558,7 +3769,7 @@ impl OutlinePanel { fn dir_names_string( &self, - entries: &[Entry], + entries: &[GitEntry], worktree_id: WorktreeId, cx: &AppContext, ) -> String { @@ -3580,11 +3791,17 @@ impl OutlinePanel { fn is_expanded(&self, entry: &FsEntry) -> bool { let entry_to_check = match entry { - FsEntry::ExternalFile(buffer_id, _) => CollapsedEntry::ExternalFile(*buffer_id), - FsEntry::File(worktree_id, _, buffer_id, _) => { - CollapsedEntry::File(*worktree_id, *buffer_id) + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + CollapsedEntry::ExternalFile(*buffer_id) } - FsEntry::Directory(worktree_id, entry) => CollapsedEntry::Dir(*worktree_id, entry.id), + FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + }) => CollapsedEntry::File(*worktree_id, *buffer_id), + FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => CollapsedEntry::Dir(*worktree_id, entry.id), }; !self.collapsed_entries.contains(&entry_to_check) } @@ -3708,11 +3925,11 @@ impl OutlinePanel { self.push_entry( state, track_matches, - PanelEntry::Outline(OutlineEntry::Excerpt( + PanelEntry::Outline(OutlineEntry::Excerpt(OutlineEntryExcerpt { buffer_id, - excerpt_id, - excerpt.range.clone(), - )), + id: excerpt_id, + range: excerpt.range.clone(), + })), excerpt_depth, cx, ); @@ -3733,11 +3950,11 @@ impl OutlinePanel { self.push_entry( state, track_matches, - PanelEntry::Outline(OutlineEntry::Outline( + PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { buffer_id, excerpt_id, - outline.clone(), - )), + outline: outline.clone(), + })), outline_base_depth + outline.depth, cx, ); @@ -3763,9 +3980,9 @@ impl OutlinePanel { let kind = search_state.kind; let related_excerpts = match &parent_entry { - FsEntry::Directory(_, _) => return, - FsEntry::ExternalFile(_, excerpts) => excerpts, - FsEntry::File(_, _, _, excerpts) => excerpts, + FsEntry::Directory(_) => return, + FsEntry::ExternalFile(external) => &external.excerpts, + FsEntry::File(file) => &file.excerpts, } .iter() .copied() @@ -4031,24 +4248,28 @@ impl OutlinePanel { fn width_estimate(&self, depth: usize, entry: &PanelEntry, cx: &AppContext) -> u64 { let item_text_chars = match entry { - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => self - .buffer_snapshot_for_id(*buffer_id, cx) + PanelEntry::Fs(FsEntry::ExternalFile(external)) => self + .buffer_snapshot_for_id(external.buffer_id, cx) .and_then(|snapshot| { Some(snapshot.file()?.path().file_name()?.to_string_lossy().len()) }) .unwrap_or_default(), - PanelEntry::Fs(FsEntry::Directory(_, directory)) => directory + PanelEntry::Fs(FsEntry::Directory(directory)) => directory + .entry .path .file_name() .map(|name| name.to_string_lossy().len()) .unwrap_or_default(), - PanelEntry::Fs(FsEntry::File(_, file, _, _)) => file + PanelEntry::Fs(FsEntry::File(file)) => file + .entry .path .file_name() .map(|name| name.to_string_lossy().len()) .unwrap_or_default(), - PanelEntry::FoldedDirs(_, dirs) => { - dirs.iter() + PanelEntry::FoldedDirs(folded_dirs) => { + folded_dirs + .entries + .iter() .map(|dir| { dir.path .file_name() @@ -4056,13 +4277,13 @@ impl OutlinePanel { .unwrap_or_default() }) .sum::() - + dirs.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() + + folded_dirs.entries.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() } - PanelEntry::Outline(OutlineEntry::Excerpt(buffer_id, _, range)) => self - .excerpt_label(*buffer_id, range, cx) + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self + .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) .map(|label| label.len()) .unwrap_or_default(), - PanelEntry::Outline(OutlineEntry::Outline(_, _, outline)) => outline.text.len(), + PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.outline.text.len(), PanelEntry::Search(search) => search .render_data .get() @@ -4136,38 +4357,25 @@ impl OutlinePanel { cached_entry.string_match.as_ref(), cx, )), - PanelEntry::FoldedDirs(worktree_id, entries) => { + PanelEntry::FoldedDirs(folded_dirs_entry) => { Some(outline_panel.render_folded_dirs( - worktree_id, - &entries, + &folded_dirs_entry, + cached_entry.depth, + cached_entry.string_match.as_ref(), + cx, + )) + } + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + outline_panel.render_excerpt(&excerpt, cached_entry.depth, cx) + } + PanelEntry::Outline(OutlineEntry::Outline(entry)) => { + Some(outline_panel.render_outline( + &entry, cached_entry.depth, cached_entry.string_match.as_ref(), cx, )) } - PanelEntry::Outline(OutlineEntry::Excerpt( - buffer_id, - excerpt_id, - excerpt, - )) => outline_panel.render_excerpt( - buffer_id, - excerpt_id, - &excerpt, - cached_entry.depth, - cx, - ), - PanelEntry::Outline(OutlineEntry::Outline( - buffer_id, - excerpt_id, - outline, - )) => Some(outline_panel.render_outline( - buffer_id, - excerpt_id, - &outline, - cached_entry.depth, - cached_entry.string_match.as_ref(), - cx, - )), PanelEntry::Search(SearchEntry { match_range, render_data, @@ -4304,7 +4512,7 @@ impl OutlinePanel { fn buffers_inside_directory( &self, dir_worktree: WorktreeId, - dir_entry: &Entry, + dir_entry: &GitEntry, ) -> HashSet { if !dir_entry.is_dir() { debug_panic!("buffers_inside_directory called on a non-directory entry {dir_entry:?}"); @@ -4314,23 +4522,24 @@ impl OutlinePanel { self.fs_entries .iter() .skip_while(|fs_entry| match fs_entry { - FsEntry::Directory(worktree_id, entry) => { - *worktree_id != dir_worktree || entry != dir_entry + FsEntry::Directory(directory) => { + directory.worktree_id != dir_worktree || &directory.entry != dir_entry } _ => true, }) .skip(1) .take_while(|fs_entry| match fs_entry { FsEntry::ExternalFile(..) => false, - FsEntry::Directory(worktree_id, entry) => { - *worktree_id == dir_worktree && entry.path.starts_with(&dir_entry.path) + FsEntry::Directory(directory) => { + directory.worktree_id == dir_worktree + && directory.entry.path.starts_with(&dir_entry.path) } - FsEntry::File(worktree_id, entry, ..) => { - *worktree_id == dir_worktree && entry.path.starts_with(&dir_entry.path) + FsEntry::File(file) => { + file.worktree_id == dir_worktree && file.entry.path.starts_with(&dir_entry.path) } }) .filter_map(|fs_entry| match fs_entry { - FsEntry::File(_, _, buffer_id, _) => Some(*buffer_id), + FsEntry::File(file) => Some(file.buffer_id), _ => None, }) .collect() @@ -4678,14 +4887,14 @@ fn subscribe_for_editor_events( .fs_entries .iter() .find_map(|fs_entry| match fs_entry { - FsEntry::ExternalFile(buffer_id, _) => { - if *buffer_id == toggled_buffer_id { + FsEntry::ExternalFile(external) => { + if external.buffer_id == toggled_buffer_id { Some(fs_entry.clone()) } else { None } } - FsEntry::File(_, _, buffer_id, _) => { + FsEntry::File(FsEntryFile { buffer_id, .. }) => { if *buffer_id == toggled_buffer_id { Some(fs_entry.clone()) } else { @@ -5545,41 +5754,46 @@ mod tests { } display_string += &match &entry.entry { PanelEntry::Fs(entry) => match entry { - FsEntry::ExternalFile(_, _) => { + FsEntry::ExternalFile(_) => { panic!("Did not cover external files with tests") } - FsEntry::Directory(_, dir_entry) => format!( + FsEntry::Directory(directory) => format!( "{}/", - dir_entry + directory + .entry .path .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_default() ), - FsEntry::File(_, file_entry, ..) => file_entry + FsEntry::File(file) => file + .entry .path .file_name() .map(|name| name.to_string_lossy().to_string()) .unwrap_or_default(), }, - PanelEntry::FoldedDirs(_, dirs) => dirs + PanelEntry::FoldedDirs(folded_dirs) => folded_dirs + .entries .iter() .filter_map(|dir| dir.path.file_name()) .map(|name| name.to_string_lossy().to_string() + "/") .collect(), PanelEntry::Outline(outline_entry) => match outline_entry { - OutlineEntry::Excerpt(_, _, _) => continue, - OutlineEntry::Outline(_, _, outline) => format!("outline: {}", outline.text), + OutlineEntry::Excerpt(_) => continue, + OutlineEntry::Outline(outline_entry) => { + format!("outline: {}", outline_entry.outline.text) + } }, - PanelEntry::Search(SearchEntry { - render_data, - match_range, - .. - }) => { + PanelEntry::Search(search_entry) => { format!( "search: {}", - render_data - .get_or_init(|| SearchData::new(match_range, &multi_buffer_snapshot)) + search_entry + .render_data + .get_or_init(|| SearchData::new( + &search_entry.match_range, + &multi_buffer_snapshot + )) .context_text ) } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index c88383b193..e5e0ce85a2 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -569,9 +569,9 @@ impl LocalBufferStore { buffer_change_sets .into_iter() .filter_map(|(change_set, buffer_snapshot, path)| { - let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?; - let relative_path = repo_entry.relativize(&snapshot, &path).ok()?; - let base_text = local_repo_entry.repo().load_index_text(&relative_path); + let local_repo = snapshot.local_repo_for_path(&path)?; + let relative_path = local_repo.relativize(&path).ok()?; + let base_text = local_repo.repo().load_index_text(&relative_path); Some((change_set, buffer_snapshot, base_text)) }) .collect::>() @@ -1161,16 +1161,16 @@ impl BufferStore { Worktree::Local(worktree) => { let worktree = worktree.snapshot(); let blame_params = maybe!({ - let (repo_entry, local_repo_entry) = match worktree.repo_for_path(&file.path) { + let local_repo = match worktree.local_repo_for_path(&file.path) { Some(repo_for_path) => repo_for_path, None => return Ok(None), }; - let relative_path = repo_entry - .relativize(&worktree, &file.path) + let relative_path = local_repo + .relativize(&file.path) .context("failed to relativize buffer path")?; - let repo = local_repo_entry.repo().clone(); + let repo = local_repo.repo().clone(); let content = match version { Some(version) => buffer.rope_for_version(&version).clone(), @@ -1247,7 +1247,7 @@ impl BufferStore { }); }; - let path = match repo_entry.relativize(worktree, file.path()) { + let path = match repo_entry.relativize(file.path()) { Ok(RepoPath(path)) => path, Err(e) => return Task::ready(Err(e)), }; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index e06858eeda..e4b8c850d0 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -87,9 +87,8 @@ pub use language::Location; #[cfg(any(test, feature = "test-support"))] pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; pub use worktree::{ - Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry, - UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, - FS_WATCH_LATENCY, + Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet, + UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY, }; const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); @@ -3109,6 +3108,7 @@ impl LspStore { WorktreeStoreEvent::WorktreeUpdateSent(worktree) => { worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree)); } + WorktreeStoreEvent::GitRepositoryUpdated => {} } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 177e05bd62..b729b72e50 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -39,7 +39,10 @@ use futures::{ pub use image_store::{ImageItem, ImageStore}; use image_store::{ImageItemEvent, ImageStoreEvent}; -use git::{blame::Blame, repository::GitRepository}; +use git::{ + blame::Blame, + repository::{GitFileStatus, GitRepository}, +}; use gpui::{ AnyModel, AppContext, AsyncAppContext, BorrowAppContext, Context as _, EventEmitter, Hsla, Model, ModelContext, SharedString, Task, WeakModel, WindowContext, @@ -95,9 +98,8 @@ pub use task_inventory::{ BasicContextProvider, ContextProviderWithTasks, Inventory, TaskSourceKind, }; pub use worktree::{ - Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, RepositoryEntry, - UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, - FS_WATCH_LATENCY, + Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId, UpdatedEntriesSet, + UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings, FS_WATCH_LATENCY, }; pub use buffer_store::ProjectTransaction; @@ -242,6 +244,7 @@ pub enum Event { ActivateProjectPanel, WorktreeAdded(WorktreeId), WorktreeOrderChanged, + GitRepositoryUpdated, WorktreeRemoved(WorktreeId), WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet), WorktreeUpdatedGitRepositories(WorktreeId), @@ -1433,6 +1436,15 @@ impl Project { .unwrap_or(false) } + pub fn project_path_git_status( + &self, + project_path: &ProjectPath, + cx: &AppContext, + ) -> Option { + self.worktree_for_id(project_path.worktree_id, cx) + .and_then(|worktree| worktree.read(cx).status_for_file(&project_path.path)) + } + pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option { paths .iter() @@ -2295,6 +2307,7 @@ impl Project { } WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged), WorktreeStoreEvent::WorktreeUpdateSent(_) => {} + WorktreeStoreEvent::GitRepositoryUpdated => cx.emit(Event::GitRepositoryUpdated), } } @@ -3516,17 +3529,6 @@ impl Project { ) } - pub fn get_repo( - &self, - project_path: &ProjectPath, - cx: &AppContext, - ) -> Option> { - self.worktree_for_id(project_path.worktree_id, cx)? - .read(cx) - .as_local()? - .local_git_repo(&project_path.path) - } - pub fn get_first_worktree_root_repo(&self, cx: &AppContext) -> Option> { let worktree = self.visible_worktrees(cx).next()?.read(cx).as_local()?; let root_entry = worktree.root_git_entry()?; @@ -4426,8 +4428,10 @@ impl Completion { } } -pub fn sort_worktree_entries(entries: &mut [Entry]) { +pub fn sort_worktree_entries(entries: &mut [impl AsRef]) { entries.sort_by(|entry_a, entry_b| { + let entry_a = entry_a.as_ref(); + let entry_b = entry_b.as_ref(); compare_paths( (&entry_a.path, entry_a.is_file()), (&entry_b.path, entry_b.is_file()), diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 2a31710df6..ede820e3e9 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -109,7 +109,7 @@ impl Inventory { /// Pulls its task sources relevant to the worktree and the language given and resolves them with the [`TaskContext`] given. /// Joins the new resolutions with the resolved tasks that were used (spawned) before, /// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first. - /// Deduplicates the tasks by their labels and contenxt and splits the ordered list into two: used tasks and the rest, newly resolved tasks. + /// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks. pub fn used_and_current_resolved_tasks( &self, worktree: Option, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index c39b88cd40..0b6c9b4c0d 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -62,6 +62,7 @@ pub enum WorktreeStoreEvent { WorktreeReleased(EntityId, WorktreeId), WorktreeOrderChanged, WorktreeUpdateSent(Model), + GitRepositoryUpdated, } impl EventEmitter for WorktreeStore {} @@ -322,6 +323,7 @@ impl WorktreeStore { let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await; let worktree = worktree?; + this.update(&mut cx, |this, cx| this.add(&worktree, cx))?; if visible { @@ -374,6 +376,17 @@ impl WorktreeStore { this.send_project_updates(cx); }) .detach(); + + cx.subscribe( + worktree, + |_this, _, event: &worktree::Event, cx| match event { + worktree::Event::UpdatedGitRepositories(_) => { + cx.emit(WorktreeStoreEvent::GitRepositoryUpdated); + } + worktree::Event::DeletedEntry(_) | worktree::Event::UpdatedEntries(_) => {} + }, + ) + .detach(); } pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { @@ -583,11 +596,11 @@ impl WorktreeStore { pub fn shared( &mut self, remote_id: u64, - downsteam_client: AnyProtoClient, + downstream_client: AnyProtoClient, cx: &mut ModelContext, ) { self.retain_worktrees = true; - self.downstream_client = Some((downsteam_client, remote_id)); + self.downstream_client = Some((downstream_client, remote_id)); // When shared, retain all worktrees for worktree_handle in self.worktrees.iter_mut() { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 03d335c3c6..78a2fd4267 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -63,7 +63,7 @@ use workspace::{ notifications::{DetachAndPromptErr, NotifyTaskExt}, DraggedSelection, OpenInTerminal, PreviewTabsSettings, SelectedEntry, Workspace, }; -use worktree::CreatedEntry; +use worktree::{CreatedEntry, GitEntry, GitEntryRef}; const PROJECT_PANEL_KEY: &str = "ProjectPanel"; const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX; @@ -76,7 +76,7 @@ pub struct ProjectPanel { // An update loop that keeps incrementing/decrementing scroll offset while there is a dragged entry that's // hovered over the start/end of a list. hover_scroll_task: Option>, - visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, + visible_entries: Vec<(WorktreeId, Vec, OnceCell>>)>, /// Maps from leaf project entry ID to the currently selected ancestor. /// Relevant only for auto-fold dirs, where a single project panel entry may actually consist of several /// project entries (and all non-leaf nodes are guaranteed to be directories). @@ -311,7 +311,8 @@ impl ProjectPanel { this.update_visible_entries(None, cx); cx.notify(); } - project::Event::WorktreeUpdatedEntries(_, _) + project::Event::GitRepositoryUpdated + | project::Event::WorktreeUpdatedEntries(_, _) | project::Event::WorktreeAdded(_) | project::Event::WorktreeOrderChanged => { this.update_visible_entries(None, cx); @@ -1366,9 +1367,10 @@ impl ProjectPanel { let parent_entry = worktree.entry_for_path(parent_path)?; // Remove all siblings that are being deleted except the last marked entry - let mut siblings: Vec = worktree + let mut siblings: Vec<_> = worktree .snapshot() .child_entries(parent_path) + .with_git_statuses() .filter(|sibling| { sibling.id == latest_entry.id || !marked_entries_in_worktree.contains(&&SelectedEntry { @@ -1376,7 +1378,7 @@ impl ProjectPanel { entry_id: sibling.id, }) }) - .cloned() + .map(|entry| entry.to_owned()) .collect(); project::sort_worktree_entries(&mut siblings); @@ -2334,7 +2336,7 @@ impl ProjectPanel { } let mut visible_worktree_entries = Vec::new(); - let mut entry_iter = snapshot.entries(true, 0); + let mut entry_iter = snapshot.entries(true, 0).with_git_statuses(); let mut auto_folded_ancestors = vec![]; while let Some(entry) = entry_iter.entry() { if auto_collapse_dirs && entry.kind.is_dir() { @@ -2376,7 +2378,7 @@ impl ProjectPanel { } } auto_folded_ancestors.clear(); - visible_worktree_entries.push(entry.clone()); + visible_worktree_entries.push(entry.to_owned()); let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id { entry.id == new_entry_id || { self.ancestors.get(&entry.id).map_or(false, |entries| { @@ -2390,25 +2392,27 @@ impl ProjectPanel { false }; if precedes_new_entry { - visible_worktree_entries.push(Entry { - id: NEW_ENTRY_ID, - kind: new_entry_kind, - path: entry.path.join("\0").into(), - inode: 0, - mtime: entry.mtime, - size: entry.size, - is_ignored: entry.is_ignored, - is_external: false, - is_private: false, - is_always_included: entry.is_always_included, + visible_worktree_entries.push(GitEntry { + entry: Entry { + id: NEW_ENTRY_ID, + kind: new_entry_kind, + path: entry.path.join("\0").into(), + inode: 0, + mtime: entry.mtime, + size: entry.size, + is_ignored: entry.is_ignored, + is_external: false, + is_private: false, + is_always_included: entry.is_always_included, + canonical_path: entry.canonical_path.clone(), + char_bag: entry.char_bag, + is_fifo: entry.is_fifo, + }, git_status: entry.git_status, - canonical_path: entry.canonical_path.clone(), - char_bag: entry.char_bag, - is_fifo: entry.is_fifo, }); } let worktree_abs_path = worktree.read(cx).abs_path(); - let (depth, path) = if Some(entry) == worktree.read(cx).root_entry() { + let (depth, path) = if Some(entry.entry) == worktree.read(cx).root_entry() { let Some(path_name) = worktree_abs_path .file_name() .with_context(|| { @@ -2485,8 +2489,8 @@ impl ProjectPanel { entry_iter.advance(); } - snapshot.propagate_git_statuses(&mut visible_worktree_entries); project::sort_worktree_entries(&mut visible_worktree_entries); + self.visible_entries .push((worktree_id, visible_worktree_entries, OnceCell::new())); } @@ -2714,13 +2718,13 @@ impl ProjectPanel { None } - fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, &Entry)> { + fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef)> { let mut offset = 0; for (worktree_id, visible_worktree_entries, _) in &self.visible_entries { if visible_worktree_entries.len() > offset + index { return visible_worktree_entries .get(index) - .map(|entry| (*worktree_id, entry)); + .map(|entry| (*worktree_id, entry.to_ref())); } offset += visible_worktree_entries.len(); } @@ -2753,7 +2757,7 @@ impl ProjectPanel { .collect() }); for entry in visible_worktree_entries[entry_range].iter() { - callback(entry, entries, cx); + callback(&entry, entries, cx); } ix = end_ix; } @@ -2822,7 +2826,7 @@ impl ProjectPanel { }; let (depth, difference) = - ProjectPanel::calculate_depth_and_difference(entry, entries); + ProjectPanel::calculate_depth_and_difference(&entry, entries); let filename = match difference { diff if diff > 1 => entry @@ -2951,9 +2955,9 @@ impl ProjectPanel { worktree_id: WorktreeId, reverse_search: bool, only_visible_entries: bool, - predicate: impl Fn(&Entry, WorktreeId) -> bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, cx: &mut ViewContext, - ) -> Option { + ) -> Option { if only_visible_entries { let entries = self .visible_entries @@ -2968,15 +2972,18 @@ impl ProjectPanel { .clone(); return utils::ReversibleIterable::new(entries.iter(), reverse_search) - .find(|ele| predicate(ele, worktree_id)) + .find(|ele| predicate(ele.to_ref(), worktree_id)) .cloned(); } let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?; worktree.update(cx, |tree, _| { - utils::ReversibleIterable::new(tree.entries(true, 0usize), reverse_search) - .find_single_ended(|ele| predicate(ele, worktree_id)) - .cloned() + utils::ReversibleIterable::new( + tree.entries(true, 0usize).with_git_statuses(), + reverse_search, + ) + .find_single_ended(|ele| predicate(*ele, worktree_id)) + .map(|ele| ele.to_owned()) }) } @@ -2984,7 +2991,7 @@ impl ProjectPanel { &self, start: Option<&SelectedEntry>, reverse_search: bool, - predicate: impl Fn(&Entry, WorktreeId) -> bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, cx: &mut ViewContext, ) -> Option { let mut worktree_ids: Vec<_> = self @@ -3006,7 +3013,9 @@ impl ProjectPanel { let root_entry = tree.root_entry()?; let tree_id = tree.id(); - let mut first_iter = tree.traverse_from_path(true, true, true, entry.path.as_ref()); + let mut first_iter = tree + .traverse_from_path(true, true, true, entry.path.as_ref()) + .with_git_statuses(); if reverse_search { first_iter.next(); @@ -3014,25 +3023,25 @@ impl ProjectPanel { let first = first_iter .enumerate() - .take_until(|(count, ele)| *ele == root_entry && *count != 0usize) - .map(|(_, ele)| ele) - .find(|ele| predicate(ele, tree_id)) - .cloned(); + .take_until(|(count, entry)| entry.entry == root_entry && *count != 0usize) + .map(|(_, entry)| entry) + .find(|ele| predicate(*ele, tree_id)) + .map(|ele| ele.to_owned()); - let second_iter = tree.entries(true, 0usize); + let second_iter = tree.entries(true, 0usize).with_git_statuses(); let second = if reverse_search { second_iter .take_until(|ele| ele.id == start.entry_id) - .filter(|ele| predicate(ele, tree_id)) + .filter(|ele| predicate(*ele, tree_id)) .last() - .cloned() + .map(|ele| ele.to_owned()) } else { second_iter .take_while(|ele| ele.id != start.entry_id) - .filter(|ele| predicate(ele, tree_id)) + .filter(|ele| predicate(*ele, tree_id)) .last() - .cloned() + .map(|ele| ele.to_owned()) }; if reverse_search { @@ -3089,7 +3098,7 @@ impl ProjectPanel { &self, start: Option<&SelectedEntry>, reverse_search: bool, - predicate: impl Fn(&Entry, WorktreeId) -> bool, + predicate: impl Fn(GitEntryRef, WorktreeId) -> bool, cx: &mut ViewContext, ) -> Option { let mut worktree_ids: Vec<_> = self @@ -3131,8 +3140,8 @@ impl ProjectPanel { ) }; - let first_search = first_iter.find(|ele| predicate(ele, start.worktree_id)); - let second_search = second_iter.find(|ele| predicate(ele, start.worktree_id)); + let first_search = first_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id)); + let second_search = second_iter.find(|ele| predicate(ele.to_ref(), start.worktree_id)); if first_search.is_some() { return first_search.map(|entry| SelectedEntry { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 29e90cc71e..3f426f0214 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1768,7 +1768,7 @@ message Entry { bool is_ignored = 7; bool is_external = 8; reserved 6; - optional GitStatus git_status = 9; + reserved 9; bool is_fifo = 10; optional uint64 size = 11; optional string canonical_path = 12; @@ -1777,6 +1777,8 @@ message Entry { message RepositoryEntry { uint64 work_directory_id = 1; optional string branch = 2; + repeated StatusEntry updated_statuses = 3; + repeated string removed_statuses = 4; } message StatusEntry { @@ -1788,6 +1790,7 @@ enum GitStatus { Added = 0; Modified = 1; Conflict = 2; + Deleted = 3; } message BufferState { diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index dc4f4b10b8..8c2e50d68e 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -20,6 +20,7 @@ use serde_json::json; use settings::{initial_server_settings_content, Settings, SettingsLocation, SettingsStore}; use smol::stream::StreamExt; use std::{ + collections::HashSet, path::{Path, PathBuf}, sync::Arc, }; @@ -1150,6 +1151,10 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA let (project, headless_project) = init_test(&fs, cx, server_cx).await; let branches = ["main", "dev", "feature-1"]; + let branches_set = branches + .iter() + .map(ToString::to_string) + .collect::>(); fs.insert_branches(Path::new("/code/project1/.git"), &branches); let (worktree, _) = project @@ -1173,10 +1178,10 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA let remote_branches = remote_branches .into_iter() - .map(|branch| branch.name) - .collect::>(); + .map(|branch| branch.name.to_string()) + .collect::>(); - assert_eq!(&remote_branches, &branches); + assert_eq!(&remote_branches, &branches_set); cx.update(|cx| { project.update(cx, |project, cx| { diff --git a/crates/repl/src/kernels/remote_kernels.rs b/crates/repl/src/kernels/remote_kernels.rs index 5a3a22ce87..c5e53cda97 100644 --- a/crates/repl/src/kernels/remote_kernels.rs +++ b/crates/repl/src/kernels/remote_kernels.rs @@ -39,7 +39,7 @@ pub async fn launch_remote_kernel( let kernel_launch_request = KernelLaunchRequest { name: kernel_name.to_string(), // Note: since the path we have locally may not be the same as the one on the remote server, - // we don't send it. We'll have to evaluate this decisiion along the way. + // we don't send it. We'll have to evaluate this decision along the way. path: None, }; diff --git a/crates/settings/src/key_equivalents.rs b/crates/settings/src/key_equivalents.rs index a0029aabbe..bf08de97ae 100644 --- a/crates/settings/src/key_equivalents.rs +++ b/crates/settings/src/key_equivalents.rs @@ -5,7 +5,7 @@ use collections::HashMap; // for those users. // // The way macOS solves this problem is to move shortcuts around so that they are all reachable, -// even if the mnemoic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct +// even if the mnemonic changes. https://developer.apple.com/documentation/swiftui/keyboardshortcut/localization-swift.struct // // For example, cmd-> is the "switch window" shortcut because the > key is right above tab. // To ensure this doesn't cause problems for shortcuts defined for a QWERTY layout, apple moves diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 773e7db88b..3e33d8b43e 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -9,6 +9,15 @@ struct StackEntry<'a, T: Item, D> { position: D, } +impl<'a, T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for StackEntry<'a, T, D> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("StackEntry") + .field("index", &self.index) + .field("position", &self.position) + .finish() + } +} + #[derive(Clone)] pub struct Cursor<'a, T: Item, D> { tree: &'a SumTree, @@ -18,6 +27,21 @@ pub struct Cursor<'a, T: Item, D> { at_end: bool, } +impl<'a, T: Item + fmt::Debug, D: fmt::Debug> fmt::Debug for Cursor<'a, T, D> +where + T::Summary: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Cursor") + .field("tree", &self.tree) + .field("stack", &self.stack) + .field("position", &self.position) + .field("did_seek", &self.did_seek) + .field("at_end", &self.at_end) + .finish() + } +} + pub struct Iter<'a, T: Item> { tree: &'a SumTree, stack: ArrayVec, 16>, @@ -60,6 +84,7 @@ where } } + /// Item is None, when the list is empty, or this cursor is at the end of the list. #[track_caller] pub fn item(&self) -> Option<&'a T> { self.assert_did_seek(); diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index fbfe3b06f3..fa37c67599 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -42,6 +42,21 @@ pub trait Summary: Clone { fn add_summary(&mut self, summary: &Self, cx: &Self::Context); } +/// This type exists because we can't implement Summary for () without causing +/// type resolution errors +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct Unit; + +impl Summary for Unit { + type Context = (); + + fn zero(_: &()) -> Self { + Unit + } + + fn add_summary(&mut self, _: &Self, _: &()) {} +} + /// Each [`Summary`] type can have more than one [`Dimension`] type that it measures. /// /// You can use dimensions to seek to a specific location in the [`SumTree`] @@ -761,6 +776,55 @@ impl SumTree { None } } + + #[inline] + pub fn contains(&self, key: &T::Key, cx: &::Context) -> bool { + self.get(key, cx).is_some() + } + + pub fn update( + &mut self, + key: &T::Key, + cx: &::Context, + f: F, + ) -> Option + where + F: FnOnce(&mut T) -> R, + { + let mut cursor = self.cursor::(cx); + let mut new_tree = cursor.slice(key, Bias::Left, cx); + let mut result = None; + if Ord::cmp(key, &cursor.end(cx)) == Ordering::Equal { + let mut updated = cursor.item().unwrap().clone(); + result = Some(f(&mut updated)); + new_tree.push(updated, cx); + cursor.next(cx); + } + new_tree.append(cursor.suffix(cx), cx); + drop(cursor); + *self = new_tree; + result + } + + pub fn retain bool>( + &mut self, + cx: &::Context, + mut predicate: F, + ) { + let mut new_map = SumTree::new(cx); + + let mut cursor = self.cursor::(cx); + cursor.next(cx); + while let Some(item) = cursor.item() { + if predicate(&item) { + new_map.push(item.clone(), cx); + } + cursor.next(cx); + } + drop(cursor); + + *self = new_map; + } } impl Default for SumTree diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 64d9da71fa..f076a4f1bc 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -358,13 +358,14 @@ impl PickerDelegate for TabSwitcherDelegate { .item .project_path(cx) .as_ref() - .and_then(|path| self.project.read(cx).entry_for_path(path, cx)) - .map(|entry| { - entry_git_aware_label_color( - entry.git_status, - entry.is_ignored, - selected, - ) + .and_then(|path| { + let project = self.project.read(cx); + let entry = project.entry_for_path(path, cx)?; + let git_status = project.project_path_git_status(path, cx); + Some((entry, git_status)) + }) + .map(|(entry, git_status)| { + entry_git_aware_label_color(git_status, entry.is_ignored, selected) }) }) .flatten(); diff --git a/crates/task/src/lib.rs b/crates/task/src/lib.rs index 7b81ae078c..af31f56450 100644 --- a/crates/task/src/lib.rs +++ b/crates/task/src/lib.rs @@ -60,7 +60,7 @@ pub struct SpawnInTerminal { pub show_command: bool, } -/// A final form of the [`TaskTemplate`], that got resolved with a particualar [`TaskContext`] and now is ready to spawn the actual task. +/// A final form of the [`TaskTemplate`], that got resolved with a particular [`TaskContext`] and now is ready to spawn the actual task. #[derive(Clone, Debug, PartialEq, Eq)] pub struct ResolvedTask { /// A way to distinguish tasks produced by the same template, but different contexts. diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index 7d553a374d..2d13a7e18b 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -143,13 +143,13 @@ impl TaskTemplate { let truncated_variables = truncate_variables(&task_variables); let cwd = match self.cwd.as_deref() { Some(cwd) => { - let substitured_cwd = substitute_all_template_variables_in_str( + let substituted_cwd = substitute_all_template_variables_in_str( cwd, &task_variables, &variable_names, &mut substituted_variables, )?; - Some(PathBuf::from(substitured_cwd)) + Some(PathBuf::from(substituted_cwd)) } None => None, } diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 3203f0455f..125c1384b5 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -100,7 +100,7 @@ impl TerminalPanel { terminal_panel } - pub fn asssistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext) { + pub fn set_assistant_enabled(&mut self, enabled: bool, cx: &mut ViewContext) { self.assistant_enabled = enabled; if enabled { let focus_handle = self diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 440ac332c4..3338665366 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -270,7 +270,7 @@ pub struct ThemeColorsContent { /// Fill Color. Used for the muted or deemphasized fill color of an icon. /// - /// This might be used to show an icon in an inactive pane, or to demphasize a series of icons to give them less visual weight. + /// This might be used to show an icon in an inactive pane, or to deemphasize a series of icons to give them less visual weight. #[serde(rename = "icon.muted")] pub icon_muted: Option, diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 99c1656215..62ab46610a 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -97,7 +97,7 @@ pub struct ThemeColors { pub icon: Hsla, /// Fill Color. Used for the muted or deemphasized fill color of an icon. /// - /// This might be used to show an icon in an inactive pane, or to demphasize a series of icons to give them less visual weight. + /// This might be used to show an icon in an inactive pane, or to deemphasize a series of icons to give them less visual weight. pub icon_muted: Hsla, /// Fill Color. Used for the disabled fill color of an icon. /// diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index cf860ad452..2a4802b4eb 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -71,7 +71,7 @@ impl From for Appearance { } } -/// Which themes should be loaded. This is used primarlily for testing. +/// Which themes should be loaded. This is used primarily for testing. pub enum LoadThemes { /// Only load the base theme. /// diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 23449ddeb0..cab85f6f19 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -21,7 +21,7 @@ use gpui::{ Interactivity, IntoElement, Model, MouseButton, ParentElement, Render, Stateful, StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; -use project::{Project, RepositoryEntry}; +use project::Project; use rpc::proto; use settings::Settings as _; use smallvec::SmallVec; @@ -487,7 +487,7 @@ impl TitleBar { let workspace = self.workspace.upgrade()?; let branch_name = entry .as_ref() - .and_then(RepositoryEntry::branch) + .and_then(|entry| entry.branch()) .map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?; Some( Button::new("project_branch_trigger", branch_name) diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 82de2bc684..8bd5c87286 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -179,7 +179,7 @@ define_connection! { // group_id: usize, // Primary key for pane_groups // workspace_id: usize, // References workspaces table // parent_group_id: Option, // None indicates that this is the root node - // position: Optiopn, // None indicates that this is the root node + // position: Option, // None indicates that this is the root node // axis: Option, // 'Vertical', 'Horizontal' // flexes: Option>, // A JSON array of floats // ) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 9ee909f73e..2c4c8f7c09 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -21,7 +21,6 @@ use fuzzy::CharBag; use git::GitHostingProviderRegistry; use git::{ repository::{GitFileStatus, GitRepository, RepoPath}, - status::GitStatus, COOKIES, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, }; use gpui::{ @@ -30,6 +29,7 @@ use gpui::{ }; use ignore::IgnoreStack; use language::DiskState; + use parking_lot::Mutex; use paths::local_settings_folder_relative_path; use postage::{ @@ -53,8 +53,8 @@ use std::{ ffi::OsStr, fmt, future::Future, - mem, - ops::{AddAssign, Deref, DerefMut, Sub}, + mem::{self}, + ops::{Deref, DerefMut}, path::{Path, PathBuf}, pin::Pin, sync::{ @@ -63,7 +63,9 @@ use std::{ }, time::{Duration, Instant}, }; -use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; +use sum_tree::{ + Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit, +}; use text::{LineEnding, Rope}; use util::{ paths::{home_dir, PathMatcher, SanitizedPath}, @@ -154,7 +156,7 @@ pub struct Snapshot { entries_by_path: SumTree, entries_by_id: SumTree, always_included_entries: Vec>, - repository_entries: TreeMap, + repositories: SumTree, /// A number that increases every time the worktree begins scanning /// a set of paths from the filesystem. This scanning could be caused @@ -171,8 +173,133 @@ pub struct Snapshot { #[derive(Clone, Debug, PartialEq, Eq)] pub struct RepositoryEntry { - pub(crate) work_directory: WorkDirectoryEntry, + /// The git status entries for this repository. + /// Note that the paths on this repository are relative to the git work directory. + /// If the .git folder is external to Zed, these paths will be relative to that folder, + /// and this data structure might reference files external to this worktree. + /// + /// For example: + /// + /// my_root_folder/ <-- repository root + /// .git + /// my_sub_folder_1/ + /// project_root/ <-- Project root, Zed opened here + /// changed_file_1 <-- File with changes, in worktree + /// my_sub_folder_2/ + /// changed_file_2 <-- File with changes, out of worktree + /// ... + /// + /// With this setup, this field would contain 2 entries, like so: + /// - my_sub_folder_1/project_root/changed_file_1 + /// - my_sub_folder_2/changed_file_2 + pub(crate) statuses_by_path: SumTree, + pub(crate) work_directory_id: ProjectEntryId, + pub(crate) work_directory: WorkDirectory, pub(crate) branch: Option>, +} + +impl Deref for RepositoryEntry { + type Target = WorkDirectory; + + fn deref(&self) -> &Self::Target { + &self.work_directory + } +} + +impl AsRef for RepositoryEntry { + fn as_ref(&self) -> &Path { + &self.path + } +} + +impl RepositoryEntry { + pub fn branch(&self) -> Option> { + self.branch.clone() + } + + pub fn work_directory_id(&self) -> ProjectEntryId { + self.work_directory_id + } + + pub fn status(&self) -> impl Iterator + '_ { + self.statuses_by_path.iter().cloned() + } + + pub fn initial_update(&self) -> proto::RepositoryEntry { + proto::RepositoryEntry { + work_directory_id: self.work_directory_id.to_proto(), + branch: self.branch.as_ref().map(|branch| branch.to_string()), + updated_statuses: self + .statuses_by_path + .iter() + .map(|entry| proto::StatusEntry { + repo_path: entry.repo_path.to_string_lossy().to_string(), + status: git_status_to_proto(entry.status), + }) + .collect(), + removed_statuses: Default::default(), + } + } + + pub fn build_update(&self, old: &Self) -> proto::RepositoryEntry { + let mut updated_statuses: Vec = Vec::new(); + let mut removed_statuses: Vec = Vec::new(); + + let mut new_statuses = self.statuses_by_path.iter().peekable(); + let mut old_statuses = old.statuses_by_path.iter().peekable(); + + let mut current_new_entry = new_statuses.next(); + let mut current_old_entry = old_statuses.next(); + loop { + match (current_new_entry, current_old_entry) { + (Some(new_entry), Some(old_entry)) => { + match new_entry.repo_path.cmp(&old_entry.repo_path) { + Ordering::Less => { + updated_statuses.push(new_entry.to_proto()); + current_new_entry = new_statuses.next(); + } + Ordering::Equal => { + if new_entry.status != old_entry.status { + updated_statuses.push(new_entry.to_proto()); + } + current_old_entry = old_statuses.next(); + current_new_entry = new_statuses.next(); + } + Ordering::Greater => { + removed_statuses.push(old_entry.repo_path.to_proto()); + current_old_entry = old_statuses.next(); + } + } + } + (None, Some(old_entry)) => { + removed_statuses.push(old_entry.repo_path.to_proto()); + current_old_entry = old_statuses.next(); + } + (Some(new_entry), None) => { + updated_statuses.push(new_entry.to_proto()); + current_new_entry = new_statuses.next(); + } + (None, None) => break, + } + } + + proto::RepositoryEntry { + work_directory_id: self.work_directory_id.to_proto(), + branch: self.branch.as_ref().map(|branch| branch.to_string()), + updated_statuses, + removed_statuses, + } + } +} + +/// This path corresponds to the 'content path' of a repository in relation +/// to Zed's project root. +/// In the majority of the cases, this is the folder that contains the .git folder. +/// But if a sub-folder of a git repository is opened, this corresponds to the +/// project root and the .git folder is located in a parent directory. +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct WorkDirectory { + path: Arc, /// If location_in_repo is set, it means the .git folder is external /// and in a parent folder of the project root. @@ -195,23 +322,19 @@ pub struct RepositoryEntry { pub(crate) location_in_repo: Option>, } -impl RepositoryEntry { - pub fn branch(&self) -> Option> { - self.branch.clone() +impl WorkDirectory { + pub fn path_key(&self) -> PathKey { + PathKey(self.path.clone()) } - pub fn work_directory_id(&self) -> ProjectEntryId { - *self.work_directory - } - - pub fn work_directory(&self, snapshot: &Snapshot) -> Option { - snapshot - .entry_for_id(self.work_directory_id()) - .map(|entry| RepositoryWorkDirectory(entry.path.clone())) - } - - pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry { - self.into() + /// Returns true if the given path is a child of the work directory. + /// + /// Note that the path may not be a member of this repository, if there + /// is a repository in a directory between these two paths + /// external .git folder in a parent folder of the project root. + pub fn directory_contains(&self, path: impl AsRef) -> bool { + let path = path.as_ref(); + path.starts_with(&self.path) } /// relativize returns the given project path relative to the root folder of the @@ -219,53 +342,50 @@ impl RepositoryEntry { /// If the root of the repository (and its .git folder) are located in a parent folder /// of the project root folder, then the returned RepoPath is relative to the root /// of the repository and not a valid path inside the project. - pub fn relativize(&self, worktree: &Snapshot, path: &Path) -> Result { - let relativize_path = |path: &Path| { - let entry = worktree - .entry_for_id(self.work_directory.0) - .ok_or_else(|| anyhow!("entry not found"))?; - + pub fn relativize(&self, path: &Path) -> Result { + if let Some(location_in_repo) = &self.location_in_repo { + Ok(location_in_repo.join(path).into()) + } else { let relativized_path = path - .strip_prefix(&entry.path) - .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, entry.path))?; + .strip_prefix(&self.path) + .map_err(|_| anyhow!("could not relativize {:?} against {:?}", path, self.path))?; Ok(relativized_path.into()) - }; + } + } - if let Some(location_in_repo) = &self.location_in_repo { - relativize_path(&location_in_repo.join(path)) + /// This is the opposite operation to `relativize` above + pub fn unrelativize(&self, path: &RepoPath) -> Option> { + if let Some(location) = &self.location_in_repo { + // If we fail to strip the prefix, that means this status entry is + // external to this worktree, and we definitely won't have an entry_id + path.strip_prefix(location).ok().map(Into::into) } else { - relativize_path(path) + Some(self.path.join(path).into()) } } } -impl From<&RepositoryEntry> for proto::RepositoryEntry { - fn from(value: &RepositoryEntry) -> Self { - proto::RepositoryEntry { - work_directory_id: value.work_directory.to_proto(), - branch: value.branch.as_ref().map(|str| str.to_string()), - } - } -} - -/// This path corresponds to the 'content path' of a repository in relation -/// to Zed's project root. -/// In the majority of the cases, this is the folder that contains the .git folder. -/// But if a sub-folder of a git repository is opened, this corresponds to the -/// project root and the .git folder is located in a parent directory. -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub struct RepositoryWorkDirectory(pub(crate) Arc); - -impl Default for RepositoryWorkDirectory { +impl Default for WorkDirectory { fn default() -> Self { - RepositoryWorkDirectory(Arc::from(Path::new(""))) + Self { + path: Arc::from(Path::new("")), + location_in_repo: None, + } } } -impl AsRef for RepositoryWorkDirectory { +impl Deref for WorkDirectory { + type Target = Path; + + fn deref(&self) -> &Self::Target { + self.as_ref() + } +} + +impl AsRef for WorkDirectory { fn as_ref(&self) -> &Path { - self.0.as_ref() + self.path.as_ref() } } @@ -317,7 +437,9 @@ struct BackgroundScannerState { #[derive(Debug, Clone)] pub struct LocalRepositoryEntry { + pub(crate) work_directory: WorkDirectory, pub(crate) git_dir_scan_id: usize, + pub(crate) status_scan_id: usize, pub(crate) repo_ptr: Arc, /// Absolute path to the actual .git folder. /// Note: if .git is a file, this points to the folder indicated by the .git file @@ -326,12 +448,39 @@ pub struct LocalRepositoryEntry { pub(crate) dot_git_worktree_abs_path: Option>, } +impl sum_tree::Item for LocalRepositoryEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.work_directory.path.clone(), + item_summary: Unit, + } + } +} + +impl KeyedItem for LocalRepositoryEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.work_directory.path.clone()) + } +} + impl LocalRepositoryEntry { pub fn repo(&self) -> &Arc { &self.repo_ptr } } +impl Deref for LocalRepositoryEntry { + type Target = WorkDirectory; + + fn deref(&self) -> &Self::Target { + &self.work_directory + } +} + impl Deref for LocalSnapshot { type Target = Snapshot; @@ -716,9 +865,9 @@ impl Worktree { let snapshot = this.snapshot(); cx.background_executor().spawn(async move { if let Some(repo) = snapshot.repository_for_path(&path) { - if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() { + if let Some(repo_path) = repo.relativize(&path).log_err() { if let Some(git_repo) = - snapshot.git_repositories.get(&*repo.work_directory) + snapshot.git_repositories.get(&repo.work_directory_id) { return Ok(git_repo.repo_ptr.load_index_text(&repo_path)); } @@ -1215,6 +1364,7 @@ impl LocalWorktree { let mut changes = Vec::new(); let mut old_repos = old_snapshot.git_repositories.iter().peekable(); let mut new_repos = new_snapshot.git_repositories.iter().peekable(); + loop { match (new_repos.peek().map(clone), old_repos.peek().map(clone)) { (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => { @@ -1231,11 +1381,13 @@ impl LocalWorktree { new_repos.next(); } Ordering::Equal => { - if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id { + if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id + || new_repo.status_scan_id != old_repo.status_scan_id + { if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1251,8 +1403,8 @@ impl LocalWorktree { Ordering::Greater => { if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1279,8 +1431,8 @@ impl LocalWorktree { (None, Some((entry_id, _))) => { if let Some(entry) = old_snapshot.entry_for_id(entry_id) { let old_repo = old_snapshot - .repository_entries - .get(&RepositoryWorkDirectory(entry.path.clone())) + .repositories + .get(&PathKey(entry.path.clone()), &()) .cloned(); changes.push(( entry.path.clone(), @@ -1325,12 +1477,12 @@ impl LocalWorktree { } pub fn local_git_repo(&self, path: &Path) -> Option> { - self.repo_for_path(path) - .map(|(_, entry)| entry.repo_ptr.clone()) + self.local_repo_for_path(path) + .map(|local_repo| local_repo.repo_ptr.clone()) } pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> { - self.git_repositories.get(&repo.work_directory.0) + self.git_repositories.get(&repo.work_directory_id) } fn load_binary_file( @@ -2086,7 +2238,7 @@ impl Snapshot { always_included_entries: Default::default(), entries_by_path: Default::default(), entries_by_id: Default::default(), - repository_entries: Default::default(), + repositories: Default::default(), scan_id: 1, completed_scan_id: 0, } @@ -2121,9 +2273,9 @@ impl Snapshot { updated_entries.sort_unstable_by_key(|e| e.id); let mut updated_repositories = self - .repository_entries - .values() - .map(proto::RepositoryEntry::from) + .repositories + .iter() + .map(|repository| repository.initial_update()) .collect::>(); updated_repositories.sort_unstable_by_key(|e| e.work_directory_id); @@ -2186,7 +2338,7 @@ impl Snapshot { self.entries_by_path = { let mut cursor = self.entries_by_path.cursor::(&()); let mut new_entries_by_path = - cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &()); + cursor.slice(&TraversalTarget::path(&removed_entry.path), Bias::Left, &()); while let Some(entry) = cursor.item() { if entry.path.starts_with(&removed_entry.path) { self.entries_by_id.remove(&entry.id, &()); @@ -2202,12 +2354,14 @@ impl Snapshot { Some(removed_entry.path) } - #[cfg(any(test, feature = "test-support"))] - pub fn status_for_file(&self, path: impl Into) -> Option { - let path = path.into(); - self.entries_by_path - .get(&PathKey(Arc::from(path)), &()) - .and_then(|entry| entry.git_status) + pub fn status_for_file(&self, path: impl AsRef) -> Option { + let path = path.as_ref(); + self.repository_for_path(path).and_then(|repo| { + let repo_path = repo.relativize(path).unwrap(); + repo.statuses_by_path + .get(&PathKey(repo_path.0), &()) + .map(|entry| entry.status) + }) } fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) { @@ -2267,38 +2421,66 @@ impl Snapshot { self.entries_by_id.edit(entries_by_id_edits, &()); update.removed_repositories.sort_unstable(); - self.repository_entries.retain(|_, entry| { + self.repositories.retain(&(), |entry: &RepositoryEntry| { update .removed_repositories - .binary_search(&entry.work_directory.to_proto()) + .binary_search(&entry.work_directory_id.to_proto()) .is_err() }); for repository in update.updated_repositories { - let work_directory_entry: WorkDirectoryEntry = - ProjectEntryId::from_proto(repository.work_directory_id).into(); + let work_directory_id = ProjectEntryId::from_proto(repository.work_directory_id); + if let Some(work_dir_entry) = self.entry_for_id(work_directory_id) { + if self + .repositories + .contains(&PathKey(work_dir_entry.path.clone()), &()) + { + let edits = repository + .removed_statuses + .into_iter() + .map(|path| Edit::Remove(PathKey(Path::new(&path).into()))) + .chain(repository.updated_statuses.into_iter().filter_map( + |updated_status| { + Some(Edit::Insert(updated_status.try_into().log_err()?)) + }, + )) + .collect::>(); - if let Some(entry) = self.entry_for_id(*work_directory_entry) { - let work_directory = RepositoryWorkDirectory(entry.path.clone()); - if self.repository_entries.get(&work_directory).is_some() { - self.repository_entries.update(&work_directory, |repo| { - repo.branch = repository.branch.map(Into::into); - }); + self.repositories + .update(&PathKey(work_dir_entry.path.clone()), &(), |repo| { + repo.branch = repository.branch.map(Into::into); + repo.statuses_by_path.edit(edits, &()); + }); } else { - self.repository_entries.insert( - work_directory, + let statuses = SumTree::from_iter( + repository + .updated_statuses + .into_iter() + .filter_map(|updated_status| updated_status.try_into().log_err()), + &(), + ); + + self.repositories.insert_or_replace( RepositoryEntry { - work_directory: work_directory_entry, + work_directory_id, + work_directory: WorkDirectory { + path: work_dir_entry.path.clone(), + // When syncing repository entries from a peer, we don't need + // the location_in_repo field, since git operations don't happen locally + // anyway. + location_in_repo: None, + }, branch: repository.branch.map(Into::into), - // When syncing repository entries from a peer, we don't need - // the location_in_repo field, since git operations don't happen locally - // anyway. - location_in_repo: None, + statuses_by_path: statuses, }, - ) + &(), + ); } } else { - log::error!("no work directory entry for repository {:?}", repository) + log::error!( + "no work directory entry for repository {:?}", + repository.work_directory_id + ) } } @@ -2355,6 +2537,7 @@ impl Snapshot { &(), ); Traversal { + snapshot: self, cursor, include_files, include_dirs, @@ -2369,13 +2552,7 @@ impl Snapshot { include_ignored: bool, path: &Path, ) -> Traversal { - Traversal::new( - &self.entries_by_path, - include_files, - include_dirs, - include_ignored, - path, - ) + Traversal::new(self, include_files, include_dirs, include_ignored, path) } pub fn files(&self, include_ignored: bool, start: usize) -> Traversal { @@ -2390,33 +2567,39 @@ impl Snapshot { self.traverse_from_offset(true, true, include_ignored, start) } - pub fn repositories(&self) -> impl Iterator, &RepositoryEntry)> { - self.repository_entries - .iter() - .map(|(path, entry)| (&path.0, entry)) + #[cfg(any(feature = "test-support", test))] + pub fn git_status(&self, work_dir: &Path) -> Option> { + self.repositories + .get(&PathKey(work_dir.into()), &()) + .map(|repo| repo.status().collect()) } - pub fn repository_for_work_directory(&self, path: &Path) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(path.into())) - .cloned() + pub fn repositories(&self) -> impl Iterator { + self.repositories.iter() + } + + /// Get the repository whose work directory corresponds to the given path. + pub(crate) fn repository(&self, work_directory: PathKey) -> Option { + self.repositories.get(&work_directory, &()).cloned() } /// Get the repository whose work directory contains the given path. - pub fn repository_for_path(&self, path: &Path) -> Option { - self.repository_and_work_directory_for_path(path) - .map(|e| e.1) - } + pub fn repository_for_path(&self, path: &Path) -> Option<&RepositoryEntry> { + let mut cursor = self.repositories.cursor::(&()); + let mut repository = None; - pub fn repository_and_work_directory_for_path( - &self, - path: &Path, - ) -> Option<(RepositoryWorkDirectory, RepositoryEntry)> { - self.repository_entries - .iter() - .filter(|(workdir_path, _)| path.starts_with(workdir_path)) - .last() - .map(|(path, repo)| (path.clone(), repo.clone())) + // Git repositories may contain other git repositories. As a side effect of + // lexicographic sorting by path, deeper repositories will be after higher repositories + // So, let's loop through every matching repository until we can't find any more to find + // the deepest repository that could contain this path. + while cursor.seek_forward(&PathTarget::Contains(path), Bias::Left, &()) + && cursor.item().is_some() + { + repository = cursor.item(); + cursor.next(&()); + } + + repository } /// Given an ordered iterator of entries, returns an iterator of those entries, @@ -2425,86 +2608,28 @@ impl Snapshot { &'a self, entries: impl 'a + Iterator, ) -> impl 'a + Iterator)> { - let mut containing_repos = Vec::<(&Arc, &RepositoryEntry)>::new(); + let mut containing_repos = Vec::<&RepositoryEntry>::new(); let mut repositories = self.repositories().peekable(); entries.map(move |entry| { - while let Some((repo_path, _)) = containing_repos.last() { - if entry.path.starts_with(repo_path) { + while let Some(repository) = containing_repos.last() { + if repository.directory_contains(&entry.path) { break; } else { containing_repos.pop(); } } - while let Some((repo_path, _)) = repositories.peek() { - if entry.path.starts_with(repo_path) { + while let Some(repository) = repositories.peek() { + if repository.directory_contains(&entry.path) { containing_repos.push(repositories.next().unwrap()); } else { break; } } - let repo = containing_repos.last().map(|(_, repo)| *repo); + let repo = containing_repos.last().copied(); (entry, repo) }) } - /// Updates the `git_status` of the given entries such that files' - /// statuses bubble up to their ancestor directories. - pub fn propagate_git_statuses(&self, result: &mut [Entry]) { - let mut cursor = self - .entries_by_path - .cursor::<(TraversalProgress, GitStatuses)>(&()); - let mut entry_stack = Vec::<(usize, GitStatuses)>::new(); - - let mut result_ix = 0; - loop { - let next_entry = result.get(result_ix); - let containing_entry = entry_stack.last().map(|(ix, _)| &result[*ix]); - - let entry_to_finish = match (containing_entry, next_entry) { - (Some(_), None) => entry_stack.pop(), - (Some(containing_entry), Some(next_path)) => { - if next_path.path.starts_with(&containing_entry.path) { - None - } else { - entry_stack.pop() - } - } - (None, Some(_)) => None, - (None, None) => break, - }; - - if let Some((entry_ix, prev_statuses)) = entry_to_finish { - cursor.seek_forward( - &TraversalTarget::PathSuccessor(&result[entry_ix].path), - Bias::Left, - &(), - ); - - let statuses = cursor.start().1 - prev_statuses; - - result[entry_ix].git_status = if statuses.conflict > 0 { - Some(GitFileStatus::Conflict) - } else if statuses.modified > 0 { - Some(GitFileStatus::Modified) - } else if statuses.added > 0 { - Some(GitFileStatus::Added) - } else { - None - }; - } else { - if result[result_ix].is_dir() { - cursor.seek_forward( - &TraversalTarget::Path(&result[result_ix].path), - Bias::Left, - &(), - ); - entry_stack.push((result_ix, cursor.start().1)); - } - result_ix += 1; - } - } - } - pub fn paths(&self) -> impl Iterator> { let empty_path = Path::new(""); self.entries_by_path @@ -2515,8 +2640,9 @@ impl Snapshot { pub fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> { let mut cursor = self.entries_by_path.cursor(&()); - cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &()); + cursor.seek(&TraversalTarget::path(parent_path), Bias::Right, &()); let traversal = Traversal { + snapshot: self, cursor, include_files: true, include_dirs: true, @@ -2543,19 +2669,19 @@ impl Snapshot { } pub fn root_git_entry(&self) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(Path::new("").into())) + self.repositories + .get(&PathKey(Path::new("").into()), &()) .map(|entry| entry.to_owned()) } pub fn git_entry(&self, work_directory_path: Arc) -> Option { - self.repository_entries - .get(&RepositoryWorkDirectory(work_directory_path)) + self.repositories + .get(&PathKey(work_directory_path), &()) .map(|entry| entry.to_owned()) } pub fn git_entries(&self) -> impl Iterator { - self.repository_entries.values() + self.repositories.iter() } pub fn scan_id(&self) -> usize { @@ -2586,10 +2712,10 @@ impl Snapshot { } impl LocalSnapshot { - pub fn repo_for_path(&self, path: &Path) -> Option<(RepositoryEntry, &LocalRepositoryEntry)> { - let (_, repo_entry) = self.repository_and_work_directory_for_path(path)?; - let work_directory_id = repo_entry.work_directory_id(); - Some((repo_entry, self.git_repositories.get(&work_directory_id)?)) + pub fn local_repo_for_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> { + let repository_entry = self.repository_for_path(path)?; + let work_directory_id = repository_entry.work_directory_id(); + self.git_repositories.get(&work_directory_id) } fn build_update( @@ -2613,18 +2739,16 @@ impl LocalSnapshot { } for (work_dir_path, change) in repo_changes.iter() { - let new_repo = self - .repository_entries - .get(&RepositoryWorkDirectory(work_dir_path.clone())); + let new_repo = self.repositories.get(&PathKey(work_dir_path.clone()), &()); match (&change.old_repository, new_repo) { (Some(old_repo), Some(new_repo)) => { updated_repositories.push(new_repo.build_update(old_repo)); } (None, Some(new_repo)) => { - updated_repositories.push(proto::RepositoryEntry::from(new_repo)); + updated_repositories.push(new_repo.initial_update()); } (Some(old_repo), None) => { - removed_repositories.push(old_repo.work_directory.0.to_proto()); + removed_repositories.push(old_repo.work_directory_id.to_proto()); } _ => {} } @@ -2827,15 +2951,15 @@ impl LocalSnapshot { .map(|repo| repo.1.dot_git_dir_abs_path.clone()) .collect::>(); let work_dir_paths = self - .repository_entries + .repositories .iter() - .map(|repo| repo.0.clone().0) + .map(|repo| repo.work_directory.path.clone()) .collect::>(); assert_eq!(dotgit_paths.len(), work_dir_paths.len()); - assert_eq!(self.repository_entries.iter().count(), work_dir_paths.len()); + assert_eq!(self.repositories.iter().count(), work_dir_paths.len()); assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len()); - for (_, entry) in self.repository_entries.iter() { - self.git_repositories.get(&entry.work_directory).unwrap(); + for entry in self.repositories.iter() { + self.git_repositories.get(&entry.work_directory_id).unwrap(); } } @@ -2872,23 +2996,7 @@ impl BackgroundScannerState { let path = entry.path.clone(); let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); - let mut containing_repository = None; - if !ignore_stack.is_abs_path_ignored(&abs_path, true) { - if let Some((repo_entry, repo)) = self.snapshot.repo_for_path(&path) { - if let Some(workdir_path) = repo_entry.work_directory(&self.snapshot) { - if let Ok(repo_path) = repo_entry.relativize(&self.snapshot, &path) { - containing_repository = Some(ScanJobContainingRepository { - work_directory: workdir_path, - statuses: repo - .repo_ptr - .status(&[repo_path.0]) - .log_err() - .unwrap_or_default(), - }); - } - } - } - } + if !ancestor_inodes.contains(&entry.inode) { ancestor_inodes.insert(entry.inode); scan_job_tx @@ -2899,7 +3007,6 @@ impl BackgroundScannerState { scan_queue: scan_job_tx.clone(), ancestor_inodes, is_external: entry.is_external, - containing_repository, }) .unwrap(); } @@ -3006,8 +3113,8 @@ impl BackgroundScannerState { .snapshot .entries_by_path .cursor::(&()); - new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); - removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); + new_entries = cursor.slice(&TraversalTarget::path(path), Bias::Left, &()); + removed_entries = cursor.slice(&TraversalTarget::successor(path), Bias::Left, &()); new_entries.append(cursor.suffix(&()), &()); } self.snapshot.entries_by_path = new_entries; @@ -3053,9 +3160,9 @@ impl BackgroundScannerState { self.snapshot .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); - self.snapshot - .repository_entries - .retain(|repo_path, _| !repo_path.0.starts_with(path)); + self.snapshot.repositories.retain(&(), |repository| { + !repository.work_directory.starts_with(path) + }); #[cfg(test)] self.snapshot.check_invariants(false); @@ -3066,7 +3173,7 @@ impl BackgroundScannerState { dot_git_path: Arc, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option<(RepositoryWorkDirectory, Arc)> { + ) -> Option { let work_dir_path: Arc = match dot_git_path.parent() { Some(parent_dir) => { // Guard against repositories inside the repository metadata @@ -3102,7 +3209,7 @@ impl BackgroundScannerState { location_in_repo: Option>, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option<(RepositoryWorkDirectory, Arc)> { + ) -> Option { let work_dir_id = self .snapshot .entry_for_path(work_dir_path.clone()) @@ -3134,7 +3241,10 @@ impl BackgroundScannerState { }; log::trace!("constructed libgit2 repo in {:?}", t0.elapsed()); - let work_directory = RepositoryWorkDirectory(work_dir_path.clone()); + let work_directory = WorkDirectory { + path: work_dir_path.clone(), + location_in_repo, + }; if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() { git_hosting_providers::register_additional_providers( @@ -3143,25 +3253,30 @@ impl BackgroundScannerState { ); } - self.snapshot.repository_entries.insert( - work_directory.clone(), + self.snapshot.repositories.insert_or_replace( RepositoryEntry { - work_directory: work_dir_id.into(), + work_directory_id: work_dir_id, + work_directory: work_directory.clone(), branch: repository.branch_name().map(Into::into), - location_in_repo, - }, - ); - self.snapshot.git_repositories.insert( - work_dir_id, - LocalRepositoryEntry { - git_dir_scan_id: 0, - repo_ptr: repository.clone(), - dot_git_dir_abs_path: actual_dot_git_dir_abs_path, - dot_git_worktree_abs_path, + statuses_by_path: Default::default(), }, + &(), ); - Some((work_directory, repository)) + let local_repository = LocalRepositoryEntry { + work_directory: work_directory.clone(), + git_dir_scan_id: 0, + status_scan_id: 0, + repo_ptr: repository.clone(), + dot_git_dir_abs_path: actual_dot_git_dir_abs_path, + dot_git_worktree_abs_path, + }; + + self.snapshot + .git_repositories + .insert(work_dir_id, local_repository.clone()); + + Some(local_repository) } } @@ -3466,7 +3581,7 @@ pub struct Entry { /// directory is expanded. External entries are treated like gitignored /// entries in that they are not included in searches. pub is_external: bool, - pub git_status: Option, + /// Whether this entry is considered to be a `.env` file. pub is_private: bool, /// The entry's size on disk, in bytes. @@ -3499,6 +3614,7 @@ pub enum PathChange { Loaded, } +#[derive(Debug)] pub struct GitRepositoryChange { /// The previous state of the repository, if it already existed. pub old_repository: Option, @@ -3507,6 +3623,230 @@ pub struct GitRepositoryChange { pub type UpdatedEntriesSet = Arc<[(Arc, ProjectEntryId, PathChange)]>; pub type UpdatedGitRepositoriesSet = Arc<[(Arc, GitRepositoryChange)]>; +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct StatusEntry { + pub repo_path: RepoPath, + pub status: GitFileStatus, +} + +impl StatusEntry { + fn to_proto(&self) -> proto::StatusEntry { + proto::StatusEntry { + repo_path: self.repo_path.to_proto(), + status: git_status_to_proto(self.status), + } + } +} + +impl TryFrom for StatusEntry { + type Error = anyhow::Error; + fn try_from(value: proto::StatusEntry) -> Result { + Ok(Self { + repo_path: RepoPath(Path::new(&value.repo_path).into()), + status: git_status_from_proto(Some(value.status)) + .ok_or_else(|| anyhow!("Unable to parse status value {}", value.status))?, + }) + } +} + +#[derive(Clone, Debug)] +struct PathProgress<'a> { + max_path: &'a Path, +} + +#[derive(Clone, Debug)] +pub struct PathSummary { + max_path: Arc, + item_summary: S, +} + +impl Summary for PathSummary { + type Context = S::Context; + + fn zero(cx: &Self::Context) -> Self { + Self { + max_path: Path::new("").into(), + item_summary: S::zero(cx), + } + } + + fn add_summary(&mut self, rhs: &Self, cx: &Self::Context) { + self.max_path = rhs.max_path.clone(); + self.item_summary.add_summary(&rhs.item_summary, cx); + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathProgress<'a> { + fn zero(_: & as Summary>::Context) -> Self { + Self { + max_path: Path::new(""), + } + } + + fn add_summary( + &mut self, + summary: &'a PathSummary, + _: & as Summary>::Context, + ) { + self.max_path = summary.max_path.as_ref() + } +} + +impl sum_tree::Item for RepositoryEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.work_directory.path.clone(), + item_summary: Unit, + } + } +} + +impl sum_tree::KeyedItem for RepositoryEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.work_directory.path.clone()) + } +} + +impl sum_tree::Summary for GitStatuses { + type Context = (); + + fn zero(_: &Self::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, rhs: &Self, _: &Self::Context) { + *self += *rhs; + } +} + +impl sum_tree::Item for StatusEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.repo_path.0.clone(), + item_summary: match self.status { + GitFileStatus::Added => GitStatuses { + added: 1, + ..Default::default() + }, + GitFileStatus::Modified => GitStatuses { + modified: 1, + ..Default::default() + }, + GitFileStatus::Conflict => GitStatuses { + conflict: 1, + ..Default::default() + }, + GitFileStatus::Deleted => Default::default(), + GitFileStatus::Untracked => GitStatuses { + untracked: 1, + ..Default::default() + }, + }, + } + } +} + +impl sum_tree::KeyedItem for StatusEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.repo_path.0.clone()) + } +} + +#[derive(Clone, Debug, Default, Copy, PartialEq, Eq)] +pub struct GitStatuses { + added: usize, + modified: usize, + conflict: usize, + untracked: usize, +} + +impl GitStatuses { + pub fn to_status(&self) -> Option { + if self.conflict > 0 { + Some(GitFileStatus::Conflict) + } else if self.modified > 0 { + Some(GitFileStatus::Modified) + } else if self.added > 0 || self.untracked > 0 { + Some(GitFileStatus::Added) + } else { + None + } + } +} + +impl std::ops::Add for GitStatuses { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + GitStatuses { + added: self.added + rhs.added, + modified: self.modified + rhs.modified, + conflict: self.conflict + rhs.conflict, + untracked: self.untracked + rhs.untracked, + } + } +} + +impl std::ops::AddAssign for GitStatuses { + fn add_assign(&mut self, rhs: Self) { + self.added += rhs.added; + self.modified += rhs.modified; + self.conflict += rhs.conflict; + self.untracked += rhs.untracked; + } +} + +impl std::ops::Sub for GitStatuses { + type Output = GitStatuses; + + fn sub(self, rhs: Self) -> Self::Output { + GitStatuses { + added: self.added - rhs.added, + modified: self.modified - rhs.modified, + conflict: self.conflict - rhs.conflict, + untracked: self.untracked - rhs.untracked, + } + } +} + +impl<'a> sum_tree::Dimension<'a, PathSummary> for GitStatuses { + fn zero(_cx: &()) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &()) { + *self += summary.item_summary + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathKey { + fn zero(_: &S::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &S::Context) { + self.0 = summary.max_path.clone(); + } +} + +impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for TraversalProgress<'a> { + fn zero(_cx: &S::Context) -> Self { + Default::default() + } + + fn add_summary(&mut self, summary: &'a PathSummary, _: &S::Context) { + self.max_path = summary.max_path.as_ref(); + } +} + impl Entry { fn new( path: Arc, @@ -3532,7 +3872,6 @@ impl Entry { is_always_included: false, is_external: false, is_private: false, - git_status: None, char_bag, is_fifo: metadata.is_fifo, } @@ -3549,10 +3888,6 @@ impl Entry { pub fn is_file(&self) -> bool { self.kind.is_file() } - - pub fn git_status(&self) -> Option { - self.git_status - } } impl EntryKind { @@ -3592,22 +3927,12 @@ impl sum_tree::Item for Entry { non_ignored_file_count = 0; } - let mut statuses = GitStatuses::default(); - if let Some(status) = self.git_status { - match status { - GitFileStatus::Added => statuses.added = 1, - GitFileStatus::Modified => statuses.modified = 1, - GitFileStatus::Conflict => statuses.conflict = 1, - } - } - EntrySummary { max_path: self.path.clone(), count: 1, non_ignored_count, file_count, non_ignored_file_count, - statuses, } } } @@ -3627,7 +3952,6 @@ pub struct EntrySummary { non_ignored_count: usize, file_count: usize, non_ignored_file_count: usize, - statuses: GitStatuses, } impl Default for EntrySummary { @@ -3638,7 +3962,6 @@ impl Default for EntrySummary { non_ignored_count: 0, file_count: 0, non_ignored_file_count: 0, - statuses: Default::default(), } } } @@ -3656,7 +3979,6 @@ impl sum_tree::Summary for EntrySummary { self.non_ignored_count += rhs.non_ignored_count; self.file_count += rhs.file_count; self.non_ignored_file_count += rhs.non_ignored_file_count; - self.statuses += rhs.statuses; } } @@ -4224,7 +4546,6 @@ impl BackgroundScanner { let next_entry_id = self.next_entry_id.clone(); let mut ignore_stack = job.ignore_stack.clone(); - let mut containing_repository = job.containing_repository.clone(); let mut new_ignore = None; let mut root_canonical_path = None; let mut new_entries: Vec = Vec::new(); @@ -4261,16 +4582,9 @@ impl BackgroundScanner { self.watcher.as_ref(), ); - if let Some((work_directory, repository)) = repo { - let t0 = Instant::now(); - let statuses = repository - .status(&[PathBuf::from("")]) - .log_err() - .unwrap_or_default(); - log::trace!("computed git status in {:?}", t0.elapsed()); - containing_repository = Some(ScanJobContainingRepository { - work_directory, - statuses, + if let Some(local_repo) = repo { + self.update_git_statuses(UpdateGitStatusesJob { + local_repository: local_repo, }); } } else if child_name == *GITIGNORE { @@ -4370,20 +4684,11 @@ impl BackgroundScanner { }, ancestor_inodes, scan_queue: job.scan_queue.clone(), - containing_repository: containing_repository.clone(), })); } } else { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); child_entry.is_always_included = self.settings.is_path_always_included(&child_path); - if !child_entry.is_ignored { - if let Some(repo) = &containing_repository { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repo.work_directory) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = repo.statuses.get(&repo_path); - } - } - } } { @@ -4440,6 +4745,7 @@ impl BackgroundScanner { abs_paths: Vec, scan_queue_tx: Option>, ) { + // grab metadata for all requested paths let metadata = futures::future::join_all( abs_paths .iter() @@ -4487,28 +4793,56 @@ impl BackgroundScanner { // Group all relative paths by their git repository. let mut paths_by_git_repo = HashMap::default(); for relative_path in relative_paths.iter() { - if let Some((repo_entry, repo)) = state.snapshot.repo_for_path(relative_path) { - if let Ok(repo_path) = repo_entry.relativize(&state.snapshot, relative_path) { + if let Some(local_repo) = state.snapshot.local_repo_for_path(relative_path) { + if let Ok(repo_path) = local_repo.relativize(relative_path) { paths_by_git_repo - .entry(repo.dot_git_dir_abs_path.clone()) + .entry(local_repo.work_directory.clone()) .or_insert_with(|| RepoPaths { - repo: repo.repo_ptr.clone(), - repo_paths: Vec::new(), - relative_paths: Vec::new(), + repo: local_repo.repo_ptr.clone(), + repo_paths: Default::default(), }) - .add_paths(relative_path, repo_path); + .add_path(repo_path); } } } - // Now call `git status` once per repository and collect each file's git status. - let mut git_statuses_by_relative_path = - paths_by_git_repo - .into_values() - .fold(HashMap::default(), |mut map, repo_paths| { - map.extend(repo_paths.into_git_file_statuses()); - map - }); + for (work_directory, mut paths) in paths_by_git_repo { + if let Ok(status) = paths.repo.status(&paths.repo_paths) { + let mut changed_path_statuses = Vec::new(); + for (repo_path, status) in &*status.entries { + paths.remove_repo_path(repo_path); + + changed_path_statuses.push(Edit::Insert(StatusEntry { + repo_path: repo_path.clone(), + status: *status, + })); + } + for path in paths.repo_paths { + changed_path_statuses.push(Edit::Remove(PathKey(path.0))); + } + + let work_directory_id = state.snapshot.repositories.update( + &work_directory.path_key(), + &(), + move |repository_entry| { + repository_entry + .statuses_by_path + .edit(changed_path_statuses, &()); + repository_entry.work_directory_id + }, + ); + + if let Some(work_directory_id) = work_directory_id { + let scan_id = state.snapshot.scan_id; + state.snapshot.git_repositories.update( + &work_directory_id, + |local_repository_entry| { + local_repository_entry.status_scan_id = scan_id; + }, + ); + } + } + } for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) { let abs_path: Arc = root_abs_path.join(path).into(); @@ -4547,10 +4881,6 @@ impl BackgroundScanner { } } - if !is_dir && !fs_entry.is_ignored && !fs_entry.is_external { - fs_entry.git_status = git_statuses_by_relative_path.remove(path); - } - state.insert_entry(fs_entry.clone(), self.fs.as_ref(), self.watcher.as_ref()); } Ok(None) => { @@ -4570,18 +4900,19 @@ impl BackgroundScanner { ); } - fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> { + fn remove_repo_path(&self, path: &Arc, snapshot: &mut LocalSnapshot) -> Option<()> { if !path .components() .any(|component| component.as_os_str() == *DOT_GIT) { - if let Some(repository) = snapshot.repository_for_work_directory(path) { - let entry = repository.work_directory.0; - snapshot.git_repositories.remove(&entry); + if let Some(repository) = snapshot.repository(PathKey(path.clone())) { + snapshot + .git_repositories + .remove(&repository.work_directory_id); snapshot .snapshot - .repository_entries - .remove(&RepositoryWorkDirectory(path.into())); + .repositories + .remove(&PathKey(repository.work_directory.path.clone()), &()); return Some(()); } } @@ -4684,7 +5015,7 @@ impl BackgroundScanner { .abs_path .strip_prefix(snapshot.abs_path.as_path()) .unwrap(); - let repo = snapshot.repo_for_path(path); + for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); @@ -4720,18 +5051,6 @@ impl BackgroundScanner { let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone(); path_entry.scan_id = snapshot.scan_id; path_entry.is_ignored = entry.is_ignored; - if !entry.is_dir() && !entry.is_ignored && !entry.is_external { - if let Some((ref repo_entry, local_repo)) = repo { - if let Ok(repo_path) = repo_entry.relativize(snapshot, &entry.path) { - let status = local_repo - .repo_ptr - .status(&[repo_path.0.clone()]) - .ok() - .and_then(|status| status.get(&repo_path)); - entry.git_status = status; - } - } - } entries_by_id_edits.push(Edit::Insert(path_entry)); entries_by_path_edits.push(Edit::Insert(entry)); } @@ -4776,7 +5095,7 @@ impl BackgroundScanner { } }); - let (work_directory, repository) = match existing_repository_entry { + let local_repository = match existing_repository_entry { None => { match state.insert_git_repository( dot_git_dir.into(), @@ -4787,45 +5106,36 @@ impl BackgroundScanner { None => continue, } } - Some((entry_id, repository)) => { - if repository.git_dir_scan_id == scan_id { + Some((entry_id, local_repository)) => { + if local_repository.git_dir_scan_id == scan_id { continue; } let Some(work_dir) = state .snapshot .entry_for_id(entry_id) - .map(|entry| RepositoryWorkDirectory(entry.path.clone())) + .map(|entry| entry.path.clone()) else { continue; }; - let repo = &repository.repo_ptr; - let branch = repo.branch_name(); - repo.reload_index(); + let branch = local_repository.repo_ptr.branch_name(); + local_repository.repo_ptr.reload_index(); - state - .snapshot - .git_repositories - .update(&entry_id, |entry| entry.git_dir_scan_id = scan_id); - state - .snapshot - .snapshot - .repository_entries - .update(&work_dir, |entry| entry.branch = branch.map(Into::into)); - (work_dir, repository.repo_ptr.clone()) + state.snapshot.git_repositories.update(&entry_id, |entry| { + entry.git_dir_scan_id = scan_id; + entry.status_scan_id = scan_id; + }); + state.snapshot.snapshot.repositories.update( + &PathKey(work_dir.clone()), + &(), + |entry| entry.branch = branch.map(Into::into), + ); + + local_repository } }; - repo_updates.push(UpdateGitStatusesJob { - location_in_repo: state - .snapshot - .repository_entries - .get(&work_directory) - .and_then(|repo| repo.location_in_repo.clone()) - .clone(), - work_directory, - repository, - }); + repo_updates.push(UpdateGitStatusesJob { local_repository }); } // Remove any git repositories whose .git entry no longer exists. @@ -4851,9 +5161,9 @@ impl BackgroundScanner { snapshot .git_repositories .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); - snapshot - .repository_entries - .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); + snapshot.repositories.retain(&(), |entry| { + ids_to_preserve.contains(&entry.work_directory_id) + }); } let (mut updates_done_tx, mut updates_done_rx) = barrier::channel(); @@ -4887,59 +5197,72 @@ impl BackgroundScanner { /// Update the git statuses for a given batch of entries. fn update_git_statuses(&self, job: UpdateGitStatusesJob) { - log::trace!("updating git statuses for repo {:?}", job.work_directory.0); + log::trace!( + "updating git statuses for repo {:?}", + job.local_repository.work_directory.path + ); let t0 = Instant::now(); - let Some(statuses) = job.repository.status(&[PathBuf::from("")]).log_err() else { + + let Some(statuses) = job + .local_repository + .repo() + .status(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) + .log_err() + else { return; }; log::trace!( "computed git statuses for repo {:?} in {:?}", - job.work_directory.0, + job.local_repository.work_directory.path, t0.elapsed() ); let t0 = Instant::now(); - let mut changes = Vec::new(); + let mut changed_paths = Vec::new(); let snapshot = self.state.lock().snapshot.snapshot.clone(); - for file in snapshot.traverse_from_path(true, false, false, job.work_directory.0.as_ref()) { - let Ok(repo_path) = file.path.strip_prefix(&job.work_directory.0) else { - break; - }; - let git_status = if let Some(location) = &job.location_in_repo { - statuses.get(&location.join(repo_path)) - } else { - statuses.get(repo_path) - }; - if file.git_status != git_status { - let mut entry = file.clone(); - entry.git_status = git_status; - changes.push((entry.path, git_status)); + + let Some(mut repository) = + snapshot.repository(job.local_repository.work_directory.path_key()) + else { + log::error!("Got an UpdateGitStatusesJob for a repository that isn't in the snapshot"); + debug_assert!(false); + return; + }; + + let mut new_entries_by_path = SumTree::new(&()); + for (repo_path, status) in statuses.entries.iter() { + let project_path = repository.work_directory.unrelativize(repo_path); + + new_entries_by_path.insert_or_replace( + StatusEntry { + repo_path: repo_path.clone(), + status: *status, + }, + &(), + ); + + if let Some(path) = project_path { + changed_paths.push(path); } } + repository.statuses_by_path = new_entries_by_path; let mut state = self.state.lock(); - let edits = changes - .iter() - .filter_map(|(path, git_status)| { - let entry = state.snapshot.entry_for_path(path)?.clone(); - Some(Edit::Insert(Entry { - git_status: *git_status, - ..entry.clone() - })) - }) - .collect(); + state + .snapshot + .repositories + .insert_or_replace(repository, &()); - // Apply the git status changes. util::extend_sorted( &mut state.changed_paths, - changes.iter().map(|p| p.0.clone()), + changed_paths, usize::MAX, Ord::cmp, ); - state.snapshot.entries_by_path.edit(edits, &()); + log::trace!( "applied git status updates for repo {:?} in {:?}", - job.work_directory.0, + job.local_repository.work_directory.path, t0.elapsed(), ); } @@ -5109,28 +5432,28 @@ fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { result } +#[derive(Debug)] struct RepoPaths { repo: Arc, - relative_paths: Vec>, - repo_paths: Vec, + // sorted + repo_paths: Vec, } impl RepoPaths { - fn add_paths(&mut self, relative_path: &Arc, repo_path: RepoPath) { - self.relative_paths.push(relative_path.clone()); - self.repo_paths.push(repo_path.0); + fn add_path(&mut self, repo_path: RepoPath) { + match self.repo_paths.binary_search(&repo_path) { + Ok(_) => {} + Err(ix) => self.repo_paths.insert(ix, repo_path), + } } - fn into_git_file_statuses(self) -> HashMap, GitFileStatus> { - let mut statuses = HashMap::default(); - if let Ok(status) = self.repo.status(&self.repo_paths) { - for (repo_path, relative_path) in self.repo_paths.into_iter().zip(self.relative_paths) { - if let Some(path_status) = status.get(&repo_path) { - statuses.insert(relative_path, path_status); - } + fn remove_repo_path(&mut self, repo_path: &RepoPath) { + match self.repo_paths.binary_search(&repo_path) { + Ok(ix) => { + self.repo_paths.remove(ix); } + Err(_) => {} } - statuses } } @@ -5141,13 +5464,6 @@ struct ScanJob { scan_queue: Sender, ancestor_inodes: TreeSet, is_external: bool, - containing_repository: Option, -} - -#[derive(Clone)] -struct ScanJobContainingRepository { - work_directory: RepositoryWorkDirectory, - statuses: GitStatus, } struct UpdateIgnoreStatusJob { @@ -5158,9 +5474,7 @@ struct UpdateIgnoreStatusJob { } struct UpdateGitStatusesJob { - work_directory: RepositoryWorkDirectory, - location_in_repo: Option>, - repository: Arc, + local_repository: LocalRepositoryEntry, } pub trait WorktreeModelHandle { @@ -5333,44 +5647,166 @@ impl<'a> Default for TraversalProgress<'a> { } } -#[derive(Clone, Debug, Default, Copy)] -struct GitStatuses { - added: usize, - modified: usize, - conflict: usize, +#[derive(Debug, Clone, Copy)] +pub struct GitEntryRef<'a> { + pub entry: &'a Entry, + pub git_status: Option, } -impl AddAssign for GitStatuses { - fn add_assign(&mut self, rhs: Self) { - self.added += rhs.added; - self.modified += rhs.modified; - self.conflict += rhs.conflict; - } -} - -impl Sub for GitStatuses { - type Output = GitStatuses; - - fn sub(self, rhs: Self) -> Self::Output { - GitStatuses { - added: self.added - rhs.added, - modified: self.modified - rhs.modified, - conflict: self.conflict - rhs.conflict, +impl<'a> GitEntryRef<'a> { + pub fn to_owned(&self) -> GitEntry { + GitEntry { + entry: self.entry.clone(), + git_status: self.git_status, } } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses { - fn zero(_cx: &()) -> Self { - Default::default() - } +impl<'a> Deref for GitEntryRef<'a> { + type Target = Entry; - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { - *self += summary.statuses + fn deref(&self) -> &Self::Target { + &self.entry } } +impl<'a> AsRef for GitEntryRef<'a> { + fn as_ref(&self) -> &Entry { + self.entry + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct GitEntry { + pub entry: Entry, + pub git_status: Option, +} + +impl GitEntry { + pub fn to_ref(&self) -> GitEntryRef { + GitEntryRef { + entry: &self.entry, + git_status: self.git_status, + } + } +} + +impl Deref for GitEntry { + type Target = Entry; + + fn deref(&self) -> &Self::Target { + &self.entry + } +} + +impl AsRef for GitEntry { + fn as_ref(&self) -> &Entry { + &self.entry + } +} + +/// Walks the worktree entries and their associated git statuses. +pub struct GitTraversal<'a> { + traversal: Traversal<'a>, + current_entry_status: Option, + repo_location: Option<( + &'a RepositoryEntry, + Cursor<'a, StatusEntry, PathProgress<'a>>, + )>, +} + +impl<'a> GitTraversal<'a> { + fn synchronize_statuses(&mut self, reset: bool) { + self.current_entry_status = None; + + let Some(entry) = self.traversal.cursor.item() else { + return; + }; + + let Some(repo) = self.traversal.snapshot.repository_for_path(&entry.path) else { + self.repo_location = None; + return; + }; + + // Update our state if we changed repositories. + if reset || self.repo_location.as_ref().map(|(prev_repo, _)| prev_repo) != Some(&repo) { + self.repo_location = Some((repo, repo.statuses_by_path.cursor::(&()))); + } + + let Some((repo, statuses)) = &mut self.repo_location else { + return; + }; + + let repo_path = repo.relativize(&entry.path).unwrap(); + + if entry.is_dir() { + let mut statuses = statuses.clone(); + statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()); + let summary: GitStatuses = + statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &()); + + self.current_entry_status = summary.to_status(); + } else if entry.is_file() { + // For a file entry, park the cursor on the corresponding status + if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) { + self.current_entry_status = Some(statuses.item().unwrap().status); + } + } + } + + pub fn advance(&mut self) -> bool { + self.advance_by(1) + } + + pub fn advance_by(&mut self, count: usize) -> bool { + let found = self.traversal.advance_by(count); + self.synchronize_statuses(false); + found + } + + pub fn advance_to_sibling(&mut self) -> bool { + let found = self.traversal.advance_to_sibling(); + self.synchronize_statuses(false); + found + } + + pub fn back_to_parent(&mut self) -> bool { + let found = self.traversal.back_to_parent(); + self.synchronize_statuses(true); + found + } + + pub fn start_offset(&self) -> usize { + self.traversal.start_offset() + } + + pub fn end_offset(&self) -> usize { + self.traversal.end_offset() + } + + pub fn entry(&self) -> Option> { + Some(GitEntryRef { + entry: self.traversal.cursor.item()?, + git_status: self.current_entry_status, + }) + } +} + +impl<'a> Iterator for GitTraversal<'a> { + type Item = GitEntryRef<'a>; + fn next(&mut self) -> Option { + if let Some(item) = self.entry() { + self.advance(); + Some(item) + } else { + None + } + } +} + +#[derive(Debug)] pub struct Traversal<'a> { + snapshot: &'a Snapshot, cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>, include_ignored: bool, include_files: bool, @@ -5379,15 +5815,16 @@ pub struct Traversal<'a> { impl<'a> Traversal<'a> { fn new( - entries: &'a SumTree, + snapshot: &'a Snapshot, include_files: bool, include_dirs: bool, include_ignored: bool, start_path: &Path, ) -> Self { - let mut cursor = entries.cursor(&()); - cursor.seek(&TraversalTarget::Path(start_path), Bias::Left, &()); + let mut cursor = snapshot.entries_by_path.cursor(&()); + cursor.seek(&TraversalTarget::path(start_path), Bias::Left, &()); let mut traversal = Self { + snapshot, cursor, include_files, include_dirs, @@ -5398,6 +5835,17 @@ impl<'a> Traversal<'a> { } traversal } + + pub fn with_git_statuses(self) -> GitTraversal<'a> { + let mut this = GitTraversal { + traversal: self, + current_entry_status: None, + repo_location: None, + }; + this.synchronize_statuses(true); + this + } + pub fn advance(&mut self) -> bool { self.advance_by(1) } @@ -5417,11 +5865,8 @@ impl<'a> Traversal<'a> { pub fn advance_to_sibling(&mut self) -> bool { while let Some(entry) = self.cursor.item() { - self.cursor.seek_forward( - &TraversalTarget::PathSuccessor(&entry.path), - Bias::Left, - &(), - ); + self.cursor + .seek_forward(&TraversalTarget::successor(&entry.path), Bias::Left, &()); if let Some(entry) = self.cursor.item() { if (self.include_files || !entry.is_file()) && (self.include_dirs || !entry.is_dir()) @@ -5439,7 +5884,7 @@ impl<'a> Traversal<'a> { return false; }; self.cursor - .seek(&TraversalTarget::Path(parent_path), Bias::Left, &()) + .seek(&TraversalTarget::path(parent_path), Bias::Left, &()) } pub fn entry(&self) -> Option<&'a Entry> { @@ -5472,10 +5917,58 @@ impl<'a> Iterator for Traversal<'a> { } } +#[derive(Debug, Clone, Copy)] +enum PathTarget<'a> { + Path(&'a Path), + Successor(&'a Path), + Contains(&'a Path), +} + +impl<'a> PathTarget<'a> { + fn cmp_path(&self, other: &Path) -> Ordering { + match self { + PathTarget::Path(path) => path.cmp(&other), + PathTarget::Successor(path) => { + if other.starts_with(path) { + Ordering::Greater + } else { + Ordering::Equal + } + } + PathTarget::Contains(path) => { + if path.starts_with(other) { + Ordering::Equal + } else { + Ordering::Greater + } + } + } + } +} + +impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary, PathProgress<'a>> for PathTarget<'b> { + fn cmp(&self, cursor_location: &PathProgress<'a>, _: &S::Context) -> Ordering { + self.cmp_path(&cursor_location.max_path) + } +} + +impl<'a, 'b, S: Summary> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for PathTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &S::Context) -> Ordering { + self.cmp_path(&cursor_location.max_path) + } +} + +impl<'a, 'b> SeekTarget<'a, PathSummary, (TraversalProgress<'a>, GitStatuses)> + for PathTarget<'b> +{ + fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering { + self.cmp_path(&cursor_location.0.max_path) + } +} + #[derive(Debug)] enum TraversalTarget<'a> { - Path(&'a Path), - PathSuccessor(&'a Path), + Path(PathTarget<'a>), Count { count: usize, include_files: bool, @@ -5484,17 +5977,18 @@ enum TraversalTarget<'a> { }, } -impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> { - fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { +impl<'a> TraversalTarget<'a> { + fn path(path: &'a Path) -> Self { + Self::Path(PathTarget::Path(path)) + } + + fn successor(path: &'a Path) -> Self { + Self::Path(PathTarget::Successor(path)) + } + + fn cmp_progress(&self, progress: &TraversalProgress) -> Ordering { match self { - TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path), - TraversalTarget::PathSuccessor(path) => { - if cursor_location.max_path.starts_with(path) { - Ordering::Greater - } else { - Ordering::Equal - } - } + TraversalTarget::Path(path) => path.cmp_path(&progress.max_path), TraversalTarget::Count { count, include_files, @@ -5502,17 +5996,21 @@ impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTa include_ignored, } => Ord::cmp( count, - &cursor_location.count(*include_files, *include_dirs, *include_ignored), + &progress.count(*include_files, *include_dirs, *include_ignored), ), } } } -impl<'a, 'b> SeekTarget<'a, EntrySummary, (TraversalProgress<'a>, GitStatuses)> - for TraversalTarget<'b> -{ - fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering { - self.cmp(&cursor_location.0, &()) +impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { + self.cmp_progress(cursor_location) + } +} + +impl<'a, 'b> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for TraversalTarget<'b> { + fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering { + self.cmp_progress(cursor_location) } } @@ -5521,6 +6019,20 @@ pub struct ChildEntriesIter<'a> { traversal: Traversal<'a>, } +impl<'a> ChildEntriesIter<'a> { + pub fn with_git_statuses(self) -> ChildEntriesGitIter<'a> { + ChildEntriesGitIter { + parent_path: self.parent_path, + traversal: self.traversal.with_git_statuses(), + } + } +} + +pub struct ChildEntriesGitIter<'a> { + parent_path: &'a Path, + traversal: GitTraversal<'a>, +} + impl<'a> Iterator for ChildEntriesIter<'a> { type Item = &'a Entry; @@ -5535,6 +6047,20 @@ impl<'a> Iterator for ChildEntriesIter<'a> { } } +impl<'a> Iterator for ChildEntriesGitIter<'a> { + type Item = GitEntryRef<'a>; + + fn next(&mut self) -> Option { + if let Some(item) = self.traversal.entry() { + if item.path.starts_with(self.parent_path) { + self.traversal.advance_to_sibling(); + return Some(item); + } + } + None + } +} + impl<'a> From<&'a Entry> for proto::Entry { fn from(entry: &'a Entry) -> Self { Self { @@ -5545,7 +6071,6 @@ impl<'a> From<&'a Entry> for proto::Entry { mtime: entry.mtime.map(|time| time.into()), is_ignored: entry.is_ignored, is_external: entry.is_external, - git_status: entry.git_status.map(git_status_to_proto), is_fifo: entry.is_fifo, size: Some(entry.size), canonical_path: entry @@ -5582,7 +6107,6 @@ impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry { is_ignored: entry.is_ignored, is_always_included: always_included.is_match(path.as_ref()), is_external: entry.is_external, - git_status: git_status_from_proto(entry.git_status), is_private: false, char_bag, is_fifo: entry.is_fifo, @@ -5596,6 +6120,7 @@ fn git_status_from_proto(git_status: Option) -> Option { proto::GitStatus::Added => GitFileStatus::Added, proto::GitStatus::Modified => GitFileStatus::Modified, proto::GitStatus::Conflict => GitFileStatus::Conflict, + proto::GitStatus::Deleted => GitFileStatus::Deleted, }) }) } @@ -5605,6 +6130,8 @@ fn git_status_to_proto(status: GitFileStatus) -> i32 { GitFileStatus::Added => proto::GitStatus::Added as i32, GitFileStatus::Modified => proto::GitStatus::Modified as i32, GitFileStatus::Conflict => proto::GitStatus::Conflict as i32, + GitFileStatus::Deleted => proto::GitStatus::Deleted as i32, + GitFileStatus::Untracked => proto::GitStatus::Added as i32, // TODO } } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 8b93396e24..4df3d98469 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -1497,7 +1497,8 @@ async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) { cx.executor().run_until_parked(); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); - check_propagated_statuses( + + check_git_statuses( &snapshot, &[ (Path::new(""), Some(GitFileStatus::Modified)), @@ -2178,15 +2179,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { cx.read(|cx| { let tree = tree.read(cx); - let (work_dir, _) = tree.repositories().next().unwrap(); - assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); + let repo = tree.repositories().next().unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("projects/project1")); assert_eq!( tree.status_for_file(Path::new("projects/project1/a")), Some(GitFileStatus::Modified) ); assert_eq!( tree.status_for_file(Path::new("projects/project1/b")), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2199,15 +2200,15 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { cx.read(|cx| { let tree = tree.read(cx); - let (work_dir, _) = tree.repositories().next().unwrap(); - assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); + let repo = tree.repositories().next().unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("projects/project2")); assert_eq!( tree.status_for_file(Path::new("projects/project2/a")), Some(GitFileStatus::Modified) ); assert_eq!( tree.status_for_file(Path::new("projects/project2/b")), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); } @@ -2253,23 +2254,13 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); - let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); - assert_eq!( - entry - .work_directory(tree) - .map(|directory| directory.as_ref().to_owned()), - Some(Path::new("dir1").to_owned()) - ); + let repo = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); + assert_eq!(repo.path.as_ref(), Path::new("dir1")); - let entry = tree + let repo = tree .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) .unwrap(); - assert_eq!( - entry - .work_directory(tree) - .map(|directory| directory.as_ref().to_owned()), - Some(Path::new("dir1/deps/dep1").to_owned()) - ); + assert_eq!(repo.path.as_ref(), Path::new("dir1/deps/dep1")); let entries = tree.files(false, 0); @@ -2278,10 +2269,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { .map(|(entry, repo)| { ( entry.path.as_ref(), - repo.and_then(|repo| { - repo.work_directory(tree) - .map(|work_directory| work_directory.0.to_path_buf()) - }), + repo.map(|repo| repo.path.to_path_buf()), ) }) .collect::>(); @@ -2334,7 +2322,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_git_status(cx: &mut TestAppContext) { +async fn test_file_status(cx: &mut TestAppContext) { init_test(cx); cx.executor().allow_parking(); const IGNORE_RULE: &str = "**/target"; @@ -2393,17 +2381,17 @@ async fn test_git_status(cx: &mut TestAppContext) { tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); assert_eq!(snapshot.repositories().count(), 1); - let (dir, repo_entry) = snapshot.repositories().next().unwrap(); - assert_eq!(dir.as_ref(), Path::new("project")); + let repo_entry = snapshot.repositories().next().unwrap(); + assert_eq!(repo_entry.path.as_ref(), Path::new("project")); assert!(repo_entry.location_in_repo.is_none()); assert_eq!( snapshot.status_for_file(project_path.join(B_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!( snapshot.status_for_file(project_path.join(F_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2433,7 +2421,7 @@ async fn test_git_status(cx: &mut TestAppContext) { let snapshot = tree.snapshot(); assert_eq!( snapshot.status_for_file(project_path.join(F_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); @@ -2455,7 +2443,7 @@ async fn test_git_status(cx: &mut TestAppContext) { assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); assert_eq!( snapshot.status_for_file(project_path.join(B_TXT)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); assert_eq!( snapshot.status_for_file(project_path.join(E_TXT)), @@ -2494,7 +2482,7 @@ async fn test_git_status(cx: &mut TestAppContext) { let snapshot = tree.snapshot(); assert_eq!( snapshot.status_for_file(project_path.join(renamed_dir_name).join(RENAMED_FILE)), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2518,11 +2506,125 @@ async fn test_git_status(cx: &mut TestAppContext) { .join(Path::new(renamed_dir_name)) .join(RENAMED_FILE) ), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); } +#[gpui::test] +async fn test_git_repository_status(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = temp_tree(json!({ + "project": { + "a.txt": "a", // Modified + "b.txt": "bb", // Added + "c.txt": "ccc", // Unchanged + "d.txt": "dddd", // Deleted + }, + + })); + + // Set up git repository before creating the worktree. + let work_dir = root.path().join("project"); + let repo = git_init(work_dir.as_path()); + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_add("d.txt", &repo); + git_commit("Initial commit", &repo); + std::fs::remove_file(work_dir.join("d.txt")).unwrap(); + std::fs::write(work_dir.join("a.txt"), "aa").unwrap(); + + let tree = Worktree::local( + root.path(), + true, + Arc::new(RealFs::default()), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + // Check that the right git state is observed on startup + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repo = snapshot.repositories().next().unwrap(); + let entries = repo.status().collect::>(); + + assert_eq!(entries.len(), 3); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].status, GitFileStatus::Modified); + assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt")); + assert_eq!(entries[1].status, GitFileStatus::Untracked); + assert_eq!(entries[2].repo_path.as_ref(), Path::new("d.txt")); + assert_eq!(entries[2].status, GitFileStatus::Deleted); + }); + + std::fs::write(work_dir.join("c.txt"), "some changes").unwrap(); + eprintln!("File c.txt has been modified"); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repository = snapshot.repositories().next().unwrap(); + let entries = repository.status().collect::>(); + + std::assert_eq!(entries.len(), 4, "entries: {entries:?}"); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].status, GitFileStatus::Modified); + assert_eq!(entries[1].repo_path.as_ref(), Path::new("b.txt")); + assert_eq!(entries[1].status, GitFileStatus::Untracked); + // Status updated + assert_eq!(entries[2].repo_path.as_ref(), Path::new("c.txt")); + assert_eq!(entries[2].status, GitFileStatus::Modified); + assert_eq!(entries[3].repo_path.as_ref(), Path::new("d.txt")); + assert_eq!(entries[3].status, GitFileStatus::Deleted); + }); + + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_remove_index(Path::new("d.txt"), &repo); + git_commit("Another commit", &repo); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + std::fs::remove_file(work_dir.join("a.txt")).unwrap(); + std::fs::remove_file(work_dir.join("b.txt")).unwrap(); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + let repo = snapshot.repositories().next().unwrap(); + let entries = repo.status().collect::>(); + + // Deleting an untracked entry, b.txt, should leave no status + // a.txt was tracked, and so should have a status + assert_eq!( + entries.len(), + 1, + "Entries length was incorrect\n{:#?}", + &entries + ); + assert_eq!(entries[0].repo_path.as_ref(), Path::new("a.txt")); + assert_eq!(entries[0].status, GitFileStatus::Deleted); + }); +} + #[gpui::test] async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { init_test(cx); @@ -2575,22 +2677,22 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { tree.read_with(cx, |tree, _cx| { let snapshot = tree.snapshot(); assert_eq!(snapshot.repositories().count(), 1); - let (dir, repo_entry) = snapshot.repositories().next().unwrap(); + let repo = snapshot.repositories().next().unwrap(); // Path is blank because the working directory of // the git repository is located at the root of the project - assert_eq!(dir.as_ref(), Path::new("")); + assert_eq!(repo.path.as_ref(), Path::new("")); // This is the missing path between the root of the project (sub-folder-2) and its // location relative to the root of the repository. assert_eq!( - repo_entry.location_in_repo, + repo.location_in_repo, Some(Arc::from(Path::new("sub-folder-1/sub-folder-2"))) ); assert_eq!(snapshot.status_for_file("c.txt"), None); assert_eq!( snapshot.status_for_file("d/e.txt"), - Some(GitFileStatus::Added) + Some(GitFileStatus::Untracked) ); }); @@ -2612,6 +2714,93 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_traverse_with_git_status(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar", + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z.txt": "sneaky..." + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[ + (Path::new("x2.txt"), GitFileStatus::Modified), + (Path::new("z.txt"), GitFileStatus::Added), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/y/.git"), + &[(Path::new("y1.txt"), GitFileStatus::Conflict)], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Added)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + let mut traversal = snapshot + .traverse_from_path(true, false, true, Path::new("x")) + .with_git_statuses(); + + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Modified)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Conflict)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("x/z.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Added)); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt")); + assert_eq!(entry.git_status, None); + let entry = traversal.next().unwrap(); + assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt")); + assert_eq!(entry.git_status, Some(GitFileStatus::Added)); +} + #[gpui::test] async fn test_propagate_git_statuses(cx: &mut TestAppContext) { init_test(cx); @@ -2638,7 +2827,6 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { "h1.txt": "", "h2.txt": "" }, - }), ) .await; @@ -2668,7 +2856,16 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { cx.executor().run_until_parked(); let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); - check_propagated_statuses( + check_git_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), + (Path::new("g"), Some(GitFileStatus::Conflict)), + (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + check_git_statuses( &snapshot, &[ (Path::new(""), Some(GitFileStatus::Conflict)), @@ -2685,7 +2882,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ], ); - check_propagated_statuses( + check_git_statuses( &snapshot, &[ (Path::new("a/b"), Some(GitFileStatus::Added)), @@ -2700,7 +2897,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ], ); - check_propagated_statuses( + check_git_statuses( &snapshot, &[ (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), @@ -2712,6 +2909,246 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_propagate_statuses_for_repos_under_project(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar" + }, + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[(Path::new("x1.txt"), GitFileStatus::Added)], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/y/.git"), + &[ + (Path::new("y1.txt"), GitFileStatus::Conflict), + (Path::new("y2.txt"), GitFileStatus::Modified), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Modified)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("y"), Some(GitFileStatus::Conflict)), + (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("z"), Some(GitFileStatus::Modified)), + (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + ], + ); + + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Added)), + (Path::new("x/x1.txt"), Some(GitFileStatus::Added)), + (Path::new("x/x2.txt"), None), + (Path::new("y"), Some(GitFileStatus::Conflict)), + (Path::new("y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("y/y2.txt"), Some(GitFileStatus::Modified)), + (Path::new("z"), Some(GitFileStatus::Modified)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Modified)), + ], + ); +} + +#[gpui::test] +async fn test_propagate_statuses_for_nested_repos(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "x": { + ".git": {}, + "x1.txt": "foo", + "x2.txt": "bar", + "y": { + ".git": {}, + "y1.txt": "baz", + "y2.txt": "qux" + }, + "z.txt": "sneaky..." + }, + "z": { + ".git": {}, + "z1.txt": "quux", + "z2.txt": "quuux" + } + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/.git"), + &[ + (Path::new("x2.txt"), GitFileStatus::Modified), + (Path::new("z.txt"), GitFileStatus::Added), + ], + ); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/x/y/.git"), + &[(Path::new("y1.txt"), GitFileStatus::Conflict)], + ); + + fs.set_status_for_repo_via_git_operation( + Path::new("/root/z/.git"), + &[(Path::new("z2.txt"), GitFileStatus::Added)], + ); + + let tree = Worktree::local( + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + // Sanity check the propagation for x/y and z + check_git_statuses( + &snapshot, + &[ + (Path::new("x/y"), Some(GitFileStatus::Conflict)), // the y git repository has conflict file in it, and so should have a conflict status + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + ], + ); + check_git_statuses( + &snapshot, + &[ + (Path::new("z"), Some(GitFileStatus::Added)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Added)), + ], + ); + + // Test one of the fundamental cases of propagation blocking, the transition from one git repository to another + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + // Sanity check everything around it + check_git_statuses( + &snapshot, + &[ + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/x1.txt"), None), + (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + (Path::new("x/z.txt"), Some(GitFileStatus::Added)), + ], + ); + + // Test the other fundamental case, transitioning from git repository to non-git repository + check_git_statuses( + &snapshot, + &[ + (Path::new(""), None), + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/x1.txt"), None), + ], + ); + + // And all together now + check_git_statuses( + &snapshot, + &[ + (Path::new(""), None), + (Path::new("x"), Some(GitFileStatus::Modified)), + (Path::new("x/x1.txt"), None), + (Path::new("x/x2.txt"), Some(GitFileStatus::Modified)), + (Path::new("x/y"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y1.txt"), Some(GitFileStatus::Conflict)), + (Path::new("x/y/y2.txt"), None), + (Path::new("x/z.txt"), Some(GitFileStatus::Added)), + (Path::new("z"), Some(GitFileStatus::Added)), + (Path::new("z/z1.txt"), None), + (Path::new("z/z2.txt"), Some(GitFileStatus::Added)), + ], + ); +} + #[gpui::test] async fn test_private_single_file_worktree(cx: &mut TestAppContext) { init_test(cx); @@ -2736,22 +3173,20 @@ async fn test_private_single_file_worktree(cx: &mut TestAppContext) { } #[track_caller] -fn check_propagated_statuses( - snapshot: &Snapshot, - expected_statuses: &[(&Path, Option)], -) { - let mut entries = expected_statuses +fn check_git_statuses(snapshot: &Snapshot, expected_statuses: &[(&Path, Option)]) { + let mut traversal = snapshot + .traverse_from_path(true, true, false, "".as_ref()) + .with_git_statuses(); + let found_statuses = expected_statuses .iter() - .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) + .map(|&(path, _)| { + let git_entry = traversal + .find(|git_entry| &*git_entry.path == path) + .expect("Traversal has no entry for {path:?}"); + (path, git_entry.git_status) + }) .collect::>(); - snapshot.propagate_git_statuses(&mut entries); - assert_eq!( - entries - .iter() - .map(|e| (e.path.as_ref(), e.git_status)) - .collect::>(), - expected_statuses - ); + assert_eq!(found_statuses, expected_statuses); } #[track_caller] @@ -2763,14 +3198,14 @@ fn git_init(path: &Path) -> git2::Repository { fn git_add>(path: P, repo: &git2::Repository) { let path = path.as_ref(); let mut index = repo.index().expect("Failed to get index"); - index.add_path(path).expect("Failed to add a.txt"); + index.add_path(path).expect("Failed to add file"); index.write().expect("Failed to write index"); } #[track_caller] fn git_remove_index(path: &Path, repo: &git2::Repository) { let mut index = repo.index().expect("Failed to get index"); - index.remove_path(path).expect("Failed to add a.txt"); + index.remove_path(path).expect("Failed to add file"); index.write().expect("Failed to write index"); } @@ -2900,7 +3335,8 @@ fn assert_entry_git_state( ) { let entry = tree.entry_for_path(path).expect("entry {path} not found"); assert_eq!( - entry.git_status, git_status, + tree.status_for_file(Path::new(path)), + git_status, "expected {path} to have git status: {git_status:?}" ); assert_eq!( diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index fef3f14f00..b010e836d5 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -70,7 +70,7 @@ use util::load_shell_from_passwd; #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; -fn files_not_createad_on_launch(errors: HashMap>) { +fn files_not_created_on_launch(errors: HashMap>) { let message = "Zed failed to launch"; let error_details = errors .into_iter() @@ -179,7 +179,7 @@ fn main() { let file_errors = init_paths(); if !file_errors.is_empty() { - files_not_createad_on_launch(file_errors); + files_not_created_on_launch(file_errors); return; } diff --git a/extensions/perplexity/README.md b/extensions/perplexity/README.md index 405356dc53..337c24325b 100644 --- a/extensions/perplexity/README.md +++ b/extensions/perplexity/README.md @@ -38,6 +38,6 @@ Open the AI Assistant panel (`cmd-r` or `ctrl-r`) and enter: This extension requires a Perplexity API key to be available via the `PERPLEXITY_API_KEY` environment variable. -To onbtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. +To obtain a Perplexity.ai API token, login to your Perplexity.ai account and go [Settings->API](https://www.perplexity.ai/settings/api) and under "API Keys" click "Generate". This will require you to have [Perplexity Pro](https://www.perplexity.ai/pro) or to buy API credits. By default the extension uses `llama-3.1-sonar-small-128k-online`, currently cheapest model available which is roughly half a penny per request + a penny per 50,000 tokens. So most requests will cost less than $0.01 USD. Take your API key and add it to your environment by adding `export PERPLEXITY_API_KEY="pplx-0123456789abcdef..."` to your `~/.zshrc` or `~/.bashrc`. Reload close and reopen your terminal session. Check with `env |grep PERPLEXITY_API_KEY`. diff --git a/script/bundle-mac b/script/bundle-mac index 54247645cc..e70aa89097 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -14,7 +14,7 @@ can_code_sign=false # This must match the team in the provisioning profile. IDENTITY="Zed Industries, Inc." -APPLE_NOTORIZATION_TEAM="MQ55VZLNZQ" +APPLE_NOTARIZATION_TEAM="MQ55VZLNZQ" # Function for displaying help info help_info() { @@ -317,7 +317,7 @@ function sign_app_binaries() { /usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "$(pwd)/${dmg_file_path}" -v echo "Notarizing DMG with Apple" - "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTORIZATION_TEAM" "${dmg_file_path}" + "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTARIZATION_TEAM" "${dmg_file_path}" echo "Removing temporary DMG (used only for notarization)" rm "${dmg_file_path}" @@ -344,7 +344,7 @@ function sign_app_binaries() { if [[ $can_code_sign = true ]]; then echo "Notarizing DMG with Apple" /usr/bin/codesign --deep --force --timestamp --options runtime --sign "$IDENTITY" "$(pwd)/${dmg_file_path}" -v - "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTORIZATION_TEAM" "${dmg_file_path}" + "${xcode_bin_dir_path}/notarytool" submit --wait --apple-id "$APPLE_NOTARIZATION_USERNAME" --password "$APPLE_NOTARIZATION_PASSWORD" --team-id "$APPLE_NOTARIZATION_TEAM" "${dmg_file_path}" "${xcode_bin_dir_path}/stapler" staple "${dmg_file_path}" fi