diff --git a/Cargo.lock b/Cargo.lock index 724550ea0a..b13fbbf33f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10591,6 +10591,7 @@ dependencies = [ "fuzzy", "git", "git2", + "git_hosting_providers", "globset", "gpui", "http_client", @@ -17217,7 +17218,6 @@ dependencies = [ "fuzzy", "git", "git2", - "git_hosting_providers", "gpui", "http_client", "ignore", diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 8a0df784a8..7fe4789591 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -10,10 +10,10 @@ use gpui::{ use language::{BinaryStatus, LanguageRegistry, LanguageServerId}; use project::{ EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project, - ProjectEnvironmentEvent, WorktreeId, + ProjectEnvironmentEvent, }; use smallvec::SmallVec; -use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration}; +use std::{cmp::Reverse, fmt::Write, path::Path, sync::Arc, time::Duration}; use ui::{ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*}; use util::truncate_and_trailoff; use workspace::{StatusItemView, Workspace, item::ItemHandle}; @@ -218,13 +218,14 @@ impl ActivityIndicator { fn pending_environment_errors<'a>( &'a self, cx: &'a App, - ) -> impl Iterator { + ) -> impl Iterator, &'a EnvironmentErrorMessage)> { self.project.read(cx).shell_environment_errors(cx) } fn content_to_render(&mut self, cx: &mut Context) -> Option { // Show if any direnv calls failed - if let Some((&worktree_id, error)) = self.pending_environment_errors(cx).next() { + if let Some((abs_path, error)) = self.pending_environment_errors(cx).next() { + let abs_path = abs_path.clone(); return Some(Content { icon: Some( Icon::new(IconName::Warning) @@ -234,7 +235,7 @@ impl ActivityIndicator { message: error.0.clone(), on_click: Some(Arc::new(move |this, window, cx| { this.project.update(cx, |project, cx| { - project.remove_environment_error(worktree_id, cx); + project.remove_environment_error(&abs_path, cx); }); window.dispatch_action(Box::new(workspace::OpenLog), cx); })), diff --git a/crates/assistant2/src/thread.rs b/crates/assistant2/src/thread.rs index 9ea43dff93..8115ed4fdc 100644 --- a/crates/assistant2/src/thread.rs +++ b/crates/assistant2/src/thread.rs @@ -11,7 +11,7 @@ use collections::{BTreeMap, HashMap, HashSet}; use fs::Fs; use futures::future::Shared; use futures::{FutureExt, StreamExt as _}; -use git; +use git::repository::DiffType; use gpui::{App, AppContext, Context, Entity, EventEmitter, SharedString, Task, WeakEntity}; use language_model::{ LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest, @@ -19,7 +19,7 @@ use language_model::{ LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError, Role, StopReason, TokenUsage, }; -use project::git_store::{GitStore, GitStoreCheckpoint}; +use project::git_store::{GitStore, GitStoreCheckpoint, RepositoryState}; use project::{Project, Worktree}; use prompt_store::{ AssistantSystemPromptContext, PromptBuilder, RulesFile, WorktreeInfoForSystemPrompt, @@ -1446,48 +1446,61 @@ impl Thread { (path, snapshot) }); - let Ok((worktree_path, snapshot)) = worktree_info else { + let Ok((worktree_path, _snapshot)) = worktree_info else { return WorktreeSnapshot { worktree_path: String::new(), git_state: None, }; }; - let repo_info = git_store + let git_state = git_store .update(cx, |git_store, cx| { git_store .repositories() .values() - .find(|repo| repo.read(cx).worktree_id == Some(snapshot.id())) - .and_then(|repo| { - let repo = repo.read(cx); - Some((repo.branch().cloned(), repo.local_repository()?)) + .find(|repo| { + repo.read(cx) + .abs_path_to_repo_path(&worktree.read(cx).abs_path()) + .is_some() }) + .cloned() }) .ok() - .flatten(); + .flatten() + .map(|repo| { + repo.read_with(cx, |repo, _| { + let current_branch = + repo.branch.as_ref().map(|branch| branch.name.to_string()); + repo.send_job(|state, _| async move { + let RepositoryState::Local { backend, .. } = state else { + return GitState { + remote_url: None, + head_sha: None, + current_branch, + diff: None, + }; + }; - // Extract git information - let git_state = match repo_info { - None => None, - Some((branch, repo)) => { - let current_branch = branch.map(|branch| branch.name.to_string()); - let remote_url = repo.remote_url("origin"); - let head_sha = repo.head_sha(); + let remote_url = backend.remote_url("origin"); + let head_sha = backend.head_sha(); + let diff = backend.diff(DiffType::HeadToWorktree).await.ok(); - // Get diff asynchronously - let diff = repo - .diff(git::repository::DiffType::HeadToWorktree) - .await - .ok(); - - Some(GitState { - remote_url, - head_sha, - current_branch, - diff, + GitState { + remote_url, + head_sha, + current_branch, + diff, + } + }) }) - } + }); + + let git_state = match git_state { + Some(git_state) => match git_state.ok() { + Some(git_state) => git_state.await.ok(), + None => None, + }, + None => None, }; WorktreeSnapshot { diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index 0b9e951ae5..d8ce452dd0 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -469,7 +469,7 @@ impl Room { let repository = repository.read(cx); repositories.push(proto::RejoinRepository { id: entry_id.to_proto(), - scan_id: repository.completed_scan_id as u64, + scan_id: repository.scan_id, }); } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index e675bc5e68..194bf7a10b 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -334,7 +334,7 @@ impl Database { project_repository::ActiveModel { project_id: ActiveValue::set(project_id), legacy_worktree_id: ActiveValue::set(Some(worktree_id)), - id: ActiveValue::set(repository.work_directory_id as i64), + id: ActiveValue::set(repository.repository_id as i64), scan_id: ActiveValue::set(update.scan_id as i64), is_deleted: ActiveValue::set(false), branch_summary: ActiveValue::Set( @@ -384,7 +384,7 @@ impl Database { project_repository_statuses::ActiveModel { project_id: ActiveValue::set(project_id), repository_id: ActiveValue::set( - repository.work_directory_id as i64, + repository.repository_id as i64, ), scan_id: ActiveValue::set(update.scan_id as i64), is_deleted: ActiveValue::set(false), @@ -424,7 +424,7 @@ impl Database { .eq(project_id) .and( project_repository_statuses::Column::RepositoryId - .eq(repo.work_directory_id), + .eq(repo.repository_id), ) .and( project_repository_statuses::Column::RepoPath @@ -936,7 +936,7 @@ impl Database { worktree.legacy_repository_entries.insert( db_repository_entry.id as u64, proto::RepositoryEntry { - work_directory_id: db_repository_entry.id as u64, + repository_id: db_repository_entry.id as u64, updated_statuses, removed_statuses: Vec::new(), current_merge_conflicts, @@ -955,6 +955,7 @@ impl Database { current_merge_conflicts, branch_summary, scan_id: db_repository_entry.scan_id as u64, + is_last_update: true, }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index a9032ac42f..85852ccd36 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -764,7 +764,7 @@ impl Database { .find(|worktree| worktree.id as i64 == legacy_worktree_id) { worktree.updated_repositories.push(proto::RepositoryEntry { - work_directory_id: db_repository.id as u64, + repository_id: db_repository.id as u64, updated_statuses, removed_statuses, current_merge_conflicts, @@ -782,6 +782,7 @@ impl Database { id: db_repository.id as u64, abs_path: db_repository.abs_path, scan_id: db_repository.scan_id as u64, + is_last_update: true, }); } } diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 0c3ee91d6c..1cc550b011 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2898,8 +2898,8 @@ async fn test_git_branch_name( assert_eq!( repository .read(cx) - .repository_entry - .branch() + .branch + .as_ref() .map(|branch| branch.name.to_string()), branch_name ) @@ -3033,7 +3033,6 @@ async fn test_git_status_sync( let repo = repos.into_iter().next().unwrap(); assert_eq!( repo.read(cx) - .repository_entry .status_for_path(&file.into()) .map(|entry| entry.status), status @@ -6882,7 +6881,8 @@ async fn test_remote_git_branches( .next() .unwrap() .read(cx) - .current_branch() + .branch + .as_ref() .unwrap() .clone() }) @@ -6919,7 +6919,8 @@ async fn test_remote_git_branches( .next() .unwrap() .read(cx) - .current_branch() + .branch + .as_ref() .unwrap() .clone() }) diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 0eca664b50..474a0fc2e3 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -1181,6 +1181,10 @@ impl RandomizedTest for ProjectCollaborationTest { (worktree.id(), worktree.snapshot()) }) .collect::>(); + let host_repository_snapshots = host_project.read_with(host_cx, |host_project, cx| { + host_project.git_store().read(cx).repo_snapshots(cx) + }); + let guest_repository_snapshots = guest_project.git_store().read(cx).repo_snapshots(cx); assert_eq!( guest_worktree_snapshots.values().map(|w| w.abs_path()).collect::>(), @@ -1189,6 +1193,13 @@ impl RandomizedTest for ProjectCollaborationTest { client.username, guest_project.remote_id(), ); + assert_eq!( + guest_repository_snapshots.values().collect::>(), + host_repository_snapshots.values().collect::>(), + "{} has different repositories than the host for project {:?}", + client.username, guest_project.remote_id(), + ); + for (id, host_snapshot) in &host_worktree_snapshots { let guest_snapshot = &guest_worktree_snapshots[id]; assert_eq!( @@ -1216,12 +1227,6 @@ impl RandomizedTest for ProjectCollaborationTest { id, guest_project.remote_id(), ); - assert_eq!(guest_snapshot.repositories().iter().collect::>(), host_snapshot.repositories().iter().collect::>(), - "{} has different repositories than the host for worktree {:?} and project {:?}", - client.username, - host_snapshot.abs_path(), - guest_project.remote_id(), - ); assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id(), "{} has different scan id than the host for worktree {:?} and project {:?}", client.username, diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 03c7702dd8..de7bb1c23b 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -313,7 +313,8 @@ async fn test_ssh_collaboration_git_branches( .next() .unwrap() .read(cx) - .current_branch() + .branch + .as_ref() .unwrap() .clone() }) @@ -352,7 +353,8 @@ async fn test_ssh_collaboration_git_branches( .next() .unwrap() .read(cx) - .current_branch() + .branch + .as_ref() .unwrap() .clone() }) diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index e804bd5829..87fa8d13b3 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -12,7 +12,10 @@ use gpui::{ }; use language::{Bias, Buffer, BufferSnapshot, Edit}; use multi_buffer::RowInfo; -use project::{Project, ProjectItem, git_store::Repository}; +use project::{ + Project, ProjectItem, + git_store::{GitStoreEvent, Repository, RepositoryEvent}, +}; use smallvec::SmallVec; use std::{sync::Arc, time::Duration}; use sum_tree::SumTree; @@ -202,13 +205,21 @@ impl GitBlame { this.generate(cx); } } - project::Event::GitStateUpdated => { + _ => {} + } + }); + + let git_store = project.read(cx).git_store().clone(); + let git_store_subscription = + cx.subscribe(&git_store, move |this, _, event, cx| match event { + GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::Updated, _) + | GitStoreEvent::RepositoryAdded(_) + | GitStoreEvent::RepositoryRemoved(_) => { log::debug!("Status of git repositories updated. Regenerating blame data...",); this.generate(cx); } _ => {} - } - }); + }); let buffer_snapshot = buffer.read(cx).snapshot(); let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe()); @@ -226,7 +237,11 @@ impl GitBlame { task: Task::ready(Ok(())), generated: false, regenerate_on_edit_task: Task::ready(Ok(())), - _regenerate_subscriptions: vec![buffer_subscriptions, project_subscription], + _regenerate_subscriptions: vec![ + buffer_subscriptions, + project_subscription, + git_store_subscription, + ], }; this.generate(cx); this diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 892b75a2fd..584abd4cf7 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -123,7 +123,7 @@ impl GitRepository for FakeGitRepository { &self, path: RepoPath, content: Option, - _env: HashMap, + _env: Arc>, ) -> BoxFuture> { self.with_state_async(true, move |state| { if let Some(message) = state.simulated_index_write_error_message.clone() { @@ -157,7 +157,7 @@ impl GitRepository for FakeGitRepository { &self, _commit: String, _mode: ResetMode, - _env: HashMap, + _env: Arc>, ) -> BoxFuture> { unimplemented!() } @@ -166,7 +166,7 @@ impl GitRepository for FakeGitRepository { &self, _commit: String, _paths: Vec, - _env: HashMap, + _env: Arc>, ) -> BoxFuture> { unimplemented!() } @@ -179,7 +179,11 @@ impl GitRepository for FakeGitRepository { self.path() } - fn status_blocking(&self, path_prefixes: &[RepoPath]) -> Result { + fn merge_message(&self) -> BoxFuture> { + async move { None }.boxed() + } + + fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture> { let workdir_path = self.dot_git_path.parent().unwrap(); // Load gitignores @@ -221,7 +225,7 @@ impl GitRepository for FakeGitRepository { }) .collect(); - self.fs.with_git_state(&self.dot_git_path, false, |state| { + let result = self.fs.with_git_state(&self.dot_git_path, false, |state| { let mut entries = Vec::new(); let paths = state .head_contents @@ -302,10 +306,11 @@ impl GitRepository for FakeGitRepository { } } entries.sort_by(|a, b| a.0.cmp(&b.0)); - Ok(GitStatus { + anyhow::Ok(GitStatus { entries: entries.into(), }) - })? + }); + async move { result? }.boxed() } fn branches(&self) -> BoxFuture>> { @@ -351,7 +356,7 @@ impl GitRepository for FakeGitRepository { fn stage_paths( &self, _paths: Vec, - _env: HashMap, + _env: Arc>, ) -> BoxFuture> { unimplemented!() } @@ -359,7 +364,7 @@ impl GitRepository for FakeGitRepository { fn unstage_paths( &self, _paths: Vec, - _env: HashMap, + _env: Arc>, ) -> BoxFuture> { unimplemented!() } @@ -368,7 +373,7 @@ impl GitRepository for FakeGitRepository { &self, _message: gpui::SharedString, _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>, - _env: HashMap, + _env: Arc>, ) -> BoxFuture> { unimplemented!() } @@ -379,7 +384,7 @@ impl GitRepository for FakeGitRepository { _remote: String, _options: Option, _askpass: AskPassDelegate, - _env: HashMap, + _env: Arc>, _cx: AsyncApp, ) -> BoxFuture> { unimplemented!() @@ -390,7 +395,7 @@ impl GitRepository for FakeGitRepository { _branch: String, _remote: String, _askpass: AskPassDelegate, - _env: HashMap, + _env: Arc>, _cx: AsyncApp, ) -> BoxFuture> { unimplemented!() @@ -399,7 +404,7 @@ impl GitRepository for FakeGitRepository { fn fetch( &self, _askpass: AskPassDelegate, - _env: HashMap, + _env: Arc>, _cx: AsyncApp, ) -> BoxFuture> { unimplemented!() diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index a76625eb60..ebd9ea411f 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -188,7 +188,7 @@ pub trait GitRepository: Send + Sync { &self, path: RepoPath, content: Option, - env: HashMap, + env: Arc>, ) -> BoxFuture>; /// Returns the URL of the remote with the given name. @@ -199,7 +199,9 @@ pub trait GitRepository: Send + Sync { fn merge_head_shas(&self) -> Vec; - fn status_blocking(&self, path_prefixes: &[RepoPath]) -> Result; + fn merge_message(&self) -> BoxFuture>; + + fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture>; fn branches(&self) -> BoxFuture>>; @@ -210,14 +212,14 @@ pub trait GitRepository: Send + Sync { &self, commit: String, mode: ResetMode, - env: HashMap, + env: Arc>, ) -> BoxFuture>; fn checkout_files( &self, commit: String, paths: Vec, - env: HashMap, + env: Arc>, ) -> BoxFuture>; fn show(&self, commit: String) -> BoxFuture>; @@ -243,7 +245,7 @@ pub trait GitRepository: Send + Sync { fn stage_paths( &self, paths: Vec, - env: HashMap, + env: Arc>, ) -> BoxFuture>; /// Updates the index to match HEAD at the given paths. /// @@ -251,14 +253,14 @@ pub trait GitRepository: Send + Sync { fn unstage_paths( &self, paths: Vec, - env: HashMap, + env: Arc>, ) -> BoxFuture>; fn commit( &self, message: SharedString, name_and_email: Option<(SharedString, SharedString)>, - env: HashMap, + env: Arc>, ) -> BoxFuture>; fn push( @@ -267,7 +269,7 @@ pub trait GitRepository: Send + Sync { upstream_name: String, options: Option, askpass: AskPassDelegate, - env: HashMap, + env: Arc>, // This method takes an AsyncApp to ensure it's invoked on the main thread, // otherwise git-credentials-manager won't work. cx: AsyncApp, @@ -278,7 +280,7 @@ pub trait GitRepository: Send + Sync { branch_name: String, upstream_name: String, askpass: AskPassDelegate, - env: HashMap, + env: Arc>, // This method takes an AsyncApp to ensure it's invoked on the main thread, // otherwise git-credentials-manager won't work. cx: AsyncApp, @@ -287,7 +289,7 @@ pub trait GitRepository: Send + Sync { fn fetch( &self, askpass: AskPassDelegate, - env: HashMap, + env: Arc>, // This method takes an AsyncApp to ensure it's invoked on the main thread, // otherwise git-credentials-manager won't work. cx: AsyncApp, @@ -528,7 +530,7 @@ impl GitRepository for RealGitRepository { &self, commit: String, mode: ResetMode, - env: HashMap, + env: Arc>, ) -> BoxFuture> { async move { let working_directory = self.working_directory(); @@ -539,7 +541,7 @@ impl GitRepository for RealGitRepository { }; let output = new_smol_command(&self.git_binary_path) - .envs(env) + .envs(env.iter()) .current_dir(&working_directory?) .args(["reset", mode_flag, &commit]) .output() @@ -559,7 +561,7 @@ impl GitRepository for RealGitRepository { &self, commit: String, paths: Vec, - env: HashMap, + env: Arc>, ) -> BoxFuture> { let working_directory = self.working_directory(); let git_binary_path = self.git_binary_path.clone(); @@ -570,7 +572,7 @@ impl GitRepository for RealGitRepository { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory?) - .envs(env) + .envs(env.iter()) .args(["checkout", &commit, "--"]) .args(paths.iter().map(|path| path.as_ref())) .output() @@ -640,7 +642,7 @@ impl GitRepository for RealGitRepository { &self, path: RepoPath, content: Option, - env: HashMap, + env: Arc>, ) -> BoxFuture> { let working_directory = self.working_directory(); let git_binary_path = self.git_binary_path.clone(); @@ -650,7 +652,7 @@ impl GitRepository for RealGitRepository { if let Some(content) = content { let mut child = new_smol_command(&git_binary_path) .current_dir(&working_directory) - .envs(&env) + .envs(env.iter()) .args(["hash-object", "-w", "--stdin"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) @@ -668,7 +670,7 @@ impl GitRepository for RealGitRepository { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory) - .envs(env) + .envs(env.iter()) .args(["update-index", "--add", "--cacheinfo", "100644", &sha]) .arg(path.to_unix_style()) .output() @@ -683,7 +685,7 @@ impl GitRepository for RealGitRepository { } else { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory) - .envs(env) + .envs(env.iter()) .args(["update-index", "--force-remove"]) .arg(path.to_unix_style()) .output() @@ -733,18 +735,30 @@ impl GitRepository for RealGitRepository { shas } - fn status_blocking(&self, path_prefixes: &[RepoPath]) -> Result { - let output = new_std_command(&self.git_binary_path) - .current_dir(self.working_directory()?) - .args(git_status_args(path_prefixes)) - .output()?; - if output.status.success() { - let stdout = String::from_utf8_lossy(&output.stdout); - stdout.parse() - } else { - let stderr = String::from_utf8_lossy(&output.stderr); - Err(anyhow!("git status failed: {}", stderr)) - } + fn merge_message(&self) -> BoxFuture> { + let path = self.path().join("MERGE_MSG"); + async move { std::fs::read_to_string(&path).ok() }.boxed() + } + + fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture> { + let git_binary_path = self.git_binary_path.clone(); + let working_directory = self.working_directory(); + let path_prefixes = path_prefixes.to_owned(); + self.executor + .spawn(async move { + let output = new_std_command(&git_binary_path) + .current_dir(working_directory?) + .args(git_status_args(&path_prefixes)) + .output()?; + if output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + stdout.parse() + } else { + let stderr = String::from_utf8_lossy(&output.stderr); + Err(anyhow!("git status failed: {}", stderr)) + } + }) + .boxed() } fn branches(&self) -> BoxFuture>> { @@ -891,7 +905,7 @@ impl GitRepository for RealGitRepository { fn stage_paths( &self, paths: Vec, - env: HashMap, + env: Arc>, ) -> BoxFuture> { let working_directory = self.working_directory(); let git_binary_path = self.git_binary_path.clone(); @@ -900,7 +914,7 @@ impl GitRepository for RealGitRepository { if !paths.is_empty() { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory?) - .envs(env) + .envs(env.iter()) .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.to_unix_style())) .output() @@ -921,7 +935,7 @@ impl GitRepository for RealGitRepository { fn unstage_paths( &self, paths: Vec, - env: HashMap, + env: Arc>, ) -> BoxFuture> { let working_directory = self.working_directory(); let git_binary_path = self.git_binary_path.clone(); @@ -931,7 +945,7 @@ impl GitRepository for RealGitRepository { if !paths.is_empty() { let output = new_smol_command(&git_binary_path) .current_dir(&working_directory?) - .envs(env) + .envs(env.iter()) .args(["reset", "--quiet", "--"]) .args(paths.iter().map(|p| p.as_ref())) .output() @@ -953,14 +967,14 @@ impl GitRepository for RealGitRepository { &self, message: SharedString, name_and_email: Option<(SharedString, SharedString)>, - env: HashMap, + env: Arc>, ) -> BoxFuture> { let working_directory = self.working_directory(); self.executor .spawn(async move { let mut cmd = new_smol_command("git"); cmd.current_dir(&working_directory?) - .envs(env) + .envs(env.iter()) .args(["commit", "--quiet", "-m"]) .arg(&message.to_string()) .arg("--cleanup=strip"); @@ -988,7 +1002,7 @@ impl GitRepository for RealGitRepository { remote_name: String, options: Option, ask_pass: AskPassDelegate, - env: HashMap, + env: Arc>, cx: AsyncApp, ) -> BoxFuture> { let working_directory = self.working_directory(); @@ -997,7 +1011,7 @@ impl GitRepository for RealGitRepository { let working_directory = working_directory?; let mut command = new_smol_command("git"); command - .envs(&env) + .envs(env.iter()) .env("GIT_HTTP_USER_AGENT", "Zed") .current_dir(&working_directory) .args(["push"]) @@ -1021,7 +1035,7 @@ impl GitRepository for RealGitRepository { branch_name: String, remote_name: String, ask_pass: AskPassDelegate, - env: HashMap, + env: Arc>, cx: AsyncApp, ) -> BoxFuture> { let working_directory = self.working_directory(); @@ -1029,7 +1043,7 @@ impl GitRepository for RealGitRepository { async move { let mut command = new_smol_command("git"); command - .envs(&env) + .envs(env.iter()) .env("GIT_HTTP_USER_AGENT", "Zed") .current_dir(&working_directory?) .args(["pull"]) @@ -1046,7 +1060,7 @@ impl GitRepository for RealGitRepository { fn fetch( &self, ask_pass: AskPassDelegate, - env: HashMap, + env: Arc>, cx: AsyncApp, ) -> BoxFuture> { let working_directory = self.working_directory(); @@ -1054,7 +1068,7 @@ impl GitRepository for RealGitRepository { async move { let mut command = new_smol_command("git"); command - .envs(&env) + .envs(env.iter()) .env("GIT_HTTP_USER_AGENT", "Zed") .current_dir(&working_directory?) .args(["fetch", "--all"]) @@ -1467,7 +1481,7 @@ struct GitBinaryCommandError { } async fn run_git_command( - env: HashMap, + env: Arc>, ask_pass: AskPassDelegate, mut command: smol::process::Command, executor: &BackgroundExecutor, @@ -1769,12 +1783,19 @@ mod tests { let repo = RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); - repo.stage_paths(vec![RepoPath::from_str("file")], HashMap::default()) - .await - .unwrap(); - repo.commit("Initial commit".into(), None, checkpoint_author_envs()) - .await - .unwrap(); + repo.stage_paths( + vec![RepoPath::from_str("file")], + Arc::new(HashMap::default()), + ) + .await + .unwrap(); + repo.commit( + "Initial commit".into(), + None, + Arc::new(checkpoint_author_envs()), + ) + .await + .unwrap(); smol::fs::write(&file_path, "modified before checkpoint") .await @@ -1791,13 +1812,16 @@ mod tests { smol::fs::write(&file_path, "modified after checkpoint") .await .unwrap(); - repo.stage_paths(vec![RepoPath::from_str("file")], HashMap::default()) - .await - .unwrap(); + repo.stage_paths( + vec![RepoPath::from_str("file")], + Arc::new(HashMap::default()), + ) + .await + .unwrap(); repo.commit( "Commit after checkpoint".into(), None, - checkpoint_author_envs(), + Arc::new(checkpoint_author_envs()), ) .await .unwrap(); @@ -1889,12 +1913,19 @@ mod tests { let repo = RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); - repo.stage_paths(vec![RepoPath::from_str("file")], HashMap::default()) - .await - .unwrap(); - repo.commit("Initial commit".into(), None, checkpoint_author_envs()) - .await - .unwrap(); + repo.stage_paths( + vec![RepoPath::from_str("file")], + Arc::new(HashMap::default()), + ) + .await + .unwrap(); + repo.commit( + "Initial commit".into(), + None, + Arc::new(checkpoint_author_envs()), + ) + .await + .unwrap(); let initial_commit_sha = repo.head_sha().unwrap(); @@ -1912,13 +1943,17 @@ mod tests { RepoPath::from_str("new_file1"), RepoPath::from_str("new_file2"), ], - HashMap::default(), + Arc::new(HashMap::default()), + ) + .await + .unwrap(); + repo.commit( + "Commit new files".into(), + None, + Arc::new(checkpoint_author_envs()), ) .await .unwrap(); - repo.commit("Commit new files".into(), None, checkpoint_author_envs()) - .await - .unwrap(); repo.restore_checkpoint(checkpoint).await.unwrap(); assert_eq!(repo.head_sha().unwrap(), initial_commit_sha); @@ -1935,7 +1970,7 @@ mod tests { "content2" ); assert_eq!( - repo.status_blocking(&[]).unwrap().entries.as_ref(), + repo.status(&[]).await.unwrap().entries.as_ref(), &[ (RepoPath::from_str("new_file1"), FileStatus::Untracked), (RepoPath::from_str("new_file2"), FileStatus::Untracked) diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index 4e380d74c6..c607916256 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -336,7 +336,7 @@ impl PickerDelegate for BranchListDelegate { let current_branch = self.repo.as_ref().map(|repo| { repo.update(cx, |repo, _| { - repo.current_branch().map(|branch| branch.name.clone()) + repo.branch.as_ref().map(|branch| branch.name.clone()) }) }); @@ -463,7 +463,7 @@ impl PickerDelegate for BranchListDelegate { let message = if entry.is_new { if let Some(current_branch) = self.repo.as_ref().and_then(|repo| { - repo.read(cx).current_branch().map(|b| b.name.clone()) + repo.read(cx).branch.as_ref().map(|b| b.name.clone()) }) { format!("based off {}", current_branch) diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 77be960612..16b8525f75 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -234,7 +234,7 @@ impl CommitModal { let branch = active_repo .as_ref() - .and_then(|repo| repo.read(cx).repository_entry.branch()) + .and_then(|repo| repo.read(cx).branch.as_ref()) .map(|b| b.name.clone()) .unwrap_or_else(|| "".into()); diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index db9d76e158..9c2e801ada 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -45,9 +45,10 @@ use panel::{ PanelHeader, panel_button, panel_editor_container, panel_editor_style, panel_filled_button, panel_icon_button, }; +use project::git_store::RepositoryEvent; use project::{ Fs, Project, ProjectPath, - git_store::{GitEvent, Repository}, + git_store::{GitStoreEvent, Repository}, }; use serde::{Deserialize, Serialize}; use settings::{Settings as _, SettingsStore}; @@ -340,7 +341,7 @@ const MAX_PANEL_EDITOR_LINES: usize = 6; pub(crate) fn commit_message_editor( commit_message_buffer: Entity, - placeholder: Option<&str>, + placeholder: Option, project: Entity, in_panel: bool, window: &mut Window, @@ -361,7 +362,7 @@ pub(crate) fn commit_message_editor( commit_editor.set_show_wrap_guides(false, cx); commit_editor.set_show_indent_guides(false, cx); commit_editor.set_hard_wrap(Some(72), cx); - let placeholder = placeholder.unwrap_or("Enter commit message"); + let placeholder = placeholder.unwrap_or("Enter commit message".into()); commit_editor.set_placeholder_text(placeholder, cx); commit_editor } @@ -403,14 +404,18 @@ impl GitPanel { &git_store, window, move |this, git_store, event, window, cx| match event { - GitEvent::FileSystemUpdated => { - this.schedule_update(false, window, cx); - } - GitEvent::ActiveRepositoryChanged | GitEvent::GitStateUpdated => { + GitStoreEvent::ActiveRepositoryChanged(_) => { this.active_repository = git_store.read(cx).active_repository(); this.schedule_update(true, window, cx); } - GitEvent::IndexWriteError(error) => { + GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::Updated, true) => { + this.schedule_update(true, window, cx); + } + GitStoreEvent::RepositoryUpdated(_, _, _) => {} + GitStoreEvent::RepositoryAdded(_) | GitStoreEvent::RepositoryRemoved(_) => { + this.schedule_update(false, window, cx); + } + GitStoreEvent::IndexWriteError(error) => { this.workspace .update(cx, |workspace, cx| { workspace.show_error(error, cx); @@ -828,7 +833,7 @@ impl GitPanel { .active_repository .as_ref() .map_or(false, |active_repository| { - active_repository.read(cx).entry_count() > 0 + active_repository.read(cx).status_summary().count > 0 }); if have_entries && self.selected_entry.is_none() { self.selected_entry = Some(1); @@ -1415,7 +1420,7 @@ impl GitPanel { let message = self.commit_editor.read(cx).text(cx); if !message.trim().is_empty() { - return Some(message.to_string()); + return Some(message); } self.suggest_commit_message(cx) @@ -1593,7 +1598,7 @@ impl GitPanel { .as_ref() .and_then(|repo| repo.read(cx).merge_message.as_ref()) { - return Some(merge_message.clone()); + return Some(merge_message.to_string()); } let git_status_entry = if let Some(staged_entry) = &self.single_staged_entry { @@ -1849,7 +1854,7 @@ impl GitPanel { let Some(repo) = self.active_repository.clone() else { return; }; - let Some(branch) = repo.read(cx).current_branch() else { + let Some(branch) = repo.read(cx).branch.as_ref() else { return; }; telemetry::event!("Git Pulled"); @@ -1906,7 +1911,7 @@ impl GitPanel { let Some(repo) = self.active_repository.clone() else { return; }; - let Some(branch) = repo.read(cx).current_branch() else { + let Some(branch) = repo.read(cx).branch.as_ref() else { return; }; telemetry::event!("Git Pushed"); @@ -2019,7 +2024,7 @@ impl GitPanel { let mut current_remotes: Vec = repo .update(&mut cx, |repo, _| { - let Some(current_branch) = repo.current_branch() else { + let Some(current_branch) = repo.branch.as_ref() else { return Err(anyhow::anyhow!("No active branch")); }; @@ -2215,7 +2220,7 @@ impl GitPanel { git_panel.commit_editor = cx.new(|cx| { commit_message_editor( buffer, - git_panel.suggest_commit_message(cx).as_deref(), + git_panel.suggest_commit_message(cx).map(SharedString::from), git_panel.project.clone(), true, window, @@ -2275,10 +2280,7 @@ impl GitPanel { continue; } - let abs_path = repo - .repository_entry - .work_directory_abs_path - .join(&entry.repo_path.0); + let abs_path = repo.work_directory_abs_path.join(&entry.repo_path.0); let entry = GitStatusEntry { repo_path: entry.repo_path.clone(), abs_path, @@ -2392,9 +2394,7 @@ impl GitPanel { self.select_first_entry_if_none(cx); let suggested_commit_message = self.suggest_commit_message(cx); - let placeholder_text = suggested_commit_message - .as_deref() - .unwrap_or("Enter commit message"); + let placeholder_text = suggested_commit_message.unwrap_or("Enter commit message".into()); self.commit_editor.update(cx, |editor, cx| { editor.set_placeholder_text(Arc::from(placeholder_text), cx) @@ -2823,12 +2823,7 @@ impl GitPanel { } pub(crate) fn render_remote_button(&self, cx: &mut Context) -> Option { - let branch = self - .active_repository - .as_ref()? - .read(cx) - .current_branch() - .cloned(); + let branch = self.active_repository.as_ref()?.read(cx).branch.clone(); if !self.can_push_and_pull(cx) { return None; } @@ -2868,7 +2863,7 @@ impl GitPanel { let commit_tooltip_focus_handle = editor_focus_handle.clone(); let expand_tooltip_focus_handle = editor_focus_handle.clone(); - let branch = active_repository.read(cx).current_branch().cloned(); + let branch = active_repository.read(cx).branch.clone(); let footer_size = px(32.); let gap = px(9.0); @@ -2999,7 +2994,7 @@ impl GitPanel { fn render_previous_commit(&self, cx: &mut Context) -> Option { let active_repository = self.active_repository.as_ref()?; - let branch = active_repository.read(cx).current_branch()?; + let branch = active_repository.read(cx).branch.as_ref()?; let commit = branch.most_recent_commit.as_ref()?.clone(); let workspace = self.workspace.clone(); diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 010154f8ef..2be1aeea9f 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -24,7 +24,7 @@ use language::{Anchor, Buffer, Capability, OffsetRangeExt}; use multi_buffer::{MultiBuffer, PathKey}; use project::{ Project, ProjectPath, - git_store::{GitEvent, GitStore}, + git_store::{GitStore, GitStoreEvent, RepositoryEvent}, }; use std::any::{Any, TypeId}; use theme::ActiveTheme; @@ -153,9 +153,8 @@ impl ProjectDiff { &git_store, window, move |this, _git_store, event, _window, _cx| match event { - GitEvent::ActiveRepositoryChanged - | GitEvent::FileSystemUpdated - | GitEvent::GitStateUpdated => { + GitStoreEvent::ActiveRepositoryChanged(_) + | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::Updated, true) => { *this.update_needed.borrow_mut() = (); } _ => {} @@ -452,13 +451,11 @@ impl ProjectDiff { ) -> Result<()> { while let Some(_) = recv.next().await { this.update(cx, |this, cx| { - let new_branch = - this.git_store - .read(cx) - .active_repository() - .and_then(|active_repository| { - active_repository.read(cx).current_branch().cloned() - }); + let new_branch = this + .git_store + .read(cx) + .active_repository() + .and_then(|active_repository| active_repository.read(cx).branch.clone()); if new_branch != this.current_branch { this.current_branch = new_branch; cx.notify(); @@ -1499,6 +1496,7 @@ mod tests { .unindent(), ); + eprintln!(">>>>>>>> git restore"); let prev_buffer_hunks = cx.update_window_entity(&buffer_editor, |buffer_editor, window, cx| { let snapshot = buffer_editor.snapshot(window, cx); @@ -1516,14 +1514,13 @@ mod tests { cx.update_window_entity(&buffer_editor, |buffer_editor, window, cx| { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot; - let new_buffer_hunks = buffer_editor + buffer_editor .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) - .collect::>(); - buffer_editor.git_restore(&Default::default(), window, cx); - new_buffer_hunks + .collect::>() }); assert_eq!(new_buffer_hunks.as_slice(), &[]); + eprintln!(">>>>>>>> modify"); cx.update_window_entity(&buffer_editor, |buffer_editor, window, cx| { buffer_editor.set_text("different\n", window, cx); buffer_editor.save(false, project.clone(), window, cx) @@ -1533,6 +1530,20 @@ mod tests { cx.run_until_parked(); + cx.update_window_entity(&buffer_editor, |buffer_editor, window, cx| { + buffer_editor.expand_all_diff_hunks(&Default::default(), window, cx); + }); + + assert_state_with_diff( + &buffer_editor, + cx, + &" + - original + + different + ˇ" + .unindent(), + ); + assert_state_with_diff( &diff_editor, cx, diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 6678489d86..a928e1bf72 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -2475,6 +2475,7 @@ impl MultiBuffer { let buffer_id = diff.buffer_id; let buffers = self.buffers.borrow(); let Some(buffer_state) = buffers.get(&buffer_id) else { + eprintln!("no buffer"); return; }; diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index e0f667d861..49345bc506 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -43,6 +43,7 @@ fs.workspace = true futures.workspace = true fuzzy.workspace = true git.workspace = true +git_hosting_providers.workspace = true globset.workspace = true gpui.workspace = true http_client.workspace = true diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index c3d5f3b528..aeba7f2d55 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -872,21 +872,6 @@ impl BufferStore { cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } - pub(crate) fn worktree_for_buffer( - &self, - buffer: &Entity, - cx: &App, - ) -> Option<(Entity, Arc)> { - let file = buffer.read(cx).file()?; - let worktree_id = file.worktree_id(cx); - let path = file.path().clone(); - let worktree = self - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx)?; - Some((worktree, path)) - } - pub fn create_buffer(&mut self, cx: &mut Context) -> Task>> { match &self.state { BufferStoreState::Local(this) => this.create_buffer(cx), diff --git a/crates/project/src/connection_manager.rs b/crates/project/src/connection_manager.rs index 6418fa1a8e..253d5d32a1 100644 --- a/crates/project/src/connection_manager.rs +++ b/crates/project/src/connection_manager.rs @@ -91,7 +91,7 @@ impl Manager { for (id, repository) in project.repositories(cx) { repositories.push(proto::RejoinRepository { id: id.to_proto(), - scan_id: repository.read(cx).completed_scan_id as u64, + scan_id: repository.read(cx).scan_id, }); } for worktree in project.worktrees(cx) { diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index e17d3eef2b..ed426a39c1 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -339,7 +339,7 @@ impl DapStore { local_store.toolchain_store.clone(), local_store.environment.update(cx, |env, cx| { let worktree = worktree.read(cx); - env.get_environment(Some(worktree.id()), Some(worktree.abs_path()), cx) + env.get_environment(worktree.abs_path().into(), cx) }), ); let session_id = local_store.next_session_id(); @@ -407,7 +407,7 @@ impl DapStore { local_store.toolchain_store.clone(), local_store.environment.update(cx, |env, cx| { let worktree = worktree.read(cx); - env.get_environment(Some(worktree.id()), Some(worktree.abs_path()), cx) + env.get_environment(Some(worktree.abs_path()), cx) }), ); let session_id = local_store.next_session_id(); diff --git a/crates/project/src/environment.rs b/crates/project/src/environment.rs index 7f178c2d70..aee8fbb6e8 100644 --- a/crates/project/src/environment.rs +++ b/crates/project/src/environment.rs @@ -1,11 +1,13 @@ -use futures::{FutureExt, future::Shared}; +use futures::{ + FutureExt, + future::{Shared, WeakShared}, +}; use std::{path::Path, sync::Arc}; use util::ResultExt; use collections::HashMap; use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task}; use settings::Settings as _; -use worktree::WorktreeId; use crate::{ project_settings::{DirenvSettings, ProjectSettings}, @@ -13,10 +15,9 @@ use crate::{ }; pub struct ProjectEnvironment { - worktree_store: Entity, cli_environment: Option>, - environments: HashMap>>>>, - environment_error_messages: HashMap, + environments: HashMap, WeakShared>>>>, + environment_error_messages: HashMap, EnvironmentErrorMessage>, } pub enum ProjectEnvironmentEvent { @@ -33,14 +34,15 @@ impl ProjectEnvironment { ) -> Entity { cx.new(|cx| { cx.subscribe(worktree_store, |this: &mut Self, _, event, _| { - if let WorktreeStoreEvent::WorktreeRemoved(_, id) = event { - this.remove_worktree_environment(*id); + if let WorktreeStoreEvent::WorktreeRemoved(_, _) = event { + this.environments.retain(|_, weak| weak.upgrade().is_some()); + this.environment_error_messages + .retain(|abs_path, _| this.environments.contains_key(abs_path)); } }) .detach(); Self { - worktree_store: worktree_store.clone(), cli_environment, environments: Default::default(), environment_error_messages: Default::default(), @@ -48,11 +50,6 @@ impl ProjectEnvironment { }) } - pub(crate) fn remove_worktree_environment(&mut self, worktree_id: WorktreeId) { - self.environment_error_messages.remove(&worktree_id); - self.environments.remove(&worktree_id); - } - /// Returns the inherited CLI environment, if this project was opened from the Zed CLI. pub(crate) fn get_cli_environment(&self) -> Option> { if let Some(mut env) = self.cli_environment.clone() { @@ -67,28 +64,22 @@ impl ProjectEnvironment { /// environment errors associated with this project environment. pub(crate) fn environment_errors( &self, - ) -> impl Iterator { + ) -> impl Iterator, &EnvironmentErrorMessage)> { self.environment_error_messages.iter() } - pub(crate) fn remove_environment_error( - &mut self, - worktree_id: WorktreeId, - cx: &mut Context, - ) { - self.environment_error_messages.remove(&worktree_id); + pub(crate) fn remove_environment_error(&mut self, abs_path: &Path, cx: &mut Context) { + self.environment_error_messages.remove(abs_path); cx.emit(ProjectEnvironmentEvent::ErrorsUpdated); } /// Returns the project environment, if possible. /// If the project was opened from the CLI, then the inherited CLI environment is returned. - /// If it wasn't opened from the CLI, and a worktree is given, then a shell is spawned in - /// the worktree's path, to get environment variables as if the user has `cd`'d into - /// the worktrees path. + /// If it wasn't opened from the CLI, and an absolute path is given, then a shell is spawned in + /// that directory, to get environment variables as if the user has `cd`'d there. pub(crate) fn get_environment( &mut self, - worktree_id: Option, - worktree_abs_path: Option>, + abs_path: Option>, cx: &Context, ) -> Shared>>> { if cfg!(any(test, feature = "test-support")) { @@ -111,74 +102,26 @@ impl ProjectEnvironment { .shared(); } - let Some((worktree_id, worktree_abs_path)) = worktree_id.zip(worktree_abs_path) else { + let Some(abs_path) = abs_path else { return Task::ready(None).shared(); }; - if self - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - .map(|w| !w.read(cx).is_local()) - .unwrap_or(true) + if let Some(existing) = self + .environments + .get(&abs_path) + .and_then(|weak| weak.upgrade()) { - return Task::ready(None).shared(); - } - - if let Some(task) = self.environments.get(&worktree_id) { - task.clone() + existing } else { - let task = self - .get_worktree_env(worktree_id, worktree_abs_path, cx) - .shared(); - self.environments.insert(worktree_id, task.clone()); - task + let env = get_directory_env(abs_path.clone(), cx).shared(); + self.environments.insert( + abs_path.clone(), + env.downgrade() + .expect("environment task has not been polled yet"), + ); + env } } - - fn get_worktree_env( - &mut self, - worktree_id: WorktreeId, - worktree_abs_path: Arc, - cx: &Context, - ) -> Task>> { - let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); - - cx.spawn(async move |this, cx| { - let (mut shell_env, error_message) = cx - .background_spawn({ - let worktree_abs_path = worktree_abs_path.clone(); - async move { - load_worktree_shell_environment(&worktree_abs_path, &load_direnv).await - } - }) - .await; - - if let Some(shell_env) = shell_env.as_mut() { - let path = shell_env - .get("PATH") - .map(|path| path.as_str()) - .unwrap_or_default(); - log::info!( - "using project environment variables shell launched in {:?}. PATH={:?}", - worktree_abs_path, - path - ); - - set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); - } - - if let Some(error) = error_message { - this.update(cx, |this, cx| { - this.environment_error_messages.insert(worktree_id, error); - cx.emit(ProjectEnvironmentEvent::ErrorsUpdated) - }) - .log_err(); - } - - shell_env - }) - } } fn set_origin_marker(env: &mut HashMap, origin: EnvironmentOrigin) { @@ -210,25 +153,25 @@ impl EnvironmentErrorMessage { } } -async fn load_worktree_shell_environment( - worktree_abs_path: &Path, +async fn load_directory_shell_environment( + abs_path: &Path, load_direnv: &DirenvSettings, ) -> ( Option>, Option, ) { - match smol::fs::metadata(worktree_abs_path).await { + match smol::fs::metadata(abs_path).await { Ok(meta) => { let dir = if meta.is_dir() { - worktree_abs_path - } else if let Some(parent) = worktree_abs_path.parent() { + abs_path + } else if let Some(parent) = abs_path.parent() { parent } else { return ( None, Some(EnvironmentErrorMessage(format!( "Failed to load shell environment in {}: not a directory", - worktree_abs_path.display() + abs_path.display() ))), ); }; @@ -239,7 +182,7 @@ async fn load_worktree_shell_environment( None, Some(EnvironmentErrorMessage(format!( "Failed to load shell environment in {}: {}", - worktree_abs_path.display(), + abs_path.display(), err ))), ), @@ -387,3 +330,43 @@ async fn load_shell_environment( (Some(parsed_env), direnv_error) } + +fn get_directory_env( + abs_path: Arc, + cx: &Context, +) -> Task>> { + let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone(); + + cx.spawn(async move |this, cx| { + let (mut shell_env, error_message) = cx + .background_spawn({ + let abs_path = abs_path.clone(); + async move { load_directory_shell_environment(&abs_path, &load_direnv).await } + }) + .await; + + if let Some(shell_env) = shell_env.as_mut() { + let path = shell_env + .get("PATH") + .map(|path| path.as_str()) + .unwrap_or_default(); + log::info!( + "using project environment variables shell launched in {:?}. PATH={:?}", + abs_path, + path + ); + + set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell); + } + + if let Some(error) = error_message { + this.update(cx, |this, cx| { + this.environment_error_messages.insert(abs_path, error); + cx.emit(ProjectEnvironmentEvent::ErrorsUpdated) + }) + .log_err(); + } + + shell_env + }) +} diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 623473b423..f442d67900 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -14,17 +14,20 @@ use fs::Fs; use futures::{ FutureExt as _, StreamExt as _, channel::{mpsc, oneshot}, - future::{self, OptionFuture, Shared}, + future::{self, Shared}, }; use git::{ - BuildPermalinkParams, GitHostingProviderRegistry, + BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH, blame::Blame, parse_git_remote_url, repository::{ Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, + UpstreamTrackingStatus, + }, + status::{ + FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode, }, - status::FileStatus, }; use gpui::{ App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task, @@ -37,38 +40,40 @@ use language::{ use parking_lot::Mutex; use rpc::{ AnyProtoClient, TypedEnvelope, - proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset}, + proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update}, }; use serde::Deserialize; -use settings::WorktreeId; use std::{ - collections::{VecDeque, hash_map}, + cmp::Ordering, + collections::{BTreeSet, VecDeque}, future::Future, + mem, ops::Range, path::{Path, PathBuf}, - sync::Arc, + sync::{ + Arc, + atomic::{self, AtomicU64}, + }, }; -use sum_tree::TreeSet; -use text::BufferId; -use util::{ResultExt, debug_panic, maybe}; +use sum_tree::{Edit, SumTree, TreeSet}; +use text::{Bias, BufferId}; +use util::{ResultExt, debug_panic}; use worktree::{ - File, PathKey, ProjectEntryId, RepositoryEntry, StatusEntry, UpdatedGitRepositoriesSet, - Worktree, proto_to_branch, + File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree, }; pub struct GitStore { state: GitStoreState, buffer_store: Entity, worktree_store: Entity, - repositories: HashMap>, - active_repo_id: Option, + repositories: HashMap>, + active_repo_id: Option, #[allow(clippy::type_complexity)] loading_diffs: HashMap<(BufferId, DiffKind), Shared, Arc>>>>, diffs: HashMap>, - update_sender: mpsc::UnboundedSender, shared_diffs: HashMap>, - _subscriptions: [Subscription; 2], + _subscriptions: Vec, } #[derive(Default)] @@ -113,25 +118,25 @@ enum DiffKind { enum GitStoreState { Local { - downstream_client: Option, - environment: Entity, + next_repository_id: Arc, + downstream: Option, + project_environment: Entity, fs: Arc, }, Ssh { upstream_client: AnyProtoClient, upstream_project_id: ProjectId, - downstream_client: Option<(AnyProtoClient, ProjectId)>, - environment: Entity, + downstream: Option<(AnyProtoClient, ProjectId)>, }, Remote { upstream_client: AnyProtoClient, - project_id: ProjectId, + upstream_project_id: ProjectId, }, } enum DownstreamUpdate { - UpdateRepository(RepositoryEntry), - RemoveRepository(ProjectEntryId), + UpdateRepository(RepositorySnapshot), + RemoveRepository(RepositoryId), } struct LocalDownstreamState { @@ -143,54 +148,145 @@ struct LocalDownstreamState { #[derive(Clone)] pub struct GitStoreCheckpoint { - checkpoints_by_work_dir_abs_path: HashMap, + checkpoints_by_work_dir_abs_path: HashMap, GitRepositoryCheckpoint>, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct StatusEntry { + pub repo_path: RepoPath, + pub status: FileStatus, +} + +impl StatusEntry { + fn to_proto(&self) -> proto::StatusEntry { + let simple_status = match self.status { + FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32, + FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32, + FileStatus::Tracked(TrackedStatus { + index_status, + worktree_status, + }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified { + worktree_status + } else { + index_status + }), + }; + + proto::StatusEntry { + repo_path: self.repo_path.as_ref().to_proto(), + simple_status, + status: Some(status_to_proto(self.status)), + } + } +} + +impl TryFrom for StatusEntry { + type Error = anyhow::Error; + + fn try_from(value: proto::StatusEntry) -> Result { + let repo_path = RepoPath(Arc::::from_proto(value.repo_path)); + let status = status_from_proto(value.simple_status, value.status)?; + Ok(Self { repo_path, status }) + } +} + +impl sum_tree::Item for StatusEntry { + type Summary = PathSummary; + + fn summary(&self, _: &::Context) -> Self::Summary { + PathSummary { + max_path: self.repo_path.0.clone(), + item_summary: self.status.summary(), + } + } +} + +impl sum_tree::KeyedItem for StatusEntry { + type Key = PathKey; + + fn key(&self) -> Self::Key { + PathKey(self.repo_path.0.clone()) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct RepositoryId(pub u64); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct RepositorySnapshot { + pub id: RepositoryId, + pub merge_message: Option, + pub statuses_by_path: SumTree, + pub work_directory_abs_path: Arc, + pub branch: Option, + pub merge_conflicts: TreeSet, + pub merge_head_shas: Vec, + pub scan_id: u64, } pub struct Repository { - pub repository_entry: RepositoryEntry, - pub merge_message: Option, - pub completed_scan_id: usize, + snapshot: RepositorySnapshot, commit_message_buffer: Option>, git_store: WeakEntity, - project_environment: Option>, - pub worktree_id: Option, - state: RepositoryState, + // For a local repository, holds paths that have had worktree events since the last status scan completed, + // and that should be examined during the next status scan. + paths_needing_status_update: BTreeSet, job_sender: mpsc::UnboundedSender, askpass_delegates: Arc>>, latest_askpass_id: u64, } +impl std::ops::Deref for Repository { + type Target = RepositorySnapshot; + + fn deref(&self) -> &Self::Target { + &self.snapshot + } +} + #[derive(Clone)] -enum RepositoryState { - Local(Arc), +pub enum RepositoryState { + Local { + backend: Arc, + environment: Arc>, + }, Remote { project_id: ProjectId, client: AnyProtoClient, - work_directory_id: ProjectEntryId, }, } +#[derive(Clone, Debug)] +pub enum RepositoryEvent { + Updated, + MergeHeadsChanged, +} + #[derive(Debug)] -pub enum GitEvent { - ActiveRepositoryChanged, - FileSystemUpdated, - GitStateUpdated, +pub enum GitStoreEvent { + ActiveRepositoryChanged(Option), + RepositoryUpdated(RepositoryId, RepositoryEvent, bool), + RepositoryAdded(RepositoryId), + RepositoryRemoved(RepositoryId), IndexWriteError(anyhow::Error), } +impl EventEmitter for Repository {} +impl EventEmitter for GitStore {} + struct GitJob { - job: Box Task<()>>, + job: Box Task<()>>, key: Option, } #[derive(PartialEq, Eq)] enum GitJobKey { WriteIndex(RepoPath), - BatchReadIndex(ProjectEntryId), + BatchReadIndex, + RefreshStatuses, + ReloadGitState, } -impl EventEmitter for GitStore {} - impl GitStore { pub fn local( worktree_store: &Entity, @@ -203,8 +299,9 @@ impl GitStore { worktree_store.clone(), buffer_store, GitStoreState::Local { - downstream_client: None, - environment, + next_repository_id: Arc::new(AtomicU64::new(1)), + downstream: None, + project_environment: environment, fs, }, cx, @@ -223,7 +320,7 @@ impl GitStore { buffer_store, GitStoreState::Remote { upstream_client, - project_id, + upstream_project_id: project_id, }, cx, ) @@ -232,7 +329,6 @@ impl GitStore { pub fn ssh( worktree_store: &Entity, buffer_store: Entity, - environment: Entity, upstream_client: AnyProtoClient, cx: &mut Context, ) -> Self { @@ -242,8 +338,7 @@ impl GitStore { GitStoreState::Ssh { upstream_client, upstream_project_id: ProjectId(SSH_PROJECT_ID), - downstream_client: None, - environment, + downstream: None, }, cx, ) @@ -255,8 +350,7 @@ impl GitStore { state: GitStoreState, cx: &mut Context, ) -> Self { - let update_sender = Self::spawn_git_worker(cx); - let _subscriptions = [ + let _subscriptions = vec![ cx.subscribe(&worktree_store, Self::on_worktree_store_event), cx.subscribe(&buffer_store, Self::on_buffer_store_event), ]; @@ -267,7 +361,6 @@ impl GitStore { worktree_store, repositories: HashMap::default(), active_repo_id: None, - update_sender, _subscriptions, loading_diffs: HashMap::default(), shared_diffs: HashMap::default(), @@ -312,24 +405,27 @@ impl GitStore { pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context) { match &mut self.state { GitStoreState::Ssh { - downstream_client, .. + downstream: downstream_client, + .. } => { for repo in self.repositories.values() { - client - .send(repo.read(cx).repository_entry.initial_update(project_id)) - .log_err(); + let update = repo.read(cx).snapshot.initial_update(project_id); + for update in split_repository_update(update) { + client.send(update).log_err(); + } } *downstream_client = Some((client, ProjectId(project_id))); } GitStoreState::Local { - downstream_client, .. + downstream: downstream_client, + .. } => { let mut snapshots = HashMap::default(); let (updates_tx, mut updates_rx) = mpsc::unbounded(); for repo in self.repositories.values() { updates_tx .unbounded_send(DownstreamUpdate::UpdateRepository( - repo.read(cx).repository_entry.clone(), + repo.read(cx).snapshot.clone(), )) .ok(); } @@ -342,17 +438,20 @@ impl GitStore { while let Some(update) = updates_rx.next().await { match update { DownstreamUpdate::UpdateRepository(snapshot) => { - if let Some(old_snapshot) = - snapshots.get_mut(&snapshot.work_directory_id) + if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id) { let update = snapshot.build_update(old_snapshot, project_id); *old_snapshot = snapshot; - client.send(update)?; + for update in split_repository_update(update) { + client.send(update)?; + } } else { let update = snapshot.initial_update(project_id); - client.send(update)?; - snapshots.insert(snapshot.work_directory_id, snapshot); + for update in split_repository_update(update) { + client.send(update)?; + } + snapshots.insert(snapshot.id, snapshot); } } DownstreamUpdate::RemoveRepository(id) => { @@ -369,7 +468,8 @@ impl GitStore { .ok(); this.update(cx, |this, _| { if let GitStoreState::Local { - downstream_client, .. + downstream: downstream_client, + .. } = &mut this.state { downstream_client.take(); @@ -389,12 +489,14 @@ impl GitStore { pub fn unshared(&mut self, _cx: &mut Context) { match &mut self.state { GitStoreState::Local { - downstream_client, .. + downstream: downstream_client, + .. } => { downstream_client.take(); } GitStoreState::Ssh { - downstream_client, .. + downstream: downstream_client, + .. } => { downstream_client.take(); } @@ -440,29 +542,32 @@ impl GitStore { } } - let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) { - hash_map::Entry::Occupied(e) => e.get().clone(), - hash_map::Entry::Vacant(entry) => { - let staged_text = self.state.load_staged_text(&buffer, &self.buffer_store, cx); - entry - .insert( - cx.spawn(async move |this, cx| { - Self::open_diff_internal( - this, - DiffKind::Unstaged, - staged_text.await.map(DiffBasesChange::SetIndex), - buffer, - cx, - ) - .await - .map_err(Arc::new) - }) - .shared(), - ) - .clone() - } + let Some((repo, repo_path)) = + self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx) + else { + return Task::ready(Err(anyhow!("failed to find git repository for buffer"))); }; + let task = self + .loading_diffs + .entry((buffer_id, DiffKind::Unstaged)) + .or_insert_with(|| { + let staged_text = repo.read(cx).load_staged_text(buffer_id, repo_path, cx); + cx.spawn(async move |this, cx| { + Self::open_diff_internal( + this, + DiffKind::Unstaged, + staged_text.await.map(DiffBasesChange::SetIndex), + buffer, + cx, + ) + .await + .map_err(Arc::new) + }) + .shared() + }) + .clone(); + cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } @@ -492,32 +597,26 @@ impl GitStore { } } - let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) { - hash_map::Entry::Occupied(e) => e.get().clone(), - hash_map::Entry::Vacant(entry) => { - let changes = self - .state - .load_committed_text(&buffer, &self.buffer_store, cx); - - entry - .insert( - cx.spawn(async move |this, cx| { - Self::open_diff_internal( - this, - DiffKind::Uncommitted, - changes.await, - buffer, - cx, - ) - .await - .map_err(Arc::new) - }) - .shared(), - ) - .clone() - } + let Some((repo, repo_path)) = + self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx) + else { + return Task::ready(Err(anyhow!("failed to find git repository for buffer"))); }; + let task = self + .loading_diffs + .entry((buffer_id, DiffKind::Uncommitted)) + .or_insert_with(|| { + let changes = repo.read(cx).load_committed_text(buffer_id, repo_path, cx); + cx.spawn(async move |this, cx| { + Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx) + .await + .map_err(Arc::new) + }) + .shared() + }) + .clone(); + cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) } @@ -607,12 +706,7 @@ impl GitStore { cx: &App, ) -> Option { let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?; - Some( - repo.read(cx) - .repository_entry - .status_for_path(&repo_path)? - .status, - ) + Some(repo.read(cx).status_for_path(&repo_path)?.status) } pub fn checkpoint(&self, cx: &App) -> Task> { @@ -620,8 +714,7 @@ impl GitStore { let mut checkpoints = Vec::new(); for repository in self.repositories.values() { let repository = repository.read(cx); - work_directory_abs_paths - .push(repository.repository_entry.work_directory_abs_path.clone()); + work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone()); checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?)); } @@ -640,15 +733,7 @@ impl GitStore { let repositories_by_work_dir_abs_path = self .repositories .values() - .map(|repo| { - ( - repo.read(cx) - .repository_entry - .work_directory_abs_path - .clone(), - repo, - ) - }) + .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo)) .collect::>(); let mut tasks = Vec::new(); @@ -674,15 +759,7 @@ impl GitStore { let repositories_by_work_dir_abs_path = self .repositories .values() - .map(|repo| { - ( - repo.read(cx) - .repository_entry - .work_directory_abs_path - .clone(), - repo, - ) - }) + .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo)) .collect::>(); let mut tasks = Vec::new(); @@ -714,15 +791,7 @@ impl GitStore { let repositories_by_work_directory_abs_path = self .repositories .values() - .map(|repo| { - ( - repo.read(cx) - .repository_entry - .work_directory_abs_path - .clone(), - repo, - ) - }) + .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo)) .collect::>(); let mut tasks = Vec::new(); @@ -748,60 +817,39 @@ impl GitStore { cx: &App, ) -> Task>> { let buffer = buffer.read(cx); - let Some(file) = File::from_dyn(buffer.file()) else { - return Task::ready(Err(anyhow!("buffer has no file"))); + let Some((repo, repo_path)) = + self.repository_and_path_for_buffer_id(buffer.remote_id(), cx) + else { + return Task::ready(Err(anyhow!("failed to find a git repository for buffer"))); }; + let content = match &version { + Some(version) => buffer.rope_for_version(version).clone(), + None => buffer.as_rope().clone(), + }; + let version = version.unwrap_or(buffer.version()); + let buffer_id = buffer.remote_id(); - match file.worktree.clone().read(cx) { - Worktree::Local(worktree) => { - let worktree = worktree.snapshot(); - let blame_params = maybe!({ - let local_repo = match worktree.local_repo_containing_path(&file.path) { - Some(repo_for_path) => repo_for_path, - None => return Ok(None), - }; - - let relative_path = local_repo - .relativize(&file.path) - .context("failed to relativize buffer path")?; - - let repo = local_repo.repo().clone(); - - let content = match version { - Some(version) => buffer.rope_for_version(&version).clone(), - None => buffer.as_rope().clone(), - }; - - anyhow::Ok(Some((repo, relative_path, content))) - }); - - cx.spawn(async move |_cx| { - let Some((repo, relative_path, content)) = blame_params? else { - return Ok(None); - }; - repo.blame(relative_path.clone(), content) - .await - .with_context(|| format!("Failed to blame {:?}", relative_path.0)) - .map(Some) - }) - } - Worktree::Remote(worktree) => { - let buffer_id = buffer.remote_id(); - let version = buffer.version(); - let project_id = worktree.project_id(); - let client = worktree.client(); - cx.spawn(async move |_| { + let rx = repo.read(cx).send_job(move |state, _| async move { + match state { + RepositoryState::Local { backend, .. } => backend + .blame(repo_path.clone(), content) + .await + .with_context(|| format!("Failed to blame {:?}", repo_path.0)) + .map(Some), + RepositoryState::Remote { project_id, client } => { let response = client .request(proto::BlameBuffer { - project_id, + project_id: project_id.to_proto(), buffer_id: buffer_id.into(), version: serialize_version(&version), }) .await?; Ok(deserialize_blame_buffer_response(response)) - }) + } } - } + }); + + cx.spawn(|_: &mut AsyncApp| async move { rx.await? }) } pub fn get_permalink_to_line( @@ -810,64 +858,53 @@ impl GitStore { selection: Range, cx: &App, ) -> Task> { - let buffer = buffer.read(cx); - let Some(file) = File::from_dyn(buffer.file()) else { + let Some(file) = File::from_dyn(buffer.read(cx).file()) else { return Task::ready(Err(anyhow!("buffer has no file"))); }; - match file.worktree.read(cx) { - Worktree::Local(worktree) => { - let repository = self - .repository_and_path_for_project_path( - &(worktree.id(), file.path.clone()).into(), - cx, - ) - .map(|(repository, _)| repository); - let Some((local_repo_entry, repo_entry)) = repository.and_then(|repository| { - let repository = repository.read(cx); - let repo_entry = repository.repository_entry.clone(); - Some((worktree.get_local_repo(&repo_entry)?, repo_entry)) - }) else { - // If we're not in a Git repo, check whether this is a Rust source - // file in the Cargo registry (presumably opened with go-to-definition - // from a normal Rust file). If so, we can put together a permalink - // using crate metadata. - if buffer - .language() - .is_none_or(|lang| lang.name() != "Rust".into()) - { - return Task::ready(Err(anyhow!("no permalink available"))); - } - let Some(file_path) = worktree.absolutize(&file.path).ok() else { - return Task::ready(Err(anyhow!("no permalink available"))); - }; - return cx.spawn(async move |cx| { - let provider_registry = - cx.update(GitHostingProviderRegistry::default_global)?; - get_permalink_in_rust_registry_src(provider_registry, file_path, selection) - .map_err(|_| anyhow!("no permalink available")) - }); - }; + let Some((repo, repo_path)) = self.repository_and_path_for_project_path( + &(file.worktree.read(cx).id(), file.path.clone()).into(), + cx, + ) else { + // If we're not in a Git repo, check whether this is a Rust source + // file in the Cargo registry (presumably opened with go-to-definition + // from a normal Rust file). If so, we can put together a permalink + // using crate metadata. + if buffer + .read(cx) + .language() + .is_none_or(|lang| lang.name() != "Rust".into()) + { + return Task::ready(Err(anyhow!("no permalink available"))); + } + let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else { + return Task::ready(Err(anyhow!("no permalink available"))); + }; + return cx.spawn(async move |cx| { + let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?; + get_permalink_in_rust_registry_src(provider_registry, file_path, selection) + .map_err(|_| anyhow!("no permalink available")) + }); - let path = match local_repo_entry.relativize(&file.path) { - Ok(RepoPath(path)) => path, - Err(e) => return Task::ready(Err(e)), - }; + // TODO remote case + }; - let remote = repo_entry - .branch() - .and_then(|b| b.upstream.as_ref()) - .and_then(|b| b.remote_name()) - .unwrap_or("origin") - .to_string(); - - let repo = local_repo_entry.repo().clone(); - cx.spawn(async move |cx| { - let origin_url = repo + let buffer_id = buffer.read(cx).remote_id(); + let branch = repo.read(cx).branch.clone(); + let remote = branch + .as_ref() + .and_then(|b| b.upstream.as_ref()) + .and_then(|b| b.remote_name()) + .unwrap_or("origin") + .to_string(); + let rx = repo.read(cx).send_job(move |state, cx| async move { + match state { + RepositoryState::Local { backend, .. } => { + let origin_url = backend .remote_url(&remote) .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?; - let sha = repo + let sha = backend .head_sha() .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?; @@ -878,7 +915,7 @@ impl GitStore { parse_git_remote_url(provider_registry, &origin_url) .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?; - let path = path + let path = repo_path .to_str() .ok_or_else(|| anyhow!("failed to convert path to string"))?; @@ -890,16 +927,11 @@ impl GitStore { selection: Some(selection), }, )) - }) - } - Worktree::Remote(worktree) => { - let buffer_id = buffer.remote_id(); - let project_id = worktree.project_id(); - let client = worktree.client(); - cx.spawn(async move |_| { + } + RepositoryState::Remote { project_id, client } => { let response = client .request(proto::GetPermalinkToLine { - project_id, + project_id: project_id.to_proto(), buffer_id: buffer_id.into(), selection: Some(proto::Range { start: selection.start as u64, @@ -909,20 +941,23 @@ impl GitStore { .await?; url::Url::parse(&response.permalink).context("failed to parse permalink") - }) + } } - } + }); + cx.spawn(|_: &mut AsyncApp| async move { rx.await? }) } fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> { match &self.state { GitStoreState::Local { - downstream_client, .. + downstream: downstream_client, + .. } => downstream_client .as_ref() .map(|state| (state.client.clone(), state.project_id)), GitStoreState::Ssh { - downstream_client, .. + downstream: downstream_client, + .. } => downstream_client.clone(), GitStoreState::Remote { .. } => None, } @@ -940,160 +975,148 @@ impl GitStore { } } - fn project_environment(&self) -> Option> { - match &self.state { - GitStoreState::Local { environment, .. } => Some(environment.clone()), - GitStoreState::Ssh { environment, .. } => Some(environment.clone()), - GitStoreState::Remote { .. } => None, - } - } - - fn project_id(&self) -> Option { - match &self.state { - GitStoreState::Local { .. } => None, - GitStoreState::Ssh { .. } => Some(ProjectId(proto::SSH_PROJECT_ID)), - GitStoreState::Remote { project_id, .. } => Some(*project_id), - } - } - fn on_worktree_store_event( &mut self, worktree_store: Entity, event: &WorktreeStoreEvent, cx: &mut Context, ) { + let GitStoreState::Local { + project_environment, + downstream, + next_repository_id, + fs, + } = &self.state + else { + return; + }; + match event { - WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => { - // We should only get this event for a local project. - self.update_repositories(&worktree_store, cx); - if self.is_local() { - if let Some(worktree) = - worktree_store.read(cx).worktree_for_id(*worktree_id, cx) - { - self.local_worktree_git_repos_changed(worktree, changed_repos, cx); - } + WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => { + let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default(); + for (relative_path, _, _) in updated_entries.iter() { + let Some((repo, repo_path)) = self.repository_and_path_for_project_path( + &(*worktree_id, relative_path.clone()).into(), + cx, + ) else { + continue; + }; + paths_by_git_repo.entry(repo).or_default().push(repo_path) + } + + for (repo, paths) in paths_by_git_repo { + repo.update(cx, |repo, cx| { + repo.paths_changed( + paths, + downstream + .as_ref() + .map(|downstream| downstream.updates_tx.clone()), + cx, + ); + }); } - cx.emit(GitEvent::GitStateUpdated); } - WorktreeStoreEvent::WorktreeAdded(_) => {} - _ => { - cx.emit(GitEvent::FileSystemUpdated); + WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => { + self.update_repositories_from_worktrees( + project_environment.clone(), + next_repository_id.clone(), + downstream + .as_ref() + .map(|downstream| downstream.updates_tx.clone()), + changed_repos.clone(), + fs.clone(), + cx, + ); + if let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx) { + self.local_worktree_git_repos_changed(worktree, changed_repos, cx); + } } + _ => {} } } - fn update_repositories( + fn on_repository_event( &mut self, - worktree_store: &Entity, - cx: &mut Context, + repo: Entity, + event: &RepositoryEvent, + cx: &mut Context, ) { - let mut new_repositories = HashMap::default(); - let git_store = cx.weak_entity(); - worktree_store.update(cx, |worktree_store, cx| { - for worktree in worktree_store.worktrees() { - worktree.update(cx, |worktree, cx| { - let snapshot = worktree.snapshot(); - for repo_entry in snapshot.repositories().iter() { - let git_repo_and_merge_message = worktree - .as_local() - .and_then(|local_worktree| local_worktree.get_local_repo(repo_entry)) - .map(|local_repo| { - ( - RepositoryState::Local(local_repo.repo().clone()), - local_repo.merge_message.clone(), - ) - }) - .or_else(|| { - let git_repo = RepositoryState::Remote { - project_id: self.project_id()?, - client: self - .upstream_client() - .context("no upstream client") - .log_err()? - .clone(), - work_directory_id: repo_entry.work_directory_id(), - }; - Some((git_repo, None)) - }); + let id = repo.read(cx).id; + cx.emit(GitStoreEvent::RepositoryUpdated( + id, + event.clone(), + self.active_repo_id == Some(id), + )) + } - let Some((git_repo, merge_message)) = git_repo_and_merge_message else { - continue; - }; - - let existing_repo = self - .repositories - .values() - .find(|repo| repo.read(cx).id() == repo_entry.work_directory_id()); - - let repo = if let Some(existing_repo) = existing_repo { - // Update the statuses and merge message but keep everything else. - let existing_repo = existing_repo.clone(); - existing_repo.update(cx, |existing_repo, _| { - existing_repo.repository_entry = repo_entry.clone(); - if matches!(git_repo, RepositoryState::Local { .. }) { - existing_repo.merge_message = merge_message; - existing_repo.completed_scan_id = worktree.completed_scan_id(); - } - }); - existing_repo - } else { - cx.new(|_| Repository { - worktree_id: Some(worktree.id()), - project_environment: self - .project_environment() - .as_ref() - .map(|env| env.downgrade()), - git_store: git_store.clone(), - askpass_delegates: Default::default(), - latest_askpass_id: 0, - repository_entry: repo_entry.clone(), - job_sender: self.update_sender.clone(), - merge_message, - commit_message_buffer: None, - completed_scan_id: worktree.completed_scan_id(), - state: git_repo, - }) - }; - - // TODO only send out messages for repository snapshots that have changed - let snapshot = repo.read(cx).repository_entry.clone(); - if let GitStoreState::Local { - downstream_client: Some(state), - .. - } = &self.state - { - state - .updates_tx - .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot)) - .ok(); - } - new_repositories.insert(repo_entry.work_directory_id(), repo); - self.repositories.remove(&repo_entry.work_directory_id()); - } - }) - } - }); - - if let GitStoreState::Local { - downstream_client: Some(state), - .. - } = &self.state - { - for id in self.repositories.keys().cloned() { - state - .updates_tx - .unbounded_send(DownstreamUpdate::RemoveRepository(id)) - .ok(); + /// Update our list of repositories and schedule git scans in response to a notification from a worktree, + fn update_repositories_from_worktrees( + &mut self, + project_environment: Entity, + next_repository_id: Arc, + updates_tx: Option>, + updated_git_repositories: UpdatedGitRepositoriesSet, + fs: Arc, + cx: &mut Context, + ) { + let mut removed_ids = Vec::new(); + for update in updated_git_repositories.iter() { + if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| { + Some(&repo.read(cx).work_directory_abs_path) + == update.old_work_directory_abs_path.as_ref() + }) { + if let Some(new_work_directory_abs_path) = + update.new_work_directory_abs_path.clone() + { + existing.update(cx, |existing, cx| { + existing.snapshot.work_directory_abs_path = new_work_directory_abs_path; + existing.schedule_scan(updates_tx.clone(), cx); + }); + } else { + removed_ids.push(*id); + } + } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update + .new_work_directory_abs_path + .clone() + .zip(update.dot_git_abs_path.clone()) + { + let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release)); + let git_store = cx.weak_entity(); + let repo = cx.new(|cx| { + let mut repo = Repository::local( + id, + work_directory_abs_path, + dot_git_abs_path, + project_environment.downgrade(), + fs.clone(), + git_store, + cx, + ); + repo.schedule_scan(updates_tx.clone(), cx); + repo + }); + self._subscriptions + .push(cx.subscribe(&repo, Self::on_repository_event)); + self.repositories.insert(id, repo); + cx.emit(GitStoreEvent::RepositoryAdded(id)); + self.active_repo_id.get_or_insert_with(|| { + cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); + id + }); } } - self.repositories = new_repositories; - if let Some(id) = self.active_repo_id.as_ref() { - if !self.repositories.contains_key(id) { + for id in removed_ids { + if self.active_repo_id == Some(id) { self.active_repo_id = None; + cx.emit(GitStoreEvent::ActiveRepositoryChanged(None)); + } + self.repositories.remove(&id); + if let Some(updates_tx) = updates_tx.as_ref() { + updates_tx + .unbounded_send(DownstreamUpdate::RemoveRepository(id)) + .ok(); } - } else if let Some(&first_id) = self.repositories.keys().next() { - self.active_repo_id = Some(first_id); } } @@ -1185,7 +1208,7 @@ impl GitStore { diff.clear_pending_hunks(cx); }) .ok(); - this.update(cx, |_, cx| cx.emit(GitEvent::IndexWriteError(error))) + this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error))) .ok(); } }) @@ -1200,14 +1223,10 @@ impl GitStore { changed_repos: &UpdatedGitRepositoriesSet, cx: &mut Context, ) { + log::debug!("local worktree repos changed"); debug_assert!(worktree.read(cx).is_local()); - let Some(active_repo) = self.active_repository() else { - log::error!("local worktree changed but we have no active repository"); - return; - }; - - let mut diff_state_updates = HashMap::>::default(); + let mut diff_state_updates = HashMap::, Vec<_>>::default(); for (buffer_id, diff_state) in &self.diffs { let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else { continue; @@ -1218,13 +1237,19 @@ impl GitStore { if file.worktree != worktree { continue; } - let Some(repo_id) = changed_repos - .iter() - .map(|(entry, _)| entry.id) - .find(|repo_id| self.repositories().contains_key(&repo_id)) + let Some((repo, repo_path)) = + self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx) else { continue; }; + if !changed_repos.iter().any(|update| { + update.old_work_directory_abs_path.as_ref() + == Some(&repo.read(cx).work_directory_abs_path) + || update.new_work_directory_abs_path.as_ref() + == Some(&repo.read(cx).work_directory_abs_path) + }) { + continue; + } let diff_state = diff_state.read(cx); let has_unstaged_diff = diff_state @@ -1238,59 +1263,44 @@ impl GitStore { let update = ( buffer, - file.path.clone(), + repo_path, has_unstaged_diff.then(|| diff_state.index_text.clone()), has_uncommitted_diff.then(|| diff_state.head_text.clone()), diff_state.hunk_staging_operation_count, ); - diff_state_updates.entry(repo_id).or_default().push(update); + diff_state_updates.entry(repo).or_default().push(update); } if diff_state_updates.is_empty() { return; } - for (repo_id, repo_diff_state_updates) in diff_state_updates.into_iter() { - let worktree = worktree.downgrade(); + for (repo, repo_diff_state_updates) in diff_state_updates.into_iter() { let git_store = cx.weak_entity(); - let _ = active_repo.read(cx).send_keyed_job( - Some(GitJobKey::BatchReadIndex(repo_id)), - |_, mut cx| async move { - let snapshot = worktree.update(&mut cx, |tree, _| { - tree.as_local().map(|local_tree| local_tree.snapshot()) - }); - let Ok(Some(snapshot)) = snapshot else { + let _ = repo.read(cx).send_keyed_job( + Some(GitJobKey::BatchReadIndex), + |state, mut cx| async move { + let RepositoryState::Local { backend, .. } = state else { + log::error!("tried to recompute diffs for a non-local repository"); return; }; - let mut diff_bases_changes_by_buffer = Vec::new(); for ( buffer, - path, + repo_path, current_index_text, current_head_text, hunk_staging_operation_count, ) in &repo_diff_state_updates { - let Some(local_repo) = snapshot.local_repo_containing_path(&path) else { - continue; - }; - let Some(relative_path) = local_repo.relativize(&path).ok() else { - continue; - }; - - log::debug!("reloading git state for buffer {}", path.display()); let index_text = if current_index_text.is_some() { - local_repo - .repo() - .load_index_text(relative_path.clone()) - .await + backend.load_index_text(repo_path.clone()).await } else { None }; let head_text = if current_head_text.is_some() { - local_repo.repo().load_committed_text(relative_path).await + backend.load_committed_text(repo_path.clone()).await } else { None }; @@ -1389,13 +1399,13 @@ impl GitStore { } } - pub fn repositories(&self) -> &HashMap> { + pub fn repositories(&self) -> &HashMap> { &self.repositories } pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?; - let status = repo.read(cx).repository_entry.status_for_path(&path)?; + let status = repo.read(cx).snapshot.status_for_path(&path)?; Some(status.status) } @@ -1417,48 +1427,11 @@ impl GitStore { let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?; self.repositories .values() - .filter_map(|repo_handle| { - let repo = repo_handle.read(cx); - let relative_path = repo.repository_entry.relativize_abs_path(&abs_path)?; - Some((repo_handle.clone(), relative_path)) + .filter_map(|repo| { + let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?; + Some((repo.clone(), repo_path)) }) - .max_by_key(|(repo, _)| { - repo.read(cx) - .repository_entry - .work_directory_abs_path - .clone() - }) - } - - fn spawn_git_worker(cx: &mut Context) -> mpsc::UnboundedSender { - let (job_tx, mut job_rx) = mpsc::unbounded::(); - - cx.spawn(async move |_, cx| { - let mut jobs = VecDeque::new(); - loop { - while let Ok(Some(next_job)) = job_rx.try_next() { - jobs.push_back(next_job); - } - - if let Some(job) = jobs.pop_front() { - if let Some(current_key) = &job.key { - if jobs - .iter() - .any(|other_job| other_job.key.as_ref() == Some(current_key)) - { - continue; - } - } - (job.job)(cx).await; - } else if let Some(job) = job_rx.next().await { - jobs.push_back(job); - } else { - break; - } - } - }) - .detach(); - job_tx + .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone()) } pub fn git_init( @@ -1480,7 +1453,7 @@ impl GitStore { } | GitStoreState::Remote { upstream_client, - project_id, + upstream_project_id: project_id, .. } => { let client = upstream_client.clone(); @@ -1507,49 +1480,40 @@ impl GitStore { this.update(&mut cx, |this, cx| { let mut update = envelope.payload; - let work_directory_id = ProjectEntryId::from_proto(update.id); + let id = RepositoryId::from_proto(update.id); let client = this .upstream_client() .context("no upstream client")? .clone(); - let repo = this - .repositories - .entry(work_directory_id) - .or_insert_with(|| { - let git_store = cx.weak_entity(); - - cx.new(|_| Repository { - commit_message_buffer: None, + let mut is_new = false; + let repo = this.repositories.entry(id).or_insert_with(|| { + is_new = true; + let git_store = cx.weak_entity(); + cx.new(|cx| { + Repository::remote( + id, + Path::new(&update.abs_path).into(), + ProjectId(update.project_id), + client, git_store, - project_environment: None, - worktree_id: None, - repository_entry: RepositoryEntry { - work_directory_id, - current_branch: None, - statuses_by_path: Default::default(), - current_merge_conflicts: Default::default(), - work_directory_abs_path: update.abs_path.clone().into(), - worktree_scan_id: update.scan_id as usize, - }, - merge_message: None, - completed_scan_id: update.scan_id as usize, - state: RepositoryState::Remote { - project_id: ProjectId(update.project_id), - client, - work_directory_id, - }, - job_sender: this.update_sender.clone(), - askpass_delegates: Default::default(), - latest_askpass_id: 0, - }) - }); + cx, + ) + }) + }); + if is_new { + this._subscriptions + .push(cx.subscribe(&repo, Self::on_repository_event)) + } + + repo.update(cx, { + let update = update.clone(); + |repo, cx| repo.apply_remote_update(update, cx) + })?; - repo.update(cx, |repo, _cx| repo.apply_remote_update(update.clone()))?; - cx.emit(GitEvent::GitStateUpdated); this.active_repo_id.get_or_insert_with(|| { - cx.emit(GitEvent::ActiveRepositoryChanged); - work_directory_id + cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); + id }); if let Some((client, project_id)) = this.downstream_client() { @@ -1567,7 +1531,7 @@ impl GitStore { ) -> Result<()> { this.update(&mut cx, |this, cx| { let mut update = envelope.payload; - let id = ProjectEntryId::from_proto(update.id); + let id = RepositoryId::from_proto(update.id); this.repositories.remove(&id); if let Some((client, project_id)) = this.downstream_client() { update.project_id = project_id.to_proto(); @@ -1575,9 +1539,9 @@ impl GitStore { } if this.active_repo_id == Some(id) { this.active_repo_id = None; - cx.emit(GitEvent::ActiveRepositoryChanged); + cx.emit(GitStoreEvent::ActiveRepositoryChanged(None)); } - cx.emit(GitEvent::GitStateUpdated); + cx.emit(GitStoreEvent::RepositoryRemoved(id)); }) } @@ -1599,14 +1563,14 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let askpass_id = envelope.payload.askpass_id; let askpass = make_remote_delegate( this, envelope.payload.project_id, - work_directory_id, + repository_id, askpass_id, &mut cx, ); @@ -1628,14 +1592,14 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let askpass_id = envelope.payload.askpass_id; let askpass = make_remote_delegate( this, envelope.payload.project_id, - work_directory_id, + repository_id, askpass_id, &mut cx, ); @@ -1668,13 +1632,13 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let askpass_id = envelope.payload.askpass_id; let askpass = make_remote_delegate( this, envelope.payload.project_id, - work_directory_id, + repository_id, askpass_id, &mut cx, ); @@ -1699,8 +1663,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let entries = envelope .payload @@ -1723,8 +1687,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let entries = envelope .payload @@ -1748,8 +1712,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; repository_handle .update(&mut cx, |repository_handle, cx| { @@ -1768,8 +1732,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let message = SharedString::from(envelope.payload.message); let name = envelope.payload.name.map(SharedString::from); @@ -1788,8 +1752,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branch_name = envelope.payload.branch_name; @@ -1814,8 +1778,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branches = repository_handle .update(&mut cx, |repository_handle, _| repository_handle.branches())? @@ -1824,7 +1788,7 @@ impl GitStore { Ok(proto::GitBranchesResponse { branches: branches .into_iter() - .map(|branch| worktree::branch_to_proto(&branch)) + .map(|branch| branch_to_proto(&branch)) .collect::>(), }) } @@ -1833,8 +1797,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branch_name = envelope.payload.branch_name; repository_handle @@ -1851,8 +1815,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branch_name = envelope.payload.branch_name; repository_handle @@ -1869,8 +1833,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let commit = repository_handle .update(&mut cx, |repository_handle, _| { @@ -1891,8 +1855,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let commit_diff = repository_handle .update(&mut cx, |repository_handle, _| { @@ -1917,8 +1881,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let mode = match envelope.payload.mode() { git_reset::ResetMode::Soft => ResetMode::Soft, @@ -1938,8 +1902,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let paths = envelope .payload .paths @@ -1960,8 +1924,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository = Self::repository_for_request(&this, repository_id, &mut cx)?; let buffer = repository .update(&mut cx, |repository, cx| { repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx) @@ -1991,8 +1955,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository = Self::repository_for_request(&this, repository_id, &mut cx)?; let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?; let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else { @@ -2014,8 +1978,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let branches = repository_handle .update(&mut cx, |repository_handle, _| { @@ -2035,8 +1999,8 @@ impl GitStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id); - let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?; + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let diff_type = match envelope.payload.diff_type() { proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex, proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree, @@ -2210,28 +2174,21 @@ impl GitStore { fn repository_for_request( this: &Entity, - work_directory_id: ProjectEntryId, + id: RepositoryId, cx: &mut AsyncApp, ) -> Result> { - this.update(cx, |this, cx| { + this.update(cx, |this, _| { this.repositories - .values() - .find(|repository_handle| { - repository_handle - .read(cx) - .repository_entry - .work_directory_id() - == work_directory_id - }) + .get(&id) .context("missing repository handle") .cloned() })? } - pub fn repo_snapshots(&self, cx: &App) -> HashMap { + pub fn repo_snapshots(&self, cx: &App) -> HashMap { self.repositories .iter() - .map(|(id, repo)| (*id, repo.read(cx).repository_entry.clone())) + .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone())) .collect() } } @@ -2409,6 +2366,7 @@ impl BufferDiffState { if this.update(cx, |this, _| { this.hunk_staging_operation_count > prev_hunk_staging_operation_count })? { + eprintln!("early return"); return Ok(()); } @@ -2457,7 +2415,7 @@ impl BufferDiffState { fn make_remote_delegate( this: Entity, project_id: u64, - work_directory_id: ProjectEntryId, + repository_id: RepositoryId, askpass_id: u64, cx: &mut AsyncApp, ) -> AskPassDelegate { @@ -2468,7 +2426,7 @@ fn make_remote_delegate( }; let response = client.request(proto::AskPassRequest { project_id, - work_directory_id: work_directory_id.to_proto(), + repository_id: repository_id.to_proto(), askpass_id, prompt, }); @@ -2482,134 +2440,204 @@ fn make_remote_delegate( }) } -impl GitStoreState { - fn load_staged_text( - &self, - buffer: &Entity, - buffer_store: &Entity, - cx: &App, - ) -> Task>> { - match self { - GitStoreState::Local { .. } => { - if let Some((worktree, path)) = - buffer_store.read(cx).worktree_for_buffer(buffer, cx) - { - worktree.read(cx).load_staged_file(path.as_ref(), cx) - } else { - return Task::ready(Err(anyhow!("no such worktree"))); - } - } - GitStoreState::Ssh { - upstream_client, - upstream_project_id: project_id, - .. - } - | GitStoreState::Remote { - upstream_client, - project_id, - } => { - let buffer_id = buffer.read(cx).remote_id(); - let project_id = *project_id; - let client = upstream_client.clone(); - cx.background_spawn(async move { - let response = client - .request(proto::OpenUnstagedDiff { - project_id: project_id.to_proto(), - buffer_id: buffer_id.to_proto(), - }) - .await?; - Ok(response.staged_text) - }) - } +impl RepositoryId { + pub fn to_proto(self) -> u64 { + self.0 + } + + pub fn from_proto(id: u64) -> Self { + RepositoryId(id) + } +} + +impl RepositorySnapshot { + fn empty(id: RepositoryId, work_directory_abs_path: Arc) -> Self { + Self { + id, + merge_message: None, + statuses_by_path: Default::default(), + work_directory_abs_path, + branch: None, + merge_conflicts: Default::default(), + merge_head_shas: Default::default(), + scan_id: 0, } } - fn load_committed_text( - &self, - buffer: &Entity, - buffer_store: &Entity, - cx: &App, - ) -> Task> { - match self { - GitStoreState::Local { .. } => { - if let Some((worktree, path)) = - buffer_store.read(cx).worktree_for_buffer(buffer, cx) - { - let worktree = worktree.read(cx); - let committed_text = worktree.load_committed_file(&path, cx); - let staged_text = worktree.load_staged_file(&path, cx); - cx.background_spawn(async move { - let committed_text = committed_text.await?; - let staged_text = staged_text.await?; - let diff_bases_change = if committed_text == staged_text { - DiffBasesChange::SetBoth(committed_text) - } else { - DiffBasesChange::SetEach { - index: staged_text, - head: committed_text, - } - }; - Ok(diff_bases_change) - }) - } else { - Task::ready(Err(anyhow!("no such worktree"))) - } - } - GitStoreState::Ssh { - upstream_client, - upstream_project_id: project_id, - .. - } - | GitStoreState::Remote { - upstream_client, - project_id, - } => { - use proto::open_uncommitted_diff_response::Mode; + fn initial_update(&self, project_id: u64) -> proto::UpdateRepository { + proto::UpdateRepository { + branch_summary: self.branch.as_ref().map(branch_to_proto), + updated_statuses: self + .statuses_by_path + .iter() + .map(|entry| entry.to_proto()) + .collect(), + removed_statuses: Default::default(), + current_merge_conflicts: self + .merge_conflicts + .iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + project_id, + id: self.id.to_proto(), + abs_path: self.work_directory_abs_path.to_proto(), + entry_ids: vec![self.id.to_proto()], + scan_id: self.scan_id, + is_last_update: true, + } + } - let buffer_id = buffer.read(cx).remote_id(); - let project_id = *project_id; - let client = upstream_client.clone(); - cx.background_spawn(async move { - let response = client - .request(proto::OpenUncommittedDiff { - project_id: project_id.to_proto(), - buffer_id: buffer_id.to_proto(), - }) - .await?; - let mode = - Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?; - let bases = match mode { - Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text), - Mode::IndexAndHead => DiffBasesChange::SetEach { - head: response.committed_text, - index: response.staged_text, - }, - }; - Ok(bases) - }) + fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository { + let mut updated_statuses: Vec = Vec::new(); + let mut removed_statuses: Vec = Vec::new(); + + let mut new_statuses = self.statuses_by_path.iter().peekable(); + let mut old_statuses = old.statuses_by_path.iter().peekable(); + + let mut current_new_entry = new_statuses.next(); + let mut current_old_entry = old_statuses.next(); + loop { + match (current_new_entry, current_old_entry) { + (Some(new_entry), Some(old_entry)) => { + match new_entry.repo_path.cmp(&old_entry.repo_path) { + Ordering::Less => { + updated_statuses.push(new_entry.to_proto()); + current_new_entry = new_statuses.next(); + } + Ordering::Equal => { + if new_entry.status != old_entry.status { + updated_statuses.push(new_entry.to_proto()); + } + current_old_entry = old_statuses.next(); + current_new_entry = new_statuses.next(); + } + Ordering::Greater => { + removed_statuses.push(old_entry.repo_path.as_ref().to_proto()); + current_old_entry = old_statuses.next(); + } + } + } + (None, Some(old_entry)) => { + removed_statuses.push(old_entry.repo_path.as_ref().to_proto()); + current_old_entry = old_statuses.next(); + } + (Some(new_entry), None) => { + updated_statuses.push(new_entry.to_proto()); + current_new_entry = new_statuses.next(); + } + (None, None) => break, } } + + proto::UpdateRepository { + branch_summary: self.branch.as_ref().map(branch_to_proto), + updated_statuses, + removed_statuses, + current_merge_conflicts: self + .merge_conflicts + .iter() + .map(|path| path.as_ref().to_proto()) + .collect(), + project_id, + id: self.id.to_proto(), + abs_path: self.work_directory_abs_path.to_proto(), + entry_ids: vec![], + scan_id: self.scan_id, + is_last_update: true, + } + } + + pub fn status(&self) -> impl Iterator + '_ { + self.statuses_by_path.iter().cloned() + } + + pub fn status_summary(&self) -> GitSummary { + self.statuses_by_path.summary().item_summary + } + + pub fn status_for_path(&self, path: &RepoPath) -> Option { + self.statuses_by_path + .get(&PathKey(path.0.clone()), &()) + .cloned() + } + + pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option { + abs_path + .strip_prefix(&self.work_directory_abs_path) + .map(RepoPath::from) + .ok() + } + + pub fn has_conflict(&self, repo_path: &RepoPath) -> bool { + self.statuses_by_path + .get(&PathKey(repo_path.0.clone()), &()) + .map_or(false, |entry| entry.status.is_conflicted()) + } + + /// This is the name that will be displayed in the repository selector for this repository. + pub fn display_name(&self) -> SharedString { + self.work_directory_abs_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string() + .into() } } impl Repository { + fn local( + id: RepositoryId, + work_directory_abs_path: Arc, + dot_git_abs_path: Arc, + project_environment: WeakEntity, + fs: Arc, + git_store: WeakEntity, + cx: &mut Context, + ) -> Self { + let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone()); + Repository { + git_store, + snapshot, + commit_message_buffer: None, + askpass_delegates: Default::default(), + paths_needing_status_update: Default::default(), + latest_askpass_id: 0, + job_sender: Repository::spawn_local_git_worker( + work_directory_abs_path, + dot_git_abs_path, + project_environment, + fs, + cx, + ), + } + } + + fn remote( + id: RepositoryId, + work_directory_abs_path: Arc, + project_id: ProjectId, + client: AnyProtoClient, + git_store: WeakEntity, + cx: &mut Context, + ) -> Self { + let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path); + Self { + snapshot, + commit_message_buffer: None, + git_store, + paths_needing_status_update: Default::default(), + job_sender: Self::spawn_remote_git_worker(project_id, client, cx), + askpass_delegates: Default::default(), + latest_askpass_id: 0, + } + } + pub fn git_store(&self) -> Option> { self.git_store.upgrade() } - fn id(&self) -> ProjectEntryId { - self.repository_entry.work_directory_id() - } - - pub fn current_branch(&self) -> Option<&Branch> { - self.repository_entry.branch() - } - - pub fn status_for_path(&self, path: &RepoPath) -> Option { - self.repository_entry.status_for_path(path) - } - - fn send_job(&self, job: F) -> oneshot::Receiver + pub fn send_job(&self, job: F) -> oneshot::Receiver where F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static, Fut: Future + 'static, @@ -2625,12 +2653,11 @@ impl Repository { R: Send + 'static, { let (result_tx, result_rx) = futures::channel::oneshot::channel(); - let git_repo = self.state.clone(); self.job_sender .unbounded_send(GitJob { key, - job: Box::new(|cx: &mut AsyncApp| { - let job = job(git_repo, cx.clone()); + job: Box::new(|state, cx: &mut AsyncApp| { + let job = job(state, cx.clone()); cx.spawn(async move |_| { let result = job.await; result_tx.send(result).ok(); @@ -2641,17 +2668,6 @@ impl Repository { result_rx } - /// This is the name that will be displayed in the repository selector for this repository. - pub fn display_name(&self) -> SharedString { - self.repository_entry - .work_directory_abs_path - .file_name() - .unwrap_or_default() - .to_string_lossy() - .to_string() - .into() - } - pub fn set_as_active_repository(&self, cx: &mut Context) { let Some(git_store) = self.git_store.upgrade() else { return; @@ -2666,35 +2682,18 @@ impl Repository { return; }; git_store.active_repo_id = Some(id); - cx.emit(GitEvent::ActiveRepositoryChanged); + cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); }); } pub fn cached_status(&self) -> impl '_ + Iterator { - self.repository_entry.status() - } - - pub fn load_index_text(&self, path: RepoPath) -> oneshot::Receiver> { - self.send_job(move |repo, _cx| async move { - match repo { - RepositoryState::Local(git_repository) => { - git_repository.load_index_text(path).await - } - RepositoryState::Remote { .. } => None, - } - }) - } - - pub fn has_conflict(&self, path: &RepoPath) -> bool { - self.repository_entry - .current_merge_conflicts - .contains(&path) + self.snapshot.status() } pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option { let git_store = self.git_store.upgrade()?; let worktree_store = git_store.read(cx).worktree_store.read(cx); - let abs_path = self.repository_entry.work_directory_abs_path.join(&path.0); + let abs_path = self.snapshot.work_directory_abs_path.join(&path.0); let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?; Some(ProjectPath { worktree_id: worktree.read(cx).id(), @@ -2706,22 +2705,15 @@ impl Repository { let git_store = self.git_store.upgrade()?; let worktree_store = git_store.read(cx).worktree_store.read(cx); let abs_path = worktree_store.absolutize(path, cx)?; - self.repository_entry.relativize_abs_path(&abs_path) + self.snapshot.abs_path_to_repo_path(&abs_path) } pub fn contains_sub_repo(&self, other: &Entity, cx: &App) -> bool { other .read(cx) - .repository_entry + .snapshot .work_directory_abs_path - .starts_with(&self.repository_entry.work_directory_abs_path) - } - - pub fn local_repository(&self) -> Option> { - match &self.state { - RepositoryState::Local(git_repository) => Some(git_repository.clone()), - RepositoryState::Remote { .. } => None, - } + .starts_with(&self.snapshot.work_directory_abs_path) } pub fn open_commit_buffer( @@ -2730,48 +2722,54 @@ impl Repository { buffer_store: Entity, cx: &mut Context, ) -> Task>> { + let id = self.id; if let Some(buffer) = self.commit_message_buffer.clone() { return Task::ready(Ok(buffer)); } + let this = cx.weak_entity(); - if let RepositoryState::Remote { - project_id, - client, - work_directory_id, - } = self.state.clone() - { - let client = client.clone(); - cx.spawn(async move |repository, cx| { - let request = client.request(proto::OpenCommitMessageBuffer { - project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), - }); - let response = request.await.context("requesting to open commit buffer")?; - let buffer_id = BufferId::new(response.buffer_id)?; - let buffer = buffer_store - .update(cx, |buffer_store, cx| { - buffer_store.wait_for_remote_buffer(buffer_id, cx) + let rx = self.send_job(move |state, mut cx| async move { + let Some(this) = this.upgrade() else { + bail!("git store was dropped"); + }; + match state { + RepositoryState::Local { .. } => { + this.update(&mut cx, |_, cx| { + Self::open_local_commit_buffer(languages, buffer_store, cx) })? - .await?; - if let Some(language_registry) = languages { - let git_commit_language = - language_registry.language_for_name("Git Commit").await?; - buffer.update(cx, |buffer, cx| { - buffer.set_language(Some(git_commit_language), cx); - })?; + .await } - repository.update(cx, |repository, _| { - repository.commit_message_buffer = Some(buffer.clone()); - })?; - Ok(buffer) - }) - } else { - self.open_local_commit_buffer(languages, buffer_store, cx) - } + RepositoryState::Remote { project_id, client } => { + let request = client.request(proto::OpenCommitMessageBuffer { + project_id: project_id.0, + repository_id: id.to_proto(), + }); + let response = request.await.context("requesting to open commit buffer")?; + let buffer_id = BufferId::new(response.buffer_id)?; + let buffer = buffer_store + .update(&mut cx, |buffer_store, cx| { + buffer_store.wait_for_remote_buffer(buffer_id, cx) + })? + .await?; + if let Some(language_registry) = languages { + let git_commit_language = + language_registry.language_for_name("Git Commit").await?; + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language(Some(git_commit_language), cx); + })?; + } + this.update(&mut cx, |this, _| { + this.commit_message_buffer = Some(buffer.clone()); + })?; + Ok(buffer) + } + } + }); + + cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? }) } fn open_local_commit_buffer( - &mut self, language_registry: Option>, buffer_store: Entity, cx: &mut Context, @@ -2799,23 +2797,27 @@ impl Repository { &self, commit: &str, paths: Vec, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { let commit = commit.to_string(); - let env = self.worktree_environment(cx); + let id = self.id; - self.send_job(|git_repo, _| async move { + self.send_job(move |git_repo, _| async move { match git_repo { - RepositoryState::Local(repo) => repo.checkout_files(commit, paths, env.await).await, - RepositoryState::Remote { - project_id, - client, - work_directory_id, + RepositoryState::Local { + backend, + environment, + .. } => { + backend + .checkout_files(commit, paths, environment.clone()) + .await + } + RepositoryState::Remote { project_id, client } => { client .request(proto::GitCheckoutFiles { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), commit, paths: paths .into_iter() @@ -2834,25 +2836,23 @@ impl Repository { &self, commit: String, reset_mode: ResetMode, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { let commit = commit.to_string(); - let env = self.worktree_environment(cx); - self.send_job(|git_repo, _| async move { + let id = self.id; + + self.send_job(move |git_repo, _| async move { match git_repo { - RepositoryState::Local(git_repo) => { - let env = env.await; - git_repo.reset(commit, reset_mode, env).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { + backend, + environment, + .. + } => backend.reset(commit, reset_mode, environment).await, + RepositoryState::Remote { project_id, client } => { client .request(proto::GitReset { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), commit, mode: match reset_mode { ResetMode::Soft => git_reset::ResetMode::Soft.into(), @@ -2868,18 +2868,15 @@ impl Repository { } pub fn show(&self, commit: String) -> oneshot::Receiver> { - self.send_job(|git_repo, _cx| async move { + let id = self.id; + self.send_job(move |git_repo, _cx| async move { match git_repo { - RepositoryState::Local(git_repository) => git_repository.show(commit).await, - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { backend, .. } => backend.show(commit).await, + RepositoryState::Remote { project_id, client } => { let resp = client .request(proto::GitShow { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), commit, }) .await?; @@ -2897,20 +2894,17 @@ impl Repository { } pub fn load_commit_diff(&self, commit: String) -> oneshot::Receiver> { - self.send_job(|git_repo, cx| async move { + let id = self.id; + self.send_job(move |git_repo, cx| async move { match git_repo { - RepositoryState::Local(git_repository) => { - git_repository.load_commit(commit, cx).await - } + RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await, RepositoryState::Remote { - client, - project_id, - work_directory_id, + client, project_id, .. } => { let response = client .request(proto::LoadCommitDiff { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), commit, }) .await?; @@ -2919,7 +2913,7 @@ impl Repository { .files .into_iter() .map(|file| CommitFile { - path: PathBuf::from(file.path).into(), + path: Path::new(&file.path).into(), old_text: file.old_text, new_text: file.new_text, }) @@ -2942,7 +2936,7 @@ impl Repository { if entries.is_empty() { return Task::ready(Ok(())); } - let env = self.worktree_environment(cx); + let id = self.id; let mut save_futures = Vec::new(); if let Some(buffer_store) = self.buffer_store(cx) { @@ -2968,21 +2962,20 @@ impl Repository { for save_future in save_futures { save_future.await?; } - let env = env.await; this.update(cx, |this, _| { - this.send_job(|git_repo, _cx| async move { + this.send_job(move |git_repo, _cx| async move { match git_repo { - RepositoryState::Local(repo) => repo.stage_paths(entries, env).await, - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { + backend, + environment, + .. + } => backend.stage_paths(entries, environment.clone()).await, + RepositoryState::Remote { project_id, client } => { client .request(proto::Stage { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), paths: entries .into_iter() .map(|repo_path| repo_path.as_ref().to_proto()) @@ -3010,7 +3003,7 @@ impl Repository { if entries.is_empty() { return Task::ready(Ok(())); } - let env = self.worktree_environment(cx); + let id = self.id; let mut save_futures = Vec::new(); if let Some(buffer_store) = self.buffer_store(cx) { @@ -3036,21 +3029,20 @@ impl Repository { for save_future in save_futures { save_future.await?; } - let env = env.await; this.update(cx, |this, _| { - this.send_job(|git_repo, _cx| async move { + this.send_job(move |git_repo, _cx| async move { match git_repo { - RepositoryState::Local(repo) => repo.unstage_paths(entries, env).await, - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { + backend, + environment, + .. + } => backend.unstage_paths(entries, environment).await, + RepositoryState::Remote { project_id, client } => { client .request(proto::Unstage { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), paths: entries .into_iter() .map(|repo_path| repo_path.as_ref().to_proto()) @@ -3072,8 +3064,7 @@ impl Repository { pub fn stage_all(&self, cx: &mut Context) -> Task> { let to_stage = self - .repository_entry - .status() + .cached_status() .filter(|entry| !entry.status.staging().is_fully_staged()) .map(|entry| entry.repo_path.clone()) .collect(); @@ -3082,65 +3073,34 @@ impl Repository { pub fn unstage_all(&self, cx: &mut Context) -> Task> { let to_unstage = self - .repository_entry - .status() + .cached_status() .filter(|entry| entry.status.staging().has_staged()) .map(|entry| entry.repo_path.clone()) .collect(); self.unstage_entries(to_unstage, cx) } - /// Get a count of all entries in the active repository, including - /// untracked files. - pub fn entry_count(&self) -> usize { - self.repository_entry.status_len() - } - - fn worktree_environment( - &self, - cx: &mut App, - ) -> impl Future> + 'static { - let task = self.project_environment.as_ref().and_then(|env| { - env.update(cx, |env, cx| { - env.get_environment( - self.worktree_id, - Some( - self.repository_entry - .work_directory_abs_path - .as_path() - .into(), - ), - cx, - ) - }) - .ok() - }); - async move { OptionFuture::from(task).await.flatten().unwrap_or_default() } - } - pub fn commit( &self, message: SharedString, name_and_email: Option<(SharedString, SharedString)>, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { - let env = self.worktree_environment(cx); - self.send_job(|git_repo, _cx| async move { + let id = self.id; + + self.send_job(move |git_repo, _cx| async move { match git_repo { - RepositoryState::Local(repo) => { - let env = env.await; - repo.commit(message, name_and_email, env).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { + backend, + environment, + .. + } => backend.commit(message, name_and_email, environment).await, + RepositoryState::Remote { project_id, client } => { let (name, email) = name_and_email.unzip(); client .request(proto::Commit { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), message: String::from(message), name: name.map(String::from), email: email.map(String::from), @@ -3157,23 +3117,20 @@ impl Repository { pub fn fetch( &mut self, askpass: AskPassDelegate, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { let askpass_delegates = self.askpass_delegates.clone(); let askpass_id = util::post_inc(&mut self.latest_askpass_id); - let env = self.worktree_environment(cx); + let id = self.id; self.send_job(move |git_repo, cx| async move { match git_repo { - RepositoryState::Local(git_repository) => { - let env = env.await; - git_repository.fetch(askpass, env, cx).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { + backend, + environment, + .. + } => backend.fetch(askpass, environment, cx).await, + RepositoryState::Remote { project_id, client } => { askpass_delegates.lock().insert(askpass_id, askpass); let _defer = util::defer(|| { let askpass_delegate = askpass_delegates.lock().remove(&askpass_id); @@ -3183,7 +3140,7 @@ impl Repository { let response = client .request(proto::Fetch { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), askpass_id, }) .await @@ -3204,33 +3161,31 @@ impl Repository { remote: SharedString, options: Option, askpass: AskPassDelegate, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { let askpass_delegates = self.askpass_delegates.clone(); let askpass_id = util::post_inc(&mut self.latest_askpass_id); - let env = self.worktree_environment(cx); + let id = self.id; self.send_job(move |git_repo, cx| async move { match git_repo { - RepositoryState::Local(git_repository) => { - let env = env.await; - // let askpass = AskPassSession::new(&executor, askpass).await?; - git_repository + RepositoryState::Local { + backend, + environment, + .. + } => { + backend .push( branch.to_string(), remote.to_string(), options, askpass, - env, + environment.clone(), cx, ) .await } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Remote { project_id, client } => { askpass_delegates.lock().insert(askpass_id, askpass); let _defer = util::defer(|| { let askpass_delegate = askpass_delegates.lock().remove(&askpass_id); @@ -3239,7 +3194,7 @@ impl Repository { let response = client .request(proto::Push { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), askpass_id, branch_name: branch.to_string(), remote_name: remote.to_string(), @@ -3265,25 +3220,30 @@ impl Repository { branch: SharedString, remote: SharedString, askpass: AskPassDelegate, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { let askpass_delegates = self.askpass_delegates.clone(); let askpass_id = util::post_inc(&mut self.latest_askpass_id); - let env = self.worktree_environment(cx); + let id = self.id; self.send_job(move |git_repo, cx| async move { match git_repo { - RepositoryState::Local(git_repository) => { - let env = env.await; - git_repository - .pull(branch.to_string(), remote.to_string(), askpass, env, cx) + RepositoryState::Local { + backend, + environment, + .. + } => { + backend + .pull( + branch.to_string(), + remote.to_string(), + askpass, + environment.clone(), + cx, + ) .await } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Remote { project_id, client } => { askpass_delegates.lock().insert(askpass_id, askpass); let _defer = util::defer(|| { let askpass_delegate = askpass_delegates.lock().remove(&askpass_id); @@ -3292,7 +3252,7 @@ impl Repository { let response = client .request(proto::Pull { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), askpass_id, branch_name: branch.to_string(), remote_name: remote.to_string(), @@ -3313,26 +3273,28 @@ impl Repository { &self, path: RepoPath, content: Option, - cx: &mut App, + _cx: &mut App, ) -> oneshot::Receiver> { - let env = self.worktree_environment(cx); + let id = self.id; self.send_keyed_job( Some(GitJobKey::WriteIndex(path.clone())), - |git_repo, _cx| async { + move |git_repo, _cx| async move { match git_repo { - RepositoryState::Local(repo) => { - repo.set_index_text(path, content, env.await).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, + RepositoryState::Local { + backend, + environment, + .. } => { + backend + .set_index_text(path, content, environment.clone()) + .await + } + RepositoryState::Remote { project_id, client } => { client .request(proto::SetIndexText { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), path: path.as_ref().to_proto(), text: content, }) @@ -3348,20 +3310,15 @@ impl Repository { &self, branch_name: Option, ) -> oneshot::Receiver>> { - self.send_job(|repo, _cx| async move { + let id = self.id; + self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.get_remotes(branch_name).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await, + RepositoryState::Remote { project_id, client } => { let response = client .request(proto::GetRemotes { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), branch_name, }) .await?; @@ -3380,34 +3337,27 @@ impl Repository { }) } - pub fn branch(&self) -> Option<&Branch> { - self.repository_entry.branch() - } - pub fn branches(&self) -> oneshot::Receiver>> { - self.send_job(|repo, cx| async move { + let id = self.id; + self.send_job(move |repo, cx| async move { match repo { - RepositoryState::Local(git_repository) => { - let git_repository = git_repository.clone(); - cx.background_spawn(async move { git_repository.branches().await }) + RepositoryState::Local { backend, .. } => { + let backend = backend.clone(); + cx.background_spawn(async move { backend.branches().await }) .await } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Remote { project_id, client } => { let response = client .request(proto::GitGetBranches { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), }) .await?; let branches = response .branches .into_iter() - .map(|branch| worktree::proto_to_branch(&branch)) + .map(|branch| proto_to_branch(&branch)) .collect(); Ok(branches) @@ -3417,19 +3367,15 @@ impl Repository { } pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver> { - self.send_job(|repo, _cx| async move { + let id = self.id; + self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => git_repository.diff(diff_type).await, - RepositoryState::Remote { - project_id, - client, - work_directory_id, - .. - } => { + RepositoryState::Local { backend, .. } => backend.diff(diff_type).await, + RepositoryState::Remote { project_id, client } => { let response = client .request(proto::GitDiff { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), diff_type: match diff_type { DiffType::HeadToIndex => { proto::git_diff::DiffType::HeadToIndex.into() @@ -3448,20 +3394,15 @@ impl Repository { } pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver> { - self.send_job(|repo, _cx| async move { + let id = self.id; + self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.create_branch(branch_name).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { backend, .. } => backend.create_branch(branch_name).await, + RepositoryState::Remote { project_id, client } => { client .request(proto::GitCreateBranch { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), branch_name, }) .await?; @@ -3473,20 +3414,15 @@ impl Repository { } pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver> { - self.send_job(|repo, _cx| async move { + let id = self.id; + self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.change_branch(branch_name).await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { backend, .. } => backend.change_branch(branch_name).await, + RepositoryState::Remote { project_id, client } => { client .request(proto::GitChangeBranch { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), branch_name, }) .await?; @@ -3498,20 +3434,15 @@ impl Repository { } pub fn check_for_pushed_commits(&self) -> oneshot::Receiver>> { - self.send_job(|repo, _cx| async move { + let id = self.id; + self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.check_for_pushed_commit().await - } - RepositoryState::Remote { - project_id, - client, - work_directory_id, - } => { + RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await, + RepositoryState::Remote { project_id, client } => { let response = client .request(proto::CheckForPushedCommits { project_id: project_id.0, - work_directory_id: work_directory_id.to_proto(), + repository_id: id.to_proto(), }) .await?; @@ -3526,7 +3457,7 @@ impl Repository { pub fn checkpoint(&self) -> oneshot::Receiver> { self.send_job(|repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => git_repository.checkpoint().await, + RepositoryState::Local { backend, .. } => backend.checkpoint().await, RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")), } }) @@ -3538,23 +3469,27 @@ impl Repository { ) -> oneshot::Receiver> { self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.restore_checkpoint(checkpoint).await + RepositoryState::Local { backend, .. } => { + backend.restore_checkpoint(checkpoint).await } RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")), } }) } - pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateRepository) -> Result<()> { + pub(crate) fn apply_remote_update( + &mut self, + update: proto::UpdateRepository, + cx: &mut Context, + ) -> Result<()> { let conflicted_paths = TreeSet::from_ordered_entries( update .current_merge_conflicts .into_iter() .map(|path| RepoPath(Path::new(&path).into())), ); - self.repository_entry.current_branch = update.branch_summary.as_ref().map(proto_to_branch); - self.repository_entry.current_merge_conflicts = conflicted_paths; + self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch); + self.snapshot.merge_conflicts = conflicted_paths; let edits = update .removed_statuses @@ -3569,7 +3504,11 @@ impl Repository { }), ) .collect::>(); - self.repository_entry.statuses_by_path.edit(edits, &()); + self.snapshot.statuses_by_path.edit(edits, &()); + if update.is_last_update { + self.snapshot.scan_id = update.scan_id; + } + cx.emit(RepositoryEvent::Updated); Ok(()) } @@ -3580,8 +3519,8 @@ impl Repository { ) -> oneshot::Receiver> { self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.compare_checkpoints(left, right).await + RepositoryState::Local { backend, .. } => { + backend.compare_checkpoints(left, right).await } RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")), } @@ -3594,8 +3533,8 @@ impl Repository { ) -> oneshot::Receiver> { self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository.delete_checkpoint(checkpoint).await + RepositoryState::Local { backend, .. } => { + backend.delete_checkpoint(checkpoint).await } RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")), } @@ -3609,8 +3548,8 @@ impl Repository { ) -> oneshot::Receiver> { self.send_job(move |repo, _cx| async move { match repo { - RepositoryState::Local(git_repository) => { - git_repository + RepositoryState::Local { backend, .. } => { + backend .diff_checkpoints(base_checkpoint, target_checkpoint) .await } @@ -3618,6 +3557,306 @@ impl Repository { } }) } + + fn schedule_scan( + &mut self, + updates_tx: Option>, + cx: &mut Context, + ) { + self.paths_changed( + vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()], + updates_tx.clone(), + cx, + ); + + let this = cx.weak_entity(); + let _ = self.send_keyed_job( + Some(GitJobKey::ReloadGitState), + |state, mut cx| async move { + let Some(this) = this.upgrade() else { + return Ok(()); + }; + let RepositoryState::Local { backend, .. } = state else { + bail!("not a local repository") + }; + let (snapshot, events) = this + .update(&mut cx, |this, _| { + compute_snapshot( + this.id, + this.work_directory_abs_path.clone(), + this.snapshot.clone(), + backend.clone(), + ) + })? + .await?; + this.update(&mut cx, |this, cx| { + this.snapshot = snapshot.clone(); + for event in events { + cx.emit(event); + } + })?; + if let Some(updates_tx) = updates_tx { + updates_tx + .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot)) + .ok(); + } + Ok(()) + }, + ); + } + + fn spawn_local_git_worker( + work_directory_abs_path: Arc, + dot_git_abs_path: Arc, + project_environment: WeakEntity, + fs: Arc, + cx: &mut Context, + ) -> mpsc::UnboundedSender { + let (job_tx, mut job_rx) = mpsc::unbounded::(); + + cx.spawn(async move |_, cx| { + let environment = project_environment + .upgrade() + .ok_or_else(|| anyhow!("missing project environment"))? + .update(cx, |project_environment, cx| { + project_environment.get_environment(Some(work_directory_abs_path), cx) + })? + .await + .ok_or_else(|| { + anyhow!("failed to get environment for repository working directory") + })?; + let backend = cx + .background_spawn(async move { + fs.open_repo(&dot_git_abs_path) + .ok_or_else(|| anyhow!("failed to build repository")) + }) + .await?; + + if let Some(git_hosting_provider_registry) = + cx.update(|cx| GitHostingProviderRegistry::try_global(cx))? + { + git_hosting_providers::register_additional_providers( + git_hosting_provider_registry, + backend.clone(), + ); + } + + let state = RepositoryState::Local { + backend, + environment: Arc::new(environment), + }; + let mut jobs = VecDeque::new(); + loop { + while let Ok(Some(next_job)) = job_rx.try_next() { + jobs.push_back(next_job); + } + + if let Some(job) = jobs.pop_front() { + if let Some(current_key) = &job.key { + if jobs + .iter() + .any(|other_job| other_job.key.as_ref() == Some(current_key)) + { + continue; + } + } + (job.job)(state.clone(), cx).await; + } else if let Some(job) = job_rx.next().await { + jobs.push_back(job); + } else { + break; + } + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + + job_tx + } + + fn spawn_remote_git_worker( + project_id: ProjectId, + client: AnyProtoClient, + cx: &mut Context, + ) -> mpsc::UnboundedSender { + let (job_tx, mut job_rx) = mpsc::unbounded::(); + + cx.spawn(async move |_, cx| { + let state = RepositoryState::Remote { project_id, client }; + let mut jobs = VecDeque::new(); + loop { + while let Ok(Some(next_job)) = job_rx.try_next() { + jobs.push_back(next_job); + } + + if let Some(job) = jobs.pop_front() { + if let Some(current_key) = &job.key { + if jobs + .iter() + .any(|other_job| other_job.key.as_ref() == Some(current_key)) + { + continue; + } + } + (job.job)(state.clone(), cx).await; + } else if let Some(job) = job_rx.next().await { + jobs.push_back(job); + } else { + break; + } + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + + job_tx + } + + fn load_staged_text( + &self, + buffer_id: BufferId, + repo_path: RepoPath, + cx: &App, + ) -> Task>> { + let rx = self.send_job(move |state, _| async move { + match state { + RepositoryState::Local { backend, .. } => { + anyhow::Ok(backend.load_index_text(repo_path).await) + } + RepositoryState::Remote { project_id, client } => { + let response = client + .request(proto::OpenUnstagedDiff { + project_id: project_id.to_proto(), + buffer_id: buffer_id.to_proto(), + }) + .await?; + Ok(response.staged_text) + } + } + }); + cx.spawn(|_: &mut AsyncApp| async move { rx.await? }) + } + + fn load_committed_text( + &self, + buffer_id: BufferId, + repo_path: RepoPath, + cx: &App, + ) -> Task> { + let rx = self.send_job(move |state, _| async move { + match state { + RepositoryState::Local { backend, .. } => { + let committed_text = backend.load_committed_text(repo_path.clone()).await; + let staged_text = backend.load_index_text(repo_path).await; + let diff_bases_change = if committed_text == staged_text { + DiffBasesChange::SetBoth(committed_text) + } else { + DiffBasesChange::SetEach { + index: staged_text, + head: committed_text, + } + }; + anyhow::Ok(diff_bases_change) + } + RepositoryState::Remote { project_id, client } => { + use proto::open_uncommitted_diff_response::Mode; + + let response = client + .request(proto::OpenUncommittedDiff { + project_id: project_id.to_proto(), + buffer_id: buffer_id.to_proto(), + }) + .await?; + let mode = + Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?; + let bases = match mode { + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text), + Mode::IndexAndHead => DiffBasesChange::SetEach { + head: response.committed_text, + index: response.staged_text, + }, + }; + Ok(bases) + } + } + }); + + cx.spawn(|_: &mut AsyncApp| async move { rx.await? }) + } + + fn paths_changed( + &mut self, + paths: Vec, + updates_tx: Option>, + cx: &mut Context, + ) { + self.paths_needing_status_update.extend(paths); + + let this = cx.weak_entity(); + let _ = self.send_keyed_job( + Some(GitJobKey::RefreshStatuses), + |state, mut cx| async move { + let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| { + ( + this.snapshot.clone(), + mem::take(&mut this.paths_needing_status_update), + ) + })?; + let RepositoryState::Local { backend, .. } = state else { + bail!("not a local repository") + }; + + let paths = changed_paths.iter().cloned().collect::>(); + let statuses = backend.status(&paths).await?; + + let changed_path_statuses = cx + .background_spawn(async move { + let mut changed_path_statuses = Vec::new(); + let prev_statuses = prev_snapshot.statuses_by_path.clone(); + let mut cursor = prev_statuses.cursor::(&()); + + for (repo_path, status) in &*statuses.entries { + changed_paths.remove(repo_path); + if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) { + if &cursor.item().unwrap().status == status { + continue; + } + } + + changed_path_statuses.push(Edit::Insert(StatusEntry { + repo_path: repo_path.clone(), + status: *status, + })); + } + let mut cursor = prev_statuses.cursor::(&()); + for path in changed_paths.iter() { + if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) { + changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone()))); + } + } + changed_path_statuses + }) + .await; + + this.update(&mut cx, |this, cx| { + if !changed_path_statuses.is_empty() { + this.snapshot + .statuses_by_path + .edit(changed_path_statuses, &()); + this.snapshot.scan_id += 1; + if let Some(updates_tx) = updates_tx { + updates_tx + .unbounded_send(DownstreamUpdate::UpdateRepository( + this.snapshot.clone(), + )) + .ok(); + } + } + cx.emit(RepositoryEvent::Updated); + }) + }, + ); + } } fn get_permalink_in_rust_registry_src( @@ -3755,3 +3994,264 @@ fn deserialize_blame_buffer_response( remote_url: response.remote_url, }) } + +fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch { + proto::Branch { + is_head: branch.is_head, + name: branch.name.to_string(), + unix_timestamp: branch + .most_recent_commit + .as_ref() + .map(|commit| commit.commit_timestamp as u64), + upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream { + ref_name: upstream.ref_name.to_string(), + tracking: upstream + .tracking + .status() + .map(|upstream| proto::UpstreamTracking { + ahead: upstream.ahead as u64, + behind: upstream.behind as u64, + }), + }), + most_recent_commit: branch + .most_recent_commit + .as_ref() + .map(|commit| proto::CommitSummary { + sha: commit.sha.to_string(), + subject: commit.subject.to_string(), + commit_timestamp: commit.commit_timestamp, + }), + } +} + +fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch { + git::repository::Branch { + is_head: proto.is_head, + name: proto.name.clone().into(), + upstream: proto + .upstream + .as_ref() + .map(|upstream| git::repository::Upstream { + ref_name: upstream.ref_name.to_string().into(), + tracking: upstream + .tracking + .as_ref() + .map(|tracking| { + git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus { + ahead: tracking.ahead as u32, + behind: tracking.behind as u32, + }) + }) + .unwrap_or(git::repository::UpstreamTracking::Gone), + }), + most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| { + git::repository::CommitSummary { + sha: commit.sha.to_string().into(), + subject: commit.subject.to_string().into(), + commit_timestamp: commit.commit_timestamp, + has_parent: true, + } + }), + } +} + +async fn compute_snapshot( + id: RepositoryId, + work_directory_abs_path: Arc, + prev_snapshot: RepositorySnapshot, + backend: Arc, +) -> Result<(RepositorySnapshot, Vec)> { + let mut events = Vec::new(); + let branches = backend.branches().await?; + let branch = branches.into_iter().find(|branch| branch.is_head); + let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?; + let merge_message = backend + .merge_message() + .await + .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into())); + let merge_head_shas = backend + .merge_head_shas() + .into_iter() + .map(SharedString::from) + .collect(); + + let statuses_by_path = SumTree::from_iter( + statuses + .entries + .iter() + .map(|(repo_path, status)| StatusEntry { + repo_path: repo_path.clone(), + status: *status, + }), + &(), + ); + + let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas; + + if merge_head_shas_changed + || branch != prev_snapshot.branch + || statuses_by_path != prev_snapshot.statuses_by_path + { + events.push(RepositoryEvent::Updated); + } + + let mut current_merge_conflicts = TreeSet::default(); + for (repo_path, status) in statuses.entries.iter() { + if status.is_conflicted() { + current_merge_conflicts.insert(repo_path.clone()); + } + } + + // Cache merge conflict paths so they don't change from staging/unstaging, + // until the merge heads change (at commit time, etc.). + let mut merge_conflicts = prev_snapshot.merge_conflicts.clone(); + if merge_head_shas_changed { + merge_conflicts = current_merge_conflicts; + events.push(RepositoryEvent::MergeHeadsChanged); + } + + let snapshot = RepositorySnapshot { + id, + merge_message, + statuses_by_path, + work_directory_abs_path, + scan_id: prev_snapshot.scan_id + 1, + branch, + merge_conflicts, + merge_head_shas, + }; + + Ok((snapshot, events)) +} + +fn status_from_proto( + simple_status: i32, + status: Option, +) -> anyhow::Result { + use proto::git_file_status::Variant; + + let Some(variant) = status.and_then(|status| status.variant) else { + let code = proto::GitStatus::from_i32(simple_status) + .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?; + let result = match code { + proto::GitStatus::Added => TrackedStatus { + worktree_status: StatusCode::Added, + index_status: StatusCode::Unmodified, + } + .into(), + proto::GitStatus::Modified => TrackedStatus { + worktree_status: StatusCode::Modified, + index_status: StatusCode::Unmodified, + } + .into(), + proto::GitStatus::Conflict => UnmergedStatus { + first_head: UnmergedStatusCode::Updated, + second_head: UnmergedStatusCode::Updated, + } + .into(), + proto::GitStatus::Deleted => TrackedStatus { + worktree_status: StatusCode::Deleted, + index_status: StatusCode::Unmodified, + } + .into(), + _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")), + }; + return Ok(result); + }; + + let result = match variant { + Variant::Untracked(_) => FileStatus::Untracked, + Variant::Ignored(_) => FileStatus::Ignored, + Variant::Unmerged(unmerged) => { + let [first_head, second_head] = + [unmerged.first_head, unmerged.second_head].map(|head| { + let code = proto::GitStatus::from_i32(head) + .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?; + let result = match code { + proto::GitStatus::Added => UnmergedStatusCode::Added, + proto::GitStatus::Updated => UnmergedStatusCode::Updated, + proto::GitStatus::Deleted => UnmergedStatusCode::Deleted, + _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")), + }; + Ok(result) + }); + let [first_head, second_head] = [first_head?, second_head?]; + UnmergedStatus { + first_head, + second_head, + } + .into() + } + Variant::Tracked(tracked) => { + let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status] + .map(|status| { + let code = proto::GitStatus::from_i32(status) + .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?; + let result = match code { + proto::GitStatus::Modified => StatusCode::Modified, + proto::GitStatus::TypeChanged => StatusCode::TypeChanged, + proto::GitStatus::Added => StatusCode::Added, + proto::GitStatus::Deleted => StatusCode::Deleted, + proto::GitStatus::Renamed => StatusCode::Renamed, + proto::GitStatus::Copied => StatusCode::Copied, + proto::GitStatus::Unmodified => StatusCode::Unmodified, + _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")), + }; + Ok(result) + }); + let [index_status, worktree_status] = [index_status?, worktree_status?]; + TrackedStatus { + index_status, + worktree_status, + } + .into() + } + }; + Ok(result) +} + +fn status_to_proto(status: FileStatus) -> proto::GitFileStatus { + use proto::git_file_status::{Tracked, Unmerged, Variant}; + + let variant = match status { + FileStatus::Untracked => Variant::Untracked(Default::default()), + FileStatus::Ignored => Variant::Ignored(Default::default()), + FileStatus::Unmerged(UnmergedStatus { + first_head, + second_head, + }) => Variant::Unmerged(Unmerged { + first_head: unmerged_status_to_proto(first_head), + second_head: unmerged_status_to_proto(second_head), + }), + FileStatus::Tracked(TrackedStatus { + index_status, + worktree_status, + }) => Variant::Tracked(Tracked { + index_status: tracked_status_to_proto(index_status), + worktree_status: tracked_status_to_proto(worktree_status), + }), + }; + proto::GitFileStatus { + variant: Some(variant), + } +} + +fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 { + match code { + UnmergedStatusCode::Added => proto::GitStatus::Added as _, + UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _, + UnmergedStatusCode::Updated => proto::GitStatus::Updated as _, + } +} + +fn tracked_status_to_proto(code: StatusCode) -> i32 { + match code { + StatusCode::Added => proto::GitStatus::Added as _, + StatusCode::Deleted => proto::GitStatus::Deleted as _, + StatusCode::Modified => proto::GitStatus::Modified as _, + StatusCode::Renamed => proto::GitStatus::Renamed as _, + StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _, + StatusCode::Copied => proto::GitStatus::Copied as _, + StatusCode::Unmodified => proto::GitStatus::Unmodified as _, + } +} diff --git a/crates/project/src/git_store/git_traversal.rs b/crates/project/src/git_store/git_traversal.rs index c8e20a02ca..531cd35b6c 100644 --- a/crates/project/src/git_store/git_traversal.rs +++ b/crates/project/src/git_store/git_traversal.rs @@ -3,21 +3,21 @@ use git::status::GitSummary; use std::{ops::Deref, path::Path}; use sum_tree::Cursor; use text::Bias; -use worktree::{ - Entry, PathProgress, PathTarget, ProjectEntryId, RepositoryEntry, StatusEntry, Traversal, -}; +use worktree::{Entry, PathProgress, PathTarget, Traversal}; + +use super::{RepositoryId, RepositorySnapshot, StatusEntry}; /// Walks the worktree entries and their associated git statuses. pub struct GitTraversal<'a> { traversal: Traversal<'a>, current_entry_summary: Option, - repo_snapshots: &'a HashMap, - repo_location: Option<(ProjectEntryId, Cursor<'a, StatusEntry, PathProgress<'a>>)>, + repo_snapshots: &'a HashMap, + repo_location: Option<(RepositoryId, Cursor<'a, StatusEntry, PathProgress<'a>>)>, } impl<'a> GitTraversal<'a> { pub fn new( - repo_snapshots: &'a HashMap, + repo_snapshots: &'a HashMap, traversal: Traversal<'a>, ) -> GitTraversal<'a> { let mut this = GitTraversal { @@ -46,8 +46,8 @@ impl<'a> GitTraversal<'a> { .repo_snapshots .values() .filter_map(|repo_snapshot| { - let relative_path = repo_snapshot.relativize_abs_path(&abs_path)?; - Some((repo_snapshot, relative_path)) + let repo_path = repo_snapshot.abs_path_to_repo_path(&abs_path)?; + Some((repo_snapshot, repo_path)) }) .max_by_key(|(repo, _)| repo.work_directory_abs_path.clone()) else { @@ -61,12 +61,9 @@ impl<'a> GitTraversal<'a> { .repo_location .as_ref() .map(|(prev_repo_id, _)| *prev_repo_id) - != Some(repo.work_directory_id()) + != Some(repo.id) { - self.repo_location = Some(( - repo.work_directory_id(), - repo.statuses_by_path.cursor::(&()), - )); + self.repo_location = Some((repo.id, repo.statuses_by_path.cursor::(&()))); } let Some((_, statuses)) = &mut self.repo_location else { @@ -148,7 +145,7 @@ pub struct ChildEntriesGitIter<'a> { impl<'a> ChildEntriesGitIter<'a> { pub fn new( - repo_snapshots: &'a HashMap, + repo_snapshots: &'a HashMap, worktree_snapshot: &'a worktree::Snapshot, parent_path: &'a Path, ) -> Self { @@ -771,7 +768,7 @@ mod tests { #[track_caller] fn check_git_statuses( - repo_snapshots: &HashMap, + repo_snapshots: &HashMap, worktree_snapshot: &worktree::Snapshot, expected_statuses: &[(&Path, GitSummary)], ) { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index aacf10dcb7..424163b346 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -8078,9 +8078,7 @@ impl LspStore { }); if let Some(environment) = &self.as_local().map(|local| local.environment.clone()) { - environment.update(cx, |env, cx| { - env.get_environment(worktree_id, worktree_abs_path, cx) - }) + environment.update(cx, |env, cx| env.get_environment(worktree_abs_path, cx)) } else { Task::ready(None).shared() } @@ -9864,13 +9862,10 @@ impl LocalLspAdapterDelegate { fs: Arc, cx: &mut App, ) -> Arc { - let (worktree_id, worktree_abs_path) = { - let worktree = worktree.read(cx); - (worktree.id(), worktree.abs_path()) - }; + let worktree_abs_path = worktree.read(cx).abs_path(); let load_shell_env_task = environment.update(cx, |env, cx| { - env.get_environment(Some(worktree_id), Some(worktree_abs_path), cx) + env.get_environment(Some(worktree_abs_path), cx) }); Arc::new(Self { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 625108fc15..68a76b3faa 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -24,7 +24,7 @@ mod direnv; mod environment; use buffer_diff::BufferDiff; pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent}; -use git_store::{GitEvent, Repository}; +use git_store::{Repository, RepositoryId}; pub mod search_history; mod yarn; @@ -300,8 +300,6 @@ pub enum Event { RevealInProjectPanel(ProjectEntryId), SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>), ExpandedAllForEntry(WorktreeId, ProjectEntryId), - GitStateUpdated, - ActiveRepositoryChanged, } pub enum DebugAdapterClientState { @@ -924,7 +922,6 @@ impl Project { cx, ) }); - cx.subscribe(&git_store, Self::on_git_store_event).detach(); cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); @@ -1064,13 +1061,7 @@ impl Project { }); let git_store = cx.new(|cx| { - GitStore::ssh( - &worktree_store, - buffer_store.clone(), - environment.clone(), - ssh_proto.clone(), - cx, - ) + GitStore::ssh(&worktree_store, buffer_store.clone(), ssh_proto.clone(), cx) }); cx.subscribe(&ssh, Self::on_ssh_event).detach(); @@ -1655,13 +1646,13 @@ impl Project { pub fn shell_environment_errors<'a>( &'a self, cx: &'a App, - ) -> impl Iterator { + ) -> impl Iterator, &'a EnvironmentErrorMessage)> { self.environment.read(cx).environment_errors() } - pub fn remove_environment_error(&mut self, worktree_id: WorktreeId, cx: &mut Context) { + pub fn remove_environment_error(&mut self, abs_path: &Path, cx: &mut Context) { self.environment.update(cx, |environment, cx| { - environment.remove_environment_error(worktree_id, cx); + environment.remove_environment_error(abs_path, cx); }); } @@ -2760,19 +2751,6 @@ impl Project { } } - fn on_git_store_event( - &mut self, - _: Entity, - event: &GitEvent, - cx: &mut Context, - ) { - match event { - GitEvent::GitStateUpdated => cx.emit(Event::GitStateUpdated), - GitEvent::ActiveRepositoryChanged => cx.emit(Event::ActiveRepositoryChanged), - GitEvent::FileSystemUpdated | GitEvent::IndexWriteError(_) => {} - } - } - fn on_ssh_event( &mut self, _: Entity, @@ -4794,7 +4772,7 @@ impl Project { self.git_store.read(cx).active_repository() } - pub fn repositories<'a>(&self, cx: &'a App) -> &'a HashMap> { + pub fn repositories<'a>(&self, cx: &'a App) -> &'a HashMap> { self.git_store.read(cx).repositories() } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 9aa3482833..a9f7fb017d 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1,12 +1,19 @@ #![allow(clippy::format_collect)] -use crate::{Event, task_inventory::TaskContexts, task_store::TaskSettingsLocation, *}; +use crate::{ + Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation, + *, +}; use buffer_diff::{ BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks, }; use fs::FakeFs; use futures::{StreamExt, future}; -use git::repository::RepoPath; +use git::{ + repository::RepoPath, + status::{StatusCode, TrackedStatus}, +}; +use git2::RepositoryInitOptions; use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal}; use http_client::Url; use language::{ @@ -21,6 +28,7 @@ use lsp::{ }; use parking_lot::Mutex; use paths::tasks_file; +use postage::stream::Stream as _; use pretty_assertions::{assert_eq, assert_matches}; use serde_json::json; #[cfg(not(windows))] @@ -7067,7 +7075,7 @@ async fn test_repository_and_path_for_project_path( ( path, result.map(|(repo, repo_path)| { - (Path::new(repo).to_owned(), RepoPath::from(repo_path)) + (Path::new(repo).into(), RepoPath::from(repo_path)) }), ) }) @@ -7079,13 +7087,7 @@ async fn test_repository_and_path_for_project_path( let result = maybe!({ let (repo, repo_path) = git_store.repository_and_path_for_project_path(&project_path, cx)?; - Some(( - repo.read(cx) - .repository_entry - .work_directory_abs_path - .clone(), - repo_path, - )) + Some((repo.read(cx).work_directory_abs_path.clone(), repo_path)) }); (path, result) }) @@ -7160,13 +7162,830 @@ async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) { .unwrap() .0 .read(cx) - .repository_entry - .work_directory_abs_path, + .work_directory_abs_path + .as_ref(), Path::new(path!("/root/home")) ); }); } +#[gpui::test] +async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = TempTree::new(json!({ + "project": { + "a.txt": "a", // Modified + "b.txt": "bb", // Added + "c.txt": "ccc", // Unchanged + "d.txt": "dddd", // Deleted + }, + })); + + // Set up git repository before creating the project. + let work_dir = root.path().join("project"); + let repo = git_init(work_dir.as_path()); + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_add("d.txt", &repo); + git_commit("Initial commit", &repo); + std::fs::remove_file(work_dir.join("d.txt")).unwrap(); + std::fs::write(work_dir.join("a.txt"), "aa").unwrap(); + + let project = Project::test( + Arc::new(RealFs::new(None, cx.executor())), + [root.path()], + cx, + ) + .await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + // Check that the right git state is observed on startup + repository.read_with(cx, |repository, _| { + let entries = repository.cached_status().collect::>(); + assert_eq!( + entries, + [ + StatusEntry { + repo_path: "a.txt".into(), + status: StatusCode::Modified.worktree(), + }, + StatusEntry { + repo_path: "b.txt".into(), + status: FileStatus::Untracked, + }, + StatusEntry { + repo_path: "d.txt".into(), + status: StatusCode::Deleted.worktree(), + }, + ] + ); + }); + + std::fs::write(work_dir.join("c.txt"), "some changes").unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + repository.read_with(cx, |repository, _| { + let entries = repository.cached_status().collect::>(); + assert_eq!( + entries, + [ + StatusEntry { + repo_path: "a.txt".into(), + status: StatusCode::Modified.worktree(), + }, + StatusEntry { + repo_path: "b.txt".into(), + status: FileStatus::Untracked, + }, + StatusEntry { + repo_path: "c.txt".into(), + status: StatusCode::Modified.worktree(), + }, + StatusEntry { + repo_path: "d.txt".into(), + status: StatusCode::Deleted.worktree(), + }, + ] + ); + }); + + git_add("a.txt", &repo); + git_add("c.txt", &repo); + git_remove_index(Path::new("d.txt"), &repo); + git_commit("Another commit", &repo); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + std::fs::remove_file(work_dir.join("a.txt")).unwrap(); + std::fs::remove_file(work_dir.join("b.txt")).unwrap(); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + repository.read_with(cx, |repository, _cx| { + let entries = repository.cached_status().collect::>(); + + // Deleting an untracked entry, b.txt, should leave no status + // a.txt was tracked, and so should have a status + assert_eq!( + entries, + [StatusEntry { + repo_path: "a.txt".into(), + status: StatusCode::Deleted.worktree(), + }] + ); + }); +} + +#[gpui::test] +async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = TempTree::new(json!({ + "project": { + "sub": {}, + "a.txt": "", + }, + })); + + let work_dir = root.path().join("project"); + let repo = git_init(work_dir.as_path()); + // a.txt exists in HEAD and the working copy but is deleted in the index. + git_add("a.txt", &repo); + git_commit("Initial commit", &repo); + git_remove_index("a.txt".as_ref(), &repo); + // `sub` is a nested git repository. + let _sub = git_init(&work_dir.join("sub")); + + let project = Project::test( + Arc::new(RealFs::new(None, cx.executor())), + [root.path()], + cx, + ) + .await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .repositories(cx) + .values() + .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project")) + .unwrap() + .clone() + }); + + repository.read_with(cx, |repository, _cx| { + let entries = repository.cached_status().collect::>(); + + // `sub` doesn't appear in our computed statuses. + // a.txt appears with a combined `DA` status. + assert_eq!( + entries, + [StatusEntry { + repo_path: "a.txt".into(), + status: TrackedStatus { + index_status: StatusCode::Deleted, + worktree_status: StatusCode::Added + } + .into(), + }] + ) + }); +} + +#[gpui::test] +async fn test_repository_subfolder_git_status(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = TempTree::new(json!({ + "my-repo": { + // .git folder will go here + "a.txt": "a", + "sub-folder-1": { + "sub-folder-2": { + "c.txt": "cc", + "d": { + "e.txt": "eee" + } + }, + } + }, + })); + + const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt"; + const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt"; + + // Set up git repository before creating the worktree. + let git_repo_work_dir = root.path().join("my-repo"); + let repo = git_init(git_repo_work_dir.as_path()); + git_add(C_TXT, &repo); + git_commit("Initial commit", &repo); + + // Open the worktree in subfolder + let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2"); + + let project = Project::test( + Arc::new(RealFs::new(None, cx.executor())), + [root.path().join(project_root).as_path()], + cx, + ) + .await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + // Ensure that the git status is loaded correctly + repository.read_with(cx, |repository, _cx| { + assert_eq!( + repository.work_directory_abs_path.canonicalize().unwrap(), + root.path().join("my-repo").canonicalize().unwrap() + ); + + assert_eq!(repository.status_for_path(&C_TXT.into()), None); + assert_eq!( + repository.status_for_path(&E_TXT.into()).unwrap().status, + FileStatus::Untracked + ); + }); + + // Now we simulate FS events, but ONLY in the .git folder that's outside + // of out project root. + // Meaning: we don't produce any FS events for files inside the project. + git_add(E_TXT, &repo); + git_commit("Second commit", &repo); + tree.flush_fs_events_in_root_git_repository(cx).await; + cx.executor().run_until_parked(); + + repository.read_with(cx, |repository, _cx| { + assert_eq!(repository.status_for_path(&C_TXT.into()), None); + assert_eq!(repository.status_for_path(&E_TXT.into()), None); + }); +} + +// TODO: this test fails on Windows because upon cherry-picking we don't get an event in the .git directory, +// despite CHERRY_PICK_HEAD existing after the `git_cherry_pick` call and the conflicted path showing up in git status. +#[cfg(not(windows))] +#[gpui::test] +async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let root = TempTree::new(json!({ + "project": { + "a.txt": "a", + }, + })); + let root_path = root.path(); + + let repo = git_init(&root_path.join("project")); + git_add("a.txt", &repo); + git_commit("init", &repo); + + let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + git_branch("other-branch", &repo); + git_checkout("refs/heads/other-branch", &repo); + std::fs::write(root_path.join("project/a.txt"), "A").unwrap(); + git_add("a.txt", &repo); + git_commit("capitalize", &repo); + let commit = repo + .head() + .expect("Failed to get HEAD") + .peel_to_commit() + .expect("HEAD is not a commit"); + git_checkout("refs/heads/main", &repo); + std::fs::write(root_path.join("project/a.txt"), "b").unwrap(); + git_add("a.txt", &repo); + git_commit("improve letter", &repo); + git_cherry_pick(&commit, &repo); + std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD")) + .expect("No CHERRY_PICK_HEAD"); + pretty_assertions::assert_eq!( + git_status(&repo), + collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)]) + ); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + let conflicts = repository.update(cx, |repository, _| { + repository + .merge_conflicts + .iter() + .cloned() + .collect::>() + }); + pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]); + + git_add("a.txt", &repo); + // Attempt to manually simulate what `git cherry-pick --continue` would do. + git_commit("whatevs", &repo); + std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD")) + .expect("Failed to remove CHERRY_PICK_HEAD"); + pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default()); + tree.flush_fs_events(cx).await; + let conflicts = repository.update(cx, |repository, _| { + repository + .merge_conflicts + .iter() + .cloned() + .collect::>() + }); + pretty_assertions::assert_eq!(conflicts, []); +} + +#[gpui::test] +async fn test_update_gitignore(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + ".git": {}, + ".gitignore": "*.txt\n", + "a.xml": "", + "b.txt": "Some text" + }), + ) + .await; + + fs.set_head_and_index_for_repo( + path!("/root/.git").as_ref(), + &[ + (".gitignore".into(), "*.txt\n".into()), + ("a.xml".into(), "".into()), + ], + ); + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + // One file is unmodified, the other is ignored. + cx.read(|cx| { + assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false); + assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true); + }); + + // Change the gitignore, and stage the newly non-ignored file. + fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into()) + .await + .unwrap(); + fs.set_index_for_repo( + Path::new(path!("/root/.git")), + &[ + (".gitignore".into(), "*.txt\n".into()), + ("a.xml".into(), "".into()), + ("b.txt".into(), "Some text".into()), + ], + ); + + cx.executor().run_until_parked(); + cx.read(|cx| { + assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true); + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "b.txt", + Some(StatusCode::Added), + false, + ); + }); +} + +// NOTE: +// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename +// a directory which some program has already open. +// This is a limitation of the Windows. +// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder +#[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let root = TempTree::new(json!({ + "projects": { + "project1": { + "a": "", + "b": "", + } + }, + + })); + let root_path = root.path(); + + let repo = git_init(&root_path.join("projects/project1")); + git_add("a", &repo); + git_commit("init", &repo); + std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap(); + + let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repository, _| { + assert_eq!( + repository.work_directory_abs_path.as_ref(), + root_path.join("projects/project1").as_path() + ); + assert_eq!( + repository + .status_for_path(&"a".into()) + .map(|entry| entry.status), + Some(StatusCode::Modified.worktree()), + ); + assert_eq!( + repository + .status_for_path(&"b".into()) + .map(|entry| entry.status), + Some(FileStatus::Untracked), + ); + }); + + std::fs::rename( + root_path.join("projects/project1"), + root_path.join("projects/project2"), + ) + .unwrap(); + tree.flush_fs_events(cx).await; + + repository.read_with(cx, |repository, _| { + assert_eq!( + repository.work_directory_abs_path.as_ref(), + root_path.join("projects/project2").as_path() + ); + assert_eq!( + repository.status_for_path(&"a".into()).unwrap().status, + StatusCode::Modified.worktree(), + ); + assert_eq!( + repository.status_for_path(&"b".into()).unwrap().status, + FileStatus::Untracked, + ); + }); +} + +// NOTE: This test always fails on Windows, because on Windows, unlike on Unix, +// you can't rename a directory which some program has already open. This is a +// limitation of the Windows. See: +// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder +#[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] +async fn test_file_status(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + const IGNORE_RULE: &str = "**/target"; + + let root = TempTree::new(json!({ + "project": { + "a.txt": "a", + "b.txt": "bb", + "c": { + "d": { + "e.txt": "eee" + } + }, + "f.txt": "ffff", + "target": { + "build_file": "???" + }, + ".gitignore": IGNORE_RULE + }, + + })); + let root_path = root.path(); + + const A_TXT: &str = "a.txt"; + const B_TXT: &str = "b.txt"; + const E_TXT: &str = "c/d/e.txt"; + const F_TXT: &str = "f.txt"; + const DOTGITIGNORE: &str = ".gitignore"; + const BUILD_FILE: &str = "target/build_file"; + + // Set up git repository before creating the worktree. + let work_dir = root.path().join("project"); + let mut repo = git_init(work_dir.as_path()); + repo.add_ignore_rule(IGNORE_RULE).unwrap(); + git_add(A_TXT, &repo); + git_add(E_TXT, &repo); + git_add(DOTGITIGNORE, &repo); + git_commit("Initial commit", &repo); + + let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + // Check that the right git state is observed on startup + repository.read_with(cx, |repository, _cx| { + assert_eq!( + repository.work_directory_abs_path.as_ref(), + root_path.join("project").as_path() + ); + + assert_eq!( + repository.status_for_path(&B_TXT.into()).unwrap().status, + FileStatus::Untracked, + ); + assert_eq!( + repository.status_for_path(&F_TXT.into()).unwrap().status, + FileStatus::Untracked, + ); + }); + + // Modify a file in the working copy. + std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + // The worktree detects that the file's git status has changed. + repository.read_with(cx, |repository, _| { + assert_eq!( + repository.status_for_path(&A_TXT.into()).unwrap().status, + StatusCode::Modified.worktree(), + ); + }); + + // Create a commit in the git repository. + git_add(A_TXT, &repo); + git_add(B_TXT, &repo); + git_commit("Committing modified and added", &repo); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // The worktree detects that the files' git status have changed. + repository.read_with(cx, |repository, _cx| { + assert_eq!( + repository.status_for_path(&F_TXT.into()).unwrap().status, + FileStatus::Untracked, + ); + assert_eq!(repository.status_for_path(&B_TXT.into()), None); + assert_eq!(repository.status_for_path(&A_TXT.into()), None); + }); + + // Modify files in the working copy and perform git operations on other files. + git_reset(0, &repo); + git_remove_index(Path::new(B_TXT), &repo); + git_stash(&mut repo); + std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); + std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // Check that more complex repo changes are tracked + repository.read_with(cx, |repository, _cx| { + assert_eq!(repository.status_for_path(&A_TXT.into()), None); + assert_eq!( + repository.status_for_path(&B_TXT.into()).unwrap().status, + FileStatus::Untracked, + ); + assert_eq!( + repository.status_for_path(&E_TXT.into()).unwrap().status, + StatusCode::Modified.worktree(), + ); + }); + + std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); + std::fs::remove_dir_all(work_dir.join("c")).unwrap(); + std::fs::write( + work_dir.join(DOTGITIGNORE), + [IGNORE_RULE, "f.txt"].join("\n"), + ) + .unwrap(); + + git_add(Path::new(DOTGITIGNORE), &repo); + git_commit("Committing modified git ignore", &repo); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + let mut renamed_dir_name = "first_directory/second_directory"; + const RENAMED_FILE: &str = "rf.txt"; + + std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); + std::fs::write( + work_dir.join(renamed_dir_name).join(RENAMED_FILE), + "new-contents", + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + repository.read_with(cx, |repository, _cx| { + assert_eq!( + repository + .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()) + .unwrap() + .status, + FileStatus::Untracked, + ); + }); + + renamed_dir_name = "new_first_directory/second_directory"; + + std::fs::rename( + work_dir.join("first_directory"), + work_dir.join("new_first_directory"), + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + repository.read_with(cx, |repository, _cx| { + assert_eq!( + repository + .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()) + .unwrap() + .status, + FileStatus::Untracked, + ); + }); +} + +#[gpui::test(iterations = 10)] +async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/root"), + json!({ + ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n", + "tree": { + ".git": {}, + ".gitignore": "ignored-dir\n", + "tracked-dir": { + "tracked-file1": "", + "ancestor-ignored-file1": "", + }, + "ignored-dir": { + "ignored-file1": "" + } + } + }), + ) + .await; + fs.set_head_and_index_for_repo( + path!("/root/tree/.git").as_ref(), + &[ + (".gitignore".into(), "ignored-dir\n".into()), + ("tracked-dir/tracked-file1".into(), "".into()), + ], + ); + + let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await; + + let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()]) + }) + .recv() + .await; + + cx.read(|cx| { + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "tracked-dir/tracked-file1", + None, + false, + ); + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "tracked-dir/ancestor-ignored-file1", + None, + false, + ); + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "ignored-dir/ignored-file1", + None, + true, + ); + }); + + fs.create_file( + path!("/root/tree/tracked-dir/tracked-file2").as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.set_index_for_repo( + path!("/root/tree/.git").as_ref(), + &[ + (".gitignore".into(), "ignored-dir\n".into()), + ("tracked-dir/tracked-file1".into(), "".into()), + ("tracked-dir/tracked-file2".into(), "".into()), + ], + ); + fs.create_file( + path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.create_file( + path!("/root/tree/ignored-dir/ignored-file2").as_ref(), + Default::default(), + ) + .await + .unwrap(); + + cx.executor().run_until_parked(); + cx.read(|cx| { + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "tracked-dir/tracked-file2", + Some(StatusCode::Added), + false, + ); + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "tracked-dir/ancestor-ignored-file2", + None, + false, + ); + assert_entry_git_state( + tree.read(cx), + repository.read(cx), + "ignored-dir/ignored-file2", + None, + true, + ); + assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored); + }); +} + async fn search( project: &Entity, query: SearchQuery, @@ -7303,3 +8122,143 @@ fn get_all_tasks( old.extend(new); old } + +#[track_caller] +fn assert_entry_git_state( + tree: &Worktree, + repository: &Repository, + path: &str, + index_status: Option, + is_ignored: bool, +) { + assert_eq!(tree.abs_path(), repository.work_directory_abs_path); + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("entry {path} not found")); + let status = repository + .status_for_path(&path.into()) + .map(|entry| entry.status); + let expected = index_status.map(|index_status| { + TrackedStatus { + index_status, + worktree_status: StatusCode::Unmodified, + } + .into() + }); + assert_eq!( + status, expected, + "expected {path} to have git status: {expected:?}" + ); + assert_eq!( + entry.is_ignored, is_ignored, + "expected {path} to have is_ignored: {is_ignored}" + ); +} + +#[track_caller] +fn git_init(path: &Path) -> git2::Repository { + let mut init_opts = RepositoryInitOptions::new(); + init_opts.initial_head("main"); + git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository") +} + +#[track_caller] +fn git_add>(path: P, repo: &git2::Repository) { + let path = path.as_ref(); + let mut index = repo.index().expect("Failed to get index"); + index.add_path(path).expect("Failed to add file"); + index.write().expect("Failed to write index"); +} + +#[track_caller] +fn git_remove_index(path: &Path, repo: &git2::Repository) { + let mut index = repo.index().expect("Failed to get index"); + index.remove_path(path).expect("Failed to add file"); + index.write().expect("Failed to write index"); +} + +#[track_caller] +fn git_commit(msg: &'static str, repo: &git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + let oid = repo.index().unwrap().write_tree().unwrap(); + let tree = repo.find_tree(oid).unwrap(); + if let Ok(head) = repo.head() { + let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); + + let parent_commit = parent_obj.as_commit().unwrap(); + + repo.commit( + Some("HEAD"), + &signature, + &signature, + msg, + &tree, + &[parent_commit], + ) + .expect("Failed to commit with parent"); + } else { + repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) + .expect("Failed to commit"); + } +} + +#[cfg(not(windows))] +#[track_caller] +fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) { + repo.cherrypick(commit, None).expect("Failed to cherrypick"); +} + +#[track_caller] +fn git_stash(repo: &mut git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + repo.stash_save(&signature, "N/A", None) + .expect("Failed to stash"); +} + +#[track_caller] +fn git_reset(offset: usize, repo: &git2::Repository) { + let head = repo.head().expect("Couldn't get repo head"); + let object = head.peel(git2::ObjectType::Commit).unwrap(); + let commit = object.as_commit().unwrap(); + let new_head = commit + .parents() + .inspect(|parnet| { + parnet.message(); + }) + .nth(offset) + .expect("Not enough history"); + repo.reset(new_head.as_object(), git2::ResetType::Soft, None) + .expect("Could not reset"); +} + +#[cfg(not(windows))] +#[track_caller] +fn git_branch(name: &str, repo: &git2::Repository) { + let head = repo + .head() + .expect("Couldn't get repo head") + .peel_to_commit() + .expect("HEAD is not a commit"); + repo.branch(name, &head, false).expect("Failed to commit"); +} + +#[cfg(not(windows))] +#[track_caller] +fn git_checkout(name: &str, repo: &git2::Repository) { + repo.set_head(name).expect("Failed to set head"); + repo.checkout_head(None).expect("Failed to check out head"); +} + +#[cfg(not(windows))] +#[track_caller] +fn git_status(repo: &git2::Repository) -> collections::HashMap { + repo.statuses(None) + .unwrap() + .iter() + .map(|status| (status.path().unwrap().to_string(), status.status())) + .collect() +} diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 188534d75e..2defadaee0 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -298,7 +298,7 @@ fn local_task_context_for_location( let worktree_abs_path = worktree_abs_path.clone(); let project_env = environment .update(cx, |environment, cx| { - environment.get_environment(worktree_id, worktree_abs_path.clone(), cx) + environment.get_environment(worktree_abs_path.clone(), cx) }) .ok()? .await; diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 054379231e..dc9a126864 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -331,11 +331,7 @@ impl LocalToolchainStore { cx.spawn(async move |cx| { let project_env = environment .update(cx, |environment, cx| { - environment.get_environment( - Some(path.worktree_id), - Some(Arc::from(abs_path.as_path())), - cx, - ) + environment.get_environment(Some(root.clone()), cx) }) .ok()? .await; diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 35f84c0ce4..6808c514e6 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -29,7 +29,8 @@ use language::DiagnosticSeverity; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::{ Entry, EntryKind, Fs, GitEntry, GitEntryRef, GitTraversal, Project, ProjectEntryId, - ProjectPath, Worktree, WorktreeId, git_store::git_traversal::ChildEntriesGitIter, + ProjectPath, Worktree, WorktreeId, + git_store::{GitStoreEvent, git_traversal::ChildEntriesGitIter}, relativize_path, }; use project_panel_settings::{ @@ -298,6 +299,7 @@ impl ProjectPanel { cx: &mut Context, ) -> Entity { let project = workspace.project().clone(); + let git_store = project.read(cx).git_store().clone(); let project_panel = cx.new(|cx| { let focus_handle = cx.focus_handle(); cx.on_focus(&focus_handle, window, Self::focus_in).detach(); @@ -306,6 +308,18 @@ impl ProjectPanel { this.hide_scrollbar(window, cx); }) .detach(); + + cx.subscribe(&git_store, |this, _, event, cx| match event { + GitStoreEvent::RepositoryUpdated(_, _, _) + | GitStoreEvent::RepositoryAdded(_) + | GitStoreEvent::RepositoryRemoved(_) => { + this.update_visible_entries(None, cx); + cx.notify(); + } + _ => {} + }) + .detach(); + cx.subscribe(&project, |this, project, event, cx| match event { project::Event::ActiveEntryChanged(Some(entry_id)) => { if ProjectPanelSettings::get_global(cx).auto_reveal_entries { @@ -335,9 +349,7 @@ impl ProjectPanel { this.update_visible_entries(None, cx); cx.notify(); } - project::Event::GitStateUpdated - | project::Event::ActiveRepositoryChanged - | project::Event::WorktreeUpdatedEntries(_, _) + project::Event::WorktreeUpdatedEntries(_, _) | project::Event::WorktreeAdded(_) | project::Event::WorktreeOrderChanged => { this.update_visible_entries(None, cx); diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 01b1476106..3e819b3d4e 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1937,7 +1937,7 @@ message Entry { } message RepositoryEntry { - uint64 work_directory_id = 1; + uint64 repository_id = 1; reserved 2; repeated StatusEntry updated_statuses = 3; repeated string removed_statuses = 4; @@ -1955,6 +1955,7 @@ message UpdateRepository { repeated string removed_statuses = 7; repeated string current_merge_conflicts = 8; uint64 scan_id = 9; + bool is_last_update = 10; } message RemoveRepository { @@ -2247,7 +2248,7 @@ message OpenUncommittedDiffResponse { message SetIndexText { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string path = 4; optional string text = 5; } @@ -3356,7 +3357,7 @@ message GetPanicFiles { message GitShow { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string commit = 4; } @@ -3371,7 +3372,7 @@ message GitCommitDetails { message LoadCommitDiff { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string commit = 4; } @@ -3388,7 +3389,7 @@ message CommitFile { message GitReset { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string commit = 4; ResetMode mode = 5; enum ResetMode { @@ -3400,7 +3401,7 @@ message GitReset { message GitCheckoutFiles { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string commit = 4; repeated string paths = 5; } @@ -3455,21 +3456,21 @@ message RegisterBufferWithLanguageServers{ message Stage { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; repeated string paths = 4; } message Unstage { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; repeated string paths = 4; } message Commit { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; optional string name = 4; optional string email = 5; string message = 6; @@ -3478,13 +3479,13 @@ message Commit { message OpenCommitMessageBuffer { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; } message Push { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string remote_name = 4; string branch_name = 5; optional PushOptions options = 6; @@ -3499,14 +3500,14 @@ message Push { message Fetch { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; uint64 askpass_id = 4; } message GetRemotes { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; optional string branch_name = 4; } @@ -3521,7 +3522,7 @@ message GetRemotesResponse { message Pull { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string remote_name = 4; string branch_name = 5; uint64 askpass_id = 6; @@ -3535,7 +3536,7 @@ message RemoteMessageResponse { message AskPassRequest { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; uint64 askpass_id = 4; string prompt = 5; } @@ -3547,27 +3548,27 @@ message AskPassResponse { message GitGetBranches { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; } message GitCreateBranch { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string branch_name = 4; } message GitChangeBranch { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; string branch_name = 4; } message CheckForPushedCommits { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; } message CheckForPushedCommitsResponse { @@ -3577,7 +3578,7 @@ message CheckForPushedCommitsResponse { message GitDiff { uint64 project_id = 1; reserved 2; - uint64 work_directory_id = 3; + uint64 repository_id = 3; DiffType diff_type = 4; enum DiffType { diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index b8045c1749..2fe3ef93cd 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -834,7 +834,7 @@ pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator impl Iterator { let mut updated_statuses_iter = mem::take(&mut update.updated_statuses).into_iter().fuse(); let mut removed_statuses_iter = mem::take(&mut update.removed_statuses).into_iter().fuse(); - let mut is_first = true; - std::iter::from_fn(move || { - let updated_statuses = updated_statuses_iter - .by_ref() - .take(MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE) - .collect::>(); - let removed_statuses = removed_statuses_iter - .by_ref() - .take(MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE) - .collect::>(); - if updated_statuses.is_empty() && removed_statuses.is_empty() && !is_first { - return None; + std::iter::from_fn({ + let update = update.clone(); + move || { + let updated_statuses = updated_statuses_iter + .by_ref() + .take(MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE) + .collect::>(); + let removed_statuses = removed_statuses_iter + .by_ref() + .take(MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE) + .collect::>(); + if updated_statuses.is_empty() && removed_statuses.is_empty() { + return None; + } + Some(UpdateRepository { + updated_statuses, + removed_statuses, + is_last_update: false, + ..update.clone() + }) } - is_first = false; - Some(UpdateRepository { - updated_statuses, - removed_statuses, - ..update.clone() - }) }) + .chain([UpdateRepository { + updated_statuses: Vec::new(), + removed_statuses: Vec::new(), + is_last_update: true, + ..update + }]) } #[cfg(test)] diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 30fd2d8940..9b5a98bf38 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -28,6 +28,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; +#[cfg(not(windows))] use unindent::Unindent as _; use util::{path, separator}; @@ -1203,6 +1204,8 @@ async fn test_remote_rename_entry(cx: &mut TestAppContext, server_cx: &mut TestA }); } +// TODO: this test fails on Windows. +#[cfg(not(windows))] #[gpui::test] async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let text_2 = " @@ -1379,7 +1382,8 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA .next() .unwrap() .read(cx) - .current_branch() + .branch + .as_ref() .unwrap() .clone() }) @@ -1418,7 +1422,8 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA .next() .unwrap() .read(cx) - .current_branch() + .branch + .as_ref() .unwrap() .clone() }) diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 5f60afa492..884042b722 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -317,10 +317,18 @@ where )) } + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + pub fn insert(&mut self, key: K) { self.0.insert(key, ()); } + pub fn remove(&mut self, key: &K) -> bool { + self.0.remove(key).is_some() + } + pub fn extend(&mut self, iter: impl IntoIterator) { self.0.extend(iter.into_iter().map(|key| (key, ()))); } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index d19e973394..ed984e44c1 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -522,7 +522,7 @@ impl TitleBar { pub fn render_project_branch(&self, cx: &mut Context) -> Option { let repository = self.project.read(cx).active_repository(cx)?; let workspace = self.workspace.upgrade()?; - let branch_name = repository.read(cx).current_branch()?.name.clone(); + let branch_name = repository.read(cx).branch.as_ref()?.name.clone(); let branch_name = util::truncate_and_trailoff(&branch_name, MAX_BRANCH_NAME_LENGTH); Some( Button::new("project_branch_trigger", branch_name) diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 11818ce8a9..8eb22f2fe2 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -783,6 +783,7 @@ mod test { async fn test_non_vim_search(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, false).await; cx.cx.set_state("ˇone one one one"); + cx.run_until_parked(); cx.simulate_keystrokes("cmd-f"); cx.run_until_parked(); diff --git a/crates/worktree/Cargo.toml b/crates/worktree/Cargo.toml index 8630f20194..8d7e57187c 100644 --- a/crates/worktree/Cargo.toml +++ b/crates/worktree/Cargo.toml @@ -30,7 +30,6 @@ fs.workspace = true futures.workspace = true fuzzy.workspace = true git.workspace = true -git_hosting_providers.workspace = true gpui.workspace = true ignore.workspace = true language.workspace = true diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index e75bd59ede..8e5d276994 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -14,18 +14,13 @@ use futures::{ mpsc::{self, UnboundedSender}, oneshot, }, - future::join_all, select_biased, task::Poll, }; use fuzzy::CharBag; use git::{ - COMMIT_MESSAGE, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, GitHostingProviderRegistry, INDEX_LOCK, - LFS_DIR, - repository::{Branch, GitRepository, RepoPath, UpstreamTrackingStatus}, - status::{ - FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode, - }, + COMMIT_MESSAGE, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, INDEX_LOCK, LFS_DIR, + repository::RepoPath, status::GitSummary, }; use gpui::{ App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, @@ -62,7 +57,7 @@ use std::{ pin::Pin, sync::{ Arc, - atomic::{self, AtomicI32, AtomicUsize, Ordering::SeqCst}, + atomic::{AtomicUsize, Ordering::SeqCst}, }, time::{Duration, Instant}, }; @@ -159,7 +154,6 @@ pub struct Snapshot { entries_by_path: SumTree, entries_by_id: SumTree, always_included_entries: Vec>, - repositories: SumTree, /// A number that increases every time the worktree begins scanning /// a set of paths from the filesystem. This scanning could be caused @@ -174,223 +168,6 @@ pub struct Snapshot { completed_scan_id: usize, } -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct RepositoryEntry { - /// The git status entries for this repository. - /// Note that the paths on this repository are relative to the git work directory. - /// If the .git folder is external to Zed, these paths will be relative to that folder, - /// and this data structure might reference files external to this worktree. - /// - /// For example: - /// - /// my_root_folder/ <-- repository root - /// .git - /// my_sub_folder_1/ - /// project_root/ <-- Project root, Zed opened here - /// changed_file_1 <-- File with changes, in worktree - /// my_sub_folder_2/ - /// changed_file_2 <-- File with changes, out of worktree - /// ... - /// - /// With this setup, this field would contain 2 entries, like so: - /// - my_sub_folder_1/project_root/changed_file_1 - /// - my_sub_folder_2/changed_file_2 - pub statuses_by_path: SumTree, - pub work_directory_id: ProjectEntryId, - pub work_directory_abs_path: PathBuf, - pub worktree_scan_id: usize, - pub current_branch: Option, - pub current_merge_conflicts: TreeSet, -} - -impl RepositoryEntry { - pub fn relativize_abs_path(&self, abs_path: &Path) -> Option { - Some( - abs_path - .strip_prefix(&self.work_directory_abs_path) - .ok()? - .into(), - ) - } - - pub fn directory_contains_abs_path(&self, abs_path: impl AsRef) -> bool { - abs_path.as_ref().starts_with(&self.work_directory_abs_path) - } - - pub fn branch(&self) -> Option<&Branch> { - self.current_branch.as_ref() - } - - pub fn work_directory_id(&self) -> ProjectEntryId { - self.work_directory_id - } - - pub fn status(&self) -> impl Iterator + '_ { - self.statuses_by_path.iter().cloned() - } - - pub fn status_len(&self) -> usize { - self.statuses_by_path.summary().item_summary.count - } - - pub fn status_summary(&self) -> GitSummary { - self.statuses_by_path.summary().item_summary - } - - pub fn status_for_path(&self, path: &RepoPath) -> Option { - self.statuses_by_path - .get(&PathKey(path.0.clone()), &()) - .cloned() - } - - pub fn initial_update(&self, project_id: u64) -> proto::UpdateRepository { - proto::UpdateRepository { - branch_summary: self.current_branch.as_ref().map(branch_to_proto), - updated_statuses: self - .statuses_by_path - .iter() - .map(|entry| entry.to_proto()) - .collect(), - removed_statuses: Default::default(), - current_merge_conflicts: self - .current_merge_conflicts - .iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - project_id, - // This is semantically wrong---we want to move to having separate IDs for repositories. - // But for the moment, RepositoryEntry isn't set up to provide that at this level, so we - // shim it using the work directory's project entry ID. The pair of this + project ID will - // be globally unique. - id: self.work_directory_id().to_proto(), - abs_path: self.work_directory_abs_path.as_path().to_proto(), - entry_ids: vec![self.work_directory_id().to_proto()], - // This is also semantically wrong, and should be replaced once we separate git repo updates - // from worktree scans. - scan_id: self.worktree_scan_id as u64, - } - } - - pub fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository { - let mut updated_statuses: Vec = Vec::new(); - let mut removed_statuses: Vec = Vec::new(); - - let mut new_statuses = self.statuses_by_path.iter().peekable(); - let mut old_statuses = old.statuses_by_path.iter().peekable(); - - let mut current_new_entry = new_statuses.next(); - let mut current_old_entry = old_statuses.next(); - loop { - match (current_new_entry, current_old_entry) { - (Some(new_entry), Some(old_entry)) => { - match new_entry.repo_path.cmp(&old_entry.repo_path) { - Ordering::Less => { - updated_statuses.push(new_entry.to_proto()); - current_new_entry = new_statuses.next(); - } - Ordering::Equal => { - if new_entry.status != old_entry.status { - updated_statuses.push(new_entry.to_proto()); - } - current_old_entry = old_statuses.next(); - current_new_entry = new_statuses.next(); - } - Ordering::Greater => { - removed_statuses.push(old_entry.repo_path.as_ref().to_proto()); - current_old_entry = old_statuses.next(); - } - } - } - (None, Some(old_entry)) => { - removed_statuses.push(old_entry.repo_path.as_ref().to_proto()); - current_old_entry = old_statuses.next(); - } - (Some(new_entry), None) => { - updated_statuses.push(new_entry.to_proto()); - current_new_entry = new_statuses.next(); - } - (None, None) => break, - } - } - - proto::UpdateRepository { - branch_summary: self.current_branch.as_ref().map(branch_to_proto), - updated_statuses, - removed_statuses, - current_merge_conflicts: self - .current_merge_conflicts - .iter() - .map(|path| path.as_ref().to_proto()) - .collect(), - project_id, - id: self.work_directory_id.to_proto(), - abs_path: self.work_directory_abs_path.as_path().to_proto(), - entry_ids: vec![self.work_directory_id.to_proto()], - scan_id: self.worktree_scan_id as u64, - } - } -} - -pub fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch { - proto::Branch { - is_head: branch.is_head, - name: branch.name.to_string(), - unix_timestamp: branch - .most_recent_commit - .as_ref() - .map(|commit| commit.commit_timestamp as u64), - upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream { - ref_name: upstream.ref_name.to_string(), - tracking: upstream - .tracking - .status() - .map(|upstream| proto::UpstreamTracking { - ahead: upstream.ahead as u64, - behind: upstream.behind as u64, - }), - }), - most_recent_commit: branch - .most_recent_commit - .as_ref() - .map(|commit| proto::CommitSummary { - sha: commit.sha.to_string(), - subject: commit.subject.to_string(), - commit_timestamp: commit.commit_timestamp, - }), - } -} - -pub fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch { - git::repository::Branch { - is_head: proto.is_head, - name: proto.name.clone().into(), - upstream: proto - .upstream - .as_ref() - .map(|upstream| git::repository::Upstream { - ref_name: upstream.ref_name.to_string().into(), - tracking: upstream - .tracking - .as_ref() - .map(|tracking| { - git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus { - ahead: tracking.ahead as u32, - behind: tracking.behind as u32, - }) - }) - .unwrap_or(git::repository::UpstreamTracking::Gone), - }), - most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| { - git::repository::CommitSummary { - sha: commit.sha.to_string().into(), - subject: commit.subject.to_string().into(), - commit_timestamp: commit.commit_timestamp, - has_parent: true, - } - }), - } -} - /// This path corresponds to the 'content path' of a repository in relation /// to Zed's project root. /// In the majority of the cases, this is the folder that contains the .git folder. @@ -598,24 +375,20 @@ struct BackgroundScannerState { removed_entries: HashMap, changed_paths: Vec>, prev_snapshot: Snapshot, - git_hosting_provider_registry: Option>, - repository_scans: HashMap>, } #[derive(Debug, Clone)] -pub struct LocalRepositoryEntry { - pub(crate) work_directory_id: ProjectEntryId, - pub(crate) work_directory: WorkDirectory, - pub(crate) git_dir_scan_id: usize, - pub(crate) status_scan_id: usize, - pub(crate) repo_ptr: Arc, +struct LocalRepositoryEntry { + work_directory_id: ProjectEntryId, + work_directory: WorkDirectory, + work_directory_abs_path: Arc, + git_dir_scan_id: usize, + original_dot_git_abs_path: Arc, /// Absolute path to the actual .git folder. /// Note: if .git is a file, this points to the folder indicated by the .git file - pub(crate) dot_git_dir_abs_path: Arc, + dot_git_dir_abs_path: Arc, /// Absolute path to the .git file, if we're in a git worktree. - pub(crate) dot_git_worktree_abs_path: Option>, - pub current_merge_head_shas: Vec, - pub merge_message: Option, + dot_git_worktree_abs_path: Option>, } impl sum_tree::Item for LocalRepositoryEntry { @@ -637,11 +410,11 @@ impl KeyedItem for LocalRepositoryEntry { } } -impl LocalRepositoryEntry { - pub fn repo(&self) -> &Arc { - &self.repo_ptr - } -} +//impl LocalRepositoryEntry { +// pub fn repo(&self) -> &Arc { +// &self.repo_ptr +// } +//} impl Deref for LocalRepositoryEntry { type Target = WorkDirectory; @@ -1030,54 +803,6 @@ impl Worktree { } } - pub fn load_staged_file(&self, path: &Path, cx: &App) -> Task>> { - match self { - Worktree::Local(this) => { - let path = Arc::from(path); - let snapshot = this.snapshot(); - cx.spawn(async move |_cx| { - if let Some(repo) = snapshot.local_repo_containing_path(&path) { - if let Some(repo_path) = repo.relativize(&path).log_err() { - if let Some(git_repo) = - snapshot.git_repositories.get(&repo.work_directory_id) - { - return Ok(git_repo.repo_ptr.load_index_text(repo_path).await); - } - } - } - Err(anyhow!("No repository found for {path:?}")) - }) - } - Worktree::Remote(_) => { - Task::ready(Err(anyhow!("remote worktrees can't yet load staged files"))) - } - } - } - - pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task>> { - match self { - Worktree::Local(this) => { - let path = Arc::from(path); - let snapshot = this.snapshot(); - cx.spawn(async move |_cx| { - if let Some(repo) = snapshot.local_repo_containing_path(&path) { - if let Some(repo_path) = repo.relativize(&path).log_err() { - if let Some(git_repo) = - snapshot.git_repositories.get(&repo.work_directory_id) - { - return Ok(git_repo.repo_ptr.load_committed_text(repo_path).await); - } - } - } - Err(anyhow!("No repository found for {path:?}")) - }) - } - Worktree::Remote(_) => Task::ready(Err(anyhow!( - "remote worktrees can't yet load committed files" - ))), - } - } - pub fn load_binary_file( &self, path: &Path, @@ -1485,7 +1210,6 @@ impl LocalWorktree { let share_private_files = self.share_private_files; let next_entry_id = self.next_entry_id.clone(); let fs = self.fs.clone(); - let git_hosting_provider_registry = GitHostingProviderRegistry::try_global(cx); let settings = self.settings.clone(); let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); let background_scanner = cx.background_spawn({ @@ -1502,12 +1226,11 @@ impl LocalWorktree { fs, fs_case_sensitive, status_updates_tx: scan_states_tx, - scans_running: Arc::new(AtomicI32::new(0)), executor: background, scan_requests_rx, path_prefixes_to_scan_rx, next_entry_id, - state: Arc::new(Mutex::new(BackgroundScannerState { + state: Mutex::new(BackgroundScannerState { prev_snapshot: snapshot.snapshot.clone(), snapshot, scanned_dirs: Default::default(), @@ -1515,9 +1238,7 @@ impl LocalWorktree { paths_to_scan: Default::default(), removed_entries: Default::default(), changed_paths: Default::default(), - repository_scans: HashMap::default(), - git_hosting_provider_registry, - })), + }), phase: BackgroundScannerPhase::InitialScan, share_private_files, settings, @@ -1561,11 +1282,11 @@ impl LocalWorktree { fn set_snapshot( &mut self, - new_snapshot: LocalSnapshot, + mut new_snapshot: LocalSnapshot, entry_changes: UpdatedEntriesSet, cx: &mut Context, ) { - let repo_changes = self.changed_repos(&self.snapshot, &new_snapshot); + let repo_changes = self.changed_repos(&self.snapshot, &mut new_snapshot); self.snapshot = new_snapshot; if let Some(share) = self.update_observer.as_mut() { @@ -1586,81 +1307,78 @@ impl LocalWorktree { fn changed_repos( &self, old_snapshot: &LocalSnapshot, - new_snapshot: &LocalSnapshot, + new_snapshot: &mut LocalSnapshot, ) -> UpdatedGitRepositoriesSet { let mut changes = Vec::new(); let mut old_repos = old_snapshot.git_repositories.iter().peekable(); - let mut new_repos = new_snapshot.git_repositories.iter().peekable(); + let new_repos = new_snapshot.git_repositories.clone(); + let mut new_repos = new_repos.iter().peekable(); loop { match (new_repos.peek().map(clone), old_repos.peek().map(clone)) { (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => { match Ord::cmp(&new_entry_id, &old_entry_id) { Ordering::Less => { - if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) { - changes.push(( - entry.clone(), - GitRepositoryChange { - old_repository: None, - }, - )); - } + changes.push(UpdatedGitRepository { + work_directory_id: new_entry_id, + old_work_directory_abs_path: None, + new_work_directory_abs_path: Some( + new_repo.work_directory_abs_path.clone(), + ), + dot_git_abs_path: Some(new_repo.original_dot_git_abs_path.clone()), + }); new_repos.next(); } Ordering::Equal => { if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id - || new_repo.status_scan_id != old_repo.status_scan_id + || new_repo.work_directory_abs_path + != old_repo.work_directory_abs_path { - if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) { - let old_repo = - old_snapshot.repository_for_id(old_entry_id).cloned(); - changes.push(( - entry.clone(), - GitRepositoryChange { - old_repository: old_repo, - }, - )); - } + changes.push(UpdatedGitRepository { + work_directory_id: new_entry_id, + old_work_directory_abs_path: Some( + old_repo.work_directory_abs_path.clone(), + ), + new_work_directory_abs_path: Some( + new_repo.work_directory_abs_path.clone(), + ), + dot_git_abs_path: Some( + new_repo.original_dot_git_abs_path.clone(), + ), + }); } new_repos.next(); old_repos.next(); } Ordering::Greater => { - if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) { - let old_repo = - old_snapshot.repository_for_id(old_entry_id).cloned(); - changes.push(( - entry.clone(), - GitRepositoryChange { - old_repository: old_repo, - }, - )); - } + changes.push(UpdatedGitRepository { + work_directory_id: old_entry_id, + old_work_directory_abs_path: Some( + old_repo.work_directory_abs_path.clone(), + ), + new_work_directory_abs_path: None, + dot_git_abs_path: None, + }); old_repos.next(); } } } - (Some((entry_id, _)), None) => { - if let Some(entry) = new_snapshot.entry_for_id(entry_id) { - changes.push(( - entry.clone(), - GitRepositoryChange { - old_repository: None, - }, - )); - } + (Some((entry_id, repo)), None) => { + changes.push(UpdatedGitRepository { + work_directory_id: entry_id, + old_work_directory_abs_path: None, + new_work_directory_abs_path: Some(repo.work_directory_abs_path.clone()), + dot_git_abs_path: Some(repo.original_dot_git_abs_path.clone()), + }); new_repos.next(); } - (None, Some((entry_id, _))) => { - if let Some(entry) = old_snapshot.entry_for_id(entry_id) { - let old_repo = old_snapshot.repository_for_id(entry_id).cloned(); - changes.push(( - entry.clone(), - GitRepositoryChange { - old_repository: old_repo, - }, - )); - } + (None, Some((entry_id, repo))) => { + changes.push(UpdatedGitRepository { + work_directory_id: entry_id, + old_work_directory_abs_path: Some(repo.work_directory_abs_path.clone()), + new_work_directory_abs_path: None, + dot_git_abs_path: Some(repo.original_dot_git_abs_path.clone()), + }); old_repos.next(); } (None, None) => break, @@ -1696,10 +1414,6 @@ impl LocalWorktree { self.settings.clone() } - pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> { - self.git_repositories.get(&repo.work_directory_id) - } - fn load_binary_file( &self, path: &Path, @@ -2228,6 +1942,11 @@ impl LocalWorktree { rx } + #[cfg(feature = "test-support")] + pub fn manually_refresh_entries_for_paths(&self, paths: Vec>) -> barrier::Receiver { + self.refresh_entries_for_paths(paths) + } + pub fn add_path_prefix_to_scan(&self, path_prefix: Arc) -> barrier::Receiver { let (tx, rx) = barrier::channel(); self.path_prefixes_to_scan_tx @@ -2527,7 +2246,6 @@ impl Snapshot { always_included_entries: Default::default(), entries_by_path: Default::default(), entries_by_id: Default::default(), - repositories: Default::default(), scan_id: 1, completed_scan_id: 0, } @@ -2646,26 +2364,6 @@ impl Snapshot { Some(removed_entry.path) } - //#[cfg(any(test, feature = "test-support"))] - //pub fn status_for_file(&self, path: impl AsRef) -> Option { - // let path = path.as_ref(); - // self.repository_for_path(path).and_then(|repo| { - // let repo_path = repo.relativize(path).unwrap(); - // repo.statuses_by_path - // .get(&PathKey(repo_path.0), &()) - // .map(|entry| entry.status) - // }) - //} - - #[cfg(any(test, feature = "test-support"))] - pub fn status_for_file_abs_path(&self, abs_path: impl AsRef) -> Option { - let abs_path = abs_path.as_ref(); - let repo = self.repository_containing_abs_path(abs_path)?; - let repo_path = repo.relativize_abs_path(abs_path)?; - let status = repo.statuses_by_path.get(&PathKey(repo_path.0), &())?; - Some(status.status) - } - fn update_abs_path(&mut self, abs_path: SanitizedPath, root_name: String) { self.abs_path = abs_path; if root_name != self.root_name { @@ -2674,7 +2372,7 @@ impl Snapshot { } } - pub(crate) fn apply_remote_update( + fn apply_remote_update( &mut self, update: proto::UpdateWorktree, always_included_paths: &PathMatcher, @@ -2805,24 +2503,6 @@ impl Snapshot { self.traverse_from_offset(true, true, include_ignored, start) } - pub fn repositories(&self) -> &SumTree { - &self.repositories - } - - /// Get the repository whose work directory contains the given path. - fn repository_containing_abs_path(&self, abs_path: &Path) -> Option<&RepositoryEntry> { - self.repositories - .iter() - .filter(|repo| repo.directory_contains_abs_path(abs_path)) - .last() - } - - fn repository_for_id(&self, id: ProjectEntryId) -> Option<&RepositoryEntry> { - self.repositories - .iter() - .find(|repo| repo.work_directory_id == id) - } - pub fn paths(&self) -> impl Iterator> { let empty_path = Path::new(""); self.entries_by_path @@ -2905,20 +2585,13 @@ impl Snapshot { } impl LocalSnapshot { - pub fn local_repo_for_work_directory_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> { + fn local_repo_for_work_directory_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> { self.git_repositories .iter() .map(|(_, entry)| entry) .find(|entry| entry.work_directory.path_key() == PathKey(path.into())) } - pub fn local_repo_containing_path(&self, path: &Path) -> Option<&LocalRepositoryEntry> { - self.git_repositories - .values() - .filter(|local_repo| path.starts_with(&local_repo.path_key().0)) - .max_by_key(|local_repo| local_repo.path_key()) - } - fn build_update( &self, project_id: u64, @@ -3046,7 +2719,7 @@ impl LocalSnapshot { } #[cfg(test)] - pub(crate) fn expanded_entries(&self) -> impl Iterator { + fn expanded_entries(&self) -> impl Iterator { self.entries_by_path .cursor::<()>(&()) .filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored)) @@ -3125,26 +2798,6 @@ impl LocalSnapshot { } } - #[cfg(test)] - fn check_git_invariants(&self) { - let dotgit_paths = self - .git_repositories - .iter() - .map(|repo| repo.1.dot_git_dir_abs_path.clone()) - .collect::>(); - let work_dir_paths = self - .repositories - .iter() - .map(|repo| repo.work_directory_abs_path.clone()) - .collect::>(); - assert_eq!(dotgit_paths.len(), work_dir_paths.len()); - assert_eq!(self.repositories.iter().count(), work_dir_paths.len()); - assert_eq!(self.git_repositories.iter().count(), work_dir_paths.len()); - for entry in self.repositories.iter() { - self.git_repositories.get(&entry.work_directory_id).unwrap(); - } - } - #[cfg(test)] pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> { let mut paths = Vec::new(); @@ -3288,7 +2941,7 @@ impl BackgroundScannerState { } fn remove_path(&mut self, path: &Path) { - log::info!("background scanner removing path {path:?}"); + log::debug!("background scanner removing path {path:?}"); let mut new_entries; let removed_entries; { @@ -3343,11 +2996,6 @@ impl BackgroundScannerState { self.snapshot .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); - self.snapshot.repositories.retain(&(), |repository| { - removed_ids - .binary_search(&repository.work_directory_id) - .is_err() - }); #[cfg(test)] self.snapshot.check_invariants(false); @@ -3358,7 +3006,7 @@ impl BackgroundScannerState { dot_git_path: Arc, fs: &dyn Fs, watcher: &dyn Watcher, - ) -> Option { + ) { let work_dir_path: Arc = match dot_git_path.parent() { Some(parent_dir) => { // Guard against repositories inside the repository metadata @@ -3366,7 +3014,7 @@ impl BackgroundScannerState { log::info!( "not building git repository for nested `.git` directory, `.git` path in the worktree: {dot_git_path:?}" ); - return None; + return; }; log::info!( "building git repository, `.git` path in the worktree: {dot_git_path:?}" @@ -3380,7 +3028,7 @@ impl BackgroundScannerState { log::info!( "not building git repository for the worktree itself, `.git` path in the worktree: {dot_git_path:?}" ); - return None; + return; } }; @@ -3391,7 +3039,7 @@ impl BackgroundScannerState { dot_git_path, fs, watcher, - ) + ); } fn insert_git_repository_for_path( @@ -3401,7 +3049,6 @@ impl BackgroundScannerState { fs: &dyn Fs, watcher: &dyn Watcher, ) -> Option { - // TODO canonicalize here log::info!("insert git repository for {dot_git_path:?}"); let work_dir_entry = self.snapshot.entry_for_path(work_directory.path_key().0)?; let work_directory_abs_path = self @@ -3421,6 +3068,7 @@ impl BackgroundScannerState { let dot_git_abs_path = self.snapshot.abs_path.as_path().join(&dot_git_path); + // TODO add these watchers without building a whole repository by parsing .git-with-indirection let t0 = Instant::now(); let repository = fs.open_repo(&dot_git_abs_path)?; log::info!("opened git repo for {dot_git_abs_path:?}"); @@ -3443,41 +3091,21 @@ impl BackgroundScannerState { // * `actual_dot_git_dir_abs_path` is the path to the actual .git directory. In git // documentation this is called the "commondir". watcher.add(&dot_git_abs_path).log_err()?; - Some(Arc::from(dot_git_abs_path)) + Some(Arc::from(dot_git_abs_path.as_path())) }; log::trace!("constructed libgit2 repo in {:?}", t0.elapsed()); - if let Some(git_hosting_provider_registry) = self.git_hosting_provider_registry.clone() { - git_hosting_providers::register_additional_providers( - git_hosting_provider_registry, - repository.clone(), - ); - } - let work_directory_id = work_dir_entry.id; - self.snapshot.repositories.insert_or_replace( - RepositoryEntry { - work_directory_id, - work_directory_abs_path, - current_branch: None, - statuses_by_path: Default::default(), - current_merge_conflicts: Default::default(), - worktree_scan_id: 0, - }, - &(), - ); let local_repository = LocalRepositoryEntry { work_directory_id, work_directory, git_dir_scan_id: 0, - status_scan_id: 0, - repo_ptr: repository.clone(), + original_dot_git_abs_path: dot_git_abs_path.as_path().into(), dot_git_dir_abs_path: actual_dot_git_dir_abs_path.into(), + work_directory_abs_path: work_directory_abs_path.as_path().into(), dot_git_worktree_abs_path, - current_merge_head_shas: Default::default(), - merge_message: None, }; self.snapshot @@ -3808,53 +3436,22 @@ pub enum PathChange { Loaded, } -#[derive(Debug)] -pub struct GitRepositoryChange { - /// The previous state of the repository, if it already existed. - pub old_repository: Option, +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct UpdatedGitRepository { + /// ID of the repository's working directory. + /// + /// For a repo that's above the worktree root, this is the ID of the worktree root, and hence not unique. + /// It's included here to aid the GitStore in detecting when a repository's working directory is renamed. + pub work_directory_id: ProjectEntryId, + pub old_work_directory_abs_path: Option>, + pub new_work_directory_abs_path: Option>, + /// For a normal git repository checkout, the absolute path to the .git directory. + /// For a worktree, the absolute path to the worktree's subdirectory inside the .git directory. + pub dot_git_abs_path: Option>, } pub type UpdatedEntriesSet = Arc<[(Arc, ProjectEntryId, PathChange)]>; -pub type UpdatedGitRepositoriesSet = Arc<[(Entry, GitRepositoryChange)]>; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct StatusEntry { - pub repo_path: RepoPath, - pub status: FileStatus, -} - -impl StatusEntry { - fn to_proto(&self) -> proto::StatusEntry { - let simple_status = match self.status { - FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32, - FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32, - FileStatus::Tracked(TrackedStatus { - index_status, - worktree_status, - }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified { - worktree_status - } else { - index_status - }), - }; - - proto::StatusEntry { - repo_path: self.repo_path.as_ref().to_proto(), - simple_status, - status: Some(status_to_proto(self.status)), - } - } -} - -impl TryFrom for StatusEntry { - type Error = anyhow::Error; - - fn try_from(value: proto::StatusEntry) -> Result { - let repo_path = RepoPath(Arc::::from_proto(value.repo_path)); - let status = status_from_proto(value.simple_status, value.status)?; - Ok(Self { repo_path, status }) - } -} +pub type UpdatedGitRepositoriesSet = Arc<[UpdatedGitRepository]>; #[derive(Clone, Debug)] pub struct PathProgress<'a> { @@ -3863,8 +3460,8 @@ pub struct PathProgress<'a> { #[derive(Clone, Debug)] pub struct PathSummary { - max_path: Arc, - item_summary: S, + pub max_path: Arc, + pub item_summary: S, } impl Summary for PathSummary { @@ -3899,75 +3496,6 @@ impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathProgress<'a } } -#[derive(Clone, Debug)] -pub struct AbsPathSummary { - max_path: Arc, -} - -impl Summary for AbsPathSummary { - type Context = (); - - fn zero(_: &Self::Context) -> Self { - Self { - max_path: Path::new("").into(), - } - } - - fn add_summary(&mut self, rhs: &Self, _: &Self::Context) { - self.max_path = rhs.max_path.clone(); - } -} - -impl sum_tree::Item for RepositoryEntry { - type Summary = AbsPathSummary; - - fn summary(&self, _: &::Context) -> Self::Summary { - AbsPathSummary { - max_path: self.work_directory_abs_path.as_path().into(), - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct AbsPathKey(pub Arc); - -impl<'a> sum_tree::Dimension<'a, AbsPathSummary> for AbsPathKey { - fn zero(_: &()) -> Self { - Self(Path::new("").into()) - } - - fn add_summary(&mut self, summary: &'a AbsPathSummary, _: &()) { - self.0 = summary.max_path.clone(); - } -} - -impl sum_tree::KeyedItem for RepositoryEntry { - type Key = AbsPathKey; - - fn key(&self) -> Self::Key { - AbsPathKey(self.work_directory_abs_path.as_path().into()) - } -} - -impl sum_tree::Item for StatusEntry { - type Summary = PathSummary; - - fn summary(&self, _: &::Context) -> Self::Summary { - PathSummary { - max_path: self.repo_path.0.clone(), - item_summary: self.status.summary(), - } - } -} - -impl sum_tree::KeyedItem for StatusEntry { - type Key = PathKey; - - fn key(&self) -> Self::Key { - PathKey(self.repo_path.0.clone()) - } -} - impl<'a> sum_tree::Dimension<'a, PathSummary> for GitSummary { fn zero(_cx: &()) -> Self { Default::default() @@ -3978,6 +3506,14 @@ impl<'a> sum_tree::Dimension<'a, PathSummary> for GitSummary { } } +impl<'a> sum_tree::SeekTarget<'a, PathSummary, (TraversalProgress<'a>, GitSummary)> + for PathTarget<'_> +{ + fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitSummary), _: &()) -> Ordering { + self.cmp_path(&cursor_location.0.max_path) + } +} + impl<'a, S: Summary> sum_tree::Dimension<'a, PathSummary> for PathKey { fn zero(_: &S::Context) -> Self { Default::default() @@ -4204,11 +3740,10 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { } struct BackgroundScanner { - state: Arc>, + state: Mutex, fs: Arc, fs_case_sensitive: bool, status_updates_tx: UnboundedSender, - scans_running: Arc, executor: BackgroundExecutor, scan_requests_rx: channel::Receiver, path_prefixes_to_scan_rx: channel::Receiver, @@ -4322,8 +3857,7 @@ impl BackgroundScanner { state.snapshot.completed_scan_id = state.snapshot.scan_id; } - let scanning = self.scans_running.load(atomic::Ordering::Acquire) > 0; - self.send_status_update(scanning, SmallVec::new()); + self.send_status_update(false, SmallVec::new()); // Process any any FS events that occurred while performing the initial scan. // For these events, update events cannot be as precise, because we didn't @@ -4349,8 +3883,7 @@ impl BackgroundScanner { // these before handling changes reported by the filesystem. request = self.next_scan_request().fuse() => { let Ok(request) = request else { break }; - let scanning = self.scans_running.load(atomic::Ordering::Acquire) > 0; - if !self.process_scan_request(request, scanning).await { + if !self.process_scan_request(request, false).await { return; } } @@ -4372,8 +3905,7 @@ impl BackgroundScanner { self.process_events(vec![abs_path]).await; } } - let scanning = self.scans_running.load(atomic::Ordering::Acquire) > 0; - self.send_status_update(scanning, request.done); + self.send_status_update(false, request.done); } paths = fs_events_rx.next().fuse() => { @@ -4568,35 +4100,19 @@ impl BackgroundScanner { self.update_ignore_statuses(scan_job_tx).await; self.scan_dirs(false, scan_job_rx).await; - let status_update = if !dot_git_abs_paths.is_empty() { - Some(self.update_git_repositories(dot_git_abs_paths)) - } else { - None - }; + if !dot_git_abs_paths.is_empty() { + self.update_git_repositories(dot_git_abs_paths); + } - let phase = self.phase; - let status_update_tx = self.status_updates_tx.clone(); - let state = self.state.clone(); - let scans_running = self.scans_running.clone(); - self.executor - .spawn(async move { - if let Some(status_update) = status_update { - status_update.await; - } - - { - let mut state = state.lock(); - state.snapshot.completed_scan_id = state.snapshot.scan_id; - for (_, entry) in mem::take(&mut state.removed_entries) { - state.scanned_dirs.remove(&entry.id); - } - #[cfg(test)] - state.snapshot.check_git_invariants(); - } - let scanning = scans_running.load(atomic::Ordering::Acquire) > 0; - send_status_update_inner(phase, state, status_update_tx, scanning, SmallVec::new()); - }) - .detach(); + { + let mut state = self.state.lock(); + state.snapshot.completed_scan_id = state.snapshot.scan_id; + for (_, entry) in mem::take(&mut state.removed_entries) { + state.scanned_dirs.remove(&entry.id); + } + } + self.send_status_update(false, SmallVec::new()); + // send_status_update_inner(phase, state, status_update_tx, false, SmallVec::new()); } async fn forcibly_load_paths(&self, paths: &[Arc]) -> bool { @@ -4638,7 +4154,6 @@ impl BackgroundScanner { return; } - inc_scans_running(&self.scans_running); let progress_update_count = AtomicUsize::new(0); self.executor .scoped(|scope| { @@ -4695,18 +4210,32 @@ impl BackgroundScanner { } }) .await; - - dec_scans_running(&self.scans_running, 1); } fn send_status_update(&self, scanning: bool, barrier: SmallVec<[barrier::Sender; 1]>) -> bool { - send_status_update_inner( + let mut state = self.state.lock(); + if state.changed_paths.is_empty() && scanning { + return true; + } + + let new_snapshot = state.snapshot.clone(); + let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); + let changes = build_diff( self.phase, - self.state.clone(), - self.status_updates_tx.clone(), - scanning, - barrier, - ) + &old_snapshot, + &new_snapshot, + &state.changed_paths, + ); + state.changed_paths.clear(); + + self.status_updates_tx + .unbounded_send(ScanState::Updated { + snapshot: new_snapshot, + changes, + scanning, + barrier, + }) + .is_ok() } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { @@ -4749,26 +4278,18 @@ impl BackgroundScanner { swap_to_front(&mut child_paths, *GITIGNORE); swap_to_front(&mut child_paths, *DOT_GIT); - let mut git_status_update_jobs = Vec::new(); for child_abs_path in child_paths { let child_abs_path: Arc = child_abs_path.into(); let child_name = child_abs_path.file_name().unwrap(); let child_path: Arc = job.path.join(child_name).into(); if child_name == *DOT_GIT { - { - let mut state = self.state.lock(); - let repo = state.insert_git_repository( - child_path.clone(), - self.fs.as_ref(), - self.watcher.as_ref(), - ); - if let Some(local_repo) = repo { - inc_scans_running(&self.scans_running); - git_status_update_jobs - .push(self.schedule_git_statuses_update(&mut state, local_repo)); - } - } + let mut state = self.state.lock(); + state.insert_git_repository( + child_path.clone(), + self.fs.as_ref(), + self.watcher.as_ref(), + ); } else if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { Ok(ignore) => { @@ -4884,32 +4405,6 @@ impl BackgroundScanner { new_entries.push(child_entry); } - let task_state = self.state.clone(); - let phase = self.phase; - let status_updates_tx = self.status_updates_tx.clone(); - let scans_running = self.scans_running.clone(); - self.executor - .spawn(async move { - if !git_status_update_jobs.is_empty() { - let status_updates = join_all(git_status_update_jobs).await; - let status_updated = status_updates - .iter() - .any(|update_result| update_result.is_ok()); - dec_scans_running(&scans_running, status_updates.len() as i32); - if status_updated { - let scanning = scans_running.load(atomic::Ordering::Acquire) > 0; - send_status_update_inner( - phase, - task_state, - status_updates_tx, - scanning, - SmallVec::new(), - ); - } - } - }) - .detach(); - let mut state = self.state.lock(); // Identify any subdirectories that should not be scanned. @@ -4999,79 +4494,6 @@ impl BackgroundScanner { } } - // Group all relative paths by their git repository. - let mut paths_by_git_repo = HashMap::default(); - for (relative_path, abs_path) in relative_paths.iter().zip(&abs_paths) { - let repository_data = state - .snapshot - .local_repo_containing_path(relative_path) - .zip(state.snapshot.repository_containing_abs_path(abs_path)); - if let Some((local_repo, entry)) = repository_data { - if let Ok(repo_path) = local_repo.relativize(relative_path) { - paths_by_git_repo - .entry(local_repo.work_directory.clone()) - .or_insert_with(|| RepoPaths { - entry: entry.clone(), - repo: local_repo.repo_ptr.clone(), - repo_paths: Default::default(), - }) - .add_path(repo_path); - } - } - } - - for (_work_directory, mut paths) in paths_by_git_repo { - if let Ok(status) = paths.repo.status_blocking(&paths.repo_paths) { - let mut changed_path_statuses = Vec::new(); - let statuses = paths.entry.statuses_by_path.clone(); - let mut cursor = statuses.cursor::(&()); - - for (repo_path, status) in &*status.entries { - paths.remove_repo_path(repo_path); - if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) { - if &cursor.item().unwrap().status == status { - continue; - } - } - - changed_path_statuses.push(Edit::Insert(StatusEntry { - repo_path: repo_path.clone(), - status: *status, - })); - } - - let mut cursor = statuses.cursor::(&()); - for path in paths.repo_paths { - if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) { - changed_path_statuses.push(Edit::Remove(PathKey(path.0))); - } - } - - if !changed_path_statuses.is_empty() { - let work_directory_id = state.snapshot.repositories.update( - &AbsPathKey(paths.entry.work_directory_abs_path.as_path().into()), - &(), - move |repository_entry| { - repository_entry - .statuses_by_path - .edit(changed_path_statuses, &()); - - repository_entry.work_directory_id - }, - ); - - if let Some(work_directory_id) = work_directory_id { - state.snapshot.git_repositories.update( - &work_directory_id, - |local_repository_entry| { - local_repository_entry.status_scan_id += 1; - }, - ); - } - } - } - } - for (path, metadata) in relative_paths.iter().zip(metadata.into_iter()) { let abs_path: Arc = root_abs_path.as_path().join(path).into(); match metadata { @@ -5137,9 +4559,6 @@ impl BackgroundScanner { let id = local_repo.work_directory_id; log::debug!("remove repo path: {:?}", path); snapshot.git_repositories.remove(&id); - snapshot - .repositories - .retain(&(), |repo_entry| repo_entry.work_directory_id != id); return Some(()); } } @@ -5297,129 +4716,71 @@ impl BackgroundScanner { state.snapshot.entries_by_id.edit(entries_by_id_edits, &()); } - fn update_git_repositories(&self, dot_git_paths: Vec) -> Task<()> { + fn update_git_repositories(&self, dot_git_paths: Vec) { log::info!("reloading repositories: {dot_git_paths:?}"); - - let mut status_updates = Vec::new(); - { - let mut state = self.state.lock(); - let scan_id = state.snapshot.scan_id; - for dot_git_dir in dot_git_paths { - let existing_repository_entry = - state - .snapshot - .git_repositories - .iter() - .find_map(|(_, repo)| { - if repo.dot_git_dir_abs_path.as_ref() == &dot_git_dir - || repo.dot_git_worktree_abs_path.as_deref() == Some(&dot_git_dir) - { - Some(repo.clone()) - } else { - None - } - }); - - let local_repository = match existing_repository_entry { - None => { - let Ok(relative) = dot_git_dir.strip_prefix(state.snapshot.abs_path()) - else { - return Task::ready(()); - }; - match state.insert_git_repository( - relative.into(), - self.fs.as_ref(), - self.watcher.as_ref(), - ) { - Some(output) => output, - None => continue, - } - } - Some(local_repository) => { - if local_repository.git_dir_scan_id == scan_id { - continue; - } - local_repository.repo_ptr.reload_index(); - - state.snapshot.git_repositories.update( - &local_repository.work_directory_id, - |entry| { - entry.git_dir_scan_id = scan_id; - }, - ); - if let Some(repo_entry) = state - .snapshot - .repository_for_id(local_repository.work_directory_id) + let mut state = self.state.lock(); + let scan_id = state.snapshot.scan_id; + for dot_git_dir in dot_git_paths { + let existing_repository_entry = + state + .snapshot + .git_repositories + .iter() + .find_map(|(_, repo)| { + if repo.dot_git_dir_abs_path.as_ref() == &dot_git_dir + || repo.dot_git_worktree_abs_path.as_deref() == Some(&dot_git_dir) { - let abs_path_key = - AbsPathKey(repo_entry.work_directory_abs_path.as_path().into()); - state - .snapshot - .repositories - .update(&abs_path_key, &(), |repo| repo.worktree_scan_id = scan_id); + Some(repo.clone()) + } else { + None } - - local_repository - } - }; - - inc_scans_running(&self.scans_running); - status_updates - .push(self.schedule_git_statuses_update(&mut state, local_repository)); - } - - // Remove any git repositories whose .git entry no longer exists. - let snapshot = &mut state.snapshot; - let mut ids_to_preserve = HashSet::default(); - for (&work_directory_id, entry) in snapshot.git_repositories.iter() { - let exists_in_snapshot = snapshot - .entry_for_id(work_directory_id) - .map_or(false, |entry| { - snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() }); - if exists_in_snapshot - || matches!( - smol::block_on(self.fs.metadata(&entry.dot_git_dir_abs_path)), - Ok(Some(_)) - ) - { - ids_to_preserve.insert(work_directory_id); + match existing_repository_entry { + None => { + let Ok(relative) = dot_git_dir.strip_prefix(state.snapshot.abs_path()) else { + return; + }; + state.insert_git_repository( + relative.into(), + self.fs.as_ref(), + self.watcher.as_ref(), + ); } - } - - snapshot - .git_repositories - .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); - snapshot.repositories.retain(&(), |entry| { - ids_to_preserve.contains(&entry.work_directory_id) - }); + Some(local_repository) => { + state.snapshot.git_repositories.update( + &local_repository.work_directory_id, + |entry| { + entry.git_dir_scan_id = scan_id; + }, + ); + } + }; } - let scans_running = self.scans_running.clone(); - self.executor.spawn(async move { - let updates_finished: Vec> = - join_all(status_updates).await; - let n = updates_finished.len(); - dec_scans_running(&scans_running, n as i32); - }) - } + // Remove any git repositories whose .git entry no longer exists. + let snapshot = &mut state.snapshot; + let mut ids_to_preserve = HashSet::default(); + for (&work_directory_id, entry) in snapshot.git_repositories.iter() { + let exists_in_snapshot = snapshot + .entry_for_id(work_directory_id) + .map_or(false, |entry| { + snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() + }); - /// Update the git statuses for a given batch of entries. - fn schedule_git_statuses_update( - &self, - state: &mut BackgroundScannerState, - local_repository: LocalRepositoryEntry, - ) -> oneshot::Receiver<()> { - let job_state = self.state.clone(); - let (tx, rx) = oneshot::channel(); + if exists_in_snapshot + || matches!( + smol::block_on(self.fs.metadata(&entry.dot_git_dir_abs_path)), + Ok(Some(_)) + ) + { + ids_to_preserve.insert(work_directory_id); + } + } - state.repository_scans.insert( - local_repository.work_directory.path_key(), - self.executor - .spawn(do_git_status_update(job_state, local_repository, tx)), - ); - rx + snapshot + .git_repositories + .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); } async fn progress_timer(&self, running: bool) { @@ -5449,161 +4810,6 @@ impl BackgroundScanner { } } -fn inc_scans_running(scans_running: &AtomicI32) { - scans_running.fetch_add(1, atomic::Ordering::Release); -} - -fn dec_scans_running(scans_running: &AtomicI32, by: i32) { - let old = scans_running.fetch_sub(by, atomic::Ordering::Release); - debug_assert!(old >= by); -} - -fn send_status_update_inner( - phase: BackgroundScannerPhase, - state: Arc>, - status_updates_tx: UnboundedSender, - scanning: bool, - barrier: SmallVec<[barrier::Sender; 1]>, -) -> bool { - let mut state = state.lock(); - if state.changed_paths.is_empty() && scanning { - return true; - } - - let new_snapshot = state.snapshot.clone(); - let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone()); - let changes = build_diff(phase, &old_snapshot, &new_snapshot, &state.changed_paths); - state.changed_paths.clear(); - - status_updates_tx - .unbounded_send(ScanState::Updated { - snapshot: new_snapshot, - changes, - scanning, - barrier, - }) - .is_ok() -} - -async fn update_branches( - state: &Mutex, - repository: &mut LocalRepositoryEntry, -) -> Result<()> { - let branches = repository.repo().branches().await?; - let snapshot = state.lock().snapshot.snapshot.clone(); - let mut repository = snapshot - .repositories - .iter() - .find(|repo_entry| repo_entry.work_directory_id == repository.work_directory_id) - .context("missing repository")? - .clone(); - repository.current_branch = branches.into_iter().find(|branch| branch.is_head); - - let mut state = state.lock(); - state - .snapshot - .repositories - .insert_or_replace(repository, &()); - - Ok(()) -} - -async fn do_git_status_update( - job_state: Arc>, - mut local_repository: LocalRepositoryEntry, - tx: oneshot::Sender<()>, -) { - let repository_name = local_repository.work_directory.display_name(); - log::trace!("updating git branches for repo {repository_name}"); - update_branches(&job_state, &mut local_repository) - .await - .log_err(); - let t0 = Instant::now(); - - log::trace!("updating git statuses for repo {repository_name}"); - let Some(statuses) = local_repository - .repo() - .status_blocking(&[git::WORK_DIRECTORY_REPO_PATH.clone()]) - .log_err() - else { - return; - }; - log::trace!( - "computed git statuses for repo {repository_name} in {:?}", - t0.elapsed() - ); - - let t0 = Instant::now(); - let mut changed_paths = Vec::new(); - let snapshot = job_state.lock().snapshot.snapshot.clone(); - - let Some(mut repository) = snapshot - .repository_for_id(local_repository.work_directory_id) - .context("tried to update git statuses for a repository that isn't in the snapshot") - .log_err() - .cloned() - else { - return; - }; - - let merge_head_shas = local_repository.repo().merge_head_shas(); - if merge_head_shas != local_repository.current_merge_head_shas { - mem::take(&mut repository.current_merge_conflicts); - } - - let mut new_entries_by_path = SumTree::new(&()); - for (repo_path, status) in statuses.entries.iter() { - let project_path = local_repository.work_directory.try_unrelativize(repo_path); - - new_entries_by_path.insert_or_replace( - StatusEntry { - repo_path: repo_path.clone(), - status: *status, - }, - &(), - ); - if status.is_conflicted() { - repository.current_merge_conflicts.insert(repo_path.clone()); - } - - if let Some(path) = project_path { - changed_paths.push(path); - } - } - - log::trace!("statuses: {:#?}", new_entries_by_path); - repository.statuses_by_path = new_entries_by_path; - let mut state = job_state.lock(); - state - .snapshot - .repositories - .insert_or_replace(repository, &()); - state - .snapshot - .git_repositories - .update(&local_repository.work_directory_id, |entry| { - entry.current_merge_head_shas = merge_head_shas; - entry.merge_message = - std::fs::read_to_string(local_repository.dot_git_dir_abs_path.join("MERGE_MSG")) - .ok() - .and_then(|merge_msg| Some(merge_msg.lines().next()?.to_owned())); - entry.status_scan_id += 1; - }); - - util::extend_sorted( - &mut state.changed_paths, - changed_paths, - usize::MAX, - Ord::cmp, - ); - - log::trace!( - "applied git status updates for repo {repository_name} in {:?}", - t0.elapsed(), - ); - tx.send(()).ok(); -} - fn build_diff( phase: BackgroundScannerPhase, old_snapshot: &Snapshot, @@ -5730,32 +4936,6 @@ fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { result } -#[derive(Debug)] -struct RepoPaths { - repo: Arc, - entry: RepositoryEntry, - // sorted - repo_paths: Vec, -} - -impl RepoPaths { - fn add_path(&mut self, repo_path: RepoPath) { - match self.repo_paths.binary_search(&repo_path) { - Ok(_) => {} - Err(ix) => self.repo_paths.insert(ix, repo_path), - } - } - - fn remove_repo_path(&mut self, repo_path: &RepoPath) { - match self.repo_paths.binary_search(&repo_path) { - Ok(ix) => { - self.repo_paths.remove(ix); - } - Err(_) => {} - } - } -} - #[derive(Debug)] struct ScanJob { abs_path: Arc, @@ -5850,8 +5030,11 @@ impl WorktreeModelHandle for Entity { let tree = self.clone(); let (fs, root_path, mut git_dir_scan_id) = self.update(cx, |tree, _| { let tree = tree.as_local().unwrap(); - let repository = tree.repositories.first().unwrap(); - let local_repo_entry = tree.get_local_repo(&repository).unwrap(); + let local_repo_entry = tree + .git_repositories + .values() + .min_by_key(|local_repo_entry| local_repo_entry.work_directory.clone()) + .unwrap(); ( tree.fs.clone(), local_repo_entry.dot_git_dir_abs_path.clone(), @@ -5860,11 +5043,12 @@ impl WorktreeModelHandle for Entity { }); let scan_id_increased = |tree: &mut Worktree, git_dir_scan_id: &mut usize| { - let repository = tree.repositories.first().unwrap(); + let tree = tree.as_local().unwrap(); + // let repository = tree.repositories.first().unwrap(); let local_repo_entry = tree - .as_local() - .unwrap() - .get_local_repo(&repository) + .git_repositories + .values() + .min_by_key(|local_repo_entry| local_repo_entry.work_directory.clone()) .unwrap(); if local_repo_entry.git_dir_scan_id > *git_dir_scan_id { @@ -6093,14 +5277,6 @@ impl<'a, S: Summary> SeekTarget<'a, PathSummary, TraversalProgress<'a>> for P } } -impl<'a> SeekTarget<'a, PathSummary, (TraversalProgress<'a>, GitSummary)> - for PathTarget<'_> -{ - fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitSummary), _: &()) -> Ordering { - self.cmp_path(&cursor_location.0.max_path) - } -} - #[derive(Debug)] enum TraversalTarget<'a> { Path(PathTarget<'a>), @@ -6229,138 +5405,6 @@ impl<'a> TryFrom<(&'a CharBag, &PathMatcher, proto::Entry)> for Entry { } } -fn status_from_proto( - simple_status: i32, - status: Option, -) -> anyhow::Result { - use proto::git_file_status::Variant; - - let Some(variant) = status.and_then(|status| status.variant) else { - let code = proto::GitStatus::from_i32(simple_status) - .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?; - let result = match code { - proto::GitStatus::Added => TrackedStatus { - worktree_status: StatusCode::Added, - index_status: StatusCode::Unmodified, - } - .into(), - proto::GitStatus::Modified => TrackedStatus { - worktree_status: StatusCode::Modified, - index_status: StatusCode::Unmodified, - } - .into(), - proto::GitStatus::Conflict => UnmergedStatus { - first_head: UnmergedStatusCode::Updated, - second_head: UnmergedStatusCode::Updated, - } - .into(), - proto::GitStatus::Deleted => TrackedStatus { - worktree_status: StatusCode::Deleted, - index_status: StatusCode::Unmodified, - } - .into(), - _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")), - }; - return Ok(result); - }; - - let result = match variant { - Variant::Untracked(_) => FileStatus::Untracked, - Variant::Ignored(_) => FileStatus::Ignored, - Variant::Unmerged(unmerged) => { - let [first_head, second_head] = - [unmerged.first_head, unmerged.second_head].map(|head| { - let code = proto::GitStatus::from_i32(head) - .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?; - let result = match code { - proto::GitStatus::Added => UnmergedStatusCode::Added, - proto::GitStatus::Updated => UnmergedStatusCode::Updated, - proto::GitStatus::Deleted => UnmergedStatusCode::Deleted, - _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")), - }; - Ok(result) - }); - let [first_head, second_head] = [first_head?, second_head?]; - UnmergedStatus { - first_head, - second_head, - } - .into() - } - Variant::Tracked(tracked) => { - let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status] - .map(|status| { - let code = proto::GitStatus::from_i32(status) - .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?; - let result = match code { - proto::GitStatus::Modified => StatusCode::Modified, - proto::GitStatus::TypeChanged => StatusCode::TypeChanged, - proto::GitStatus::Added => StatusCode::Added, - proto::GitStatus::Deleted => StatusCode::Deleted, - proto::GitStatus::Renamed => StatusCode::Renamed, - proto::GitStatus::Copied => StatusCode::Copied, - proto::GitStatus::Unmodified => StatusCode::Unmodified, - _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")), - }; - Ok(result) - }); - let [index_status, worktree_status] = [index_status?, worktree_status?]; - TrackedStatus { - index_status, - worktree_status, - } - .into() - } - }; - Ok(result) -} - -fn status_to_proto(status: FileStatus) -> proto::GitFileStatus { - use proto::git_file_status::{Tracked, Unmerged, Variant}; - - let variant = match status { - FileStatus::Untracked => Variant::Untracked(Default::default()), - FileStatus::Ignored => Variant::Ignored(Default::default()), - FileStatus::Unmerged(UnmergedStatus { - first_head, - second_head, - }) => Variant::Unmerged(Unmerged { - first_head: unmerged_status_to_proto(first_head), - second_head: unmerged_status_to_proto(second_head), - }), - FileStatus::Tracked(TrackedStatus { - index_status, - worktree_status, - }) => Variant::Tracked(Tracked { - index_status: tracked_status_to_proto(index_status), - worktree_status: tracked_status_to_proto(worktree_status), - }), - }; - proto::GitFileStatus { - variant: Some(variant), - } -} - -fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 { - match code { - UnmergedStatusCode::Added => proto::GitStatus::Added as _, - UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _, - UnmergedStatusCode::Updated => proto::GitStatus::Updated as _, - } -} - -fn tracked_status_to_proto(code: StatusCode) -> i32 { - match code { - StatusCode::Added => proto::GitStatus::Added as _, - StatusCode::Deleted => proto::GitStatus::Deleted as _, - StatusCode::Modified => proto::GitStatus::Modified as _, - StatusCode::Renamed => proto::GitStatus::Renamed as _, - StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _, - StatusCode::Copied => proto::GitStatus::Copied as _, - StatusCode::Unmodified => proto::GitStatus::Unmodified as _, - } -} - #[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct ProjectEntryId(usize); diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index a4bbdd8937..45ffc22892 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -1,15 +1,10 @@ use crate::{ - Entry, EntryKind, Event, PathChange, StatusEntry, WorkDirectory, Worktree, WorktreeModelHandle, + Entry, EntryKind, Event, PathChange, WorkDirectory, Worktree, WorktreeModelHandle, worktree_settings::WorktreeSettings, }; use anyhow::Result; use fs::{FakeFs, Fs, RealFs, RemoveOptions}; -use git::{ - GITIGNORE, - repository::RepoPath, - status::{FileStatus, StatusCode, TrackedStatus}, -}; -use git2::RepositoryInitOptions; +use git::GITIGNORE; use gpui::{AppContext as _, BorrowAppContext, Context, Task, TestAppContext}; use parking_lot::Mutex; use postage::stream::Stream; @@ -685,183 +680,6 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { assert_eq!(read_dir_count_3 - read_dir_count_2, 2); } -#[gpui::test(iterations = 10)] -async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { - init_test(cx); - cx.update(|cx| { - cx.update_global::(|store, cx| { - store.update_user_settings::(cx, |project_settings| { - project_settings.file_scan_exclusions = Some(Vec::new()); - }); - }); - }); - let fs = FakeFs::new(cx.background_executor.clone()); - fs.insert_tree( - path!("/root"), - json!({ - ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n", - "tree": { - ".git": {}, - ".gitignore": "ignored-dir\n", - "tracked-dir": { - "tracked-file1": "", - "ancestor-ignored-file1": "", - }, - "ignored-dir": { - "ignored-file1": "" - } - } - }), - ) - .await; - fs.set_head_and_index_for_repo( - path!("/root/tree/.git").as_ref(), - &[ - (".gitignore".into(), "ignored-dir\n".into()), - ("tracked-dir/tracked-file1".into(), "".into()), - ], - ); - - let tree = Worktree::local( - path!("/root/tree").as_ref(), - true, - fs.clone(), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - - tree.read_with(cx, |tree, _| { - tree.as_local() - .unwrap() - .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()]) - }) - .recv() - .await; - - cx.read(|cx| { - let tree = tree.read(cx); - assert_entry_git_state(tree, "tracked-dir/tracked-file1", None, false); - assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file1", None, false); - assert_entry_git_state(tree, "ignored-dir/ignored-file1", None, true); - }); - - fs.create_file( - path!("/root/tree/tracked-dir/tracked-file2").as_ref(), - Default::default(), - ) - .await - .unwrap(); - fs.set_index_for_repo( - path!("/root/tree/.git").as_ref(), - &[ - (".gitignore".into(), "ignored-dir\n".into()), - ("tracked-dir/tracked-file1".into(), "".into()), - ("tracked-dir/tracked-file2".into(), "".into()), - ], - ); - fs.create_file( - path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(), - Default::default(), - ) - .await - .unwrap(); - fs.create_file( - path!("/root/tree/ignored-dir/ignored-file2").as_ref(), - Default::default(), - ) - .await - .unwrap(); - - cx.executor().run_until_parked(); - cx.read(|cx| { - let tree = tree.read(cx); - assert_entry_git_state( - tree, - "tracked-dir/tracked-file2", - Some(StatusCode::Added), - false, - ); - assert_entry_git_state(tree, "tracked-dir/ancestor-ignored-file2", None, false); - assert_entry_git_state(tree, "ignored-dir/ignored-file2", None, true); - assert!(tree.entry_for_path(".git").unwrap().is_ignored); - }); -} - -#[gpui::test] -async fn test_update_gitignore(cx: &mut TestAppContext) { - init_test(cx); - let fs = FakeFs::new(cx.background_executor.clone()); - fs.insert_tree( - path!("/root"), - json!({ - ".git": {}, - ".gitignore": "*.txt\n", - "a.xml": "", - "b.txt": "Some text" - }), - ) - .await; - - fs.set_head_and_index_for_repo( - path!("/root/.git").as_ref(), - &[ - (".gitignore".into(), "*.txt\n".into()), - ("a.xml".into(), "".into()), - ], - ); - - let tree = Worktree::local( - path!("/root").as_ref(), - true, - fs.clone(), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - - tree.read_with(cx, |tree, _| { - tree.as_local() - .unwrap() - .refresh_entries_for_paths(vec![Path::new("").into()]) - }) - .recv() - .await; - - // One file is unmodified, the other is ignored. - cx.read(|cx| { - let tree = tree.read(cx); - assert_entry_git_state(tree, "a.xml", None, false); - assert_entry_git_state(tree, "b.txt", None, true); - }); - - // Change the gitignore, and stage the newly non-ignored file. - fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into()) - .await - .unwrap(); - fs.set_index_for_repo( - Path::new(path!("/root/.git")), - &[ - (".gitignore".into(), "*.txt\n".into()), - ("a.xml".into(), "".into()), - ("b.txt".into(), "Some text".into()), - ], - ); - - cx.executor().run_until_parked(); - cx.read(|cx| { - let tree = tree.read(cx); - assert_entry_git_state(tree, "a.xml", None, true); - assert_entry_git_state(tree, "b.txt", Some(StatusCode::Added), false); - }); -} - #[gpui::test] async fn test_write_file(cx: &mut TestAppContext) { init_test(cx); @@ -2106,655 +1924,6 @@ fn random_filename(rng: &mut impl Rng) -> String { .collect() } -// NOTE: -// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename -// a directory which some program has already open. -// This is a limitation of the Windows. -// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder -#[gpui::test] -#[cfg_attr(target_os = "windows", ignore)] -async fn test_rename_work_directory(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - let root = TempTree::new(json!({ - "projects": { - "project1": { - "a": "", - "b": "", - } - }, - - })); - let root_path = root.path(); - - let tree = Worktree::local( - root_path, - true, - Arc::new(RealFs::new(None, cx.executor())), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - let repo = git_init(&root_path.join("projects/project1")); - git_add("a", &repo); - git_commit("init", &repo); - std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap(); - - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - - tree.flush_fs_events(cx).await; - - cx.read(|cx| { - let tree = tree.read(cx); - let repo = tree.repositories.iter().next().unwrap(); - assert_eq!( - repo.work_directory_abs_path, - root_path.join("projects/project1") - ); - assert_eq!( - repo.status_for_path(&"a".into()).map(|entry| entry.status), - Some(StatusCode::Modified.worktree()), - ); - assert_eq!( - repo.status_for_path(&"b".into()).map(|entry| entry.status), - Some(FileStatus::Untracked), - ); - }); - - std::fs::rename( - root_path.join("projects/project1"), - root_path.join("projects/project2"), - ) - .unwrap(); - tree.flush_fs_events(cx).await; - - cx.read(|cx| { - let tree = tree.read(cx); - let repo = tree.repositories.iter().next().unwrap(); - assert_eq!( - repo.work_directory_abs_path, - root_path.join("projects/project2") - ); - assert_eq!( - repo.status_for_path(&"a".into()).unwrap().status, - StatusCode::Modified.worktree(), - ); - assert_eq!( - repo.status_for_path(&"b".into()).unwrap().status, - FileStatus::Untracked, - ); - }); -} - -// NOTE: This test always fails on Windows, because on Windows, unlike on Unix, -// you can't rename a directory which some program has already open. This is a -// limitation of the Windows. See: -// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder -#[gpui::test] -#[cfg_attr(target_os = "windows", ignore)] -async fn test_file_status(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - const IGNORE_RULE: &str = "**/target"; - - let root = TempTree::new(json!({ - "project": { - "a.txt": "a", - "b.txt": "bb", - "c": { - "d": { - "e.txt": "eee" - } - }, - "f.txt": "ffff", - "target": { - "build_file": "???" - }, - ".gitignore": IGNORE_RULE - }, - - })); - - const A_TXT: &str = "a.txt"; - const B_TXT: &str = "b.txt"; - const E_TXT: &str = "c/d/e.txt"; - const F_TXT: &str = "f.txt"; - const DOTGITIGNORE: &str = ".gitignore"; - const BUILD_FILE: &str = "target/build_file"; - - // Set up git repository before creating the worktree. - let work_dir = root.path().join("project"); - let mut repo = git_init(work_dir.as_path()); - repo.add_ignore_rule(IGNORE_RULE).unwrap(); - git_add(A_TXT, &repo); - git_add(E_TXT, &repo); - git_add(DOTGITIGNORE, &repo); - git_commit("Initial commit", &repo); - - let tree = Worktree::local( - root.path(), - true, - Arc::new(RealFs::new(None, cx.executor())), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - let root_path = root.path(); - - tree.flush_fs_events(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - // Check that the right git state is observed on startup - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo_entry = snapshot.repositories.iter().next().unwrap(); - assert_eq!( - repo_entry.work_directory_abs_path, - root_path.join("project") - ); - - assert_eq!( - repo_entry.status_for_path(&B_TXT.into()).unwrap().status, - FileStatus::Untracked, - ); - assert_eq!( - repo_entry.status_for_path(&F_TXT.into()).unwrap().status, - FileStatus::Untracked, - ); - }); - - // Modify a file in the working copy. - std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); - tree.flush_fs_events(cx).await; - cx.executor().run_until_parked(); - - // The worktree detects that the file's git status has changed. - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo_entry = snapshot.repositories.iter().next().unwrap(); - assert_eq!( - repo_entry.status_for_path(&A_TXT.into()).unwrap().status, - StatusCode::Modified.worktree(), - ); - }); - - // Create a commit in the git repository. - git_add(A_TXT, &repo); - git_add(B_TXT, &repo); - git_commit("Committing modified and added", &repo); - tree.flush_fs_events(cx).await; - cx.executor().run_until_parked(); - - // The worktree detects that the files' git status have changed. - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo_entry = snapshot.repositories.iter().next().unwrap(); - assert_eq!( - repo_entry.status_for_path(&F_TXT.into()).unwrap().status, - FileStatus::Untracked, - ); - assert_eq!(repo_entry.status_for_path(&B_TXT.into()), None); - assert_eq!(repo_entry.status_for_path(&A_TXT.into()), None); - }); - - // Modify files in the working copy and perform git operations on other files. - git_reset(0, &repo); - git_remove_index(Path::new(B_TXT), &repo); - git_stash(&mut repo); - std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); - std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); - tree.flush_fs_events(cx).await; - cx.executor().run_until_parked(); - - // Check that more complex repo changes are tracked - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo_entry = snapshot.repositories.iter().next().unwrap(); - - assert_eq!(repo_entry.status_for_path(&A_TXT.into()), None); - assert_eq!( - repo_entry.status_for_path(&B_TXT.into()).unwrap().status, - FileStatus::Untracked, - ); - assert_eq!( - repo_entry.status_for_path(&E_TXT.into()).unwrap().status, - StatusCode::Modified.worktree(), - ); - }); - - std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); - std::fs::remove_dir_all(work_dir.join("c")).unwrap(); - std::fs::write( - work_dir.join(DOTGITIGNORE), - [IGNORE_RULE, "f.txt"].join("\n"), - ) - .unwrap(); - - git_add(Path::new(DOTGITIGNORE), &repo); - git_commit("Committing modified git ignore", &repo); - - tree.flush_fs_events(cx).await; - cx.executor().run_until_parked(); - - let mut renamed_dir_name = "first_directory/second_directory"; - const RENAMED_FILE: &str = "rf.txt"; - - std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); - std::fs::write( - work_dir.join(renamed_dir_name).join(RENAMED_FILE), - "new-contents", - ) - .unwrap(); - - tree.flush_fs_events(cx).await; - cx.executor().run_until_parked(); - - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo_entry = snapshot.repositories.iter().next().unwrap(); - assert_eq!( - repo_entry - .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()) - .unwrap() - .status, - FileStatus::Untracked, - ); - }); - - renamed_dir_name = "new_first_directory/second_directory"; - - std::fs::rename( - work_dir.join("first_directory"), - work_dir.join("new_first_directory"), - ) - .unwrap(); - - tree.flush_fs_events(cx).await; - cx.executor().run_until_parked(); - - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo_entry = snapshot.repositories.iter().next().unwrap(); - - assert_eq!( - repo_entry - .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()) - .unwrap() - .status, - FileStatus::Untracked, - ); - }); -} - -#[gpui::test] -async fn test_git_repository_status(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - - let root = TempTree::new(json!({ - "project": { - "a.txt": "a", // Modified - "b.txt": "bb", // Added - "c.txt": "ccc", // Unchanged - "d.txt": "dddd", // Deleted - }, - - })); - - // Set up git repository before creating the worktree. - let work_dir = root.path().join("project"); - let repo = git_init(work_dir.as_path()); - git_add("a.txt", &repo); - git_add("c.txt", &repo); - git_add("d.txt", &repo); - git_commit("Initial commit", &repo); - std::fs::remove_file(work_dir.join("d.txt")).unwrap(); - std::fs::write(work_dir.join("a.txt"), "aa").unwrap(); - - let tree = Worktree::local( - root.path(), - true, - Arc::new(RealFs::new(None, cx.executor())), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - tree.flush_fs_events(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - // Check that the right git state is observed on startup - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - let repo = snapshot.repositories.iter().next().unwrap(); - let entries = repo.status().collect::>(); - - assert_eq!( - entries, - [ - StatusEntry { - repo_path: "a.txt".into(), - status: StatusCode::Modified.worktree(), - }, - StatusEntry { - repo_path: "b.txt".into(), - status: FileStatus::Untracked, - }, - StatusEntry { - repo_path: "d.txt".into(), - status: StatusCode::Deleted.worktree(), - }, - ] - ); - }); - - std::fs::write(work_dir.join("c.txt"), "some changes").unwrap(); - - tree.flush_fs_events(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - let repository = snapshot.repositories.iter().next().unwrap(); - let entries = repository.status().collect::>(); - - assert_eq!( - entries, - [ - StatusEntry { - repo_path: "a.txt".into(), - status: StatusCode::Modified.worktree(), - }, - StatusEntry { - repo_path: "b.txt".into(), - status: FileStatus::Untracked, - }, - StatusEntry { - repo_path: "c.txt".into(), - status: StatusCode::Modified.worktree(), - }, - StatusEntry { - repo_path: "d.txt".into(), - status: StatusCode::Deleted.worktree(), - }, - ] - ); - }); - - git_add("a.txt", &repo); - git_add("c.txt", &repo); - git_remove_index(Path::new("d.txt"), &repo); - git_commit("Another commit", &repo); - tree.flush_fs_events(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - std::fs::remove_file(work_dir.join("a.txt")).unwrap(); - std::fs::remove_file(work_dir.join("b.txt")).unwrap(); - tree.flush_fs_events(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - let repo = snapshot.repositories.iter().next().unwrap(); - let entries = repo.status().collect::>(); - - // Deleting an untracked entry, b.txt, should leave no status - // a.txt was tracked, and so should have a status - assert_eq!( - entries, - [StatusEntry { - repo_path: "a.txt".into(), - status: StatusCode::Deleted.worktree(), - }] - ); - }); -} - -#[gpui::test] -async fn test_git_status_postprocessing(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - - let root = TempTree::new(json!({ - "project": { - "sub": {}, - "a.txt": "", - }, - })); - - let work_dir = root.path().join("project"); - let repo = git_init(work_dir.as_path()); - // a.txt exists in HEAD and the working copy but is deleted in the index. - git_add("a.txt", &repo); - git_commit("Initial commit", &repo); - git_remove_index("a.txt".as_ref(), &repo); - // `sub` is a nested git repository. - let _sub = git_init(&work_dir.join("sub")); - - let tree = Worktree::local( - root.path(), - true, - Arc::new(RealFs::new(None, cx.executor())), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - tree.flush_fs_events(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - let repo = snapshot.repositories.iter().next().unwrap(); - let entries = repo.status().collect::>(); - - // `sub` doesn't appear in our computed statuses. - // a.txt appears with a combined `DA` status. - assert_eq!( - entries, - [StatusEntry { - repo_path: "a.txt".into(), - status: TrackedStatus { - index_status: StatusCode::Deleted, - worktree_status: StatusCode::Added - } - .into(), - }] - ) - }); -} - -#[gpui::test] -async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - - let root = TempTree::new(json!({ - "my-repo": { - // .git folder will go here - "a.txt": "a", - "sub-folder-1": { - "sub-folder-2": { - "c.txt": "cc", - "d": { - "e.txt": "eee" - } - }, - } - }, - - })); - - const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt"; - const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt"; - - // Set up git repository before creating the worktree. - let git_repo_work_dir = root.path().join("my-repo"); - let repo = git_init(git_repo_work_dir.as_path()); - git_add(C_TXT, &repo); - git_commit("Initial commit", &repo); - - // Open the worktree in subfolder - let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2"); - let tree = Worktree::local( - root.path().join(project_root), - true, - Arc::new(RealFs::new(None, cx.executor())), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - tree.flush_fs_events(cx).await; - tree.flush_fs_events_in_root_git_repository(cx).await; - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - cx.executor().run_until_parked(); - - // Ensure that the git status is loaded correctly - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - assert_eq!(snapshot.repositories.iter().count(), 1); - let repo = snapshot.repositories.iter().next().unwrap(); - assert_eq!( - repo.work_directory_abs_path.canonicalize().unwrap(), - root.path().join("my-repo").canonicalize().unwrap() - ); - - assert_eq!(repo.status_for_path(&C_TXT.into()), None); - assert_eq!( - repo.status_for_path(&E_TXT.into()).unwrap().status, - FileStatus::Untracked - ); - }); - - // Now we simulate FS events, but ONLY in the .git folder that's outside - // of out project root. - // Meaning: we don't produce any FS events for files inside the project. - git_add(E_TXT, &repo); - git_commit("Second commit", &repo); - tree.flush_fs_events_in_root_git_repository(cx).await; - cx.executor().run_until_parked(); - - tree.read_with(cx, |tree, _cx| { - let snapshot = tree.snapshot(); - let repos = snapshot.repositories().iter().cloned().collect::>(); - assert_eq!(repos.len(), 1); - let repo_entry = repos.into_iter().next().unwrap(); - - assert!(snapshot.repositories.iter().next().is_some()); - - assert_eq!(repo_entry.status_for_path(&C_TXT.into()), None); - assert_eq!(repo_entry.status_for_path(&E_TXT.into()), None); - }); -} - -#[gpui::test] -async fn test_conflicted_cherry_pick(cx: &mut TestAppContext) { - init_test(cx); - cx.executor().allow_parking(); - - let root = TempTree::new(json!({ - "project": { - "a.txt": "a", - }, - })); - let root_path = root.path(); - - let tree = Worktree::local( - root_path, - true, - Arc::new(RealFs::new(None, cx.executor())), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - let repo = git_init(&root_path.join("project")); - git_add("a.txt", &repo); - git_commit("init", &repo); - - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - - tree.flush_fs_events(cx).await; - - git_branch("other-branch", &repo); - git_checkout("refs/heads/other-branch", &repo); - std::fs::write(root_path.join("project/a.txt"), "A").unwrap(); - git_add("a.txt", &repo); - git_commit("capitalize", &repo); - let commit = repo - .head() - .expect("Failed to get HEAD") - .peel_to_commit() - .expect("HEAD is not a commit"); - git_checkout("refs/heads/main", &repo); - std::fs::write(root_path.join("project/a.txt"), "b").unwrap(); - git_add("a.txt", &repo); - git_commit("improve letter", &repo); - git_cherry_pick(&commit, &repo); - std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD")) - .expect("No CHERRY_PICK_HEAD"); - pretty_assertions::assert_eq!( - git_status(&repo), - collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)]) - ); - tree.flush_fs_events(cx).await; - let conflicts = tree.update(cx, |tree, _| { - let entry = tree.repositories.first().expect("No git entry").clone(); - entry - .current_merge_conflicts - .iter() - .cloned() - .collect::>() - }); - pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]); - - git_add("a.txt", &repo); - // Attempt to manually simulate what `git cherry-pick --continue` would do. - git_commit("whatevs", &repo); - std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD")) - .expect("Failed to remove CHERRY_PICK_HEAD"); - pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default()); - tree.flush_fs_events(cx).await; - let conflicts = tree.update(cx, |tree, _| { - let entry = tree.repositories.first().expect("No git entry").clone(); - entry - .current_merge_conflicts - .iter() - .cloned() - .collect::>() - }); - pretty_assertions::assert_eq!(conflicts, []); -} - #[gpui::test] async fn test_private_single_file_worktree(cx: &mut TestAppContext) { init_test(cx); @@ -2815,110 +1984,6 @@ fn test_unrelativize() { ); } -#[track_caller] -fn git_init(path: &Path) -> git2::Repository { - let mut init_opts = RepositoryInitOptions::new(); - init_opts.initial_head("main"); - git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository") -} - -#[track_caller] -fn git_add>(path: P, repo: &git2::Repository) { - let path = path.as_ref(); - let mut index = repo.index().expect("Failed to get index"); - index.add_path(path).expect("Failed to add file"); - index.write().expect("Failed to write index"); -} - -#[track_caller] -fn git_remove_index(path: &Path, repo: &git2::Repository) { - let mut index = repo.index().expect("Failed to get index"); - index.remove_path(path).expect("Failed to add file"); - index.write().expect("Failed to write index"); -} - -#[track_caller] -fn git_commit(msg: &'static str, repo: &git2::Repository) { - use git2::Signature; - - let signature = Signature::now("test", "test@zed.dev").unwrap(); - let oid = repo.index().unwrap().write_tree().unwrap(); - let tree = repo.find_tree(oid).unwrap(); - if let Ok(head) = repo.head() { - let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); - - let parent_commit = parent_obj.as_commit().unwrap(); - - repo.commit( - Some("HEAD"), - &signature, - &signature, - msg, - &tree, - &[parent_commit], - ) - .expect("Failed to commit with parent"); - } else { - repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) - .expect("Failed to commit"); - } -} - -#[track_caller] -fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) { - repo.cherrypick(commit, None).expect("Failed to cherrypick"); -} - -#[track_caller] -fn git_stash(repo: &mut git2::Repository) { - use git2::Signature; - - let signature = Signature::now("test", "test@zed.dev").unwrap(); - repo.stash_save(&signature, "N/A", None) - .expect("Failed to stash"); -} - -#[track_caller] -fn git_reset(offset: usize, repo: &git2::Repository) { - let head = repo.head().expect("Couldn't get repo head"); - let object = head.peel(git2::ObjectType::Commit).unwrap(); - let commit = object.as_commit().unwrap(); - let new_head = commit - .parents() - .inspect(|parnet| { - parnet.message(); - }) - .nth(offset) - .expect("Not enough history"); - repo.reset(new_head.as_object(), git2::ResetType::Soft, None) - .expect("Could not reset"); -} - -#[track_caller] -fn git_branch(name: &str, repo: &git2::Repository) { - let head = repo - .head() - .expect("Couldn't get repo head") - .peel_to_commit() - .expect("HEAD is not a commit"); - repo.branch(name, &head, false).expect("Failed to commit"); -} - -#[track_caller] -fn git_checkout(name: &str, repo: &git2::Repository) { - repo.set_head(name).expect("Failed to set head"); - repo.checkout_head(None).expect("Failed to check out head"); -} - -#[track_caller] -fn git_status(repo: &git2::Repository) -> collections::HashMap { - repo.statuses(None) - .unwrap() - .iter() - .map(|status| (status.path().unwrap().to_string(), status.status())) - .collect() -} - #[track_caller] fn check_worktree_entries( tree: &Worktree, @@ -2974,34 +2039,3 @@ fn init_test(cx: &mut gpui::TestAppContext) { WorktreeSettings::register(cx); }); } - -#[track_caller] -fn assert_entry_git_state( - tree: &Worktree, - path: &str, - index_status: Option, - is_ignored: bool, -) { - let entry = tree.entry_for_path(path).expect("entry {path} not found"); - let repos = tree.repositories().iter().cloned().collect::>(); - assert_eq!(repos.len(), 1); - let repo_entry = repos.into_iter().next().unwrap(); - let status = repo_entry - .status_for_path(&path.into()) - .map(|entry| entry.status); - let expected = index_status.map(|index_status| { - TrackedStatus { - index_status, - worktree_status: StatusCode::Unmodified, - } - .into() - }); - assert_eq!( - status, expected, - "expected {path} to have git status: {expected:?}" - ); - assert_eq!( - entry.is_ignored, is_ignored, - "expected {path} to have is_ignored: {is_ignored}" - ); -}