Migrate most callers of git-related worktree APIs to use the GitStore (#27225)

This is a pure refactoring PR that goes through all the git-related APIs
exposed by the worktree crate and minimizes their use outside that
crate, migrating callers of those APIs to read from the GitStore
instead. This is to prepare for evacuating git repository state from
worktrees and making the GitStore the new source of truth.

Other drive-by changes:

- `project::git` is now `project::git_store`, for consistency with the
other project stores
- the project panel's test module has been split into its own file

Release Notes:

- N/A

---------

Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
Cole Miller 2025-03-21 00:10:17 -04:00 committed by GitHub
parent 9134630841
commit cf7d639fbc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 6480 additions and 6429 deletions

1
Cargo.lock generated
View file

@ -10590,6 +10590,7 @@ dependencies = [
"smol", "smol",
"snippet", "snippet",
"snippet_provider", "snippet_provider",
"sum_tree",
"task", "task",
"tempfile", "tempfile",
"terminal", "terminal",

View file

@ -17,7 +17,7 @@ use language_model::{
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError, LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
Role, StopReason, TokenUsage, Role, StopReason, TokenUsage,
}; };
use project::git::GitStoreCheckpoint; use project::git_store::{GitStore, GitStoreCheckpoint};
use project::{Project, Worktree}; use project::{Project, Worktree};
use prompt_store::{ use prompt_store::{
AssistantSystemPromptContext, PromptBuilder, RulesFile, WorktreeInfoForSystemPrompt, AssistantSystemPromptContext, PromptBuilder, RulesFile, WorktreeInfoForSystemPrompt,
@ -1219,10 +1219,11 @@ impl Thread {
project: Entity<Project>, project: Entity<Project>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<Arc<ProjectSnapshot>> { ) -> Task<Arc<ProjectSnapshot>> {
let git_store = project.read(cx).git_store().clone();
let worktree_snapshots: Vec<_> = project let worktree_snapshots: Vec<_> = project
.read(cx) .read(cx)
.visible_worktrees(cx) .visible_worktrees(cx)
.map(|worktree| Self::worktree_snapshot(worktree, cx)) .map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx))
.collect(); .collect();
cx.spawn(async move |_, cx| { cx.spawn(async move |_, cx| {
@ -1251,7 +1252,11 @@ impl Thread {
}) })
} }
fn worktree_snapshot(worktree: Entity<project::Worktree>, cx: &App) -> Task<WorktreeSnapshot> { fn worktree_snapshot(
worktree: Entity<project::Worktree>,
git_store: Entity<GitStore>,
cx: &App,
) -> Task<WorktreeSnapshot> {
cx.spawn(async move |cx| { cx.spawn(async move |cx| {
// Get worktree path and snapshot // Get worktree path and snapshot
let worktree_info = cx.update(|app_cx| { let worktree_info = cx.update(|app_cx| {
@ -1268,29 +1273,30 @@ impl Thread {
}; };
}; };
// Extract git information let repo_info = git_store
let git_state = match snapshot.repositories().first() { .update(cx, |git_store, cx| {
None => None, git_store
Some(repo_entry) => { .repositories()
// Get branch information .values()
let current_branch = repo_entry.branch().map(|branch| branch.name.to_string()); .find(|repo| repo.read(cx).worktree_id == snapshot.id())
.and_then(|repo| {
// Get repository info let repo = repo.read(cx);
let repo_result = worktree.read_with(cx, |worktree, _cx| { Some((repo.branch().cloned(), repo.local_repository()?))
if let project::Worktree::Local(local_worktree) = &worktree {
local_worktree.get_local_repo(repo_entry).map(|local_repo| {
let repo = local_repo.repo();
(repo.remote_url("origin"), repo.head_sha(), repo.clone())
}) })
} else { })
None .ok()
} .flatten();
});
// Extract git information
let git_state = match repo_info {
None => None,
Some((branch, repo)) => {
let current_branch = branch.map(|branch| branch.name.to_string());
let remote_url = repo.remote_url("origin");
let head_sha = repo.head_sha();
match repo_result {
Ok(Some((remote_url, head_sha, repository))) => {
// Get diff asynchronously // Get diff asynchronously
let diff = repository let diff = repo
.diff(git::repository::DiffType::HeadToWorktree, cx.clone()) .diff(git::repository::DiffType::HeadToWorktree, cx.clone())
.await .await
.ok(); .ok();
@ -1302,9 +1308,6 @@ impl Thread {
diff, diff,
}) })
} }
Err(_) | Ok(None) => None,
}
}
}; };
WorktreeSnapshot { WorktreeSnapshot {

View file

@ -532,13 +532,15 @@ impl Room {
id: worktree.id().to_proto(), id: worktree.id().to_proto(),
scan_id: worktree.completed_scan_id() as u64, scan_id: worktree.completed_scan_id() as u64,
}); });
for repository in worktree.repositories().iter() { }
for (entry_id, repository) in project.repositories(cx) {
let repository = repository.read(cx);
repositories.push(proto::RejoinRepository { repositories.push(proto::RejoinRepository {
id: repository.work_directory_id().to_proto(), id: entry_id.to_proto(),
scan_id: worktree.completed_scan_id() as u64, scan_id: repository.completed_scan_id as u64,
}); });
} }
}
rejoined_projects.push(proto::RejoinProject { rejoined_projects.push(proto::RejoinProject {
id: project_id, id: project_id,
worktrees, worktrees,

View file

@ -2895,9 +2895,10 @@ async fn test_git_branch_name(
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>(); let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 1); assert_eq!(worktrees.len(), 1);
let worktree = worktrees[0].clone(); let worktree = worktrees[0].clone();
let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap(); let snapshot = worktree.read(cx).snapshot();
let repo = snapshot.repositories().first().unwrap();
assert_eq!( assert_eq!(
root_entry.branch().map(|branch| branch.name.to_string()), repo.branch().map(|branch| branch.name.to_string()),
branch_name branch_name
); );
} }
@ -6771,7 +6772,7 @@ async fn test_remote_git_branches(
.map(ToString::to_string) .map(ToString::to_string)
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await; let (project_a, _) = client_a.build_local_project("/project", cx_a).await;
let project_id = active_call_a let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
@ -6784,8 +6785,6 @@ async fn test_remote_git_branches(
let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap());
let root_path = ProjectPath::root_path(worktree_id);
let branches_b = cx_b let branches_b = cx_b
.update(|cx| repo_b.update(cx, |repository, _| repository.branches())) .update(|cx| repo_b.update(cx, |repository, _| repository.branches()))
.await .await
@ -6810,11 +6809,15 @@ async fn test_remote_git_branches(
let host_branch = cx_a.update(|cx| { let host_branch = cx_a.update(|cx| {
project_a.update(cx, |project, cx| { project_a.update(cx, |project, cx| {
project.worktree_store().update(cx, |worktree_store, cx| { project
worktree_store .repositories(cx)
.current_branch(root_path.clone(), cx) .values()
.next()
.unwrap() .unwrap()
}) .read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}); });
@ -6843,9 +6846,15 @@ async fn test_remote_git_branches(
let host_branch = cx_a.update(|cx| { let host_branch = cx_a.update(|cx| {
project_a.update(cx, |project, cx| { project_a.update(cx, |project, cx| {
project.worktree_store().update(cx, |worktree_store, cx| { project
worktree_store.current_branch(root_path, cx).unwrap() .repositories(cx)
}) .values()
.next()
.unwrap()
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}); });

View file

@ -258,7 +258,7 @@ async fn test_ssh_collaboration_git_branches(
}); });
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
let (project_a, worktree_id) = client_a let (project_a, _) = client_a
.build_ssh_project("/project", client_ssh, cx_a) .build_ssh_project("/project", client_ssh, cx_a)
.await; .await;
@ -277,7 +277,6 @@ async fn test_ssh_collaboration_git_branches(
executor.run_until_parked(); executor.run_until_parked();
let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap());
let root_path = ProjectPath::root_path(worktree_id);
let branches_b = cx_b let branches_b = cx_b
.update(|cx| repo_b.read(cx).branches()) .update(|cx| repo_b.read(cx).branches())
@ -303,12 +302,16 @@ async fn test_ssh_collaboration_git_branches(
let server_branch = server_cx.update(|cx| { let server_branch = server_cx.update(|cx| {
headless_project.update(cx, |headless_project, cx| { headless_project.update(cx, |headless_project, cx| {
headless_project headless_project.git_store.update(cx, |git_store, cx| {
.worktree_store git_store
.update(cx, |worktree_store, cx| { .repositories()
worktree_store .values()
.current_branch(root_path.clone(), cx) .next()
.unwrap() .unwrap()
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}) })
}); });
@ -338,10 +341,16 @@ async fn test_ssh_collaboration_git_branches(
let server_branch = server_cx.update(|cx| { let server_branch = server_cx.update(|cx| {
headless_project.update(cx, |headless_project, cx| { headless_project.update(cx, |headless_project, cx| {
headless_project headless_project.git_store.update(cx, |git_store, cx| {
.worktree_store git_store
.update(cx, |worktree_store, cx| { .repositories()
worktree_store.current_branch(root_path, cx).unwrap() .values()
.next()
.unwrap()
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}) })
}); });

View file

@ -629,18 +629,20 @@ impl Item for Editor {
self.buffer() self.buffer()
.read(cx) .read(cx)
.as_singleton() .as_singleton()
.and_then(|buffer| buffer.read(cx).project_path(cx)) .and_then(|buffer| {
.and_then(|path| { let buffer = buffer.read(cx);
let path = buffer.project_path(cx)?;
let buffer_id = buffer.remote_id();
let project = self.project.as_ref()?.read(cx); let project = self.project.as_ref()?.read(cx);
let entry = project.entry_for_path(&path, cx)?; let entry = project.entry_for_path(&path, cx)?;
let git_status = project let (repo, repo_path) = project
.worktree_for_id(path.worktree_id, cx)? .git_store()
.read(cx) .read(cx)
.snapshot() .repository_and_path_for_buffer_id(buffer_id, cx)?;
.status_for_file(path.path)?; let status = repo.read(cx).status_for_path(&repo_path)?.status;
Some(entry_git_aware_label_color( Some(entry_git_aware_label_color(
git_status.summary(), status.summary(),
entry.is_ignored, entry.is_ignored,
params.selected, params.selected,
)) ))

View file

@ -137,9 +137,9 @@ pub fn deploy_context_menu(
menu menu
} else { } else {
// Don't show the context menu if there isn't a project associated with this editor // Don't show the context menu if there isn't a project associated with this editor
if editor.project.is_none() { let Some(project) = editor.project.clone() else {
return; return;
} };
let display_map = editor.selections.display_map(cx); let display_map = editor.selections.display_map(cx);
let buffer = &editor.snapshot(window, cx).buffer_snapshot; let buffer = &editor.snapshot(window, cx).buffer_snapshot;
@ -159,10 +159,13 @@ pub fn deploy_context_menu(
.all::<PointUtf16>(cx) .all::<PointUtf16>(cx)
.into_iter() .into_iter()
.any(|s| !s.is_empty()); .any(|s| !s.is_empty());
let has_git_repo = editor.project.as_ref().map_or(false, |project| { let has_git_repo = anchor.buffer_id.is_some_and(|buffer_id| {
project.update(cx, |project, cx| { project
project.get_first_worktree_root_repo(cx).is_some() .read(cx)
}) .git_store()
.read(cx)
.repository_and_path_for_buffer_id(buffer_id, cx)
.is_some()
}); });
ui::ContextMenu::build(window, cx, |menu, _window, _cx| { ui::ContextMenu::build(window, cx, |menu, _window, _cx| {

View file

@ -8,7 +8,7 @@ use gpui::{
SharedString, Styled, Subscription, Task, Window, SharedString, Styled, Subscription, Task, Window,
}; };
use picker::{Picker, PickerDelegate, PickerEditorPosition}; use picker::{Picker, PickerDelegate, PickerEditorPosition};
use project::git::Repository; use project::git_store::Repository;
use std::sync::Arc; use std::sync::Arc;
use time::OffsetDateTime; use time::OffsetDateTime;
use time_format::format_local_timestamp; use time_format::format_local_timestamp;

View file

@ -46,7 +46,7 @@ use panel::{
panel_icon_button, PanelHeader, panel_icon_button, PanelHeader,
}; };
use project::{ use project::{
git::{GitEvent, Repository}, git_store::{GitEvent, Repository},
Fs, Project, ProjectPath, Fs, Project, ProjectPath,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

View file

@ -23,7 +23,7 @@ use gpui::{
use language::{Anchor, Buffer, Capability, OffsetRangeExt}; use language::{Anchor, Buffer, Capability, OffsetRangeExt};
use multi_buffer::{MultiBuffer, PathKey}; use multi_buffer::{MultiBuffer, PathKey};
use project::{ use project::{
git::{GitEvent, GitStore}, git_store::{GitEvent, GitStore},
Project, ProjectPath, Project, ProjectPath,
}; };
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};

View file

@ -4,7 +4,7 @@ use gpui::{
use itertools::Itertools; use itertools::Itertools;
use picker::{Picker, PickerDelegate}; use picker::{Picker, PickerDelegate};
use project::{ use project::{
git::{GitStore, Repository}, git_store::{GitStore, Repository},
Project, Project,
}; };
use std::sync::Arc; use std::sync::Arc;

View file

@ -40,7 +40,7 @@ use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem};
use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrevious};
use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings, ShowIndentGuides}; use outline_panel_settings::{OutlinePanelDockPosition, OutlinePanelSettings, ShowIndentGuides};
use project::{File, Fs, Project, ProjectItem}; use project::{File, Fs, GitEntry, GitTraversal, Project, ProjectItem};
use search::{BufferSearchBar, ProjectSearchView}; use search::{BufferSearchBar, ProjectSearchView};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore}; use settings::{Settings, SettingsStore};
@ -60,7 +60,7 @@ use workspace::{
}, },
OpenInTerminal, WeakItemHandle, Workspace, OpenInTerminal, WeakItemHandle, Workspace,
}; };
use worktree::{Entry, GitEntry, ProjectEntryId, WorktreeId}; use worktree::{Entry, ProjectEntryId, WorktreeId};
actions!( actions!(
outline_panel, outline_panel,
@ -2566,6 +2566,7 @@ impl OutlinePanel {
let mut root_entries = HashSet::default(); let mut root_entries = HashSet::default();
let mut new_excerpts = HashMap::<BufferId, HashMap<ExcerptId, Excerpt>>::default(); let mut new_excerpts = HashMap::<BufferId, HashMap<ExcerptId, Excerpt>>::default();
let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| { let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| {
let git_store = outline_panel.project.read(cx).git_store().clone();
new_collapsed_entries = outline_panel.collapsed_entries.clone(); new_collapsed_entries = outline_panel.collapsed_entries.clone();
new_unfolded_dirs = outline_panel.unfolded_dirs.clone(); new_unfolded_dirs = outline_panel.unfolded_dirs.clone();
let multi_buffer_snapshot = active_multi_buffer.read(cx).snapshot(cx); let multi_buffer_snapshot = active_multi_buffer.read(cx).snapshot(cx);
@ -2579,9 +2580,17 @@ impl OutlinePanel {
let is_new = new_entries.contains(&excerpt_id) let is_new = new_entries.contains(&excerpt_id)
|| !outline_panel.excerpts.contains_key(&buffer_id); || !outline_panel.excerpts.contains_key(&buffer_id);
let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx); let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx);
let status = git_store
.read(cx)
.repository_and_path_for_buffer_id(buffer_id, cx)
.and_then(|(repo, path)| {
Some(repo.read(cx).status_for_path(&path)?.status)
});
buffer_excerpts buffer_excerpts
.entry(buffer_id) .entry(buffer_id)
.or_insert_with(|| (is_new, is_folded, Vec::new(), entry_id, worktree)) .or_insert_with(|| {
(is_new, is_folded, Vec::new(), entry_id, worktree, status)
})
.2 .2
.push(excerpt_id); .push(excerpt_id);
@ -2631,7 +2640,7 @@ impl OutlinePanel {
>::default(); >::default();
let mut external_excerpts = HashMap::default(); let mut external_excerpts = HashMap::default();
for (buffer_id, (is_new, is_folded, excerpts, entry_id, worktree)) in for (buffer_id, (is_new, is_folded, excerpts, entry_id, worktree, status)) in
buffer_excerpts buffer_excerpts
{ {
if is_folded { if is_folded {
@ -2665,15 +2674,18 @@ impl OutlinePanel {
match entry_id.and_then(|id| worktree.entry_for_id(id)).cloned() { match entry_id.and_then(|id| worktree.entry_for_id(id)).cloned() {
Some(entry) => { Some(entry) => {
let entry = GitEntry { let entry = GitEntry {
git_summary: worktree git_summary: status
.status_for_file(&entry.path)
.map(|status| status.summary()) .map(|status| status.summary())
.unwrap_or_default(), .unwrap_or_default(),
entry, entry,
}; };
let mut traversal = worktree let mut traversal =
.traverse_from_path(true, true, true, entry.path.as_ref()) GitTraversal::new(worktree.traverse_from_path(
.with_git_statuses(); true,
true,
true,
entry.path.as_ref(),
));
let mut entries_to_add = HashMap::default(); let mut entries_to_add = HashMap::default();
worktree_excerpts worktree_excerpts

View file

@ -73,6 +73,7 @@ shlex.workspace = true
smol.workspace = true smol.workspace = true
snippet.workspace = true snippet.workspace = true
snippet_provider.workspace = true snippet_provider.workspace = true
sum_tree.workspace = true
task.workspace = true task.workspace = true
tempfile.workspace = true tempfile.workspace = true
terminal.workspace = true terminal.workspace = true

View file

@ -4,13 +4,11 @@ use crate::{
worktree_store::{WorktreeStore, WorktreeStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent},
ProjectItem as _, ProjectPath, ProjectItem as _, ProjectPath,
}; };
use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; use anyhow::{anyhow, Context as _, Result};
use anyhow::{anyhow, bail, Context as _, Result};
use client::Client; use client::Client;
use collections::{hash_map, HashMap, HashSet}; use collections::{hash_map, HashMap, HashSet};
use fs::Fs; use fs::Fs;
use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt}; use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
use git::{blame::Blame, repository::RepoPath};
use gpui::{ use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
}; };
@ -25,16 +23,8 @@ use rpc::{
proto::{self, ToProto}, proto::{self, ToProto},
AnyProtoClient, ErrorExt as _, TypedEnvelope, AnyProtoClient, ErrorExt as _, TypedEnvelope,
}; };
use serde::Deserialize;
use smol::channel::Receiver; use smol::channel::Receiver;
use std::{ use std::{io, path::Path, pin::pin, sync::Arc, time::Instant};
io,
ops::Range,
path::{Path, PathBuf},
pin::pin,
sync::Arc,
time::Instant,
};
use text::BufferId; use text::BufferId;
use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId}; use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
@ -750,9 +740,7 @@ impl BufferStore {
client.add_entity_message_handler(Self::handle_buffer_saved); client.add_entity_message_handler(Self::handle_buffer_saved);
client.add_entity_message_handler(Self::handle_update_buffer_file); client.add_entity_message_handler(Self::handle_update_buffer_file);
client.add_entity_request_handler(Self::handle_save_buffer); client.add_entity_request_handler(Self::handle_save_buffer);
client.add_entity_request_handler(Self::handle_blame_buffer);
client.add_entity_request_handler(Self::handle_reload_buffers); client.add_entity_request_handler(Self::handle_reload_buffers);
client.add_entity_request_handler(Self::handle_get_permalink_to_line);
} }
/// Creates a buffer store, optionally retaining its buffers. /// Creates a buffer store, optionally retaining its buffers.
@ -938,172 +926,6 @@ impl BufferStore {
}) })
} }
pub fn blame_buffer(
&self,
buffer: &Entity<Buffer>,
version: Option<clock::Global>,
cx: &App,
) -> Task<Result<Option<Blame>>> {
let buffer = buffer.read(cx);
let Some(file) = File::from_dyn(buffer.file()) else {
return Task::ready(Err(anyhow!("buffer has no file")));
};
match file.worktree.clone().read(cx) {
Worktree::Local(worktree) => {
let worktree = worktree.snapshot();
let blame_params = maybe!({
let local_repo = match worktree.local_repo_for_path(&file.path) {
Some(repo_for_path) => repo_for_path,
None => return Ok(None),
};
let relative_path = local_repo
.relativize(&file.path)
.context("failed to relativize buffer path")?;
let repo = local_repo.repo().clone();
let content = match version {
Some(version) => buffer.rope_for_version(&version).clone(),
None => buffer.as_rope().clone(),
};
anyhow::Ok(Some((repo, relative_path, content)))
});
cx.spawn(async move |cx| {
let Some((repo, relative_path, content)) = blame_params? else {
return Ok(None);
};
repo.blame(relative_path.clone(), content, cx)
.await
.with_context(|| format!("Failed to blame {:?}", relative_path.0))
.map(Some)
})
}
Worktree::Remote(worktree) => {
let buffer_id = buffer.remote_id();
let version = buffer.version();
let project_id = worktree.project_id();
let client = worktree.client();
cx.spawn(async move |_| {
let response = client
.request(proto::BlameBuffer {
project_id,
buffer_id: buffer_id.into(),
version: serialize_version(&version),
})
.await?;
Ok(deserialize_blame_buffer_response(response))
})
}
}
}
pub fn get_permalink_to_line(
&self,
buffer: &Entity<Buffer>,
selection: Range<u32>,
cx: &App,
) -> Task<Result<url::Url>> {
let buffer = buffer.read(cx);
let Some(file) = File::from_dyn(buffer.file()) else {
return Task::ready(Err(anyhow!("buffer has no file")));
};
match file.worktree.read(cx) {
Worktree::Local(worktree) => {
let worktree_path = worktree.abs_path().clone();
let Some((repo_entry, repo)) =
worktree.repository_for_path(file.path()).and_then(|entry| {
let repo = worktree.get_local_repo(&entry)?.repo().clone();
Some((entry, repo))
})
else {
// If we're not in a Git repo, check whether this is a Rust source
// file in the Cargo registry (presumably opened with go-to-definition
// from a normal Rust file). If so, we can put together a permalink
// using crate metadata.
if buffer
.language()
.is_none_or(|lang| lang.name() != "Rust".into())
{
return Task::ready(Err(anyhow!("no permalink available")));
}
let file_path = worktree_path.join(file.path());
return cx.spawn(async move |cx| {
let provider_registry =
cx.update(GitHostingProviderRegistry::default_global)?;
get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
.map_err(|_| anyhow!("no permalink available"))
});
};
let path = match repo_entry.relativize(file.path()) {
Ok(RepoPath(path)) => path,
Err(e) => return Task::ready(Err(e)),
};
let remote = repo_entry
.branch()
.and_then(|b| b.upstream.as_ref())
.and_then(|b| b.remote_name())
.unwrap_or("origin")
.to_string();
cx.spawn(async move |cx| {
let origin_url = repo
.remote_url(&remote)
.ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
let sha = repo
.head_sha()
.ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
let provider_registry =
cx.update(GitHostingProviderRegistry::default_global)?;
let (provider, remote) =
parse_git_remote_url(provider_registry, &origin_url)
.ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
let path = path
.to_str()
.ok_or_else(|| anyhow!("failed to convert path to string"))?;
Ok(provider.build_permalink(
remote,
BuildPermalinkParams {
sha: &sha,
path,
selection: Some(selection),
},
))
})
}
Worktree::Remote(worktree) => {
let buffer_id = buffer.remote_id();
let project_id = worktree.project_id();
let client = worktree.client();
cx.spawn(async move |_| {
let response = client
.request(proto::GetPermalinkToLine {
project_id,
buffer_id: buffer_id.into(),
selection: Some(proto::Range {
start: selection.start as u64,
end: selection.end as u64,
}),
})
.await?;
url::Url::parse(&response.permalink).context("failed to parse permalink")
})
}
}
}
fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> { fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
let buffer = buffer_entity.read(cx); let buffer = buffer_entity.read(cx);
let remote_id = buffer.remote_id(); let remote_id = buffer.remote_id();
@ -1662,52 +1484,6 @@ impl BufferStore {
}) })
} }
pub async fn handle_blame_buffer(
this: Entity<Self>,
envelope: TypedEnvelope<proto::BlameBuffer>,
mut cx: AsyncApp,
) -> Result<proto::BlameBufferResponse> {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let version = deserialize_version(&envelope.payload.version);
let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(version.clone())
})?
.await?;
let blame = this
.update(&mut cx, |this, cx| {
this.blame_buffer(&buffer, Some(version), cx)
})?
.await?;
Ok(serialize_blame_buffer_response(blame))
}
pub async fn handle_get_permalink_to_line(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GetPermalinkToLine>,
mut cx: AsyncApp,
) -> Result<proto::GetPermalinkToLineResponse> {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
// let version = deserialize_version(&envelope.payload.version);
let selection = {
let proto_selection = envelope
.payload
.selection
.context("no selection to get permalink for defined")?;
proto_selection.start as u32..proto_selection.end as u32
};
let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
let permalink = this
.update(&mut cx, |this, cx| {
this.get_permalink_to_line(&buffer, selection, cx)
})?
.await?;
Ok(proto::GetPermalinkToLineResponse {
permalink: permalink.to_string(),
})
}
pub fn reload_buffers( pub fn reload_buffers(
&self, &self,
buffers: HashSet<Entity<Buffer>>, buffers: HashSet<Entity<Buffer>>,
@ -1930,139 +1706,3 @@ fn is_not_found_error(error: &anyhow::Error) -> bool {
.downcast_ref::<io::Error>() .downcast_ref::<io::Error>()
.is_some_and(|err| err.kind() == io::ErrorKind::NotFound) .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
} }
fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
let Some(blame) = blame else {
return proto::BlameBufferResponse {
blame_response: None,
};
};
let entries = blame
.entries
.into_iter()
.map(|entry| proto::BlameEntry {
sha: entry.sha.as_bytes().into(),
start_line: entry.range.start,
end_line: entry.range.end,
original_line_number: entry.original_line_number,
author: entry.author.clone(),
author_mail: entry.author_mail.clone(),
author_time: entry.author_time,
author_tz: entry.author_tz.clone(),
committer: entry.committer_name.clone(),
committer_mail: entry.committer_email.clone(),
committer_time: entry.committer_time,
committer_tz: entry.committer_tz.clone(),
summary: entry.summary.clone(),
previous: entry.previous.clone(),
filename: entry.filename.clone(),
})
.collect::<Vec<_>>();
let messages = blame
.messages
.into_iter()
.map(|(oid, message)| proto::CommitMessage {
oid: oid.as_bytes().into(),
message,
})
.collect::<Vec<_>>();
proto::BlameBufferResponse {
blame_response: Some(proto::blame_buffer_response::BlameResponse {
entries,
messages,
remote_url: blame.remote_url,
}),
}
}
fn deserialize_blame_buffer_response(
response: proto::BlameBufferResponse,
) -> Option<git::blame::Blame> {
let response = response.blame_response?;
let entries = response
.entries
.into_iter()
.filter_map(|entry| {
Some(git::blame::BlameEntry {
sha: git::Oid::from_bytes(&entry.sha).ok()?,
range: entry.start_line..entry.end_line,
original_line_number: entry.original_line_number,
committer_name: entry.committer,
committer_time: entry.committer_time,
committer_tz: entry.committer_tz,
committer_email: entry.committer_mail,
author: entry.author,
author_mail: entry.author_mail,
author_time: entry.author_time,
author_tz: entry.author_tz,
summary: entry.summary,
previous: entry.previous,
filename: entry.filename,
})
})
.collect::<Vec<_>>();
let messages = response
.messages
.into_iter()
.filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
.collect::<HashMap<_, _>>();
Some(Blame {
entries,
messages,
remote_url: response.remote_url,
})
}
fn get_permalink_in_rust_registry_src(
provider_registry: Arc<GitHostingProviderRegistry>,
path: PathBuf,
selection: Range<u32>,
) -> Result<url::Url> {
#[derive(Deserialize)]
struct CargoVcsGit {
sha1: String,
}
#[derive(Deserialize)]
struct CargoVcsInfo {
git: CargoVcsGit,
path_in_vcs: String,
}
#[derive(Deserialize)]
struct CargoPackage {
repository: String,
}
#[derive(Deserialize)]
struct CargoToml {
package: CargoPackage,
}
let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
Some((dir, json))
}) else {
bail!("No .cargo_vcs_info.json found in parent directories")
};
let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
.ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
let permalink = provider.build_permalink(
remote,
BuildPermalinkParams {
sha: &cargo_vcs_info.git.sha1,
path: &path.to_string_lossy(),
selection: Some(selection),
},
);
Ok(permalink)
}

View file

@ -88,18 +88,18 @@ impl Manager {
projects.insert(project_id, handle.clone()); projects.insert(project_id, handle.clone());
let mut worktrees = Vec::new(); let mut worktrees = Vec::new();
let mut repositories = Vec::new(); let mut repositories = Vec::new();
for (id, repository) in project.repositories(cx) {
repositories.push(proto::RejoinRepository {
id: id.to_proto(),
scan_id: repository.read(cx).completed_scan_id as u64,
});
}
for worktree in project.worktrees(cx) { for worktree in project.worktrees(cx) {
let worktree = worktree.read(cx); let worktree = worktree.read(cx);
worktrees.push(proto::RejoinWorktree { worktrees.push(proto::RejoinWorktree {
id: worktree.id().to_proto(), id: worktree.id().to_proto(),
scan_id: worktree.completed_scan_id() as u64, scan_id: worktree.completed_scan_id() as u64,
}); });
for repository in worktree.repositories().iter() {
repositories.push(proto::RejoinRepository {
id: repository.work_directory_id().to_proto(),
scan_id: worktree.completed_scan_id() as u64,
});
}
} }
Some(proto::RejoinProject { Some(proto::RejoinProject {
id: project_id, id: project_id,

View file

@ -1,9 +1,11 @@
pub mod git_traversal;
use crate::{ use crate::{
buffer_store::{BufferStore, BufferStoreEvent}, buffer_store::{BufferStore, BufferStoreEvent},
worktree_store::{WorktreeStore, WorktreeStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent},
Project, ProjectEnvironment, ProjectItem, ProjectPath, Project, ProjectEnvironment, ProjectItem, ProjectPath,
}; };
use anyhow::{anyhow, Context as _, Result}; use anyhow::{anyhow, bail, Context as _, Result};
use askpass::{AskPassDelegate, AskPassSession}; use askpass::{AskPassDelegate, AskPassSession};
use buffer_diff::{BufferDiff, BufferDiffEvent}; use buffer_diff::{BufferDiff, BufferDiffEvent};
use client::ProjectId; use client::ProjectId;
@ -14,28 +16,35 @@ use futures::{
future::{self, OptionFuture, Shared}, future::{self, OptionFuture, Shared},
FutureExt as _, StreamExt as _, FutureExt as _, StreamExt as _,
}; };
use git::repository::{DiffType, GitRepositoryCheckpoint};
use git::{ use git::{
blame::Blame,
parse_git_remote_url,
repository::{ repository::{
Branch, CommitDetails, GitRepository, PushOptions, Remote, RemoteCommandOutput, RepoPath, Branch, CommitDetails, DiffType, GitRepository, GitRepositoryCheckpoint, PushOptions,
ResetMode, Remote, RemoteCommandOutput, RepoPath, ResetMode,
}, },
status::FileStatus, status::FileStatus,
BuildPermalinkParams, GitHostingProviderRegistry,
}; };
use gpui::{ use gpui::{
App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task, App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
WeakEntity, WeakEntity,
}; };
use language::{Buffer, BufferEvent, Language, LanguageRegistry}; use language::{
proto::{deserialize_version, serialize_version},
Buffer, BufferEvent, Language, LanguageRegistry,
};
use parking_lot::Mutex; use parking_lot::Mutex;
use rpc::{ use rpc::{
proto::{self, git_reset, ToProto, SSH_PROJECT_ID}, proto::{self, git_reset, ToProto, SSH_PROJECT_ID},
AnyProtoClient, TypedEnvelope, AnyProtoClient, TypedEnvelope,
}; };
use serde::Deserialize;
use settings::WorktreeId; use settings::WorktreeId;
use std::{ use std::{
collections::{hash_map, VecDeque}, collections::{hash_map, VecDeque},
future::Future, future::Future,
ops::Range,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
@ -49,6 +58,7 @@ use worktree::{
pub struct GitStore { pub struct GitStore {
state: GitStoreState, state: GitStoreState,
buffer_store: Entity<BufferStore>, buffer_store: Entity<BufferStore>,
_worktree_store: Entity<WorktreeStore>,
repositories: HashMap<ProjectEntryId, Entity<Repository>>, repositories: HashMap<ProjectEntryId, Entity<Repository>>,
active_repo_id: Option<ProjectEntryId>, active_repo_id: Option<ProjectEntryId>,
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
@ -131,15 +141,16 @@ pub struct Repository {
pub dot_git_abs_path: PathBuf, pub dot_git_abs_path: PathBuf,
pub worktree_abs_path: Arc<Path>, pub worktree_abs_path: Arc<Path>,
pub is_from_single_file_worktree: bool, pub is_from_single_file_worktree: bool,
pub git_repo: GitRepo,
pub merge_message: Option<String>, pub merge_message: Option<String>,
pub completed_scan_id: usize,
git_repo: RepositoryState,
job_sender: mpsc::UnboundedSender<GitJob>, job_sender: mpsc::UnboundedSender<GitJob>,
askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>, askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
latest_askpass_id: u64, latest_askpass_id: u64,
} }
#[derive(Clone)] #[derive(Clone)]
pub enum GitRepo { enum RepositoryState {
Local(Arc<dyn GitRepository>), Local(Arc<dyn GitRepository>),
Remote { Remote {
project_id: ProjectId, project_id: ProjectId,
@ -179,7 +190,7 @@ impl GitStore {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
Self::new( Self::new(
worktree_store, worktree_store.clone(),
buffer_store, buffer_store,
GitStoreState::Local { GitStoreState::Local {
downstream_client: None, downstream_client: None,
@ -198,7 +209,7 @@ impl GitStore {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
Self::new( Self::new(
worktree_store, worktree_store.clone(),
buffer_store, buffer_store,
GitStoreState::Remote { GitStoreState::Remote {
upstream_client, upstream_client,
@ -216,7 +227,7 @@ impl GitStore {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
Self::new( Self::new(
worktree_store, worktree_store.clone(),
buffer_store, buffer_store,
GitStoreState::Ssh { GitStoreState::Ssh {
upstream_client, upstream_client,
@ -229,20 +240,21 @@ impl GitStore {
} }
fn new( fn new(
worktree_store: &Entity<WorktreeStore>, worktree_store: Entity<WorktreeStore>,
buffer_store: Entity<BufferStore>, buffer_store: Entity<BufferStore>,
state: GitStoreState, state: GitStoreState,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
let update_sender = Self::spawn_git_worker(cx); let update_sender = Self::spawn_git_worker(cx);
let _subscriptions = [ let _subscriptions = [
cx.subscribe(worktree_store, Self::on_worktree_store_event), cx.subscribe(&worktree_store, Self::on_worktree_store_event),
cx.subscribe(&buffer_store, Self::on_buffer_store_event), cx.subscribe(&buffer_store, Self::on_buffer_store_event),
]; ];
GitStore { GitStore {
state, state,
buffer_store, buffer_store,
_worktree_store: worktree_store,
repositories: HashMap::default(), repositories: HashMap::default(),
active_repo_id: None, active_repo_id: None,
update_sender, update_sender,
@ -276,6 +288,8 @@ impl GitStore {
client.add_entity_request_handler(Self::handle_open_unstaged_diff); client.add_entity_request_handler(Self::handle_open_unstaged_diff);
client.add_entity_request_handler(Self::handle_open_uncommitted_diff); client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
client.add_entity_message_handler(Self::handle_update_diff_bases); client.add_entity_message_handler(Self::handle_update_diff_bases);
client.add_entity_request_handler(Self::handle_get_permalink_to_line);
client.add_entity_request_handler(Self::handle_blame_buffer);
} }
pub fn is_local(&self) -> bool { pub fn is_local(&self) -> bool {
@ -511,6 +525,20 @@ impl GitStore {
diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade() diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
} }
pub fn project_path_git_status(
&self,
project_path: &ProjectPath,
cx: &App,
) -> Option<FileStatus> {
let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
Some(
repo.read(cx)
.repository_entry
.status_for_path(&repo_path)?
.status,
)
}
pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> { pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> {
let mut dot_git_abs_paths = Vec::new(); let mut dot_git_abs_paths = Vec::new();
let mut checkpoints = Vec::new(); let mut checkpoints = Vec::new();
@ -552,6 +580,172 @@ impl GitStore {
}) })
} }
pub fn blame_buffer(
&self,
buffer: &Entity<Buffer>,
version: Option<clock::Global>,
cx: &App,
) -> Task<Result<Option<Blame>>> {
let buffer = buffer.read(cx);
let Some(file) = File::from_dyn(buffer.file()) else {
return Task::ready(Err(anyhow!("buffer has no file")));
};
match file.worktree.clone().read(cx) {
Worktree::Local(worktree) => {
let worktree = worktree.snapshot();
let blame_params = maybe!({
let local_repo = match worktree.local_repo_for_path(&file.path) {
Some(repo_for_path) => repo_for_path,
None => return Ok(None),
};
let relative_path = local_repo
.relativize(&file.path)
.context("failed to relativize buffer path")?;
let repo = local_repo.repo().clone();
let content = match version {
Some(version) => buffer.rope_for_version(&version).clone(),
None => buffer.as_rope().clone(),
};
anyhow::Ok(Some((repo, relative_path, content)))
});
cx.spawn(async move |cx| {
let Some((repo, relative_path, content)) = blame_params? else {
return Ok(None);
};
repo.blame(relative_path.clone(), content, cx)
.await
.with_context(|| format!("Failed to blame {:?}", relative_path.0))
.map(Some)
})
}
Worktree::Remote(worktree) => {
let buffer_id = buffer.remote_id();
let version = buffer.version();
let project_id = worktree.project_id();
let client = worktree.client();
cx.spawn(async move |_| {
let response = client
.request(proto::BlameBuffer {
project_id,
buffer_id: buffer_id.into(),
version: serialize_version(&version),
})
.await?;
Ok(deserialize_blame_buffer_response(response))
})
}
}
}
pub fn get_permalink_to_line(
&self,
buffer: &Entity<Buffer>,
selection: Range<u32>,
cx: &App,
) -> Task<Result<url::Url>> {
let buffer = buffer.read(cx);
let Some(file) = File::from_dyn(buffer.file()) else {
return Task::ready(Err(anyhow!("buffer has no file")));
};
match file.worktree.read(cx) {
Worktree::Local(worktree) => {
let worktree_path = worktree.abs_path().clone();
let Some((repo_entry, repo)) =
worktree.repository_for_path(&file.path).and_then(|entry| {
let repo = worktree.get_local_repo(&entry)?.repo().clone();
Some((entry, repo))
})
else {
// If we're not in a Git repo, check whether this is a Rust source
// file in the Cargo registry (presumably opened with go-to-definition
// from a normal Rust file). If so, we can put together a permalink
// using crate metadata.
if buffer
.language()
.is_none_or(|lang| lang.name() != "Rust".into())
{
return Task::ready(Err(anyhow!("no permalink available")));
}
let file_path = worktree_path.join(&file.path);
return cx.spawn(async move |cx| {
let provider_registry =
cx.update(GitHostingProviderRegistry::default_global)?;
get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
.map_err(|_| anyhow!("no permalink available"))
});
};
let path = match repo_entry.relativize(&file.path) {
Ok(RepoPath(path)) => path,
Err(e) => return Task::ready(Err(e)),
};
let remote = repo_entry
.branch()
.and_then(|b| b.upstream.as_ref())
.and_then(|b| b.remote_name())
.unwrap_or("origin")
.to_string();
cx.spawn(async move |cx| {
let origin_url = repo
.remote_url(&remote)
.ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
let sha = repo
.head_sha()
.ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
let provider_registry =
cx.update(GitHostingProviderRegistry::default_global)?;
let (provider, remote) =
parse_git_remote_url(provider_registry, &origin_url)
.ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
let path = path
.to_str()
.ok_or_else(|| anyhow!("failed to convert path to string"))?;
Ok(provider.build_permalink(
remote,
BuildPermalinkParams {
sha: &sha,
path,
selection: Some(selection),
},
))
})
}
Worktree::Remote(worktree) => {
let buffer_id = buffer.remote_id();
let project_id = worktree.project_id();
let client = worktree.client();
cx.spawn(async move |_| {
let response = client
.request(proto::GetPermalinkToLine {
project_id,
buffer_id: buffer_id.into(),
selection: Some(proto::Range {
start: selection.start as u64,
end: selection.end as u64,
}),
})
.await?;
url::Url::parse(&response.permalink).context("failed to parse permalink")
})
}
}
}
fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> { fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
match &self.state { match &self.state {
GitStoreState::Local { GitStoreState::Local {
@ -611,12 +805,12 @@ impl GitStore {
.and_then(|local_worktree| local_worktree.get_local_repo(repo_entry)) .and_then(|local_worktree| local_worktree.get_local_repo(repo_entry))
.map(|local_repo| { .map(|local_repo| {
( (
GitRepo::Local(local_repo.repo().clone()), RepositoryState::Local(local_repo.repo().clone()),
local_repo.merge_message.clone(), local_repo.merge_message.clone(),
) )
}) })
.or_else(|| { .or_else(|| {
let git_repo = GitRepo::Remote { let git_repo = RepositoryState::Remote {
project_id: self.project_id()?, project_id: self.project_id()?,
client: self client: self
.upstream_client() .upstream_client()
@ -642,8 +836,9 @@ impl GitStore {
let existing_repo = existing_repo.clone(); let existing_repo = existing_repo.clone();
existing_repo.update(cx, |existing_repo, _| { existing_repo.update(cx, |existing_repo, _| {
existing_repo.repository_entry = repo_entry.clone(); existing_repo.repository_entry = repo_entry.clone();
if matches!(git_repo, GitRepo::Local { .. }) { if matches!(git_repo, RepositoryState::Local { .. }) {
existing_repo.merge_message = merge_message; existing_repo.merge_message = merge_message;
existing_repo.completed_scan_id = worktree.completed_scan_id();
} }
}); });
existing_repo existing_repo
@ -666,6 +861,7 @@ impl GitStore {
job_sender: self.update_sender.clone(), job_sender: self.update_sender.clone(),
merge_message, merge_message,
commit_message_buffer: None, commit_message_buffer: None,
completed_scan_id: worktree.completed_scan_id(),
}) })
}; };
new_repositories.insert(repo_entry.work_directory_id(), repo); new_repositories.insert(repo_entry.work_directory_id(), repo);
@ -992,13 +1188,21 @@ impl GitStore {
Some(status.status) Some(status.status)
} }
fn repository_and_path_for_buffer_id( pub fn repository_and_path_for_buffer_id(
&self, &self,
buffer_id: BufferId, buffer_id: BufferId,
cx: &App, cx: &App,
) -> Option<(Entity<Repository>, RepoPath)> { ) -> Option<(Entity<Repository>, RepoPath)> {
let buffer = self.buffer_store.read(cx).get(buffer_id)?; let buffer = self.buffer_store.read(cx).get(buffer_id)?;
let path = buffer.read(cx).project_path(cx)?; let project_path = buffer.read(cx).project_path(cx)?;
self.repository_and_path_for_project_path(&project_path, cx)
}
pub fn repository_and_path_for_project_path(
&self,
path: &ProjectPath,
cx: &App,
) -> Option<(Entity<Repository>, RepoPath)> {
let mut result: Option<(Entity<Repository>, RepoPath)> = None; let mut result: Option<(Entity<Repository>, RepoPath)> = None;
for repo_handle in self.repositories.values() { for repo_handle in self.repositories.values() {
let repo = repo_handle.read(cx); let repo = repo_handle.read(cx);
@ -1572,7 +1776,7 @@ impl GitStore {
Ok(proto::GitDiffResponse { diff }) Ok(proto::GitDiffResponse { diff })
} }
pub async fn handle_open_unstaged_diff( async fn handle_open_unstaged_diff(
this: Entity<Self>, this: Entity<Self>,
request: TypedEnvelope<proto::OpenUnstagedDiff>, request: TypedEnvelope<proto::OpenUnstagedDiff>,
mut cx: AsyncApp, mut cx: AsyncApp,
@ -1596,7 +1800,7 @@ impl GitStore {
Ok(proto::OpenUnstagedDiffResponse { staged_text }) Ok(proto::OpenUnstagedDiffResponse { staged_text })
} }
pub async fn handle_open_uncommitted_diff( async fn handle_open_uncommitted_diff(
this: Entity<Self>, this: Entity<Self>,
request: TypedEnvelope<proto::OpenUncommittedDiff>, request: TypedEnvelope<proto::OpenUncommittedDiff>,
mut cx: AsyncApp, mut cx: AsyncApp,
@ -1657,7 +1861,7 @@ impl GitStore {
}) })
} }
pub async fn handle_update_diff_bases( async fn handle_update_diff_bases(
this: Entity<Self>, this: Entity<Self>,
request: TypedEnvelope<proto::UpdateDiffBases>, request: TypedEnvelope<proto::UpdateDiffBases>,
mut cx: AsyncApp, mut cx: AsyncApp,
@ -1675,6 +1879,56 @@ impl GitStore {
}) })
} }
async fn handle_blame_buffer(
this: Entity<Self>,
envelope: TypedEnvelope<proto::BlameBuffer>,
mut cx: AsyncApp,
) -> Result<proto::BlameBufferResponse> {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let version = deserialize_version(&envelope.payload.version);
let buffer = this.read_with(&cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
})??;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(version.clone())
})?
.await?;
let blame = this
.update(&mut cx, |this, cx| {
this.blame_buffer(&buffer, Some(version), cx)
})?
.await?;
Ok(serialize_blame_buffer_response(blame))
}
async fn handle_get_permalink_to_line(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GetPermalinkToLine>,
mut cx: AsyncApp,
) -> Result<proto::GetPermalinkToLineResponse> {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
// let version = deserialize_version(&envelope.payload.version);
let selection = {
let proto_selection = envelope
.payload
.selection
.context("no selection to get permalink for defined")?;
proto_selection.start as u32..proto_selection.end as u32
};
let buffer = this.read_with(&cx, |this, cx| {
this.buffer_store.read(cx).get_existing(buffer_id)
})??;
let permalink = this
.update(&mut cx, |this, cx| {
this.get_permalink_to_line(&buffer, selection, cx)
})?
.await?;
Ok(proto::GetPermalinkToLineResponse {
permalink: permalink.to_string(),
})
}
fn repository_for_request( fn repository_for_request(
this: &Entity<Self>, this: &Entity<Self>,
worktree_id: WorktreeId, worktree_id: WorktreeId,
@ -2052,9 +2306,13 @@ impl Repository {
self.repository_entry.branch() self.repository_entry.branch()
} }
pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
self.repository_entry.status_for_path(path)
}
fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R> fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R>
where where
F: FnOnce(GitRepo, AsyncApp) -> Fut + 'static, F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
Fut: Future<Output = R> + 'static, Fut: Future<Output = R> + 'static,
R: Send + 'static, R: Send + 'static,
{ {
@ -2063,7 +2321,7 @@ impl Repository {
fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R> fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R>
where where
F: FnOnce(GitRepo, AsyncApp) -> Fut + 'static, F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
Fut: Future<Output = R> + 'static, Fut: Future<Output = R> + 'static,
R: Send + 'static, R: Send + 'static,
{ {
@ -2178,6 +2436,13 @@ impl Repository {
self.repository_entry.relativize(path).log_err() self.repository_entry.relativize(path).log_err()
} }
pub fn local_repository(&self) -> Option<Arc<dyn GitRepository>> {
match &self.git_repo {
RepositoryState::Local(git_repository) => Some(git_repository.clone()),
RepositoryState::Remote { .. } => None,
}
}
pub fn open_commit_buffer( pub fn open_commit_buffer(
&mut self, &mut self,
languages: Option<Arc<LanguageRegistry>>, languages: Option<Arc<LanguageRegistry>>,
@ -2188,7 +2453,7 @@ impl Repository {
return Task::ready(Ok(buffer)); return Task::ready(Ok(buffer));
} }
if let GitRepo::Remote { if let RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2262,8 +2527,8 @@ impl Repository {
self.send_job(|git_repo, _| async move { self.send_job(|git_repo, _| async move {
match git_repo { match git_repo {
GitRepo::Local(repo) => repo.checkout_files(commit, paths, env.await).await, RepositoryState::Local(repo) => repo.checkout_files(commit, paths, env.await).await,
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2298,11 +2563,11 @@ impl Repository {
let env = self.worktree_environment(cx); let env = self.worktree_environment(cx);
self.send_job(|git_repo, _| async move { self.send_job(|git_repo, _| async move {
match git_repo { match git_repo {
GitRepo::Local(git_repo) => { RepositoryState::Local(git_repo) => {
let env = env.await; let env = env.await;
git_repo.reset(commit, reset_mode, env).await git_repo.reset(commit, reset_mode, env).await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2330,8 +2595,8 @@ impl Repository {
pub fn show(&self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> { pub fn show(&self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
self.send_job(|git_repo, cx| async move { self.send_job(|git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(git_repository) => git_repository.show(commit, cx).await, RepositoryState::Local(git_repository) => git_repository.show(commit, cx).await,
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2402,8 +2667,8 @@ impl Repository {
this.update(cx, |this, _| { this.update(cx, |this, _| {
this.send_job(|git_repo, cx| async move { this.send_job(|git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(repo) => repo.stage_paths(entries, env, cx).await, RepositoryState::Local(repo) => repo.stage_paths(entries, env, cx).await,
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2473,8 +2738,8 @@ impl Repository {
this.update(cx, |this, _| { this.update(cx, |this, _| {
this.send_job(|git_repo, cx| async move { this.send_job(|git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(repo) => repo.unstage_paths(entries, env, cx).await, RepositoryState::Local(repo) => repo.unstage_paths(entries, env, cx).await,
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2556,11 +2821,11 @@ impl Repository {
let env = self.worktree_environment(cx); let env = self.worktree_environment(cx);
self.send_job(|git_repo, cx| async move { self.send_job(|git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(repo) => { RepositoryState::Local(repo) => {
let env = env.await; let env = env.await;
repo.commit(message, name_and_email, env, cx).await repo.commit(message, name_and_email, env, cx).await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2597,12 +2862,12 @@ impl Repository {
self.send_job(move |git_repo, cx| async move { self.send_job(move |git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
let askpass = AskPassSession::new(&executor, askpass).await?; let askpass = AskPassSession::new(&executor, askpass).await?;
let env = env.await; let env = env.await;
git_repository.fetch(askpass, env, cx).await git_repository.fetch(askpass, env, cx).await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2648,7 +2913,7 @@ impl Repository {
self.send_job(move |git_repo, cx| async move { self.send_job(move |git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
let env = env.await; let env = env.await;
let askpass = AskPassSession::new(&executor, askpass).await?; let askpass = AskPassSession::new(&executor, askpass).await?;
git_repository git_repository
@ -2662,7 +2927,7 @@ impl Repository {
) )
.await .await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2712,14 +2977,14 @@ impl Repository {
self.send_job(move |git_repo, cx| async move { self.send_job(move |git_repo, cx| async move {
match git_repo { match git_repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
let askpass = AskPassSession::new(&executor, askpass).await?; let askpass = AskPassSession::new(&executor, askpass).await?;
let env = env.await; let env = env.await;
git_repository git_repository
.pull(branch.to_string(), remote.to_string(), askpass, env, cx) .pull(branch.to_string(), remote.to_string(), askpass, env, cx)
.await .await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2763,8 +3028,10 @@ impl Repository {
Some(GitJobKey::WriteIndex(path.clone())), Some(GitJobKey::WriteIndex(path.clone())),
|git_repo, cx| async { |git_repo, cx| async {
match git_repo { match git_repo {
GitRepo::Local(repo) => repo.set_index_text(path, content, env.await, cx).await, RepositoryState::Local(repo) => {
GitRepo::Remote { repo.set_index_text(path, content, env.await, cx).await
}
RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2792,8 +3059,10 @@ impl Repository {
) -> oneshot::Receiver<Result<Vec<Remote>>> { ) -> oneshot::Receiver<Result<Vec<Remote>>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => git_repository.get_remotes(branch_name, cx).await, RepositoryState::Local(git_repository) => {
GitRepo::Remote { git_repository.get_remotes(branch_name, cx).await
}
RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2822,15 +3091,19 @@ impl Repository {
}) })
} }
pub fn branch(&self) -> Option<&Branch> {
self.repository_entry.branch()
}
pub fn branches(&self) -> oneshot::Receiver<Result<Vec<Branch>>> { pub fn branches(&self) -> oneshot::Receiver<Result<Vec<Branch>>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
let git_repository = git_repository.clone(); let git_repository = git_repository.clone();
cx.background_spawn(async move { git_repository.branches().await }) cx.background_spawn(async move { git_repository.branches().await })
.await .await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2859,8 +3132,8 @@ impl Repository {
pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> { pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => git_repository.diff(diff_type, cx).await, RepositoryState::Local(git_repository) => git_repository.diff(diff_type, cx).await,
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2892,10 +3165,10 @@ impl Repository {
pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> { pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
git_repository.create_branch(branch_name, cx).await git_repository.create_branch(branch_name, cx).await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2919,10 +3192,10 @@ impl Repository {
pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> { pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
git_repository.change_branch(branch_name, cx).await git_repository.change_branch(branch_name, cx).await
} }
GitRepo::Remote { RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2946,8 +3219,10 @@ impl Repository {
pub fn check_for_pushed_commits(&self) -> oneshot::Receiver<Result<Vec<SharedString>>> { pub fn check_for_pushed_commits(&self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => git_repository.check_for_pushed_commit(cx).await, RepositoryState::Local(git_repository) => {
GitRepo::Remote { git_repository.check_for_pushed_commit(cx).await
}
RepositoryState::Remote {
project_id, project_id,
client, client,
worktree_id, worktree_id,
@ -2972,8 +3247,8 @@ impl Repository {
pub fn checkpoint(&self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> { pub fn checkpoint(&self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
self.send_job(|repo, cx| async move { self.send_job(|repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => git_repository.checkpoint(cx).await, RepositoryState::Local(git_repository) => git_repository.checkpoint(cx).await,
GitRepo::Remote { .. } => Err(anyhow!("not implemented yet")), RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
} }
}) })
} }
@ -2984,11 +3259,147 @@ impl Repository {
) -> oneshot::Receiver<Result<()>> { ) -> oneshot::Receiver<Result<()>> {
self.send_job(move |repo, cx| async move { self.send_job(move |repo, cx| async move {
match repo { match repo {
GitRepo::Local(git_repository) => { RepositoryState::Local(git_repository) => {
git_repository.restore_checkpoint(checkpoint, cx).await git_repository.restore_checkpoint(checkpoint, cx).await
} }
GitRepo::Remote { .. } => Err(anyhow!("not implemented yet")), RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
} }
}) })
} }
} }
fn get_permalink_in_rust_registry_src(
provider_registry: Arc<GitHostingProviderRegistry>,
path: PathBuf,
selection: Range<u32>,
) -> Result<url::Url> {
#[derive(Deserialize)]
struct CargoVcsGit {
sha1: String,
}
#[derive(Deserialize)]
struct CargoVcsInfo {
git: CargoVcsGit,
path_in_vcs: String,
}
#[derive(Deserialize)]
struct CargoPackage {
repository: String,
}
#[derive(Deserialize)]
struct CargoToml {
package: CargoPackage,
}
let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
Some((dir, json))
}) else {
bail!("No .cargo_vcs_info.json found in parent directories")
};
let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
.ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
let permalink = provider.build_permalink(
remote,
BuildPermalinkParams {
sha: &cargo_vcs_info.git.sha1,
path: &path.to_string_lossy(),
selection: Some(selection),
},
);
Ok(permalink)
}
fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
let Some(blame) = blame else {
return proto::BlameBufferResponse {
blame_response: None,
};
};
let entries = blame
.entries
.into_iter()
.map(|entry| proto::BlameEntry {
sha: entry.sha.as_bytes().into(),
start_line: entry.range.start,
end_line: entry.range.end,
original_line_number: entry.original_line_number,
author: entry.author.clone(),
author_mail: entry.author_mail.clone(),
author_time: entry.author_time,
author_tz: entry.author_tz.clone(),
committer: entry.committer_name.clone(),
committer_mail: entry.committer_email.clone(),
committer_time: entry.committer_time,
committer_tz: entry.committer_tz.clone(),
summary: entry.summary.clone(),
previous: entry.previous.clone(),
filename: entry.filename.clone(),
})
.collect::<Vec<_>>();
let messages = blame
.messages
.into_iter()
.map(|(oid, message)| proto::CommitMessage {
oid: oid.as_bytes().into(),
message,
})
.collect::<Vec<_>>();
proto::BlameBufferResponse {
blame_response: Some(proto::blame_buffer_response::BlameResponse {
entries,
messages,
remote_url: blame.remote_url,
}),
}
}
fn deserialize_blame_buffer_response(
response: proto::BlameBufferResponse,
) -> Option<git::blame::Blame> {
let response = response.blame_response?;
let entries = response
.entries
.into_iter()
.filter_map(|entry| {
Some(git::blame::BlameEntry {
sha: git::Oid::from_bytes(&entry.sha).ok()?,
range: entry.start_line..entry.end_line,
original_line_number: entry.original_line_number,
committer_name: entry.committer,
committer_time: entry.committer_time,
committer_tz: entry.committer_tz,
committer_email: entry.committer_mail,
author: entry.author,
author_mail: entry.author_mail,
author_time: entry.author_time,
author_tz: entry.author_tz,
summary: entry.summary,
previous: entry.previous,
filename: entry.filename,
})
})
.collect::<Vec<_>>();
let messages = response
.messages
.into_iter()
.filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
.collect::<HashMap<_, _>>();
Some(Blame {
entries,
messages,
remote_url: response.remote_url,
})
}

View file

@ -0,0 +1,767 @@
use git::status::GitSummary;
use std::{ops::Deref, path::Path};
use sum_tree::Cursor;
use text::Bias;
use worktree::{Entry, PathProgress, PathTarget, RepositoryEntry, StatusEntry, Traversal};
/// Walks the worktree entries and their associated git statuses.
pub struct GitTraversal<'a> {
traversal: Traversal<'a>,
current_entry_summary: Option<GitSummary>,
repo_location: Option<(
&'a RepositoryEntry,
Cursor<'a, StatusEntry, PathProgress<'a>>,
)>,
}
impl<'a> GitTraversal<'a> {
pub fn new(traversal: Traversal<'a>) -> GitTraversal<'a> {
let mut this = GitTraversal {
traversal,
current_entry_summary: None,
repo_location: None,
};
this.synchronize_statuses(true);
this
}
fn synchronize_statuses(&mut self, reset: bool) {
self.current_entry_summary = None;
let Some(entry) = self.entry() else {
return;
};
let Some(repo) = self.traversal.snapshot().repository_for_path(&entry.path) else {
self.repo_location = None;
return;
};
// Update our state if we changed repositories.
if reset
|| self
.repo_location
.as_ref()
.map(|(prev_repo, _)| &prev_repo.work_directory)
!= Some(&repo.work_directory)
{
self.repo_location = Some((repo, repo.statuses_by_path.cursor::<PathProgress>(&())));
}
let Some((repo, statuses)) = &mut self.repo_location else {
return;
};
let repo_path = repo.relativize(&entry.path).unwrap();
if entry.is_dir() {
let mut statuses = statuses.clone();
statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
let summary =
statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
self.current_entry_summary = Some(summary);
} else if entry.is_file() {
// For a file entry, park the cursor on the corresponding status
if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
// TODO: Investigate statuses.item() being None here.
self.current_entry_summary = statuses.item().map(|item| item.status.into());
} else {
self.current_entry_summary = Some(GitSummary::UNCHANGED);
}
}
}
pub fn advance(&mut self) -> bool {
self.advance_by(1)
}
pub fn advance_by(&mut self, count: usize) -> bool {
let found = self.traversal.advance_by(count);
self.synchronize_statuses(false);
found
}
pub fn advance_to_sibling(&mut self) -> bool {
let found = self.traversal.advance_to_sibling();
self.synchronize_statuses(false);
found
}
pub fn back_to_parent(&mut self) -> bool {
let found = self.traversal.back_to_parent();
self.synchronize_statuses(true);
found
}
pub fn start_offset(&self) -> usize {
self.traversal.start_offset()
}
pub fn end_offset(&self) -> usize {
self.traversal.end_offset()
}
pub fn entry(&self) -> Option<GitEntryRef<'a>> {
let entry = self.traversal.entry()?;
let git_summary = self.current_entry_summary.unwrap_or(GitSummary::UNCHANGED);
Some(GitEntryRef { entry, git_summary })
}
}
impl<'a> Iterator for GitTraversal<'a> {
type Item = GitEntryRef<'a>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.entry() {
self.advance();
Some(item)
} else {
None
}
}
}
pub struct ChildEntriesGitIter<'a> {
parent_path: &'a Path,
traversal: GitTraversal<'a>,
}
impl<'a> ChildEntriesGitIter<'a> {
pub fn new(snapshot: &'a worktree::Snapshot, parent_path: &'a Path) -> Self {
let mut traversal =
GitTraversal::new(snapshot.traverse_from_path(true, true, true, parent_path));
traversal.advance();
ChildEntriesGitIter {
parent_path,
traversal,
}
}
}
impl<'a> Iterator for ChildEntriesGitIter<'a> {
type Item = GitEntryRef<'a>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.traversal.entry() {
if item.path.starts_with(self.parent_path) {
self.traversal.advance_to_sibling();
return Some(item);
}
}
None
}
}
#[derive(Debug, Clone, Copy)]
pub struct GitEntryRef<'a> {
pub entry: &'a Entry,
pub git_summary: GitSummary,
}
impl GitEntryRef<'_> {
pub fn to_owned(&self) -> GitEntry {
GitEntry {
entry: self.entry.clone(),
git_summary: self.git_summary,
}
}
}
impl Deref for GitEntryRef<'_> {
type Target = Entry;
fn deref(&self) -> &Self::Target {
&self.entry
}
}
impl AsRef<Entry> for GitEntryRef<'_> {
fn as_ref(&self) -> &Entry {
self.entry
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct GitEntry {
pub entry: Entry,
pub git_summary: GitSummary,
}
impl GitEntry {
pub fn to_ref(&self) -> GitEntryRef {
GitEntryRef {
entry: &self.entry,
git_summary: self.git_summary,
}
}
}
impl Deref for GitEntry {
type Target = Entry;
fn deref(&self) -> &Self::Target {
&self.entry
}
}
impl AsRef<Entry> for GitEntry {
fn as_ref(&self) -> &Entry {
&self.entry
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::*;
use fs::FakeFs;
use git::status::{FileStatus, StatusCode, TrackedSummary, UnmergedStatus, UnmergedStatusCode};
use gpui::TestAppContext;
use serde_json::json;
use settings::{Settings as _, SettingsStore};
use util::path;
use worktree::{Worktree, WorktreeSettings};
const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus {
first_head: UnmergedStatusCode::Updated,
second_head: UnmergedStatusCode::Updated,
});
const ADDED: GitSummary = GitSummary {
index: TrackedSummary::ADDED,
count: 1,
..GitSummary::UNCHANGED
};
const MODIFIED: GitSummary = GitSummary {
index: TrackedSummary::MODIFIED,
count: 1,
..GitSummary::UNCHANGED
};
#[gpui::test]
async fn test_git_traversal_with_one_repo(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
"x": {
".git": {},
"x1.txt": "foo",
"x2.txt": "bar",
"y": {
".git": {},
"y1.txt": "baz",
"y2.txt": "qux"
},
"z.txt": "sneaky..."
},
"z": {
".git": {},
"z1.txt": "quux",
"z2.txt": "quuux"
}
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/x/.git")),
&[
(Path::new("x2.txt"), StatusCode::Modified.index()),
(Path::new("z.txt"), StatusCode::Added.index()),
],
);
fs.set_status_for_repo(
Path::new(path!("/root/x/y/.git")),
&[(Path::new("y1.txt"), CONFLICT)],
);
fs.set_status_for_repo(
Path::new(path!("/root/z/.git")),
&[(Path::new("z2.txt"), StatusCode::Added.index())],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
let mut traversal =
GitTraversal::new(snapshot.traverse_from_path(true, false, true, Path::new("x")));
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt"));
assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt"));
assert_eq!(entry.git_summary, MODIFIED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt"));
assert_eq!(entry.git_summary, GitSummary::CONFLICT);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt"));
assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/z.txt"));
assert_eq!(entry.git_summary, ADDED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt"));
assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt"));
assert_eq!(entry.git_summary, ADDED);
}
#[gpui::test]
async fn test_git_traversal_with_nested_repos(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
"x": {
".git": {},
"x1.txt": "foo",
"x2.txt": "bar",
"y": {
".git": {},
"y1.txt": "baz",
"y2.txt": "qux"
},
"z.txt": "sneaky..."
},
"z": {
".git": {},
"z1.txt": "quux",
"z2.txt": "quuux"
}
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/x/.git")),
&[
(Path::new("x2.txt"), StatusCode::Modified.index()),
(Path::new("z.txt"), StatusCode::Added.index()),
],
);
fs.set_status_for_repo(
Path::new(path!("/root/x/y/.git")),
&[(Path::new("y1.txt"), CONFLICT)],
);
fs.set_status_for_repo(
Path::new(path!("/root/z/.git")),
&[(Path::new("z2.txt"), StatusCode::Added.index())],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
// Sanity check the propagation for x/y and z
check_git_statuses(
&snapshot,
&[
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
(Path::new("x/y/y2.txt"), GitSummary::UNCHANGED),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("z"), ADDED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
(Path::new("z/z2.txt"), ADDED),
],
);
// Test one of the fundamental cases of propagation blocking, the transition from one git repository to another
check_git_statuses(
&snapshot,
&[
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
],
);
// Sanity check everything around it
check_git_statuses(
&snapshot,
&[
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
(Path::new("x/x2.txt"), MODIFIED),
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
(Path::new("x/y/y2.txt"), GitSummary::UNCHANGED),
(Path::new("x/z.txt"), ADDED),
],
);
// Test the other fundamental case, transitioning from git repository to non-git repository
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::UNCHANGED),
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
],
);
// And all together now
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::UNCHANGED),
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
(Path::new("x/x2.txt"), MODIFIED),
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
(Path::new("x/y/y2.txt"), GitSummary::UNCHANGED),
(Path::new("x/z.txt"), ADDED),
(Path::new("z"), ADDED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
(Path::new("z/z2.txt"), ADDED),
],
);
}
#[gpui::test]
async fn test_git_traversal_simple(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
".git": {},
"a": {
"b": {
"c1.txt": "",
"c2.txt": "",
},
"d": {
"e1.txt": "",
"e2.txt": "",
"e3.txt": "",
}
},
"f": {
"no-status.txt": ""
},
"g": {
"h1.txt": "",
"h2.txt": ""
},
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/.git")),
&[
(Path::new("a/b/c1.txt"), StatusCode::Added.index()),
(Path::new("a/d/e2.txt"), StatusCode::Modified.index()),
(Path::new("g/h2.txt"), CONFLICT),
],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::CONFLICT + MODIFIED + ADDED),
(Path::new("g"), GitSummary::CONFLICT),
(Path::new("g/h2.txt"), GitSummary::CONFLICT),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::CONFLICT + ADDED + MODIFIED),
(Path::new("a"), ADDED + MODIFIED),
(Path::new("a/b"), ADDED),
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
(Path::new("a/d"), MODIFIED),
(Path::new("a/d/e2.txt"), MODIFIED),
(Path::new("f"), GitSummary::UNCHANGED),
(Path::new("f/no-status.txt"), GitSummary::UNCHANGED),
(Path::new("g"), GitSummary::CONFLICT),
(Path::new("g/h2.txt"), GitSummary::CONFLICT),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("a/b"), ADDED),
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
(Path::new("a/d"), MODIFIED),
(Path::new("a/d/e1.txt"), GitSummary::UNCHANGED),
(Path::new("a/d/e2.txt"), MODIFIED),
(Path::new("f"), GitSummary::UNCHANGED),
(Path::new("f/no-status.txt"), GitSummary::UNCHANGED),
(Path::new("g"), GitSummary::CONFLICT),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
(Path::new("a/d/e1.txt"), GitSummary::UNCHANGED),
(Path::new("a/d/e2.txt"), MODIFIED),
(Path::new("f/no-status.txt"), GitSummary::UNCHANGED),
],
);
}
#[gpui::test]
async fn test_git_traversal_with_repos_under_project(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
"x": {
".git": {},
"x1.txt": "foo",
"x2.txt": "bar"
},
"y": {
".git": {},
"y1.txt": "baz",
"y2.txt": "qux"
},
"z": {
".git": {},
"z1.txt": "quux",
"z2.txt": "quuux"
}
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/x/.git")),
&[(Path::new("x1.txt"), StatusCode::Added.index())],
);
fs.set_status_for_repo(
Path::new(path!("/root/y/.git")),
&[
(Path::new("y1.txt"), CONFLICT),
(Path::new("y2.txt"), StatusCode::Modified.index()),
],
);
fs.set_status_for_repo(
Path::new(path!("/root/z/.git")),
&[(Path::new("z2.txt"), StatusCode::Modified.index())],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
check_git_statuses(
&snapshot,
&[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)],
);
check_git_statuses(
&snapshot,
&[
(Path::new("y"), GitSummary::CONFLICT + MODIFIED),
(Path::new("y/y1.txt"), GitSummary::CONFLICT),
(Path::new("y/y2.txt"), MODIFIED),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("z"), MODIFIED),
(Path::new("z/z2.txt"), MODIFIED),
],
);
check_git_statuses(
&snapshot,
&[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)],
);
check_git_statuses(
&snapshot,
&[
(Path::new("x"), ADDED),
(Path::new("x/x1.txt"), ADDED),
(Path::new("x/x2.txt"), GitSummary::UNCHANGED),
(Path::new("y"), GitSummary::CONFLICT + MODIFIED),
(Path::new("y/y1.txt"), GitSummary::CONFLICT),
(Path::new("y/y2.txt"), MODIFIED),
(Path::new("z"), MODIFIED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
(Path::new("z/z2.txt"), MODIFIED),
],
);
}
fn init_test(cx: &mut gpui::TestAppContext) {
if std::env::var("RUST_LOG").is_ok() {
env_logger::try_init().ok();
}
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
WorktreeSettings::register(cx);
});
}
#[gpui::test]
async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) {
init_test(cx);
// Create a worktree with a git directory.
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
".git": {},
"a.txt": "",
"b": {
"c.txt": "",
},
}),
)
.await;
fs.set_head_and_index_for_repo(
path!("/root/.git").as_ref(),
&[("a.txt".into(), "".into()), ("b/c.txt".into(), "".into())],
);
cx.run_until_parked();
let tree = Worktree::local(
path!("/root").as_ref(),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.executor().run_until_parked();
let (old_entry_ids, old_mtimes) = tree.read_with(cx, |tree, _| {
(
tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
)
});
// Regression test: after the directory is scanned, touch the git repo's
// working directory, bumping its mtime. That directory keeps its project
// entry id after the directories are re-scanned.
fs.touch_path(path!("/root")).await;
cx.executor().run_until_parked();
let (new_entry_ids, new_mtimes) = tree.read_with(cx, |tree, _| {
(
tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
)
});
assert_eq!(new_entry_ids, old_entry_ids);
assert_ne!(new_mtimes, old_mtimes);
// Regression test: changes to the git repository should still be
// detected.
fs.set_head_for_repo(
path!("/root/.git").as_ref(),
&[
("a.txt".into(), "".into()),
("b/c.txt".into(), "something-else".into()),
],
);
cx.executor().run_until_parked();
cx.executor().advance_clock(Duration::from_secs(1));
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
check_git_statuses(
&snapshot,
&[
(Path::new(""), MODIFIED),
(Path::new("a.txt"), GitSummary::UNCHANGED),
(Path::new("b/c.txt"), MODIFIED),
],
);
}
#[track_caller]
fn check_git_statuses(
snapshot: &worktree::Snapshot,
expected_statuses: &[(&Path, GitSummary)],
) {
let mut traversal =
GitTraversal::new(snapshot.traverse_from_path(true, true, false, "".as_ref()));
let found_statuses = expected_statuses
.iter()
.map(|&(path, _)| {
let git_entry = traversal
.find(|git_entry| &*git_entry.path == path)
.unwrap_or_else(|| panic!("Traversal has no entry for {path:?}"));
(path, git_entry.git_summary)
})
.collect::<Vec<_>>();
assert_eq!(found_statuses, expected_statuses);
}
}

View file

@ -3,7 +3,7 @@ mod color_extractor;
pub mod connection_manager; pub mod connection_manager;
pub mod debounced_delay; pub mod debounced_delay;
pub mod debugger; pub mod debugger;
pub mod git; pub mod git_store;
pub mod image_store; pub mod image_store;
pub mod lsp_command; pub mod lsp_command;
pub mod lsp_store; pub mod lsp_store;
@ -24,11 +24,12 @@ mod direnv;
mod environment; mod environment;
use buffer_diff::BufferDiff; use buffer_diff::BufferDiff;
pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent}; pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent};
use git::Repository; use git_store::Repository;
pub mod search_history; pub mod search_history;
mod yarn; mod yarn;
use crate::git::GitStore; use crate::git_store::GitStore;
pub use git_store::git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal};
use anyhow::{anyhow, Context as _, Result}; use anyhow::{anyhow, Context as _, Result};
use buffer_store::{BufferStore, BufferStoreEvent}; use buffer_store::{BufferStore, BufferStoreEvent};
@ -55,7 +56,7 @@ use futures::{
pub use image_store::{ImageItem, ImageStore}; pub use image_store::{ImageItem, ImageStore};
use image_store::{ImageItemEvent, ImageStoreEvent}; use image_store::{ImageItemEvent, ImageStoreEvent};
use ::git::{blame::Blame, repository::GitRepository, status::FileStatus}; use ::git::{blame::Blame, status::FileStatus};
use gpui::{ use gpui::{
AnyEntity, App, AppContext, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Hsla, AnyEntity, App, AppContext, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Hsla,
SharedString, Task, WeakEntity, Window, SharedString, Task, WeakEntity, Window,
@ -1768,8 +1769,9 @@ impl Project {
project_path: &ProjectPath, project_path: &ProjectPath,
cx: &App, cx: &App,
) -> Option<FileStatus> { ) -> Option<FileStatus> {
self.worktree_for_id(project_path.worktree_id, cx) self.git_store
.and_then(|worktree| worktree.read(cx).status_for_file(&project_path.path)) .read(cx)
.project_path_git_status(project_path, cx)
} }
pub fn visibility_for_paths( pub fn visibility_for_paths(
@ -4049,19 +4051,13 @@ impl Project {
) )
} }
pub fn get_first_worktree_root_repo(&self, cx: &App) -> Option<Arc<dyn GitRepository>> {
let worktree = self.visible_worktrees(cx).next()?.read(cx).as_local()?;
let root_entry = worktree.root_git_entry()?;
worktree.get_local_repo(&root_entry)?.repo().clone().into()
}
pub fn blame_buffer( pub fn blame_buffer(
&self, &self,
buffer: &Entity<Buffer>, buffer: &Entity<Buffer>,
version: Option<clock::Global>, version: Option<clock::Global>,
cx: &App, cx: &App,
) -> Task<Result<Option<Blame>>> { ) -> Task<Result<Option<Blame>>> {
self.buffer_store.read(cx).blame_buffer(buffer, version, cx) self.git_store.read(cx).blame_buffer(buffer, version, cx)
} }
pub fn get_permalink_to_line( pub fn get_permalink_to_line(
@ -4070,7 +4066,7 @@ impl Project {
selection: Range<u32>, selection: Range<u32>,
cx: &App, cx: &App,
) -> Task<Result<url::Url>> { ) -> Task<Result<url::Url>> {
self.buffer_store self.git_store
.read(cx) .read(cx)
.get_permalink_to_line(buffer, selection, cx) .get_permalink_to_line(buffer, selection, cx)
} }

View file

@ -12,7 +12,6 @@ use futures::{
future::{BoxFuture, Shared}, future::{BoxFuture, Shared},
FutureExt, SinkExt, FutureExt, SinkExt,
}; };
use git::repository::Branch;
use gpui::{ use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity, App, AppContext as _, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity,
}; };
@ -134,14 +133,6 @@ impl WorktreeStore {
.find(|worktree| worktree.read(cx).id() == id) .find(|worktree| worktree.read(cx).id() == id)
} }
pub fn current_branch(&self, repository: ProjectPath, cx: &App) -> Option<Branch> {
self.worktree_for_id(repository.worktree_id, cx)?
.read(cx)
.git_entry(repository.path)?
.branch()
.cloned()
}
pub fn worktree_for_entry( pub fn worktree_for_entry(
&self, &self,
entry_id: ProjectEntryId, entry_id: ProjectEntryId,

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -10,7 +10,7 @@ use node_runtime::NodeRuntime;
use project::{ use project::{
buffer_store::{BufferStore, BufferStoreEvent}, buffer_store::{BufferStore, BufferStoreEvent},
debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore}, debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
git::GitStore, git_store::GitStore,
project_settings::SettingsObserver, project_settings::SettingsObserver,
search::SearchQuery, search::SearchQuery,
task_store::TaskStore, task_store::TaskStore,

View file

@ -1336,15 +1336,12 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
fs.insert_branches(Path::new(path!("/code/project1/.git")), &branches); fs.insert_branches(Path::new(path!("/code/project1/.git")), &branches);
let (worktree, _) = project let (_worktree, _) = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.find_or_create_worktree(path!("/code/project1"), true, cx) project.find_or_create_worktree(path!("/code/project1"), true, cx)
}) })
.await .await
.unwrap(); .unwrap();
let worktree_id = cx.update(|cx| worktree.read(cx).id());
let root_path = ProjectPath::root_path(worktree_id);
// Give the worktree a bit of time to index the file system // Give the worktree a bit of time to index the file system
cx.run_until_parked(); cx.run_until_parked();
@ -1374,12 +1371,16 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
let server_branch = server_cx.update(|cx| { let server_branch = server_cx.update(|cx| {
headless_project.update(cx, |headless_project, cx| { headless_project.update(cx, |headless_project, cx| {
headless_project headless_project.git_store.update(cx, |git_store, cx| {
.worktree_store git_store
.update(cx, |worktree_store, cx| { .repositories()
worktree_store .values()
.current_branch(root_path.clone(), cx) .next()
.unwrap() .unwrap()
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}) })
}); });
@ -1409,10 +1410,16 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
let server_branch = server_cx.update(|cx| { let server_branch = server_cx.update(|cx| {
headless_project.update(cx, |headless_project, cx| { headless_project.update(cx, |headless_project, cx| {
headless_project headless_project.git_store.update(cx, |git_store, cx| {
.worktree_store git_store
.update(cx, |worktree_store, cx| { .repositories()
worktree_store.current_branch(root_path, cx).unwrap() .values()
.next()
.unwrap()
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}) })
}); });

View file

@ -513,21 +513,10 @@ impl TitleBar {
} }
pub fn render_project_branch(&self, cx: &mut Context<Self>) -> Option<impl IntoElement> { pub fn render_project_branch(&self, cx: &mut Context<Self>) -> Option<impl IntoElement> {
let entry = { let repository = self.project.read(cx).active_repository(cx)?;
let mut names_and_branches =
self.project.read(cx).visible_worktrees(cx).map(|worktree| {
let worktree = worktree.read(cx);
worktree.root_git_entry()
});
names_and_branches.next().flatten()
};
let workspace = self.workspace.upgrade()?; let workspace = self.workspace.upgrade()?;
let branch_name = entry let branch_name = repository.read(cx).current_branch()?.name.clone();
.as_ref() let branch_name = util::truncate_and_trailoff(&branch_name, MAX_BRANCH_NAME_LENGTH);
.and_then(|entry| entry.branch())
.map(|branch| branch.name.clone())
.map(|branch| util::truncate_and_trailoff(&branch, MAX_BRANCH_NAME_LENGTH))?;
Some( Some(
Button::new("project_branch_trigger", branch_name) Button::new("project_branch_trigger", branch_name)
.color(Color::Muted) .color(Color::Muted)

View file

@ -66,9 +66,7 @@ use std::{
}, },
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use sum_tree::{ use sum_tree::{Bias, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit};
Bias, Cursor, Edit, KeyedItem, SeekTarget, SumTree, Summary, TreeMap, TreeSet, Unit,
};
use text::{LineEnding, Rope}; use text::{LineEnding, Rope};
use util::{ use util::{
paths::{home_dir, PathMatcher, SanitizedPath}, paths::{home_dir, PathMatcher, SanitizedPath},
@ -197,7 +195,7 @@ pub struct RepositoryEntry {
/// With this setup, this field would contain 2 entries, like so: /// With this setup, this field would contain 2 entries, like so:
/// - my_sub_folder_1/project_root/changed_file_1 /// - my_sub_folder_1/project_root/changed_file_1
/// - my_sub_folder_2/changed_file_2 /// - my_sub_folder_2/changed_file_2
pub(crate) statuses_by_path: SumTree<StatusEntry>, pub statuses_by_path: SumTree<StatusEntry>,
work_directory_id: ProjectEntryId, work_directory_id: ProjectEntryId,
pub work_directory: WorkDirectory, pub work_directory: WorkDirectory,
work_directory_abs_path: PathBuf, work_directory_abs_path: PathBuf,
@ -2700,6 +2698,7 @@ impl Snapshot {
Some(removed_entry.path) Some(removed_entry.path)
} }
#[cfg(any(test, feature = "test-support"))]
pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<FileStatus> { pub fn status_for_file(&self, path: impl AsRef<Path>) -> Option<FileStatus> {
let path = path.as_ref(); let path = path.as_ref();
self.repository_for_path(path).and_then(|repo| { self.repository_for_path(path).and_then(|repo| {
@ -2955,19 +2954,12 @@ impl Snapshot {
self.traverse_from_offset(true, true, include_ignored, start) self.traverse_from_offset(true, true, include_ignored, start)
} }
#[cfg(any(feature = "test-support", test))]
pub fn git_status(&self, work_dir: &Path) -> Option<Vec<StatusEntry>> {
self.repositories
.get(&PathKey(work_dir.into()), &())
.map(|repo| repo.status().collect())
}
pub fn repositories(&self) -> &SumTree<RepositoryEntry> { pub fn repositories(&self) -> &SumTree<RepositoryEntry> {
&self.repositories &self.repositories
} }
/// Get the repository whose work directory corresponds to the given path. /// Get the repository whose work directory corresponds to the given path.
pub(crate) fn repository(&self, work_directory: PathKey) -> Option<RepositoryEntry> { fn repository(&self, work_directory: PathKey) -> Option<RepositoryEntry> {
self.repositories.get(&work_directory, &()).cloned() self.repositories.get(&work_directory, &()).cloned()
} }
@ -2982,13 +2974,14 @@ impl Snapshot {
/// Given an ordered iterator of entries, returns an iterator of those entries, /// Given an ordered iterator of entries, returns an iterator of those entries,
/// along with their containing git repository. /// along with their containing git repository.
#[cfg(test)]
#[track_caller] #[track_caller]
pub fn entries_with_repositories<'a>( fn entries_with_repositories<'a>(
&'a self, &'a self,
entries: impl 'a + Iterator<Item = &'a Entry>, entries: impl 'a + Iterator<Item = &'a Entry>,
) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> { ) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> {
let mut containing_repos = Vec::<&RepositoryEntry>::new(); let mut containing_repos = Vec::<&RepositoryEntry>::new();
let mut repositories = self.repositories().iter().peekable(); let mut repositories = self.repositories.iter().peekable();
entries.map(move |entry| { entries.map(move |entry| {
while let Some(repository) = containing_repos.last() { while let Some(repository) = containing_repos.last() {
if repository.directory_contains(&entry.path) { if repository.directory_contains(&entry.path) {
@ -3062,22 +3055,6 @@ impl Snapshot {
&self.root_name &self.root_name
} }
pub fn root_git_entry(&self) -> Option<RepositoryEntry> {
self.repositories
.get(&PathKey(Path::new("").into()), &())
.map(|entry| entry.to_owned())
}
pub fn git_entry(&self, work_directory_path: Arc<Path>) -> Option<RepositoryEntry> {
self.repositories
.get(&PathKey(work_directory_path), &())
.map(|entry| entry.to_owned())
}
pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
self.repositories.iter()
}
pub fn scan_id(&self) -> usize { pub fn scan_id(&self) -> usize {
self.scan_id self.scan_id
} }
@ -4087,8 +4064,8 @@ impl TryFrom<proto::StatusEntry> for StatusEntry {
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct PathProgress<'a> { pub struct PathProgress<'a> {
max_path: &'a Path, pub max_path: &'a Path,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -6036,8 +6013,8 @@ impl WorktreeModelHandle for Entity<Worktree> {
let tree = self.clone(); let tree = self.clone();
let (fs, root_path, mut git_dir_scan_id) = self.update(cx, |tree, _| { let (fs, root_path, mut git_dir_scan_id) = self.update(cx, |tree, _| {
let tree = tree.as_local().unwrap(); let tree = tree.as_local().unwrap();
let root_entry = tree.root_git_entry().unwrap(); let repository = tree.repositories.first().unwrap();
let local_repo_entry = tree.get_local_repo(&root_entry).unwrap(); let local_repo_entry = tree.get_local_repo(&repository).unwrap();
( (
tree.fs.clone(), tree.fs.clone(),
local_repo_entry.dot_git_dir_abs_path.clone(), local_repo_entry.dot_git_dir_abs_path.clone(),
@ -6046,11 +6023,11 @@ impl WorktreeModelHandle for Entity<Worktree> {
}); });
let scan_id_increased = |tree: &mut Worktree, git_dir_scan_id: &mut usize| { let scan_id_increased = |tree: &mut Worktree, git_dir_scan_id: &mut usize| {
let root_entry = tree.root_git_entry().unwrap(); let repository = tree.repositories.first().unwrap();
let local_repo_entry = tree let local_repo_entry = tree
.as_local() .as_local()
.unwrap() .unwrap()
.get_local_repo(&root_entry) .get_local_repo(&repository)
.unwrap(); .unwrap();
if local_repo_entry.git_dir_scan_id > *git_dir_scan_id { if local_repo_entry.git_dir_scan_id > *git_dir_scan_id {
@ -6139,171 +6116,6 @@ impl Default for TraversalProgress<'_> {
} }
} }
#[derive(Debug, Clone, Copy)]
pub struct GitEntryRef<'a> {
pub entry: &'a Entry,
pub git_summary: GitSummary,
}
impl GitEntryRef<'_> {
pub fn to_owned(&self) -> GitEntry {
GitEntry {
entry: self.entry.clone(),
git_summary: self.git_summary,
}
}
}
impl Deref for GitEntryRef<'_> {
type Target = Entry;
fn deref(&self) -> &Self::Target {
&self.entry
}
}
impl AsRef<Entry> for GitEntryRef<'_> {
fn as_ref(&self) -> &Entry {
self.entry
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct GitEntry {
pub entry: Entry,
pub git_summary: GitSummary,
}
impl GitEntry {
pub fn to_ref(&self) -> GitEntryRef {
GitEntryRef {
entry: &self.entry,
git_summary: self.git_summary,
}
}
}
impl Deref for GitEntry {
type Target = Entry;
fn deref(&self) -> &Self::Target {
&self.entry
}
}
impl AsRef<Entry> for GitEntry {
fn as_ref(&self) -> &Entry {
&self.entry
}
}
/// Walks the worktree entries and their associated git statuses.
pub struct GitTraversal<'a> {
traversal: Traversal<'a>,
current_entry_summary: Option<GitSummary>,
repo_location: Option<(
&'a RepositoryEntry,
Cursor<'a, StatusEntry, PathProgress<'a>>,
)>,
}
impl<'a> GitTraversal<'a> {
fn synchronize_statuses(&mut self, reset: bool) {
self.current_entry_summary = None;
let Some(entry) = self.traversal.cursor.item() else {
return;
};
let Some(repo) = self.traversal.snapshot.repository_for_path(&entry.path) else {
self.repo_location = None;
return;
};
// Update our state if we changed repositories.
if reset
|| self
.repo_location
.as_ref()
.map(|(prev_repo, _)| &prev_repo.work_directory)
!= Some(&repo.work_directory)
{
self.repo_location = Some((repo, repo.statuses_by_path.cursor::<PathProgress>(&())));
}
let Some((repo, statuses)) = &mut self.repo_location else {
return;
};
let repo_path = repo.relativize(&entry.path).unwrap();
if entry.is_dir() {
let mut statuses = statuses.clone();
statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &());
let summary =
statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left, &());
self.current_entry_summary = Some(summary);
} else if entry.is_file() {
// For a file entry, park the cursor on the corresponding status
if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left, &()) {
// TODO: Investigate statuses.item() being None here.
self.current_entry_summary = statuses.item().map(|item| item.status.into());
} else {
self.current_entry_summary = Some(GitSummary::UNCHANGED);
}
}
}
pub fn advance(&mut self) -> bool {
self.advance_by(1)
}
pub fn advance_by(&mut self, count: usize) -> bool {
let found = self.traversal.advance_by(count);
self.synchronize_statuses(false);
found
}
pub fn advance_to_sibling(&mut self) -> bool {
let found = self.traversal.advance_to_sibling();
self.synchronize_statuses(false);
found
}
pub fn back_to_parent(&mut self) -> bool {
let found = self.traversal.back_to_parent();
self.synchronize_statuses(true);
found
}
pub fn start_offset(&self) -> usize {
self.traversal.start_offset()
}
pub fn end_offset(&self) -> usize {
self.traversal.end_offset()
}
pub fn entry(&self) -> Option<GitEntryRef<'a>> {
let entry = self.traversal.cursor.item()?;
let git_summary = self.current_entry_summary.unwrap_or(GitSummary::UNCHANGED);
Some(GitEntryRef { entry, git_summary })
}
}
impl<'a> Iterator for GitTraversal<'a> {
type Item = GitEntryRef<'a>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.entry() {
self.advance();
Some(item)
} else {
None
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct Traversal<'a> { pub struct Traversal<'a> {
snapshot: &'a Snapshot, snapshot: &'a Snapshot,
@ -6336,16 +6148,6 @@ impl<'a> Traversal<'a> {
traversal traversal
} }
pub fn with_git_statuses(self) -> GitTraversal<'a> {
let mut this = GitTraversal {
traversal: self,
current_entry_summary: None,
repo_location: None,
};
this.synchronize_statuses(true);
this
}
pub fn advance(&mut self) -> bool { pub fn advance(&mut self) -> bool {
self.advance_by(1) self.advance_by(1)
} }
@ -6391,6 +6193,10 @@ impl<'a> Traversal<'a> {
self.cursor.item() self.cursor.item()
} }
pub fn snapshot(&self) -> &'a Snapshot {
self.snapshot
}
pub fn start_offset(&self) -> usize { pub fn start_offset(&self) -> usize {
self.cursor self.cursor
.start() .start()
@ -6418,7 +6224,7 @@ impl<'a> Iterator for Traversal<'a> {
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
enum PathTarget<'a> { pub enum PathTarget<'a> {
Path(&'a Path), Path(&'a Path),
Successor(&'a Path), Successor(&'a Path),
} }
@ -6517,20 +6323,6 @@ pub struct ChildEntriesIter<'a> {
traversal: Traversal<'a>, traversal: Traversal<'a>,
} }
impl<'a> ChildEntriesIter<'a> {
pub fn with_git_statuses(self) -> ChildEntriesGitIter<'a> {
ChildEntriesGitIter {
parent_path: self.parent_path,
traversal: self.traversal.with_git_statuses(),
}
}
}
pub struct ChildEntriesGitIter<'a> {
parent_path: &'a Path,
traversal: GitTraversal<'a>,
}
impl<'a> Iterator for ChildEntriesIter<'a> { impl<'a> Iterator for ChildEntriesIter<'a> {
type Item = &'a Entry; type Item = &'a Entry;
@ -6545,20 +6337,6 @@ impl<'a> Iterator for ChildEntriesIter<'a> {
} }
} }
impl<'a> Iterator for ChildEntriesGitIter<'a> {
type Item = GitEntryRef<'a>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.traversal.entry() {
if item.path.starts_with(self.parent_path) {
self.traversal.advance_to_sibling();
return Some(item);
}
}
None
}
}
impl<'a> From<&'a Entry> for proto::Entry { impl<'a> From<&'a Entry> for proto::Entry {
fn from(entry: &'a Entry) -> Self { fn from(entry: &'a Entry) -> Self {
Self { Self {

View file

@ -1,15 +1,12 @@
use crate::{ use crate::{
worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, Snapshot, worktree_settings::WorktreeSettings, Entry, EntryKind, Event, PathChange, WorkDirectory,
WorkDirectory, Worktree, WorktreeModelHandle, Worktree, WorktreeModelHandle,
}; };
use anyhow::Result; use anyhow::Result;
use fs::{FakeFs, Fs, RealFs, RemoveOptions}; use fs::{FakeFs, Fs, RealFs, RemoveOptions};
use git::{ use git::{
repository::RepoPath, repository::RepoPath,
status::{ status::{FileStatus, StatusCode, TrackedStatus},
FileStatus, GitSummary, StatusCode, TrackedStatus, TrackedSummary, UnmergedStatus,
UnmergedStatusCode,
},
GITIGNORE, GITIGNORE,
}; };
use git2::RepositoryInitOptions; use git2::RepositoryInitOptions;
@ -27,7 +24,6 @@ use std::{
mem, mem,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
time::Duration,
}; };
use util::{path, test::TempTree, ResultExt}; use util::{path, test::TempTree, ResultExt};
@ -1472,86 +1468,6 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
); );
} }
#[gpui::test]
async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) {
init_test(cx);
// Create a worktree with a git directory.
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
".git": {},
"a.txt": "",
"b": {
"c.txt": "",
},
}),
)
.await;
fs.set_head_and_index_for_repo(
path!("/root/.git").as_ref(),
&[("a.txt".into(), "".into()), ("b/c.txt".into(), "".into())],
);
cx.run_until_parked();
let tree = Worktree::local(
path!("/root").as_ref(),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.executor().run_until_parked();
let (old_entry_ids, old_mtimes) = tree.read_with(cx, |tree, _| {
(
tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
)
});
// Regression test: after the directory is scanned, touch the git repo's
// working directory, bumping its mtime. That directory keeps its project
// entry id after the directories are re-scanned.
fs.touch_path(path!("/root")).await;
cx.executor().run_until_parked();
let (new_entry_ids, new_mtimes) = tree.read_with(cx, |tree, _| {
(
tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
)
});
assert_eq!(new_entry_ids, old_entry_ids);
assert_ne!(new_mtimes, old_mtimes);
// Regression test: changes to the git repository should still be
// detected.
fs.set_head_for_repo(
path!("/root/.git").as_ref(),
&[
("a.txt".into(), "".into()),
("b/c.txt".into(), "something-else".into()),
],
);
cx.executor().run_until_parked();
cx.executor().advance_clock(Duration::from_secs(1));
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
check_git_statuses(
&snapshot,
&[
(Path::new(""), MODIFIED),
(Path::new("a.txt"), GitSummary::UNCHANGED),
(Path::new("b/c.txt"), MODIFIED),
],
);
}
#[gpui::test] #[gpui::test]
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);
@ -2196,11 +2112,6 @@ fn random_filename(rng: &mut impl Rng) -> String {
.collect() .collect()
} }
const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus {
first_head: UnmergedStatusCode::Updated,
second_head: UnmergedStatusCode::Updated,
});
// NOTE: // NOTE:
// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename // This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
// a directory which some program has already open. // a directory which some program has already open.
@ -2244,7 +2155,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
cx.read(|cx| { cx.read(|cx| {
let tree = tree.read(cx); let tree = tree.read(cx);
let repo = tree.repositories().iter().next().unwrap(); let repo = tree.repositories.iter().next().unwrap();
assert_eq!( assert_eq!(
repo.work_directory, repo.work_directory,
WorkDirectory::in_project("projects/project1") WorkDirectory::in_project("projects/project1")
@ -2268,7 +2179,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
cx.read(|cx| { cx.read(|cx| {
let tree = tree.read(cx); let tree = tree.read(cx);
let repo = tree.repositories().iter().next().unwrap(); let repo = tree.repositories.iter().next().unwrap();
assert_eq!( assert_eq!(
repo.work_directory, repo.work_directory,
WorkDirectory::in_project("projects/project2") WorkDirectory::in_project("projects/project2")
@ -2529,8 +2440,8 @@ async fn test_file_status(cx: &mut TestAppContext) {
// Check that the right git state is observed on startup // Check that the right git state is observed on startup
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
assert_eq!(snapshot.repositories().iter().count(), 1); assert_eq!(snapshot.repositories.iter().count(), 1);
let repo_entry = snapshot.repositories().iter().next().unwrap(); let repo_entry = snapshot.repositories.iter().next().unwrap();
assert_eq!( assert_eq!(
repo_entry.work_directory, repo_entry.work_directory,
WorkDirectory::in_project("project") WorkDirectory::in_project("project")
@ -2705,7 +2616,7 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
// Check that the right git state is observed on startup // Check that the right git state is observed on startup
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let repo = snapshot.repositories().iter().next().unwrap(); let repo = snapshot.repositories.iter().next().unwrap();
let entries = repo.status().collect::<Vec<_>>(); let entries = repo.status().collect::<Vec<_>>();
assert_eq!(entries.len(), 3); assert_eq!(entries.len(), 3);
@ -2727,7 +2638,7 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let repository = snapshot.repositories().iter().next().unwrap(); let repository = snapshot.repositories.iter().next().unwrap();
let entries = repository.status().collect::<Vec<_>>(); let entries = repository.status().collect::<Vec<_>>();
std::assert_eq!(entries.len(), 4, "entries: {entries:?}"); std::assert_eq!(entries.len(), 4, "entries: {entries:?}");
@ -2760,7 +2671,7 @@ async fn test_git_repository_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let repo = snapshot.repositories().iter().next().unwrap(); let repo = snapshot.repositories.iter().next().unwrap();
let entries = repo.status().collect::<Vec<_>>(); let entries = repo.status().collect::<Vec<_>>();
// Deleting an untracked entry, b.txt, should leave no status // Deleting an untracked entry, b.txt, should leave no status
@ -2814,7 +2725,7 @@ async fn test_git_status_postprocessing(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let repo = snapshot.repositories().iter().next().unwrap(); let repo = snapshot.repositories.iter().next().unwrap();
let entries = repo.status().collect::<Vec<_>>(); let entries = repo.status().collect::<Vec<_>>();
// `sub` doesn't appear in our computed statuses. // `sub` doesn't appear in our computed statuses.
@ -2883,8 +2794,8 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
// Ensure that the git status is loaded correctly // Ensure that the git status is loaded correctly
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
assert_eq!(snapshot.repositories().iter().count(), 1); assert_eq!(snapshot.repositories.iter().count(), 1);
let repo = snapshot.repositories().iter().next().unwrap(); let repo = snapshot.repositories.iter().next().unwrap();
assert_eq!( assert_eq!(
repo.work_directory.canonicalize(), repo.work_directory.canonicalize(),
WorkDirectory::AboveProject { WorkDirectory::AboveProject {
@ -2913,442 +2824,13 @@ async fn test_repository_subfolder_git_status(cx: &mut TestAppContext) {
tree.read_with(cx, |tree, _cx| { tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
assert!(snapshot.repositories().iter().next().is_some()); assert!(snapshot.repositories.iter().next().is_some());
assert_eq!(snapshot.status_for_file("c.txt"), None); assert_eq!(snapshot.status_for_file("c.txt"), None);
assert_eq!(snapshot.status_for_file("d/e.txt"), None); assert_eq!(snapshot.status_for_file("d/e.txt"), None);
}); });
} }
#[gpui::test]
async fn test_traverse_with_git_status(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
"x": {
".git": {},
"x1.txt": "foo",
"x2.txt": "bar",
"y": {
".git": {},
"y1.txt": "baz",
"y2.txt": "qux"
},
"z.txt": "sneaky..."
},
"z": {
".git": {},
"z1.txt": "quux",
"z2.txt": "quuux"
}
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/x/.git")),
&[
(Path::new("x2.txt"), StatusCode::Modified.index()),
(Path::new("z.txt"), StatusCode::Added.index()),
],
);
fs.set_status_for_repo(
Path::new(path!("/root/x/y/.git")),
&[(Path::new("y1.txt"), CONFLICT)],
);
fs.set_status_for_repo(
Path::new(path!("/root/z/.git")),
&[(Path::new("z2.txt"), StatusCode::Added.index())],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
tree.flush_fs_events(cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
let mut traversal = snapshot
.traverse_from_path(true, false, true, Path::new("x"))
.with_git_statuses();
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/x1.txt"));
assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/x2.txt"));
assert_eq!(entry.git_summary, MODIFIED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/y/y1.txt"));
assert_eq!(entry.git_summary, GitSummary::CONFLICT);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/y/y2.txt"));
assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("x/z.txt"));
assert_eq!(entry.git_summary, ADDED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("z/z1.txt"));
assert_eq!(entry.git_summary, GitSummary::UNCHANGED);
let entry = traversal.next().unwrap();
assert_eq!(entry.path.as_ref(), Path::new("z/z2.txt"));
assert_eq!(entry.git_summary, ADDED);
}
#[gpui::test]
async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
".git": {},
"a": {
"b": {
"c1.txt": "",
"c2.txt": "",
},
"d": {
"e1.txt": "",
"e2.txt": "",
"e3.txt": "",
}
},
"f": {
"no-status.txt": ""
},
"g": {
"h1.txt": "",
"h2.txt": ""
},
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/.git")),
&[
(Path::new("a/b/c1.txt"), StatusCode::Added.index()),
(Path::new("a/d/e2.txt"), StatusCode::Modified.index()),
(Path::new("g/h2.txt"), CONFLICT),
],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::CONFLICT + MODIFIED + ADDED),
(Path::new("g"), GitSummary::CONFLICT),
(Path::new("g/h2.txt"), GitSummary::CONFLICT),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::CONFLICT + ADDED + MODIFIED),
(Path::new("a"), ADDED + MODIFIED),
(Path::new("a/b"), ADDED),
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
(Path::new("a/d"), MODIFIED),
(Path::new("a/d/e2.txt"), MODIFIED),
(Path::new("f"), GitSummary::UNCHANGED),
(Path::new("f/no-status.txt"), GitSummary::UNCHANGED),
(Path::new("g"), GitSummary::CONFLICT),
(Path::new("g/h2.txt"), GitSummary::CONFLICT),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("a/b"), ADDED),
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
(Path::new("a/d"), MODIFIED),
(Path::new("a/d/e1.txt"), GitSummary::UNCHANGED),
(Path::new("a/d/e2.txt"), MODIFIED),
(Path::new("f"), GitSummary::UNCHANGED),
(Path::new("f/no-status.txt"), GitSummary::UNCHANGED),
(Path::new("g"), GitSummary::CONFLICT),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("a/b/c1.txt"), ADDED),
(Path::new("a/b/c2.txt"), GitSummary::UNCHANGED),
(Path::new("a/d/e1.txt"), GitSummary::UNCHANGED),
(Path::new("a/d/e2.txt"), MODIFIED),
(Path::new("f/no-status.txt"), GitSummary::UNCHANGED),
],
);
}
#[gpui::test]
async fn test_propagate_statuses_for_repos_under_project(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
"x": {
".git": {},
"x1.txt": "foo",
"x2.txt": "bar"
},
"y": {
".git": {},
"y1.txt": "baz",
"y2.txt": "qux"
},
"z": {
".git": {},
"z1.txt": "quux",
"z2.txt": "quuux"
}
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/x/.git")),
&[(Path::new("x1.txt"), StatusCode::Added.index())],
);
fs.set_status_for_repo(
Path::new(path!("/root/y/.git")),
&[
(Path::new("y1.txt"), CONFLICT),
(Path::new("y2.txt"), StatusCode::Modified.index()),
],
);
fs.set_status_for_repo(
Path::new(path!("/root/z/.git")),
&[(Path::new("z2.txt"), StatusCode::Modified.index())],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
tree.flush_fs_events(cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
check_git_statuses(
&snapshot,
&[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)],
);
check_git_statuses(
&snapshot,
&[
(Path::new("y"), GitSummary::CONFLICT + MODIFIED),
(Path::new("y/y1.txt"), GitSummary::CONFLICT),
(Path::new("y/y2.txt"), MODIFIED),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("z"), MODIFIED),
(Path::new("z/z2.txt"), MODIFIED),
],
);
check_git_statuses(
&snapshot,
&[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)],
);
check_git_statuses(
&snapshot,
&[
(Path::new("x"), ADDED),
(Path::new("x/x1.txt"), ADDED),
(Path::new("x/x2.txt"), GitSummary::UNCHANGED),
(Path::new("y"), GitSummary::CONFLICT + MODIFIED),
(Path::new("y/y1.txt"), GitSummary::CONFLICT),
(Path::new("y/y2.txt"), MODIFIED),
(Path::new("z"), MODIFIED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
(Path::new("z/z2.txt"), MODIFIED),
],
);
}
#[gpui::test]
async fn test_propagate_statuses_for_nested_repos(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
path!("/root"),
json!({
"x": {
".git": {},
"x1.txt": "foo",
"x2.txt": "bar",
"y": {
".git": {},
"y1.txt": "baz",
"y2.txt": "qux"
},
"z.txt": "sneaky..."
},
"z": {
".git": {},
"z1.txt": "quux",
"z2.txt": "quuux"
}
}),
)
.await;
fs.set_status_for_repo(
Path::new(path!("/root/x/.git")),
&[
(Path::new("x2.txt"), StatusCode::Modified.index()),
(Path::new("z.txt"), StatusCode::Added.index()),
],
);
fs.set_status_for_repo(
Path::new(path!("/root/x/y/.git")),
&[(Path::new("y1.txt"), CONFLICT)],
);
fs.set_status_for_repo(
Path::new(path!("/root/z/.git")),
&[(Path::new("z2.txt"), StatusCode::Added.index())],
);
let tree = Worktree::local(
Path::new(path!("/root")),
true,
fs.clone(),
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
tree.flush_fs_events(cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
cx.executor().run_until_parked();
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
// Sanity check the propagation for x/y and z
check_git_statuses(
&snapshot,
&[
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
(Path::new("x/y/y2.txt"), GitSummary::UNCHANGED),
],
);
check_git_statuses(
&snapshot,
&[
(Path::new("z"), ADDED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
(Path::new("z/z2.txt"), ADDED),
],
);
// Test one of the fundamental cases of propagation blocking, the transition from one git repository to another
check_git_statuses(
&snapshot,
&[
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
],
);
// Sanity check everything around it
check_git_statuses(
&snapshot,
&[
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
(Path::new("x/x2.txt"), MODIFIED),
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
(Path::new("x/y/y2.txt"), GitSummary::UNCHANGED),
(Path::new("x/z.txt"), ADDED),
],
);
// Test the other fundamental case, transitioning from git repository to non-git repository
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::UNCHANGED),
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
],
);
// And all together now
check_git_statuses(
&snapshot,
&[
(Path::new(""), GitSummary::UNCHANGED),
(Path::new("x"), MODIFIED + ADDED),
(Path::new("x/x1.txt"), GitSummary::UNCHANGED),
(Path::new("x/x2.txt"), MODIFIED),
(Path::new("x/y"), GitSummary::CONFLICT),
(Path::new("x/y/y1.txt"), GitSummary::CONFLICT),
(Path::new("x/y/y2.txt"), GitSummary::UNCHANGED),
(Path::new("x/z.txt"), ADDED),
(Path::new("z"), ADDED),
(Path::new("z/z1.txt"), GitSummary::UNCHANGED),
(Path::new("z/z2.txt"), ADDED),
],
);
}
#[gpui::test] #[gpui::test]
async fn test_conflicted_cherry_pick(cx: &mut TestAppContext) { async fn test_conflicted_cherry_pick(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);
@ -3403,7 +2885,7 @@ async fn test_conflicted_cherry_pick(cx: &mut TestAppContext) {
); );
tree.flush_fs_events(cx).await; tree.flush_fs_events(cx).await;
let conflicts = tree.update(cx, |tree, _| { let conflicts = tree.update(cx, |tree, _| {
let entry = tree.git_entries().nth(0).expect("No git entry").clone(); let entry = tree.repositories.first().expect("No git entry").clone();
entry entry
.current_merge_conflicts .current_merge_conflicts
.iter() .iter()
@ -3420,7 +2902,7 @@ async fn test_conflicted_cherry_pick(cx: &mut TestAppContext) {
pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default()); pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
tree.flush_fs_events(cx).await; tree.flush_fs_events(cx).await;
let conflicts = tree.update(cx, |tree, _| { let conflicts = tree.update(cx, |tree, _| {
let entry = tree.git_entries().nth(0).expect("No git entry").clone(); let entry = tree.repositories.first().expect("No git entry").clone();
entry entry
.current_merge_conflicts .current_merge_conflicts
.iter() .iter()
@ -3490,34 +2972,6 @@ fn test_unrelativize() {
); );
} }
#[track_caller]
fn check_git_statuses(snapshot: &Snapshot, expected_statuses: &[(&Path, GitSummary)]) {
let mut traversal = snapshot
.traverse_from_path(true, true, false, "".as_ref())
.with_git_statuses();
let found_statuses = expected_statuses
.iter()
.map(|&(path, _)| {
let git_entry = traversal
.find(|git_entry| &*git_entry.path == path)
.unwrap_or_else(|| panic!("Traversal has no entry for {path:?}"));
(path, git_entry.git_summary)
})
.collect::<Vec<_>>();
assert_eq!(found_statuses, expected_statuses);
}
const ADDED: GitSummary = GitSummary {
index: TrackedSummary::ADDED,
count: 1,
..GitSummary::UNCHANGED
};
const MODIFIED: GitSummary = GitSummary {
index: TrackedSummary::MODIFIED,
count: 1,
..GitSummary::UNCHANGED
};
#[track_caller] #[track_caller]
fn git_init(path: &Path) -> git2::Repository { fn git_init(path: &Path) -> git2::Repository {
let mut init_opts = RepositoryInitOptions::new(); let mut init_opts = RepositoryInitOptions::new();