Make FakeGitRepository behave more like a real git repository (#26961)

This PR reworks the `FakeGitRepository` type that we use for testing git
interactions, to make it more realistic. In particular, the `status`
method now derives the Git status from the differences between HEAD, the
index, and the working copy. This way, if you modify a file in the
`FakeFs`, the Git repository's `status` method will reflect that
modification.

Release Notes:

- N/A

---------

Co-authored-by: Junkui Zhang <364772080@qq.com>
This commit is contained in:
Max Brunsfeld 2025-03-19 09:04:27 -07:00 committed by GitHub
parent 5f398071b2
commit 74a39c7263
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 790 additions and 679 deletions

View file

@ -18,8 +18,8 @@ async-trait.workspace = true
collections.workspace = true
futures.workspace = true
git.workspace = true
git2.workspace = true
gpui.workspace = true
ignore.workspace = true
libc.workspace = true
log.workspace = true
parking_lot.workspace = true

View file

@ -0,0 +1,411 @@
use crate::FakeFs;
use anyhow::{anyhow, Context as _, Result};
use collections::{HashMap, HashSet};
use futures::future::{self, BoxFuture};
use git::{
blame::Blame,
repository::{
AskPassSession, Branch, CommitDetails, GitRepository, PushOptions, Remote, RepoPath,
ResetMode,
},
status::{FileStatus, GitStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
use gpui::{AsyncApp, BackgroundExecutor};
use ignore::gitignore::GitignoreBuilder;
use rope::Rope;
use smol::future::FutureExt as _;
use std::{path::PathBuf, sync::Arc};
#[derive(Clone)]
pub struct FakeGitRepository {
pub(crate) fs: Arc<FakeFs>,
pub(crate) executor: BackgroundExecutor,
pub(crate) dot_git_path: PathBuf,
}
#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
pub path: PathBuf,
pub event_emitter: smol::channel::Sender<PathBuf>,
pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
pub blames: HashMap<RepoPath, Blame>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
pub simulated_index_write_error_message: Option<String>,
}
impl FakeGitRepositoryState {
pub fn new(path: PathBuf, event_emitter: smol::channel::Sender<PathBuf>) -> Self {
FakeGitRepositoryState {
path,
event_emitter,
head_contents: Default::default(),
index_contents: Default::default(),
unmerged_paths: Default::default(),
blames: Default::default(),
current_branch_name: Default::default(),
branches: Default::default(),
simulated_index_write_error_message: Default::default(),
}
}
}
impl FakeGitRepository {
fn with_state<F, T>(&self, f: F) -> T
where
F: FnOnce(&mut FakeGitRepositoryState) -> T,
{
self.fs.with_git_state(&self.dot_git_path, false, f)
}
fn with_state_async<F, T>(&self, write: bool, f: F) -> BoxFuture<T>
where
F: 'static + Send + FnOnce(&mut FakeGitRepositoryState) -> T,
T: Send,
{
let fs = self.fs.clone();
let executor = self.executor.clone();
let dot_git_path = self.dot_git_path.clone();
async move {
executor.simulate_random_delay().await;
fs.with_git_state(&dot_git_path, write, f)
}
.boxed()
}
}
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
fn load_index_text(&self, path: RepoPath, _cx: AsyncApp) -> BoxFuture<Option<String>> {
self.with_state_async(false, move |state| {
state.index_contents.get(path.as_ref()).cloned()
})
}
fn load_committed_text(&self, path: RepoPath, _cx: AsyncApp) -> BoxFuture<Option<String>> {
self.with_state_async(false, move |state| {
state.head_contents.get(path.as_ref()).cloned()
})
}
fn set_index_text(
&self,
path: RepoPath,
content: Option<String>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<anyhow::Result<()>> {
self.with_state_async(true, move |state| {
if let Some(message) = state.simulated_index_write_error_message.clone() {
return Err(anyhow!("{}", message));
} else if let Some(content) = content {
state.index_contents.insert(path, content);
} else {
state.index_contents.remove(&path);
}
Ok(())
})
}
fn remote_url(&self, _name: &str) -> Option<String> {
None
}
fn head_sha(&self) -> Option<String> {
None
}
fn merge_head_shas(&self) -> Vec<String> {
vec![]
}
fn show(&self, _commit: String, _cx: AsyncApp) -> BoxFuture<Result<CommitDetails>> {
unimplemented!()
}
fn reset(
&self,
_commit: String,
_mode: ResetMode,
_env: HashMap<String, String>,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn checkout_files(
&self,
_commit: String,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn path(&self) -> PathBuf {
self.with_state(|state| state.path.clone())
}
fn main_repository_path(&self) -> PathBuf {
self.path()
}
fn status(&self, path_prefixes: &[RepoPath]) -> Result<GitStatus> {
let workdir_path = self.dot_git_path.parent().unwrap();
// Load gitignores
let ignores = workdir_path
.ancestors()
.filter_map(|dir| {
let ignore_path = dir.join(".gitignore");
let content = self.fs.read_file_sync(ignore_path).ok()?;
let content = String::from_utf8(content).ok()?;
let mut builder = GitignoreBuilder::new(dir);
for line in content.lines() {
builder.add_line(Some(dir.into()), line).ok()?;
}
builder.build().ok()
})
.collect::<Vec<_>>();
// Load working copy files.
let git_files: HashMap<RepoPath, (String, bool)> = self
.fs
.files()
.iter()
.filter_map(|path| {
let repo_path = path.strip_prefix(workdir_path).ok()?;
let mut is_ignored = false;
for ignore in &ignores {
match ignore.matched_path_or_any_parents(path, false) {
ignore::Match::None => {}
ignore::Match::Ignore(_) => is_ignored = true,
ignore::Match::Whitelist(_) => break,
}
}
let content = self
.fs
.read_file_sync(path)
.ok()
.map(|content| String::from_utf8(content).unwrap())?;
Some((repo_path.into(), (content, is_ignored)))
})
.collect();
self.with_state(|state| {
let mut entries = Vec::new();
let paths = state
.head_contents
.keys()
.chain(state.index_contents.keys())
.chain(git_files.keys())
.collect::<HashSet<_>>();
for path in paths {
if !path_prefixes.iter().any(|prefix| path.starts_with(prefix)) {
continue;
}
let head = state.head_contents.get(path);
let index = state.index_contents.get(path);
let unmerged = state.unmerged_paths.get(path);
let fs = git_files.get(path);
let status = match (unmerged, head, index, fs) {
(Some(unmerged), _, _, _) => FileStatus::Unmerged(*unmerged),
(_, Some(head), Some(index), Some((fs, _))) => {
FileStatus::Tracked(TrackedStatus {
index_status: if head == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
worktree_status: if fs == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
})
}
(_, Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
index_status: if head == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
worktree_status: StatusCode::Deleted,
}),
(_, Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Added,
}),
(_, Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Deleted,
}),
(_, None, Some(index), Some((fs, _))) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
worktree_status: if fs == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
}),
(_, None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Deleted,
}),
(_, None, None, Some((_, is_ignored))) => {
if *is_ignored {
continue;
}
FileStatus::Untracked
}
(_, None, None, None) => {
unreachable!();
}
};
if status
!= FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Unmodified,
worktree_status: StatusCode::Unmodified,
})
{
entries.push((path.clone(), status));
}
}
entries.sort_by(|a, b| a.0.cmp(&b.0));
Ok(GitStatus {
entries: entries.into(),
})
})
}
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
self.with_state_async(false, move |state| {
let current_branch = &state.current_branch_name;
Ok(state
.branches
.iter()
.map(|branch_name| Branch {
is_head: Some(branch_name) == current_branch.as_ref(),
name: branch_name.into(),
most_recent_commit: None,
upstream: None,
})
.collect())
})
}
fn change_branch(&self, name: String, _cx: AsyncApp) -> BoxFuture<Result<()>> {
self.with_state_async(true, |state| {
state.current_branch_name = Some(name);
Ok(())
})
}
fn create_branch(&self, name: String, _: AsyncApp) -> BoxFuture<Result<()>> {
self.with_state_async(true, move |state| {
state.branches.insert(name.to_owned());
Ok(())
})
}
fn blame(
&self,
path: RepoPath,
_content: Rope,
_cx: &mut AsyncApp,
) -> BoxFuture<Result<git::blame::Blame>> {
self.with_state_async(false, move |state| {
state
.blames
.get(&path)
.with_context(|| format!("failed to get blame for {:?}", path.0))
.cloned()
})
}
fn stage_paths(
&self,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn unstage_paths(
&self,
_paths: Vec<RepoPath>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn commit(
&self,
_message: gpui::SharedString,
_name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<()>> {
unimplemented!()
}
fn push(
&self,
_branch: String,
_remote: String,
_options: Option<PushOptions>,
_askpass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<git::repository::RemoteCommandOutput>> {
unimplemented!()
}
fn pull(
&self,
_branch: String,
_remote: String,
_askpass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<git::repository::RemoteCommandOutput>> {
unimplemented!()
}
fn fetch(
&self,
_askpass: AskPassSession,
_env: HashMap<String, String>,
_cx: AsyncApp,
) -> BoxFuture<Result<git::repository::RemoteCommandOutput>> {
unimplemented!()
}
fn get_remotes(
&self,
_branch: Option<String>,
_cx: AsyncApp,
) -> BoxFuture<Result<Vec<Remote>>> {
unimplemented!()
}
fn check_for_pushed_commit(
&self,
_cx: gpui::AsyncApp,
) -> BoxFuture<Result<Vec<gpui::SharedString>>> {
future::ready(Ok(Vec::new())).boxed()
}
fn diff(
&self,
_diff: git::repository::DiffType,
_cx: gpui::AsyncApp,
) -> BoxFuture<Result<String>> {
unimplemented!()
}
}

View file

@ -5,36 +5,23 @@ mod mac_watcher;
pub mod fs_watcher;
use anyhow::{anyhow, Context as _, Result};
#[cfg(any(test, feature = "test-support"))]
use collections::HashMap;
#[cfg(any(test, feature = "test-support"))]
use git::status::StatusCode;
#[cfg(any(test, feature = "test-support"))]
use git::status::TrackedStatus;
#[cfg(any(test, feature = "test-support"))]
use git::{repository::RepoPath, status::FileStatus};
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
use ashpd::desktop::trash;
use gpui::App;
use gpui::Global;
use gpui::ReadGlobal as _;
use std::borrow::Cow;
#[cfg(any(test, feature = "test-support"))]
use std::collections::HashSet;
#[cfg(unix)]
use std::os::fd::AsFd;
#[cfg(unix)]
use std::os::fd::AsRawFd;
use util::command::new_std_command;
#[cfg(unix)]
use std::os::unix::fs::MetadataExt;
use std::os::fd::{AsFd, AsRawFd};
#[cfg(unix)]
use std::os::unix::fs::FileTypeExt;
use std::os::unix::fs::{FileTypeExt, MetadataExt};
use async_tar::Archive;
use futures::{future::BoxFuture, AsyncRead, Stream, StreamExt};
use git::repository::{GitRepository, RealGitRepository};
use gpui::{App, Global, ReadGlobal};
use rope::Rope;
use serde::{Deserialize, Serialize};
use smol::io::AsyncWriteExt;
@ -47,12 +34,18 @@ use std::{
};
use tempfile::{NamedTempFile, TempDir};
use text::LineEnding;
use util::ResultExt;
#[cfg(any(test, feature = "test-support"))]
mod fake_git_repo;
#[cfg(any(test, feature = "test-support"))]
use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))]
use git::FakeGitRepositoryState;
use fake_git_repo::FakeGitRepositoryState;
#[cfg(any(test, feature = "test-support"))]
use git::{
repository::RepoPath,
status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
#[cfg(any(test, feature = "test-support"))]
use parking_lot::Mutex;
#[cfg(any(test, feature = "test-support"))]
@ -708,7 +701,7 @@ impl Fs for RealFs {
Arc<dyn Watcher>,
) {
use parking_lot::Mutex;
use util::paths::SanitizedPath;
use util::{paths::SanitizedPath, ResultExt as _};
let (tx, rx) = smol::channel::unbounded();
let pending_paths: Arc<Mutex<Vec<PathEvent>>> = Default::default();
@ -758,14 +751,10 @@ impl Fs for RealFs {
}
fn open_repo(&self, dotgit_path: &Path) -> Option<Arc<dyn GitRepository>> {
// with libgit2, we can open git repo from an existing work dir
// https://libgit2.org/docs/reference/main/repository/git_repository_open.html
let workdir_root = dotgit_path.parent()?;
let repo = git2::Repository::open(workdir_root).log_err()?;
Some(Arc::new(RealGitRepository::new(
repo,
dotgit_path,
self.git_binary_path.clone(),
)))
)?))
}
fn git_init(&self, abs_work_directory_path: &Path, fallback_branch_name: String) -> Result<()> {
@ -885,7 +874,7 @@ enum FakeFsEntry {
mtime: MTime,
len: u64,
entries: BTreeMap<String, Arc<Mutex<FakeFsEntry>>>,
git_repo_state: Option<Arc<Mutex<git::FakeGitRepositoryState>>>,
git_repo_state: Option<Arc<Mutex<FakeGitRepositoryState>>>,
},
Symlink {
target: PathBuf,
@ -1254,9 +1243,9 @@ impl FakeFs {
.boxed()
}
pub fn with_git_state<F>(&self, dot_git: &Path, emit_git_event: bool, f: F)
pub fn with_git_state<T, F>(&self, dot_git: &Path, emit_git_event: bool, f: F) -> T
where
F: FnOnce(&mut FakeGitRepositoryState),
F: FnOnce(&mut FakeGitRepositoryState) -> T,
{
let mut state = self.state.lock();
let entry = state.read_path(dot_git).unwrap();
@ -1271,11 +1260,13 @@ impl FakeFs {
});
let mut repo_state = repo_state.lock();
f(&mut repo_state);
let result = f(&mut repo_state);
if emit_git_event {
state.emit_event([(dot_git, None)]);
}
result
} else {
panic!("not a directory");
}
@ -1302,6 +1293,21 @@ impl FakeFs {
})
}
pub fn set_unmerged_paths_for_repo(
&self,
dot_git: &Path,
unmerged_state: &[(RepoPath, UnmergedStatus)],
) {
self.with_git_state(dot_git, true, |state| {
state.unmerged_paths.clear();
state.unmerged_paths.extend(
unmerged_state
.iter()
.map(|(path, content)| (path.clone(), *content)),
);
});
}
pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) {
self.with_git_state(dot_git, true, |state| {
state.index_contents.clear();
@ -1346,80 +1352,20 @@ impl FakeFs {
},
));
});
self.recalculate_git_status(dot_git);
}
pub fn recalculate_git_status(&self, dot_git: &Path) {
let git_files: HashMap<_, _> = self
.files()
.iter()
.filter_map(|path| {
let repo_path =
RepoPath::new(path.strip_prefix(dot_git.parent().unwrap()).ok()?.into());
let content = self
.read_file_sync(path)
.ok()
.map(|content| String::from_utf8(content).unwrap());
Some((repo_path, content?))
})
.collect();
self.with_git_state(dot_git, false, |state| {
state.statuses.clear();
let mut paths: HashSet<_> = state.head_contents.keys().collect();
paths.extend(state.index_contents.keys());
paths.extend(git_files.keys());
for path in paths {
let head = state.head_contents.get(path);
let index = state.index_contents.get(path);
let fs = git_files.get(path);
let status = match (head, index, fs) {
(Some(head), Some(index), Some(fs)) => FileStatus::Tracked(TrackedStatus {
index_status: if head == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
worktree_status: if fs == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
}),
(Some(head), Some(index), None) => FileStatus::Tracked(TrackedStatus {
index_status: if head == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
worktree_status: StatusCode::Deleted,
}),
(Some(_), None, Some(_)) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Added,
}),
(Some(_), None, None) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Deleted,
}),
(None, Some(index), Some(fs)) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
worktree_status: if fs == index {
StatusCode::Unmodified
} else {
StatusCode::Modified
},
}),
(None, Some(_), None) => FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Deleted,
}),
(None, None, Some(_)) => FileStatus::Untracked,
(None, None, None) => {
unreachable!();
}
};
state.statuses.insert(path.clone(), status);
}
pub fn set_head_and_index_for_repo(
&self,
dot_git: &Path,
contents_by_path: &[(RepoPath, String)],
) {
self.with_git_state(dot_git, true, |state| {
state.head_contents.clear();
state.index_contents.clear();
state.head_contents.extend(contents_by_path.iter().cloned());
state
.index_contents
.extend(contents_by_path.iter().cloned());
});
}
@ -1430,38 +1376,85 @@ impl FakeFs {
});
}
pub fn set_status_for_repo_via_working_copy_change(
&self,
dot_git: &Path,
statuses: &[(&Path, FileStatus)],
) {
self.with_git_state(dot_git, false, |state| {
state.statuses.clear();
state.statuses.extend(
statuses
.iter()
.map(|(path, content)| ((**path).into(), *content)),
);
});
self.state.lock().emit_event(
statuses
.iter()
.map(|(path, _)| (dot_git.parent().unwrap().join(path), None)),
);
}
pub fn set_status_for_repo_via_git_operation(
&self,
dot_git: &Path,
statuses: &[(&Path, FileStatus)],
) {
/// Put the given git repository into a state with the given status,
/// by mutating the head, index, and unmerged state.
pub fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, FileStatus)]) {
let workdir_path = dot_git.parent().unwrap();
let workdir_contents = self.files_with_contents(&workdir_path);
self.with_git_state(dot_git, true, |state| {
state.statuses.clear();
state.statuses.extend(
statuses
state.index_contents.clear();
state.head_contents.clear();
state.unmerged_paths.clear();
for (path, content) in workdir_contents {
let repo_path: RepoPath = path.strip_prefix(&workdir_path).unwrap().into();
let status = statuses
.iter()
.map(|(path, content)| ((**path).into(), *content)),
);
.find_map(|(p, status)| (**p == *repo_path.0).then_some(status));
let mut content = String::from_utf8_lossy(&content).to_string();
let mut index_content = None;
let mut head_content = None;
match status {
None => {
index_content = Some(content.clone());
head_content = Some(content);
}
Some(FileStatus::Untracked | FileStatus::Ignored) => {}
Some(FileStatus::Unmerged(unmerged_status)) => {
state
.unmerged_paths
.insert(repo_path.clone(), *unmerged_status);
content.push_str(" (unmerged)");
index_content = Some(content.clone());
head_content = Some(content);
}
Some(FileStatus::Tracked(TrackedStatus {
index_status,
worktree_status,
})) => {
match worktree_status {
StatusCode::Modified => {
let mut content = content.clone();
content.push_str(" (modified in working copy)");
index_content = Some(content);
}
StatusCode::TypeChanged | StatusCode::Unmodified => {
index_content = Some(content.clone());
}
StatusCode::Added => {}
StatusCode::Deleted | StatusCode::Renamed | StatusCode::Copied => {
panic!("cannot create these statuses for an existing file");
}
};
match index_status {
StatusCode::Modified => {
let mut content = index_content.clone().expect(
"file cannot be both modified in index and created in working copy",
);
content.push_str(" (modified in index)");
head_content = Some(content);
}
StatusCode::TypeChanged | StatusCode::Unmodified => {
head_content = Some(index_content.clone().expect("file cannot be both unmodified in index and created in working copy"));
}
StatusCode::Added => {}
StatusCode::Deleted => {
head_content = Some("".into());
}
StatusCode::Renamed | StatusCode::Copied => {
panic!("cannot create these statuses for an existing file");
}
};
}
};
if let Some(content) = index_content {
state.index_contents.insert(repo_path.clone(), content);
}
if let Some(content) = head_content {
state.head_contents.insert(repo_path.clone(), content);
}
}
});
}
@ -1541,6 +1534,32 @@ impl FakeFs {
result
}
pub fn files_with_contents(&self, prefix: &Path) -> Vec<(PathBuf, Vec<u8>)> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
queue.push_back((
PathBuf::from(util::path!("/")),
self.state.lock().root.clone(),
));
while let Some((path, entry)) = queue.pop_front() {
let e = entry.lock();
match &*e {
FakeFsEntry::File { content, .. } => {
if path.starts_with(prefix) {
result.push((path, content.clone()));
}
}
FakeFsEntry::Dir { entries, .. } => {
for (name, entry) in entries {
queue.push_back((path.join(name), entry.clone()));
}
}
FakeFsEntry::Symlink { .. } => {}
}
}
result
}
/// How many `read_dir` calls have been issued.
pub fn read_dir_call_count(&self) -> usize {
self.state.lock().read_dir_call_count
@ -2087,15 +2106,17 @@ impl Fs for FakeFs {
let entry = state.read_path(abs_dot_git).unwrap();
let mut entry = entry.lock();
if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
let state = git_repo_state
.get_or_insert_with(|| {
Arc::new(Mutex::new(FakeGitRepositoryState::new(
abs_dot_git.to_path_buf(),
state.git_event_tx.clone(),
)))
})
.clone();
Some(git::FakeGitRepository::open(state))
git_repo_state.get_or_insert_with(|| {
Arc::new(Mutex::new(FakeGitRepositoryState::new(
abs_dot_git.to_path_buf(),
state.git_event_tx.clone(),
)))
});
Some(Arc::new(fake_git_repo::FakeGitRepository {
fs: self.this.upgrade().unwrap(),
executor: self.executor.clone(),
dot_git_path: abs_dot_git.to_path_buf(),
}))
} else {
None
}