Update proto names to reflect new status info

This commit is contained in:
Mikayla Maki 2023-05-15 13:40:55 -07:00
parent 1e4ab6cd75
commit 307dd2b83e
No known key found for this signature in database
5 changed files with 88 additions and 92 deletions

View file

@ -1569,8 +1569,8 @@ impl Database {
worktree.updated_repositories.push(proto::RepositoryEntry { worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64, work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch, branch: db_repository.branch,
removed_worktree_repo_paths: Default::default(), removed_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(), updated_statuses: Default::default(),
}); });
} }
} }
@ -1607,15 +1607,13 @@ impl Database {
let db_status_entry = db_status_entry?; let db_status_entry = db_status_entry?;
if db_status_entry.is_deleted { if db_status_entry.is_deleted {
repository repository
.removed_worktree_repo_paths .removed_repo_paths
.push(db_status_entry.repo_path); .push(db_status_entry.repo_path);
} else { } else {
repository repository.updated_statuses.push(proto::StatusEntry {
.updated_worktree_statuses repo_path: db_status_entry.repo_path,
.push(proto::StatusEntry { status: db_status_entry.status as i32,
repo_path: db_status_entry.repo_path, });
status: db_status_entry.status as i32,
});
} }
} }
} }
@ -2444,12 +2442,10 @@ impl Database {
.await?; .await?;
for repository in update.updated_repositories.iter() { for repository in update.updated_repositories.iter() {
if !repository.updated_worktree_statuses.is_empty() { if !repository.updated_statuses.is_empty() {
worktree_repository_statuses::Entity::insert_many( worktree_repository_statuses::Entity::insert_many(
repository repository.updated_statuses.iter().map(|status_entry| {
.updated_worktree_statuses worktree_repository_statuses::ActiveModel {
.iter()
.map(|status_entry| worktree_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id), project_id: ActiveValue::set(project_id),
worktree_id: ActiveValue::set(worktree_id), worktree_id: ActiveValue::set(worktree_id),
work_directory_id: ActiveValue::set( work_directory_id: ActiveValue::set(
@ -2459,7 +2455,8 @@ impl Database {
status: ActiveValue::set(status_entry.status as i64), status: ActiveValue::set(status_entry.status as i64),
scan_id: ActiveValue::set(update.scan_id as i64), scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false), is_deleted: ActiveValue::set(false),
}), }
}),
) )
.on_conflict( .on_conflict(
OnConflict::columns([ OnConflict::columns([
@ -2479,7 +2476,7 @@ impl Database {
.await?; .await?;
} }
if !repository.removed_worktree_repo_paths.is_empty() { if !repository.removed_repo_paths.is_empty() {
worktree_repository_statuses::Entity::update_many() worktree_repository_statuses::Entity::update_many()
.filter( .filter(
worktree_repository_statuses::Column::ProjectId worktree_repository_statuses::Column::ProjectId
@ -2492,14 +2489,9 @@ impl Database {
worktree_repository_statuses::Column::WorkDirectoryId worktree_repository_statuses::Column::WorkDirectoryId
.eq(repository.work_directory_id as i64), .eq(repository.work_directory_id as i64),
) )
.and( .and(worktree_repository_statuses::Column::RepoPath.is_in(
worktree_repository_statuses::Column::RepoPath.is_in( repository.removed_repo_paths.iter().map(String::as_str),
repository )),
.removed_worktree_repo_paths
.iter()
.map(String::as_str),
),
),
) )
.set(worktree_repository_statuses::ActiveModel { .set(worktree_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true), is_deleted: ActiveValue::Set(true),
@ -2765,8 +2757,8 @@ impl Database {
proto::RepositoryEntry { proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64, work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch, branch: db_repository_entry.branch,
removed_worktree_repo_paths: Default::default(), removed_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(), updated_statuses: Default::default(),
}, },
); );
} }
@ -2791,12 +2783,10 @@ impl Database {
.repository_entries .repository_entries
.get_mut(&(db_status_entry.work_directory_id as u64)) .get_mut(&(db_status_entry.work_directory_id as u64))
{ {
repository_entry repository_entry.updated_statuses.push(proto::StatusEntry {
.updated_worktree_statuses repo_path: db_status_entry.repo_path,
.push(proto::StatusEntry { status: db_status_entry.status as i32,
repo_path: db_status_entry.repo_path, });
status: db_status_entry.status as i32,
});
} }
} }
} }

View file

@ -22,9 +22,9 @@ pub trait GitRepository: Send {
fn branch_name(&self) -> Option<String>; fn branch_name(&self) -> Option<String>;
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>; fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus>; fn status(&self, path: &RepoPath) -> Option<GitFileStatus>;
} }
impl std::fmt::Debug for dyn GitRepository { impl std::fmt::Debug for dyn GitRepository {
@ -71,7 +71,7 @@ impl GitRepository for LibGitRepository {
Some(branch.to_string()) Some(branch.to_string())
} }
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> { fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let statuses = self.statuses(None).log_err()?; let statuses = self.statuses(None).log_err()?;
let mut map = TreeMap::default(); let mut map = TreeMap::default();
@ -91,7 +91,7 @@ impl GitRepository for LibGitRepository {
Some(map) Some(map)
} }
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus> { fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
let status = self.status_file(path).log_err()?; let status = self.status_file(path).log_err()?;
read_status(status) read_status(status)
} }
@ -100,7 +100,12 @@ impl GitRepository for LibGitRepository {
fn read_status(status: git2::Status) -> Option<GitFileStatus> { fn read_status(status: git2::Status) -> Option<GitFileStatus> {
if status.contains(git2::Status::CONFLICTED) { if status.contains(git2::Status::CONFLICTED) {
Some(GitFileStatus::Conflict) Some(GitFileStatus::Conflict)
} else if status.intersects(git2::Status::WT_MODIFIED | git2::Status::WT_RENAMED | git2::Status::INDEX_MODIFIED | git2::Status::INDEX_RENAMED) { } else if status.intersects(
git2::Status::WT_MODIFIED
| git2::Status::WT_RENAMED
| git2::Status::INDEX_MODIFIED
| git2::Status::INDEX_RENAMED,
) {
Some(GitFileStatus::Modified) Some(GitFileStatus::Modified)
} else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) { } else if status.intersects(git2::Status::WT_NEW | git2::Status::INDEX_NEW) {
Some(GitFileStatus::Added) Some(GitFileStatus::Added)
@ -141,7 +146,7 @@ impl GitRepository for FakeGitRepository {
state.branch_name.clone() state.branch_name.clone()
} }
fn worktree_statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> { fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>> {
let state = self.state.lock(); let state = self.state.lock();
let mut map = TreeMap::default(); let mut map = TreeMap::default();
for (repo_path, status) in state.worktree_statuses.iter() { for (repo_path, status) in state.worktree_statuses.iter() {
@ -150,7 +155,7 @@ impl GitRepository for FakeGitRepository {
Some(map) Some(map)
} }
fn worktree_status(&self, path: &RepoPath) -> Option<GitFileStatus> { fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
let state = self.state.lock(); let state = self.state.lock();
state.worktree_statuses.get(path).cloned() state.worktree_statuses.get(path).cloned()
} }

View file

@ -143,7 +143,7 @@ impl Snapshot {
pub struct RepositoryEntry { pub struct RepositoryEntry {
pub(crate) work_directory: WorkDirectoryEntry, pub(crate) work_directory: WorkDirectoryEntry,
pub(crate) branch: Option<Arc<str>>, pub(crate) branch: Option<Arc<str>>,
pub(crate) worktree_statuses: TreeMap<RepoPath, GitFileStatus>, pub(crate) statuses: TreeMap<RepoPath, GitFileStatus>,
} }
fn read_git_status(git_status: i32) -> Option<GitFileStatus> { fn read_git_status(git_status: i32) -> Option<GitFileStatus> {
@ -176,7 +176,7 @@ impl RepositoryEntry {
pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> { pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
self.work_directory self.work_directory
.relativize(snapshot, path) .relativize(snapshot, path)
.and_then(|repo_path| self.worktree_statuses.get(&repo_path)) .and_then(|repo_path| self.statuses.get(&repo_path))
.cloned() .cloned()
} }
@ -184,7 +184,7 @@ impl RepositoryEntry {
self.work_directory self.work_directory
.relativize(snapshot, path) .relativize(snapshot, path)
.and_then(|repo_path| { .and_then(|repo_path| {
self.worktree_statuses self.statuses
.iter_from(&repo_path) .iter_from(&repo_path)
.take_while(|(key, _)| key.starts_with(&repo_path)) .take_while(|(key, _)| key.starts_with(&repo_path))
.map(|(path, status)| { .map(|(path, status)| {
@ -203,8 +203,8 @@ impl RepositoryEntry {
let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new(); let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
let mut removed_statuses: Vec<String> = Vec::new(); let mut removed_statuses: Vec<String> = Vec::new();
let mut self_statuses = self.worktree_statuses.iter().peekable(); let mut self_statuses = self.statuses.iter().peekable();
let mut other_statuses = other.worktree_statuses.iter().peekable(); let mut other_statuses = other.statuses.iter().peekable();
loop { loop {
match (self_statuses.peek(), other_statuses.peek()) { match (self_statuses.peek(), other_statuses.peek()) {
(Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => { (Some((self_repo_path, self_status)), Some((other_repo_path, other_status))) => {
@ -243,8 +243,8 @@ impl RepositoryEntry {
proto::RepositoryEntry { proto::RepositoryEntry {
work_directory_id: self.work_directory_id().to_proto(), work_directory_id: self.work_directory_id().to_proto(),
branch: self.branch.as_ref().map(|str| str.to_string()), branch: self.branch.as_ref().map(|str| str.to_string()),
removed_worktree_repo_paths: removed_statuses, removed_repo_paths: removed_statuses,
updated_worktree_statuses: updated_statuses, updated_statuses: updated_statuses,
} }
} }
} }
@ -269,12 +269,12 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry {
proto::RepositoryEntry { proto::RepositoryEntry {
work_directory_id: value.work_directory.to_proto(), work_directory_id: value.work_directory.to_proto(),
branch: value.branch.as_ref().map(|str| str.to_string()), branch: value.branch.as_ref().map(|str| str.to_string()),
updated_worktree_statuses: value updated_statuses: value
.worktree_statuses .statuses
.iter() .iter()
.map(|(repo_path, status)| make_status_entry(repo_path, status)) .map(|(repo_path, status)| make_status_entry(repo_path, status))
.collect(), .collect(),
removed_worktree_repo_paths: Default::default(), removed_repo_paths: Default::default(),
} }
} }
} }
@ -1540,7 +1540,7 @@ impl Snapshot {
if let Some(entry) = self.entry_for_id(*work_directory_entry) { if let Some(entry) = self.entry_for_id(*work_directory_entry) {
let mut statuses = TreeMap::default(); let mut statuses = TreeMap::default();
for status_entry in repository.updated_worktree_statuses { for status_entry in repository.updated_statuses {
let Some(git_file_status) = read_git_status(status_entry.status) else { let Some(git_file_status) = read_git_status(status_entry.status) else {
continue; continue;
}; };
@ -1553,11 +1553,11 @@ impl Snapshot {
if self.repository_entries.get(&work_directory).is_some() { if self.repository_entries.get(&work_directory).is_some() {
self.repository_entries.update(&work_directory, |repo| { self.repository_entries.update(&work_directory, |repo| {
repo.branch = repository.branch.map(Into::into); repo.branch = repository.branch.map(Into::into);
repo.worktree_statuses.insert_tree(statuses); repo.statuses.insert_tree(statuses);
for repo_path in repository.removed_worktree_repo_paths { for repo_path in repository.removed_repo_paths {
let repo_path = RepoPath::new(repo_path.into()); let repo_path = RepoPath::new(repo_path.into());
repo.worktree_statuses.remove(&repo_path); repo.statuses.remove(&repo_path);
} }
}); });
} else { } else {
@ -1566,7 +1566,7 @@ impl Snapshot {
RepositoryEntry { RepositoryEntry {
work_directory: work_directory_entry, work_directory: work_directory_entry,
branch: repository.branch.map(Into::into), branch: repository.branch.map(Into::into),
worktree_statuses: statuses, statuses,
}, },
) )
} }
@ -1982,7 +1982,7 @@ impl LocalSnapshot {
RepositoryEntry { RepositoryEntry {
work_directory: work_dir_id.into(), work_directory: work_dir_id.into(),
branch: repo_lock.branch_name().map(Into::into), branch: repo_lock.branch_name().map(Into::into),
worktree_statuses: repo_lock.worktree_statuses().unwrap_or_default(), statuses: repo_lock.statuses().unwrap_or_default(),
}, },
); );
drop(repo_lock); drop(repo_lock);
@ -2681,6 +2681,8 @@ impl BackgroundScanner {
self.update_ignore_statuses().await; self.update_ignore_statuses().await;
//
let mut snapshot = self.snapshot.lock(); let mut snapshot = self.snapshot.lock();
let mut git_repositories = mem::take(&mut snapshot.git_repositories); let mut git_repositories = mem::take(&mut snapshot.git_repositories);
@ -2993,7 +2995,7 @@ impl BackgroundScanner {
fs_entry.is_ignored = ignore_stack.is_all(); fs_entry.is_ignored = ignore_stack.is_all();
snapshot.insert_entry(fs_entry, self.fs.as_ref()); snapshot.insert_entry(fs_entry, self.fs.as_ref());
self.reload_repo_for_path(&path, &mut snapshot, self.fs.as_ref()); self.reload_repo_for_file_path(&path, &mut snapshot, self.fs.as_ref());
if let Some(scan_queue_tx) = &scan_queue_tx { if let Some(scan_queue_tx) = &scan_queue_tx {
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path); let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
@ -3042,7 +3044,7 @@ impl BackgroundScanner {
snapshot.repository_entries.update(&work_dir, |entry| { snapshot.repository_entries.update(&work_dir, |entry| {
entry entry
.worktree_statuses .statuses
.remove_range(&repo_path, &RepoPathDescendants(&repo_path)) .remove_range(&repo_path, &RepoPathDescendants(&repo_path))
}); });
} }
@ -3050,7 +3052,7 @@ impl BackgroundScanner {
Some(()) Some(())
} }
fn reload_repo_for_path( fn reload_repo_for_file_path(
&self, &self,
path: &Path, path: &Path,
snapshot: &mut LocalSnapshot, snapshot: &mut LocalSnapshot,
@ -3084,7 +3086,7 @@ impl BackgroundScanner {
let repo = repo_ptr.lock(); let repo = repo_ptr.lock();
repo.reload_index(); repo.reload_index();
let branch = repo.branch_name(); let branch = repo.branch_name();
let statuses = repo.worktree_statuses().unwrap_or_default(); let statuses = repo.statuses().unwrap_or_default();
snapshot.git_repositories.update(&entry_id, |entry| { snapshot.git_repositories.update(&entry_id, |entry| {
entry.scan_id = scan_id; entry.scan_id = scan_id;
@ -3093,7 +3095,7 @@ impl BackgroundScanner {
snapshot.repository_entries.update(&work_dir, |entry| { snapshot.repository_entries.update(&work_dir, |entry| {
entry.branch = branch.map(Into::into); entry.branch = branch.map(Into::into);
entry.worktree_statuses = statuses; entry.statuses = statuses;
}); });
} else { } else {
if snapshot if snapshot
@ -3118,7 +3120,7 @@ impl BackgroundScanner {
} }
let git_ptr = local_repo.repo_ptr.lock(); let git_ptr = local_repo.repo_ptr.lock();
git_ptr.worktree_status(&repo_path) git_ptr.status(&repo_path)
}; };
let work_dir = repo.work_directory(snapshot)?; let work_dir = repo.work_directory(snapshot)?;
@ -3130,9 +3132,9 @@ impl BackgroundScanner {
snapshot.repository_entries.update(&work_dir, |entry| { snapshot.repository_entries.update(&work_dir, |entry| {
if let Some(status) = status { if let Some(status) = status {
entry.worktree_statuses.insert(repo_path, status); entry.statuses.insert(repo_path, status);
} else { } else {
entry.worktree_statuses.remove(&repo_path); entry.statuses.remove(&repo_path);
} }
}); });
} }
@ -4089,17 +4091,17 @@ mod tests {
let (dir, repo) = snapshot.repository_entries.iter().next().unwrap(); let (dir, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(dir.0.as_ref(), Path::new("project")); assert_eq!(dir.0.as_ref(), Path::new("project"));
assert_eq!(repo.worktree_statuses.iter().count(), 3); assert_eq!(repo.statuses.iter().count(), 3);
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(A_TXT).into()), repo.statuses.get(&Path::new(A_TXT).into()),
Some(&GitFileStatus::Modified) Some(&GitFileStatus::Modified)
); );
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(B_TXT).into()), repo.statuses.get(&Path::new(B_TXT).into()),
Some(&GitFileStatus::Added) Some(&GitFileStatus::Added)
); );
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()), repo.statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added) Some(&GitFileStatus::Added)
); );
}); });
@ -4114,11 +4116,11 @@ mod tests {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 1); assert_eq!(repo.statuses.iter().count(), 1);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(B_TXT).into()), None);
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()), repo.statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added) Some(&GitFileStatus::Added)
); );
}); });
@ -4135,18 +4137,18 @@ mod tests {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 3); assert_eq!(repo.statuses.iter().count(), 3);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(B_TXT).into()), repo.statuses.get(&Path::new(B_TXT).into()),
Some(&GitFileStatus::Added) Some(&GitFileStatus::Added)
); );
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(E_TXT).into()), repo.statuses.get(&Path::new(E_TXT).into()),
Some(&GitFileStatus::Modified) Some(&GitFileStatus::Modified)
); );
assert_eq!( assert_eq!(
repo.worktree_statuses.get(&Path::new(F_TXT).into()), repo.statuses.get(&Path::new(F_TXT).into()),
Some(&GitFileStatus::Added) Some(&GitFileStatus::Added)
); );
}); });
@ -4169,11 +4171,11 @@ mod tests {
let snapshot = tree.snapshot(); let snapshot = tree.snapshot();
let (_, repo) = snapshot.repository_entries.iter().next().unwrap(); let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
assert_eq!(repo.worktree_statuses.iter().count(), 0); assert_eq!(repo.statuses.iter().count(), 0);
assert_eq!(repo.worktree_statuses.get(&Path::new(A_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(B_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(B_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(E_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(E_TXT).into()), None);
assert_eq!(repo.worktree_statuses.get(&Path::new(F_TXT).into()), None); assert_eq!(repo.statuses.get(&Path::new(F_TXT).into()), None);
}); });
} }

View file

@ -986,8 +986,8 @@ message Entry {
message RepositoryEntry { message RepositoryEntry {
uint64 work_directory_id = 1; uint64 work_directory_id = 1;
optional string branch = 2; optional string branch = 2;
repeated string removed_worktree_repo_paths = 3; repeated string removed_repo_paths = 3;
repeated StatusEntry updated_worktree_statuses = 4; repeated StatusEntry updated_statuses = 4;
} }
message StatusEntry { message StatusEntry {

View file

@ -509,8 +509,8 @@ pub fn split_worktree_update(
updated_repositories.push(RepositoryEntry { updated_repositories.push(RepositoryEntry {
work_directory_id: repo.work_directory_id, work_directory_id: repo.work_directory_id,
branch: repo.branch.clone(), branch: repo.branch.clone(),
removed_worktree_repo_paths: Default::default(), removed_repo_paths: Default::default(),
updated_worktree_statuses: Default::default(), updated_statuses: Default::default(),
}); });
break; break;
} }
@ -535,26 +535,25 @@ pub fn split_worktree_update(
{ {
let updated_statuses_chunk_size = cmp::min( let updated_statuses_chunk_size = cmp::min(
message.updated_repositories[repository_index] message.updated_repositories[repository_index]
.updated_worktree_statuses .updated_statuses
.len(), .len(),
max_chunk_size - total_statuses, max_chunk_size - total_statuses,
); );
let updated_statuses: Vec<_> = message.updated_repositories[repository_index] let updated_statuses: Vec<_> = message.updated_repositories[repository_index]
.updated_worktree_statuses .updated_statuses
.drain(..updated_statuses_chunk_size) .drain(..updated_statuses_chunk_size)
.collect(); .collect();
total_statuses += updated_statuses.len(); total_statuses += updated_statuses.len();
let done_this_repo = message.updated_repositories[repository_index] let done_this_repo = message.updated_repositories[repository_index]
.updated_worktree_statuses .updated_statuses
.is_empty(); .is_empty();
let removed_repo_paths = if done_this_repo { let removed_repo_paths = if done_this_repo {
mem::take( mem::take(
&mut message.updated_repositories[repository_index] &mut message.updated_repositories[repository_index].removed_repo_paths,
.removed_worktree_repo_paths,
) )
} else { } else {
Default::default() Default::default()
@ -566,8 +565,8 @@ pub fn split_worktree_update(
branch: message.updated_repositories[repository_index] branch: message.updated_repositories[repository_index]
.branch .branch
.clone(), .clone(),
updated_worktree_statuses: updated_statuses, updated_statuses,
removed_worktree_repo_paths: removed_repo_paths, removed_repo_paths,
}); });
if done_this_repo { if done_this_repo {