Separate repository state synchronization from worktree synchronization (#27140)

This PR updates our DB schemas and wire protocol to separate the
synchronization of git statuses and other repository state from the
synchronization of worktrees. This paves the way for moving the code
that executes git status updates out of the `worktree` crate and onto
the new `GitStore`. That end goal is motivated by two (related) points:

- Disentangling git status updates from the worktree's
`BackgroundScanner` will allow us to implement a simpler concurrency
story for those updates, hopefully fixing some known but elusive bugs
(upstream state not updating after push; statuses getting out of sync in
remote projects).
- By moving git repository state to the project-scoped `GitStore`, we
can get rid of the duplication that currently happens when two worktrees
are associated with the same git repository.

Co-authored-by: Max <max@zed.dev>

Release Notes:

- N/A

---------

Co-authored-by: Max <max@zed.dev>
Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
Cole Miller 2025-03-20 18:07:03 -04:00 committed by GitHub
parent 700af63c45
commit bc1c0a2297
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 1147 additions and 535 deletions

View file

@ -9,6 +9,7 @@ use anyhow::anyhow;
use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use dashmap::DashMap;
use futures::StreamExt;
use project_repository_statuses::StatusKind;
use rand::{prelude::StdRng, Rng, SeedableRng};
use rpc::ExtensionProvides;
use rpc::{
@ -36,7 +37,6 @@ use std::{
};
use time::PrimitiveDateTime;
use tokio::sync::{Mutex, OwnedMutexGuard};
use worktree_repository_statuses::StatusKind;
use worktree_settings_file::LocalSettingsKind;
#[cfg(test)]
@ -658,6 +658,8 @@ pub struct RejoinedProject {
pub old_connection_id: ConnectionId,
pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: Vec<RejoinedWorktree>,
pub updated_repositories: Vec<proto::UpdateRepository>,
pub removed_repositories: Vec<u64>,
pub language_servers: Vec<proto::LanguageServer>,
}
@ -726,6 +728,7 @@ pub struct Project {
pub role: ChannelRole,
pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: BTreeMap<u64, Worktree>,
pub repositories: Vec<proto::UpdateRepository>,
pub language_servers: Vec<proto::LanguageServer>,
}
@ -760,7 +763,7 @@ pub struct Worktree {
pub root_name: String,
pub visible: bool,
pub entries: Vec<proto::Entry>,
pub repository_entries: BTreeMap<u64, proto::RepositoryEntry>,
pub legacy_repository_entries: BTreeMap<u64, proto::RepositoryEntry>,
pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
pub settings_files: Vec<WorktreeSettingsFile>,
pub scan_id: u64,
@ -810,7 +813,7 @@ impl LocalSettingsKind {
}
fn db_status_to_proto(
entry: worktree_repository_statuses::Model,
entry: project_repository_statuses::Model,
) -> anyhow::Result<proto::StatusEntry> {
use proto::git_file_status::{Tracked, Unmerged, Variant};

View file

@ -324,119 +324,135 @@ impl Database {
.await?;
}
if !update.updated_repositories.is_empty() {
worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
|repository| {
worktree_repository::ActiveModel {
project_id: ActiveValue::set(project_id),
worktree_id: ActiveValue::set(worktree_id),
work_directory_id: ActiveValue::set(
repository.work_directory_id as i64,
),
scan_id: ActiveValue::set(update.scan_id as i64),
branch: ActiveValue::set(repository.branch.clone()),
is_deleted: ActiveValue::set(false),
branch_summary: ActiveValue::Set(
repository
.branch_summary
.as_ref()
.map(|summary| serde_json::to_string(summary).unwrap()),
),
current_merge_conflicts: ActiveValue::Set(Some(
serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
)),
}
},
))
.on_conflict(
OnConflict::columns([
worktree_repository::Column::ProjectId,
worktree_repository::Column::WorktreeId,
worktree_repository::Column::WorkDirectoryId,
])
.update_columns([
worktree_repository::Column::ScanId,
worktree_repository::Column::Branch,
worktree_repository::Column::BranchSummary,
worktree_repository::Column::CurrentMergeConflicts,
])
.to_owned(),
)
.exec(&*tx)
.await?;
// Backward-compatibility for old Zed clients.
//
// Remove this block when Zed 1.80 stable has been out for a week.
{
if !update.updated_repositories.is_empty() {
project_repository::Entity::insert_many(
update.updated_repositories.iter().map(|repository| {
project_repository::ActiveModel {
project_id: ActiveValue::set(project_id),
legacy_worktree_id: ActiveValue::set(Some(worktree_id)),
id: ActiveValue::set(repository.work_directory_id as i64),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
branch_summary: ActiveValue::Set(
repository
.branch_summary
.as_ref()
.map(|summary| serde_json::to_string(summary).unwrap()),
),
current_merge_conflicts: ActiveValue::Set(Some(
serde_json::to_string(&repository.current_merge_conflicts)
.unwrap(),
)),
let has_any_statuses = update
.updated_repositories
.iter()
.any(|repository| !repository.updated_statuses.is_empty());
if has_any_statuses {
worktree_repository_statuses::Entity::insert_many(
update.updated_repositories.iter().flat_map(
|repository: &proto::RepositoryEntry| {
repository.updated_statuses.iter().map(|status_entry| {
let (repo_path, status_kind, first_status, second_status) =
proto_status_to_db(status_entry.clone());
worktree_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
worktree_id: ActiveValue::set(worktree_id),
work_directory_id: ActiveValue::set(
repository.work_directory_id as i64,
),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
repo_path: ActiveValue::set(repo_path),
status: ActiveValue::set(0),
status_kind: ActiveValue::set(status_kind),
first_status: ActiveValue::set(first_status),
second_status: ActiveValue::set(second_status),
}
})
},
),
// Old clients do not use abs path or entry ids.
abs_path: ActiveValue::set(String::new()),
entry_ids: ActiveValue::set("[]".into()),
}
}),
)
.on_conflict(
OnConflict::columns([
worktree_repository_statuses::Column::ProjectId,
worktree_repository_statuses::Column::WorktreeId,
worktree_repository_statuses::Column::WorkDirectoryId,
worktree_repository_statuses::Column::RepoPath,
project_repository::Column::ProjectId,
project_repository::Column::Id,
])
.update_columns([
worktree_repository_statuses::Column::ScanId,
worktree_repository_statuses::Column::StatusKind,
worktree_repository_statuses::Column::FirstStatus,
worktree_repository_statuses::Column::SecondStatus,
project_repository::Column::ScanId,
project_repository::Column::BranchSummary,
project_repository::Column::CurrentMergeConflicts,
])
.to_owned(),
)
.exec(&*tx)
.await?;
let has_any_statuses = update
.updated_repositories
.iter()
.any(|repository| !repository.updated_statuses.is_empty());
if has_any_statuses {
project_repository_statuses::Entity::insert_many(
update.updated_repositories.iter().flat_map(
|repository: &proto::RepositoryEntry| {
repository.updated_statuses.iter().map(|status_entry| {
let (repo_path, status_kind, first_status, second_status) =
proto_status_to_db(status_entry.clone());
project_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
repository_id: ActiveValue::set(
repository.work_directory_id as i64,
),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
repo_path: ActiveValue::set(repo_path),
status: ActiveValue::set(0),
status_kind: ActiveValue::set(status_kind),
first_status: ActiveValue::set(first_status),
second_status: ActiveValue::set(second_status),
}
})
},
),
)
.on_conflict(
OnConflict::columns([
project_repository_statuses::Column::ProjectId,
project_repository_statuses::Column::RepositoryId,
project_repository_statuses::Column::RepoPath,
])
.update_columns([
project_repository_statuses::Column::ScanId,
project_repository_statuses::Column::StatusKind,
project_repository_statuses::Column::FirstStatus,
project_repository_statuses::Column::SecondStatus,
])
.to_owned(),
)
.exec(&*tx)
.await?;
}
for repo in &update.updated_repositories {
if !repo.removed_statuses.is_empty() {
project_repository_statuses::Entity::update_many()
.filter(
project_repository_statuses::Column::ProjectId
.eq(project_id)
.and(
project_repository_statuses::Column::RepositoryId
.eq(repo.work_directory_id),
)
.and(
project_repository_statuses::Column::RepoPath
.is_in(repo.removed_statuses.iter()),
),
)
.set(project_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
})
.exec(&*tx)
.await?;
}
}
}
let has_any_removed_statuses = update
.updated_repositories
.iter()
.any(|repository| !repository.removed_statuses.is_empty());
if has_any_removed_statuses {
worktree_repository_statuses::Entity::update_many()
if !update.removed_repositories.is_empty() {
project_repository::Entity::update_many()
.filter(
worktree_repository_statuses::Column::ProjectId
project_repository::Column::ProjectId
.eq(project_id)
.and(
worktree_repository_statuses::Column::WorktreeId
.eq(worktree_id),
)
.and(
worktree_repository_statuses::Column::RepoPath.is_in(
update.updated_repositories.iter().flat_map(|repository| {
repository.removed_statuses.iter()
}),
),
),
.and(project_repository::Column::LegacyWorktreeId.eq(worktree_id))
.and(project_repository::Column::Id.is_in(
update.removed_repositories.iter().map(|id| *id as i64),
)),
)
.set(worktree_repository_statuses::ActiveModel {
.set(project_repository::ActiveModel {
is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
@ -446,18 +462,109 @@ impl Database {
}
}
if !update.removed_repositories.is_empty() {
worktree_repository::Entity::update_many()
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
Ok(connection_ids)
})
.await
}
pub async fn update_repository(
&self,
update: &proto::UpdateRepository,
_connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
let repository_id = update.id as i64;
self.project_transaction(project_id, |tx| async move {
project_repository::Entity::insert(project_repository::ActiveModel {
project_id: ActiveValue::set(project_id),
id: ActiveValue::set(repository_id),
legacy_worktree_id: ActiveValue::set(None),
abs_path: ActiveValue::set(update.abs_path.clone()),
entry_ids: ActiveValue::Set(serde_json::to_string(&update.entry_ids).unwrap()),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
branch_summary: ActiveValue::Set(
update
.branch_summary
.as_ref()
.map(|summary| serde_json::to_string(summary).unwrap()),
),
current_merge_conflicts: ActiveValue::Set(Some(
serde_json::to_string(&update.current_merge_conflicts).unwrap(),
)),
})
.on_conflict(
OnConflict::columns([
project_repository::Column::ProjectId,
project_repository::Column::Id,
])
.update_columns([
project_repository::Column::ScanId,
project_repository::Column::BranchSummary,
project_repository::Column::EntryIds,
project_repository::Column::AbsPath,
project_repository::Column::CurrentMergeConflicts,
])
.to_owned(),
)
.exec(&*tx)
.await?;
let has_any_statuses = !update.updated_statuses.is_empty();
if has_any_statuses {
project_repository_statuses::Entity::insert_many(
update.updated_statuses.iter().map(|status_entry| {
let (repo_path, status_kind, first_status, second_status) =
proto_status_to_db(status_entry.clone());
project_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
repository_id: ActiveValue::set(repository_id),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
repo_path: ActiveValue::set(repo_path),
status: ActiveValue::set(0),
status_kind: ActiveValue::set(status_kind),
first_status: ActiveValue::set(first_status),
second_status: ActiveValue::set(second_status),
}
}),
)
.on_conflict(
OnConflict::columns([
project_repository_statuses::Column::ProjectId,
project_repository_statuses::Column::RepositoryId,
project_repository_statuses::Column::RepoPath,
])
.update_columns([
project_repository_statuses::Column::ScanId,
project_repository_statuses::Column::StatusKind,
project_repository_statuses::Column::FirstStatus,
project_repository_statuses::Column::SecondStatus,
])
.to_owned(),
)
.exec(&*tx)
.await?;
}
let has_any_removed_statuses = !update.removed_statuses.is_empty();
if has_any_removed_statuses {
project_repository_statuses::Entity::update_many()
.filter(
worktree_repository::Column::ProjectId
project_repository_statuses::Column::ProjectId
.eq(project_id)
.and(worktree_repository::Column::WorktreeId.eq(worktree_id))
.and(
worktree_repository::Column::WorkDirectoryId
.is_in(update.removed_repositories.iter().map(|id| *id as i64)),
project_repository_statuses::Column::RepositoryId.eq(repository_id),
)
.and(
project_repository_statuses::Column::RepoPath
.is_in(update.removed_statuses.iter()),
),
)
.set(worktree_repository::ActiveModel {
.set(project_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
@ -472,6 +579,34 @@ impl Database {
.await
}
pub async fn remove_repository(
&self,
remove: &proto::RemoveRepository,
_connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(remove.project_id);
let repository_id = remove.id as i64;
self.project_transaction(project_id, |tx| async move {
project_repository::Entity::update_many()
.filter(
project_repository::Column::ProjectId
.eq(project_id)
.and(project_repository::Column::Id.eq(repository_id)),
)
.set(project_repository::ActiveModel {
is_deleted: ActiveValue::Set(true),
// scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
})
.exec(&*tx)
.await?;
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
Ok(connection_ids)
})
.await
}
/// Updates the diagnostic summary for the given connection.
pub async fn update_diagnostic_summary(
&self,
@ -703,11 +838,11 @@ impl Database {
root_name: db_worktree.root_name,
visible: db_worktree.visible,
entries: Default::default(),
repository_entries: Default::default(),
diagnostic_summaries: Default::default(),
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
legacy_repository_entries: Default::default(),
},
)
})
@ -750,65 +885,77 @@ impl Database {
}
// Populate repository entries.
let mut repositories = Vec::new();
{
let db_repository_entries = worktree_repository::Entity::find()
let db_repository_entries = project_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::IsDeleted.eq(false)),
.add(project_repository::Column::ProjectId.eq(project.id))
.add(project_repository::Column::IsDeleted.eq(false)),
)
.all(tx)
.await?;
for db_repository_entry in db_repository_entries {
if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
{
let mut repository_statuses = worktree_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(worktree_repository_statuses::Column::ProjectId.eq(project.id))
.add(
worktree_repository_statuses::Column::WorktreeId
.eq(worktree.id),
)
.add(
worktree_repository_statuses::Column::WorkDirectoryId
.eq(db_repository_entry.work_directory_id),
)
.add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
)
.stream(tx)
.await?;
let mut updated_statuses = Vec::new();
while let Some(status_entry) = repository_statuses.next().await {
let status_entry: worktree_repository_statuses::Model = status_entry?;
updated_statuses.push(db_status_to_proto(status_entry)?);
let mut repository_statuses = project_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(project_repository_statuses::Column::ProjectId.eq(project.id))
.add(
project_repository_statuses::Column::RepositoryId
.eq(db_repository_entry.id),
)
.add(project_repository_statuses::Column::IsDeleted.eq(false)),
)
.stream(tx)
.await?;
let mut updated_statuses = Vec::new();
while let Some(status_entry) = repository_statuses.next().await {
let status_entry = status_entry?;
updated_statuses.push(db_status_to_proto(status_entry)?);
}
let current_merge_conflicts = db_repository_entry
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository_entry
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
let entry_ids = serde_json::from_str(&db_repository_entry.entry_ids)
.context("failed to deserialize repository's entry ids")?;
if let Some(worktree_id) = db_repository_entry.legacy_worktree_id {
if let Some(worktree) = worktrees.get_mut(&(worktree_id as u64)) {
worktree.legacy_repository_entries.insert(
db_repository_entry.id as u64,
proto::RepositoryEntry {
work_directory_id: db_repository_entry.id as u64,
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
branch_summary,
},
);
}
let current_merge_conflicts = db_repository_entry
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository_entry
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
branch_summary,
},
);
} else {
repositories.push(proto::UpdateRepository {
project_id: db_repository_entry.project_id.0 as u64,
id: db_repository_entry.id as u64,
abs_path: db_repository_entry.abs_path,
entry_ids,
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
branch_summary,
scan_id: db_repository_entry.scan_id as u64,
});
}
}
}
@ -871,6 +1018,7 @@ impl Database {
})
.collect(),
worktrees,
repositories,
language_servers: language_servers
.into_iter()
.map(|language_server| proto::LanguageServer {

View file

@ -1,3 +1,5 @@
use anyhow::Context as _;
use super::*;
impl Database {
@ -606,6 +608,11 @@ impl Database {
let mut worktrees = Vec::new();
let db_worktrees = project.find_related(worktree::Entity).all(tx).await?;
let db_repos = project
.find_related(project_repository::Entity)
.all(tx)
.await?;
for db_worktree in db_worktrees {
let mut worktree = RejoinedWorktree {
id: db_worktree.id as u64,
@ -673,96 +680,112 @@ impl Database {
}
}
// Repository Entries
{
let repository_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
worktrees.push(worktree);
}
let mut removed_repositories = Vec::new();
let mut updated_repositories = Vec::new();
for db_repo in db_repos {
let rejoined_repository = rejoined_project
.repositories
.iter()
.find(|repo| repo.id == db_repo.id as u64);
let repository_filter = if let Some(rejoined_repository) = rejoined_repository {
project_repository::Column::ScanId.gt(rejoined_repository.scan_id)
} else {
project_repository::Column::IsDeleted.eq(false)
};
let db_repositories = project_repository::Entity::find()
.filter(
Condition::all()
.add(project_repository::Column::ProjectId.eq(project.id))
.add(repository_filter),
)
.all(tx)
.await?;
for db_repository in db_repositories.into_iter() {
if db_repository.is_deleted {
removed_repositories.push(db_repository.id as u64);
} else {
worktree_repository::Column::IsDeleted.eq(false)
};
let db_repositories = worktree_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
.add(repository_entry_filter),
)
.all(tx)
.await?;
for db_repository in db_repositories.into_iter() {
if db_repository.is_deleted {
worktree
.removed_repositories
.push(db_repository.work_directory_id as u64);
let status_entry_filter = if let Some(rejoined_repository) = rejoined_repository
{
project_repository_statuses::Column::ScanId.gt(rejoined_repository.scan_id)
} else {
let status_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree
{
worktree_repository_statuses::Column::ScanId
.gt(rejoined_worktree.scan_id)
project_repository_statuses::Column::IsDeleted.eq(false)
};
let mut db_statuses = project_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(project_repository_statuses::Column::ProjectId.eq(project.id))
.add(
project_repository_statuses::Column::RepositoryId
.eq(db_repository.id),
)
.add(status_entry_filter),
)
.stream(tx)
.await?;
let mut removed_statuses = Vec::new();
let mut updated_statuses = Vec::new();
while let Some(db_status) = db_statuses.next().await {
let db_status: project_repository_statuses::Model = db_status?;
if db_status.is_deleted {
removed_statuses.push(db_status.repo_path);
} else {
worktree_repository_statuses::Column::IsDeleted.eq(false)
};
let mut db_statuses = worktree_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(
worktree_repository_statuses::Column::ProjectId
.eq(project.id),
)
.add(
worktree_repository_statuses::Column::WorktreeId
.eq(worktree.id),
)
.add(
worktree_repository_statuses::Column::WorkDirectoryId
.eq(db_repository.work_directory_id),
)
.add(status_entry_filter),
)
.stream(tx)
.await?;
let mut removed_statuses = Vec::new();
let mut updated_statuses = Vec::new();
while let Some(db_status) = db_statuses.next().await {
let db_status: worktree_repository_statuses::Model = db_status?;
if db_status.is_deleted {
removed_statuses.push(db_status.repo_path);
} else {
updated_statuses.push(db_status_to_proto(db_status)?);
}
updated_statuses.push(db_status_to_proto(db_status)?);
}
}
let current_merge_conflicts = db_repository
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
let current_merge_conflicts = db_repository
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
let entry_ids = serde_json::from_str(&db_repository.entry_ids)
.context("failed to deserialize repository's entry ids")?;
if let Some(legacy_worktree_id) = db_repository.legacy_worktree_id {
if let Some(worktree) = worktrees
.iter_mut()
.find(|worktree| worktree.id as i64 == legacy_worktree_id)
{
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.id as u64,
updated_statuses,
removed_statuses,
current_merge_conflicts,
branch_summary,
});
}
} else {
updated_repositories.push(proto::UpdateRepository {
entry_ids,
updated_statuses,
removed_statuses,
current_merge_conflicts,
branch_summary,
project_id: project_id.to_proto(),
id: db_repository.id as u64,
abs_path: db_repository.abs_path,
scan_id: db_repository.scan_id as u64,
});
}
}
}
worktrees.push(worktree);
}
let language_servers = project
@ -832,6 +855,8 @@ impl Database {
id: project_id,
old_connection_id,
collaborators,
updated_repositories,
removed_repositories,
worktrees,
language_servers,
}))

View file

@ -26,6 +26,8 @@ pub mod observed_channel_messages;
pub mod processed_stripe_event;
pub mod project;
pub mod project_collaborator;
pub mod project_repository;
pub mod project_repository_statuses;
pub mod rate_buckets;
pub mod room;
pub mod room_participant;
@ -36,6 +38,4 @@ pub mod user_feature;
pub mod worktree;
pub mod worktree_diagnostic_summary;
pub mod worktree_entry;
pub mod worktree_repository;
pub mod worktree_repository_statuses;
pub mod worktree_settings_file;

View file

@ -45,6 +45,8 @@ pub enum Relation {
Room,
#[sea_orm(has_many = "super::worktree::Entity")]
Worktrees,
#[sea_orm(has_many = "super::project_repository::Entity")]
Repositories,
#[sea_orm(has_many = "super::project_collaborator::Entity")]
Collaborators,
#[sea_orm(has_many = "super::language_server::Entity")]
@ -69,6 +71,12 @@ impl Related<super::worktree::Entity> for Entity {
}
}
impl Related<super::project_repository::Entity> for Entity {
fn to() -> RelationDef {
Relation::Repositories.def()
}
}
impl Related<super::project_collaborator::Entity> for Entity {
fn to() -> RelationDef {
Relation::Collaborators.def()

View file

@ -2,16 +2,17 @@ use crate::db::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "worktree_repositories")]
#[sea_orm(table_name = "project_repositories")]
pub struct Model {
#[sea_orm(primary_key)]
pub project_id: ProjectId,
#[sea_orm(primary_key)]
pub worktree_id: i64,
#[sea_orm(primary_key)]
pub work_directory_id: i64,
pub id: i64,
pub abs_path: String,
pub legacy_worktree_id: Option<i64>,
// JSON array containing 1 or more integer project entry ids
pub entry_ids: String,
pub scan_id: i64,
pub branch: Option<String>,
pub is_deleted: bool,
// JSON array typed string
pub current_merge_conflicts: Option<String>,
@ -20,6 +21,19 @@ pub struct Model {
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
pub enum Relation {
#[sea_orm(
belongs_to = "super::project::Entity",
from = "Column::ProjectId",
to = "super::project::Column::Id"
)]
Project,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View file

@ -2,14 +2,12 @@ use crate::db::ProjectId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "worktree_repository_statuses")]
#[sea_orm(table_name = "project_repository_statuses")]
pub struct Model {
#[sea_orm(primary_key)]
pub project_id: ProjectId,
#[sea_orm(primary_key)]
pub worktree_id: i64,
#[sea_orm(primary_key)]
pub work_directory_id: i64,
pub repository_id: i64,
#[sea_orm(primary_key)]
pub repo_path: String,
/// Old single-code status field, no longer used but kept here to mirror the DB schema.

View file

@ -37,6 +37,7 @@ use core::fmt::{self, Debug, Formatter};
use http_client::HttpClient;
use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL};
use reqwest_client::ReqwestClient;
use rpc::proto::split_repository_update;
use sha2::Digest;
use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi};
@ -291,6 +292,8 @@ impl Server {
.add_message_handler(leave_project)
.add_request_handler(update_project)
.add_request_handler(update_worktree)
.add_request_handler(update_repository)
.add_request_handler(remove_repository)
.add_message_handler(start_language_server)
.add_message_handler(update_language_server)
.add_message_handler(update_diagnostic_summary)
@ -1464,7 +1467,7 @@ fn notify_rejoined_projects(
removed_repositories: worktree.removed_repositories,
};
for update in proto::split_worktree_update(message) {
session.peer.send(session.connection_id, update.clone())?;
session.peer.send(session.connection_id, update)?;
}
// Stream this worktree's diagnostics.
@ -1493,21 +1496,23 @@ fn notify_rejoined_projects(
}
}
for language_server in &project.language_servers {
for repository in mem::take(&mut project.updated_repositories) {
for update in split_repository_update(repository) {
session.peer.send(session.connection_id, update)?;
}
}
for id in mem::take(&mut project.removed_repositories) {
session.peer.send(
session.connection_id,
proto::UpdateLanguageServer {
proto::RemoveRepository {
project_id: project.id.to_proto(),
language_server_id: language_server.id,
variant: Some(
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
proto::LspDiskBasedDiagnosticsUpdated {},
),
),
id,
},
)?;
}
}
Ok(())
}
@ -1893,7 +1898,7 @@ fn join_project_internal(
removed_entries: Default::default(),
scan_id: worktree.scan_id,
is_last_update: worktree.scan_id == worktree.completed_scan_id,
updated_repositories: worktree.repository_entries.into_values().collect(),
updated_repositories: worktree.legacy_repository_entries.into_values().collect(),
removed_repositories: Default::default(),
};
for update in proto::split_worktree_update(message) {
@ -1926,6 +1931,12 @@ fn join_project_internal(
}
}
for repository in mem::take(&mut project.repositories) {
for update in split_repository_update(repository) {
session.peer.send(session.connection_id, update)?;
}
}
for language_server in &project.language_servers {
session.peer.send(
session.connection_id,
@ -2018,6 +2029,54 @@ async fn update_worktree(
Ok(())
}
async fn update_repository(
request: proto::UpdateRepository,
response: Response<proto::UpdateRepository>,
session: Session,
) -> Result<()> {
let guest_connection_ids = session
.db()
.await
.update_repository(&request, session.connection_id)
.await?;
broadcast(
Some(session.connection_id),
guest_connection_ids.iter().copied(),
|connection_id| {
session
.peer
.forward_send(session.connection_id, connection_id, request.clone())
},
);
response.send(proto::Ack {})?;
Ok(())
}
async fn remove_repository(
request: proto::RemoveRepository,
response: Response<proto::RemoveRepository>,
session: Session,
) -> Result<()> {
let guest_connection_ids = session
.db()
.await
.remove_repository(&request, session.connection_id)
.await?;
broadcast(
Some(session.connection_id),
guest_connection_ids.iter().copied(),
|connection_id| {
session
.peer
.forward_send(session.connection_id, connection_id, request.clone())
},
);
response.send(proto::Ack {})?;
Ok(())
}
/// Updates other participants with changes to the diagnostics
async fn update_diagnostic_summary(
message: proto::UpdateDiagnosticSummary,

View file

@ -2847,7 +2847,7 @@ async fn test_git_diff_base_change(
});
}
#[gpui::test]
#[gpui::test(iterations = 10)]
async fn test_git_branch_name(
executor: BackgroundExecutor,
cx_a: &mut TestAppContext,