Separate repository state synchronization from worktree synchronization (#27140)
This PR updates our DB schemas and wire protocol to separate the synchronization of git statuses and other repository state from the synchronization of worktrees. This paves the way for moving the code that executes git status updates out of the `worktree` crate and onto the new `GitStore`. That end goal is motivated by two (related) points: - Disentangling git status updates from the worktree's `BackgroundScanner` will allow us to implement a simpler concurrency story for those updates, hopefully fixing some known but elusive bugs (upstream state not updating after push; statuses getting out of sync in remote projects). - By moving git repository state to the project-scoped `GitStore`, we can get rid of the duplication that currently happens when two worktrees are associated with the same git repository. Co-authored-by: Max <max@zed.dev> Release Notes: - N/A --------- Co-authored-by: Max <max@zed.dev> Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
This commit is contained in:
parent
700af63c45
commit
bc1c0a2297
21 changed files with 1147 additions and 535 deletions
|
@ -324,119 +324,135 @@ impl Database {
|
|||
.await?;
|
||||
}
|
||||
|
||||
if !update.updated_repositories.is_empty() {
|
||||
worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
|
||||
|repository| {
|
||||
worktree_repository::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
work_directory_id: ActiveValue::set(
|
||||
repository.work_directory_id as i64,
|
||||
),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
branch: ActiveValue::set(repository.branch.clone()),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
branch_summary: ActiveValue::Set(
|
||||
repository
|
||||
.branch_summary
|
||||
.as_ref()
|
||||
.map(|summary| serde_json::to_string(summary).unwrap()),
|
||||
),
|
||||
current_merge_conflicts: ActiveValue::Set(Some(
|
||||
serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
|
||||
)),
|
||||
}
|
||||
},
|
||||
))
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_repository::Column::ProjectId,
|
||||
worktree_repository::Column::WorktreeId,
|
||||
worktree_repository::Column::WorkDirectoryId,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_repository::Column::ScanId,
|
||||
worktree_repository::Column::Branch,
|
||||
worktree_repository::Column::BranchSummary,
|
||||
worktree_repository::Column::CurrentMergeConflicts,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
// Backward-compatibility for old Zed clients.
|
||||
//
|
||||
// Remove this block when Zed 1.80 stable has been out for a week.
|
||||
{
|
||||
if !update.updated_repositories.is_empty() {
|
||||
project_repository::Entity::insert_many(
|
||||
update.updated_repositories.iter().map(|repository| {
|
||||
project_repository::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
legacy_worktree_id: ActiveValue::set(Some(worktree_id)),
|
||||
id: ActiveValue::set(repository.work_directory_id as i64),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
branch_summary: ActiveValue::Set(
|
||||
repository
|
||||
.branch_summary
|
||||
.as_ref()
|
||||
.map(|summary| serde_json::to_string(summary).unwrap()),
|
||||
),
|
||||
current_merge_conflicts: ActiveValue::Set(Some(
|
||||
serde_json::to_string(&repository.current_merge_conflicts)
|
||||
.unwrap(),
|
||||
)),
|
||||
|
||||
let has_any_statuses = update
|
||||
.updated_repositories
|
||||
.iter()
|
||||
.any(|repository| !repository.updated_statuses.is_empty());
|
||||
|
||||
if has_any_statuses {
|
||||
worktree_repository_statuses::Entity::insert_many(
|
||||
update.updated_repositories.iter().flat_map(
|
||||
|repository: &proto::RepositoryEntry| {
|
||||
repository.updated_statuses.iter().map(|status_entry| {
|
||||
let (repo_path, status_kind, first_status, second_status) =
|
||||
proto_status_to_db(status_entry.clone());
|
||||
worktree_repository_statuses::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
work_directory_id: ActiveValue::set(
|
||||
repository.work_directory_id as i64,
|
||||
),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
repo_path: ActiveValue::set(repo_path),
|
||||
status: ActiveValue::set(0),
|
||||
status_kind: ActiveValue::set(status_kind),
|
||||
first_status: ActiveValue::set(first_status),
|
||||
second_status: ActiveValue::set(second_status),
|
||||
}
|
||||
})
|
||||
},
|
||||
),
|
||||
// Old clients do not use abs path or entry ids.
|
||||
abs_path: ActiveValue::set(String::new()),
|
||||
entry_ids: ActiveValue::set("[]".into()),
|
||||
}
|
||||
}),
|
||||
)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_repository_statuses::Column::ProjectId,
|
||||
worktree_repository_statuses::Column::WorktreeId,
|
||||
worktree_repository_statuses::Column::WorkDirectoryId,
|
||||
worktree_repository_statuses::Column::RepoPath,
|
||||
project_repository::Column::ProjectId,
|
||||
project_repository::Column::Id,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_repository_statuses::Column::ScanId,
|
||||
worktree_repository_statuses::Column::StatusKind,
|
||||
worktree_repository_statuses::Column::FirstStatus,
|
||||
worktree_repository_statuses::Column::SecondStatus,
|
||||
project_repository::Column::ScanId,
|
||||
project_repository::Column::BranchSummary,
|
||||
project_repository::Column::CurrentMergeConflicts,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let has_any_statuses = update
|
||||
.updated_repositories
|
||||
.iter()
|
||||
.any(|repository| !repository.updated_statuses.is_empty());
|
||||
|
||||
if has_any_statuses {
|
||||
project_repository_statuses::Entity::insert_many(
|
||||
update.updated_repositories.iter().flat_map(
|
||||
|repository: &proto::RepositoryEntry| {
|
||||
repository.updated_statuses.iter().map(|status_entry| {
|
||||
let (repo_path, status_kind, first_status, second_status) =
|
||||
proto_status_to_db(status_entry.clone());
|
||||
project_repository_statuses::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
repository_id: ActiveValue::set(
|
||||
repository.work_directory_id as i64,
|
||||
),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
repo_path: ActiveValue::set(repo_path),
|
||||
status: ActiveValue::set(0),
|
||||
status_kind: ActiveValue::set(status_kind),
|
||||
first_status: ActiveValue::set(first_status),
|
||||
second_status: ActiveValue::set(second_status),
|
||||
}
|
||||
})
|
||||
},
|
||||
),
|
||||
)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
project_repository_statuses::Column::ProjectId,
|
||||
project_repository_statuses::Column::RepositoryId,
|
||||
project_repository_statuses::Column::RepoPath,
|
||||
])
|
||||
.update_columns([
|
||||
project_repository_statuses::Column::ScanId,
|
||||
project_repository_statuses::Column::StatusKind,
|
||||
project_repository_statuses::Column::FirstStatus,
|
||||
project_repository_statuses::Column::SecondStatus,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
for repo in &update.updated_repositories {
|
||||
if !repo.removed_statuses.is_empty() {
|
||||
project_repository_statuses::Entity::update_many()
|
||||
.filter(
|
||||
project_repository_statuses::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(
|
||||
project_repository_statuses::Column::RepositoryId
|
||||
.eq(repo.work_directory_id),
|
||||
)
|
||||
.and(
|
||||
project_repository_statuses::Column::RepoPath
|
||||
.is_in(repo.removed_statuses.iter()),
|
||||
),
|
||||
)
|
||||
.set(project_repository_statuses::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let has_any_removed_statuses = update
|
||||
.updated_repositories
|
||||
.iter()
|
||||
.any(|repository| !repository.removed_statuses.is_empty());
|
||||
|
||||
if has_any_removed_statuses {
|
||||
worktree_repository_statuses::Entity::update_many()
|
||||
if !update.removed_repositories.is_empty() {
|
||||
project_repository::Entity::update_many()
|
||||
.filter(
|
||||
worktree_repository_statuses::Column::ProjectId
|
||||
project_repository::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(
|
||||
worktree_repository_statuses::Column::WorktreeId
|
||||
.eq(worktree_id),
|
||||
)
|
||||
.and(
|
||||
worktree_repository_statuses::Column::RepoPath.is_in(
|
||||
update.updated_repositories.iter().flat_map(|repository| {
|
||||
repository.removed_statuses.iter()
|
||||
}),
|
||||
),
|
||||
),
|
||||
.and(project_repository::Column::LegacyWorktreeId.eq(worktree_id))
|
||||
.and(project_repository::Column::Id.is_in(
|
||||
update.removed_repositories.iter().map(|id| *id as i64),
|
||||
)),
|
||||
)
|
||||
.set(worktree_repository_statuses::ActiveModel {
|
||||
.set(project_repository::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
|
@ -446,18 +462,109 @@ impl Database {
|
|||
}
|
||||
}
|
||||
|
||||
if !update.removed_repositories.is_empty() {
|
||||
worktree_repository::Entity::update_many()
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_repository(
|
||||
&self,
|
||||
update: &proto::UpdateRepository,
|
||||
_connection: ConnectionId,
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let repository_id = update.id as i64;
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
project_repository::Entity::insert(project_repository::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
id: ActiveValue::set(repository_id),
|
||||
legacy_worktree_id: ActiveValue::set(None),
|
||||
abs_path: ActiveValue::set(update.abs_path.clone()),
|
||||
entry_ids: ActiveValue::Set(serde_json::to_string(&update.entry_ids).unwrap()),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
branch_summary: ActiveValue::Set(
|
||||
update
|
||||
.branch_summary
|
||||
.as_ref()
|
||||
.map(|summary| serde_json::to_string(summary).unwrap()),
|
||||
),
|
||||
current_merge_conflicts: ActiveValue::Set(Some(
|
||||
serde_json::to_string(&update.current_merge_conflicts).unwrap(),
|
||||
)),
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
project_repository::Column::ProjectId,
|
||||
project_repository::Column::Id,
|
||||
])
|
||||
.update_columns([
|
||||
project_repository::Column::ScanId,
|
||||
project_repository::Column::BranchSummary,
|
||||
project_repository::Column::EntryIds,
|
||||
project_repository::Column::AbsPath,
|
||||
project_repository::Column::CurrentMergeConflicts,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let has_any_statuses = !update.updated_statuses.is_empty();
|
||||
|
||||
if has_any_statuses {
|
||||
project_repository_statuses::Entity::insert_many(
|
||||
update.updated_statuses.iter().map(|status_entry| {
|
||||
let (repo_path, status_kind, first_status, second_status) =
|
||||
proto_status_to_db(status_entry.clone());
|
||||
project_repository_statuses::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
repository_id: ActiveValue::set(repository_id),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
repo_path: ActiveValue::set(repo_path),
|
||||
status: ActiveValue::set(0),
|
||||
status_kind: ActiveValue::set(status_kind),
|
||||
first_status: ActiveValue::set(first_status),
|
||||
second_status: ActiveValue::set(second_status),
|
||||
}
|
||||
}),
|
||||
)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
project_repository_statuses::Column::ProjectId,
|
||||
project_repository_statuses::Column::RepositoryId,
|
||||
project_repository_statuses::Column::RepoPath,
|
||||
])
|
||||
.update_columns([
|
||||
project_repository_statuses::Column::ScanId,
|
||||
project_repository_statuses::Column::StatusKind,
|
||||
project_repository_statuses::Column::FirstStatus,
|
||||
project_repository_statuses::Column::SecondStatus,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let has_any_removed_statuses = !update.removed_statuses.is_empty();
|
||||
|
||||
if has_any_removed_statuses {
|
||||
project_repository_statuses::Entity::update_many()
|
||||
.filter(
|
||||
worktree_repository::Column::ProjectId
|
||||
project_repository_statuses::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(worktree_repository::Column::WorktreeId.eq(worktree_id))
|
||||
.and(
|
||||
worktree_repository::Column::WorkDirectoryId
|
||||
.is_in(update.removed_repositories.iter().map(|id| *id as i64)),
|
||||
project_repository_statuses::Column::RepositoryId.eq(repository_id),
|
||||
)
|
||||
.and(
|
||||
project_repository_statuses::Column::RepoPath
|
||||
.is_in(update.removed_statuses.iter()),
|
||||
),
|
||||
)
|
||||
.set(worktree_repository::ActiveModel {
|
||||
.set(project_repository_statuses::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
|
@ -472,6 +579,34 @@ impl Database {
|
|||
.await
|
||||
}
|
||||
|
||||
pub async fn remove_repository(
|
||||
&self,
|
||||
remove: &proto::RemoveRepository,
|
||||
_connection: ConnectionId,
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(remove.project_id);
|
||||
let repository_id = remove.id as i64;
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
project_repository::Entity::update_many()
|
||||
.filter(
|
||||
project_repository::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(project_repository::Column::Id.eq(repository_id)),
|
||||
)
|
||||
.set(project_repository::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
// scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Updates the diagnostic summary for the given connection.
|
||||
pub async fn update_diagnostic_summary(
|
||||
&self,
|
||||
|
@ -703,11 +838,11 @@ impl Database {
|
|||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
entries: Default::default(),
|
||||
repository_entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
legacy_repository_entries: Default::default(),
|
||||
},
|
||||
)
|
||||
})
|
||||
|
@ -750,65 +885,77 @@ impl Database {
|
|||
}
|
||||
|
||||
// Populate repository entries.
|
||||
let mut repositories = Vec::new();
|
||||
{
|
||||
let db_repository_entries = worktree_repository::Entity::find()
|
||||
let db_repository_entries = project_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_repository::Column::IsDeleted.eq(false)),
|
||||
.add(project_repository::Column::ProjectId.eq(project.id))
|
||||
.add(project_repository::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.all(tx)
|
||||
.await?;
|
||||
for db_repository_entry in db_repository_entries {
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
|
||||
{
|
||||
let mut repository_statuses = worktree_repository_statuses::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository_statuses::Column::ProjectId.eq(project.id))
|
||||
.add(
|
||||
worktree_repository_statuses::Column::WorktreeId
|
||||
.eq(worktree.id),
|
||||
)
|
||||
.add(
|
||||
worktree_repository_statuses::Column::WorkDirectoryId
|
||||
.eq(db_repository_entry.work_directory_id),
|
||||
)
|
||||
.add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
let mut updated_statuses = Vec::new();
|
||||
while let Some(status_entry) = repository_statuses.next().await {
|
||||
let status_entry: worktree_repository_statuses::Model = status_entry?;
|
||||
updated_statuses.push(db_status_to_proto(status_entry)?);
|
||||
let mut repository_statuses = project_repository_statuses::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_repository_statuses::Column::ProjectId.eq(project.id))
|
||||
.add(
|
||||
project_repository_statuses::Column::RepositoryId
|
||||
.eq(db_repository_entry.id),
|
||||
)
|
||||
.add(project_repository_statuses::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
let mut updated_statuses = Vec::new();
|
||||
while let Some(status_entry) = repository_statuses.next().await {
|
||||
let status_entry = status_entry?;
|
||||
updated_statuses.push(db_status_to_proto(status_entry)?);
|
||||
}
|
||||
|
||||
let current_merge_conflicts = db_repository_entry
|
||||
.current_merge_conflicts
|
||||
.as_ref()
|
||||
.map(|conflicts| serde_json::from_str(&conflicts))
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let branch_summary = db_repository_entry
|
||||
.branch_summary
|
||||
.as_ref()
|
||||
.map(|branch_summary| serde_json::from_str(&branch_summary))
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let entry_ids = serde_json::from_str(&db_repository_entry.entry_ids)
|
||||
.context("failed to deserialize repository's entry ids")?;
|
||||
|
||||
if let Some(worktree_id) = db_repository_entry.legacy_worktree_id {
|
||||
if let Some(worktree) = worktrees.get_mut(&(worktree_id as u64)) {
|
||||
worktree.legacy_repository_entries.insert(
|
||||
db_repository_entry.id as u64,
|
||||
proto::RepositoryEntry {
|
||||
work_directory_id: db_repository_entry.id as u64,
|
||||
updated_statuses,
|
||||
removed_statuses: Vec::new(),
|
||||
current_merge_conflicts,
|
||||
branch_summary,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let current_merge_conflicts = db_repository_entry
|
||||
.current_merge_conflicts
|
||||
.as_ref()
|
||||
.map(|conflicts| serde_json::from_str(&conflicts))
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let branch_summary = db_repository_entry
|
||||
.branch_summary
|
||||
.as_ref()
|
||||
.map(|branch_summary| serde_json::from_str(&branch_summary))
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
worktree.repository_entries.insert(
|
||||
db_repository_entry.work_directory_id as u64,
|
||||
proto::RepositoryEntry {
|
||||
work_directory_id: db_repository_entry.work_directory_id as u64,
|
||||
branch: db_repository_entry.branch,
|
||||
updated_statuses,
|
||||
removed_statuses: Vec::new(),
|
||||
current_merge_conflicts,
|
||||
branch_summary,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
repositories.push(proto::UpdateRepository {
|
||||
project_id: db_repository_entry.project_id.0 as u64,
|
||||
id: db_repository_entry.id as u64,
|
||||
abs_path: db_repository_entry.abs_path,
|
||||
entry_ids,
|
||||
updated_statuses,
|
||||
removed_statuses: Vec::new(),
|
||||
current_merge_conflicts,
|
||||
branch_summary,
|
||||
scan_id: db_repository_entry.scan_id as u64,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -871,6 +1018,7 @@ impl Database {
|
|||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
repositories,
|
||||
language_servers: language_servers
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue