Switch to attaching git statuses to their associated entries (#2571)
This rewrites and simplifies the git status system by attaching the git status to each individual entry. This also improves the git testing infrastructure to cover more cases and be more accurate to how file events actually occur. This also fixes several other bugs in the worktree and the buffer, and stops any randomly generated actions from happening inside a `.git` folder. Hopefully, we can undo this change soon once our randomized testing is more robust. Release Notes: - Will require a DB migration TODO: - [x] Pass randomized tests - [x] Get ready for merging
This commit is contained in:
commit
8216d26a7a
17 changed files with 720 additions and 582 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2440,6 +2440,7 @@ dependencies = [
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"regex",
|
"regex",
|
||||||
"rope",
|
"rope",
|
||||||
|
"rpc",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
|
|
@ -66,6 +66,7 @@ impl<'a> AddAssign<&'a Local> for Local {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A vector clock
|
||||||
#[derive(Clone, Default, Hash, Eq, PartialEq)]
|
#[derive(Clone, Default, Hash, Eq, PartialEq)]
|
||||||
pub struct Global(SmallVec<[u32; 8]>);
|
pub struct Global(SmallVec<[u32; 8]>);
|
||||||
|
|
||||||
|
|
|
@ -76,6 +76,7 @@ CREATE TABLE "worktree_entries" (
|
||||||
"is_symlink" BOOL NOT NULL,
|
"is_symlink" BOOL NOT NULL,
|
||||||
"is_ignored" BOOL NOT NULL,
|
"is_ignored" BOOL NOT NULL,
|
||||||
"is_deleted" BOOL NOT NULL,
|
"is_deleted" BOOL NOT NULL,
|
||||||
|
"git_status" INTEGER,
|
||||||
PRIMARY KEY(project_id, worktree_id, id),
|
PRIMARY KEY(project_id, worktree_id, id),
|
||||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||||
);
|
);
|
||||||
|
@ -96,22 +97,6 @@ CREATE TABLE "worktree_repositories" (
|
||||||
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
|
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
|
||||||
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
|
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
|
||||||
|
|
||||||
CREATE TABLE "worktree_repository_statuses" (
|
|
||||||
"project_id" INTEGER NOT NULL,
|
|
||||||
"worktree_id" INTEGER NOT NULL,
|
|
||||||
"work_directory_id" INTEGER NOT NULL,
|
|
||||||
"repo_path" VARCHAR NOT NULL,
|
|
||||||
"status" INTEGER NOT NULL,
|
|
||||||
"scan_id" INTEGER NOT NULL,
|
|
||||||
"is_deleted" BOOL NOT NULL,
|
|
||||||
PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
|
|
||||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
CREATE INDEX "index_worktree_repository_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
|
|
||||||
CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
|
|
||||||
CREATE INDEX "index_worktree_repository_statuses_on_project_id_and_worktree_id_and_work_directory_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
|
|
||||||
|
|
||||||
CREATE TABLE "worktree_settings_files" (
|
CREATE TABLE "worktree_settings_files" (
|
||||||
"project_id" INTEGER NOT NULL,
|
"project_id" INTEGER NOT NULL,
|
||||||
"worktree_id" INTEGER NOT NULL,
|
"worktree_id" INTEGER NOT NULL,
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
DROP TABLE "worktree_repository_statuses";
|
||||||
|
|
||||||
|
ALTER TABLE "worktree_entries"
|
||||||
|
ADD "git_status" INT8;
|
|
@ -1539,6 +1539,7 @@ impl Database {
|
||||||
}),
|
}),
|
||||||
is_symlink: db_entry.is_symlink,
|
is_symlink: db_entry.is_symlink,
|
||||||
is_ignored: db_entry.is_ignored,
|
is_ignored: db_entry.is_ignored,
|
||||||
|
git_status: db_entry.git_status.map(|status| status as i32),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1573,54 +1574,6 @@ impl Database {
|
||||||
worktree.updated_repositories.push(proto::RepositoryEntry {
|
worktree.updated_repositories.push(proto::RepositoryEntry {
|
||||||
work_directory_id: db_repository.work_directory_id as u64,
|
work_directory_id: db_repository.work_directory_id as u64,
|
||||||
branch: db_repository.branch,
|
branch: db_repository.branch,
|
||||||
removed_repo_paths: Default::default(),
|
|
||||||
updated_statuses: Default::default(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Repository Status Entries
|
|
||||||
for repository in worktree.updated_repositories.iter_mut() {
|
|
||||||
let repository_status_entry_filter =
|
|
||||||
if let Some(rejoined_worktree) = rejoined_worktree {
|
|
||||||
worktree_repository_statuses::Column::ScanId
|
|
||||||
.gt(rejoined_worktree.scan_id)
|
|
||||||
} else {
|
|
||||||
worktree_repository_statuses::Column::IsDeleted.eq(false)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut db_repository_statuses =
|
|
||||||
worktree_repository_statuses::Entity::find()
|
|
||||||
.filter(
|
|
||||||
Condition::all()
|
|
||||||
.add(
|
|
||||||
worktree_repository_statuses::Column::ProjectId
|
|
||||||
.eq(project.id),
|
|
||||||
)
|
|
||||||
.add(
|
|
||||||
worktree_repository_statuses::Column::WorktreeId
|
|
||||||
.eq(worktree.id),
|
|
||||||
)
|
|
||||||
.add(
|
|
||||||
worktree_repository_statuses::Column::WorkDirectoryId
|
|
||||||
.eq(repository.work_directory_id),
|
|
||||||
)
|
|
||||||
.add(repository_status_entry_filter),
|
|
||||||
)
|
|
||||||
.stream(&*tx)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
while let Some(db_status_entry) = db_repository_statuses.next().await {
|
|
||||||
let db_status_entry = db_status_entry?;
|
|
||||||
if db_status_entry.is_deleted {
|
|
||||||
repository
|
|
||||||
.removed_repo_paths
|
|
||||||
.push(db_status_entry.repo_path);
|
|
||||||
} else {
|
|
||||||
repository.updated_statuses.push(proto::StatusEntry {
|
|
||||||
repo_path: db_status_entry.repo_path,
|
|
||||||
status: db_status_entry.status as i32,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2396,6 +2349,7 @@ impl Database {
|
||||||
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
|
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
|
||||||
is_symlink: ActiveValue::set(entry.is_symlink),
|
is_symlink: ActiveValue::set(entry.is_symlink),
|
||||||
is_ignored: ActiveValue::set(entry.is_ignored),
|
is_ignored: ActiveValue::set(entry.is_ignored),
|
||||||
|
git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
|
||||||
is_deleted: ActiveValue::set(false),
|
is_deleted: ActiveValue::set(false),
|
||||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||||
}
|
}
|
||||||
|
@ -2414,6 +2368,7 @@ impl Database {
|
||||||
worktree_entry::Column::MtimeNanos,
|
worktree_entry::Column::MtimeNanos,
|
||||||
worktree_entry::Column::IsSymlink,
|
worktree_entry::Column::IsSymlink,
|
||||||
worktree_entry::Column::IsIgnored,
|
worktree_entry::Column::IsIgnored,
|
||||||
|
worktree_entry::Column::GitStatus,
|
||||||
worktree_entry::Column::ScanId,
|
worktree_entry::Column::ScanId,
|
||||||
])
|
])
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
|
@ -2467,68 +2422,6 @@ impl Database {
|
||||||
)
|
)
|
||||||
.exec(&*tx)
|
.exec(&*tx)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
for repository in update.updated_repositories.iter() {
|
|
||||||
if !repository.updated_statuses.is_empty() {
|
|
||||||
worktree_repository_statuses::Entity::insert_many(
|
|
||||||
repository.updated_statuses.iter().map(|status_entry| {
|
|
||||||
worktree_repository_statuses::ActiveModel {
|
|
||||||
project_id: ActiveValue::set(project_id),
|
|
||||||
worktree_id: ActiveValue::set(worktree_id),
|
|
||||||
work_directory_id: ActiveValue::set(
|
|
||||||
repository.work_directory_id as i64,
|
|
||||||
),
|
|
||||||
repo_path: ActiveValue::set(status_entry.repo_path.clone()),
|
|
||||||
status: ActiveValue::set(status_entry.status as i64),
|
|
||||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
|
||||||
is_deleted: ActiveValue::set(false),
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.on_conflict(
|
|
||||||
OnConflict::columns([
|
|
||||||
worktree_repository_statuses::Column::ProjectId,
|
|
||||||
worktree_repository_statuses::Column::WorktreeId,
|
|
||||||
worktree_repository_statuses::Column::WorkDirectoryId,
|
|
||||||
worktree_repository_statuses::Column::RepoPath,
|
|
||||||
])
|
|
||||||
.update_columns([
|
|
||||||
worktree_repository_statuses::Column::ScanId,
|
|
||||||
worktree_repository_statuses::Column::Status,
|
|
||||||
worktree_repository_statuses::Column::IsDeleted,
|
|
||||||
])
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.exec(&*tx)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !repository.removed_repo_paths.is_empty() {
|
|
||||||
worktree_repository_statuses::Entity::update_many()
|
|
||||||
.filter(
|
|
||||||
worktree_repository_statuses::Column::ProjectId
|
|
||||||
.eq(project_id)
|
|
||||||
.and(
|
|
||||||
worktree_repository_statuses::Column::WorktreeId
|
|
||||||
.eq(worktree_id),
|
|
||||||
)
|
|
||||||
.and(
|
|
||||||
worktree_repository_statuses::Column::WorkDirectoryId
|
|
||||||
.eq(repository.work_directory_id as i64),
|
|
||||||
)
|
|
||||||
.and(worktree_repository_statuses::Column::RepoPath.is_in(
|
|
||||||
repository.removed_repo_paths.iter().map(String::as_str),
|
|
||||||
)),
|
|
||||||
)
|
|
||||||
.set(worktree_repository_statuses::ActiveModel {
|
|
||||||
is_deleted: ActiveValue::Set(true),
|
|
||||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.exec(&*tx)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !update.removed_repositories.is_empty() {
|
if !update.removed_repositories.is_empty() {
|
||||||
|
@ -2812,6 +2705,7 @@ impl Database {
|
||||||
}),
|
}),
|
||||||
is_symlink: db_entry.is_symlink,
|
is_symlink: db_entry.is_symlink,
|
||||||
is_ignored: db_entry.is_ignored,
|
is_ignored: db_entry.is_ignored,
|
||||||
|
git_status: db_entry.git_status.map(|status| status as i32),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2837,41 +2731,12 @@ impl Database {
|
||||||
proto::RepositoryEntry {
|
proto::RepositoryEntry {
|
||||||
work_directory_id: db_repository_entry.work_directory_id as u64,
|
work_directory_id: db_repository_entry.work_directory_id as u64,
|
||||||
branch: db_repository_entry.branch,
|
branch: db_repository_entry.branch,
|
||||||
removed_repo_paths: Default::default(),
|
|
||||||
updated_statuses: Default::default(),
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
|
||||||
let mut db_status_entries = worktree_repository_statuses::Entity::find()
|
|
||||||
.filter(
|
|
||||||
Condition::all()
|
|
||||||
.add(worktree_repository_statuses::Column::ProjectId.eq(project_id))
|
|
||||||
.add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
|
|
||||||
)
|
|
||||||
.stream(&*tx)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
while let Some(db_status_entry) = db_status_entries.next().await {
|
|
||||||
let db_status_entry = db_status_entry?;
|
|
||||||
if let Some(worktree) = worktrees.get_mut(&(db_status_entry.worktree_id as u64))
|
|
||||||
{
|
|
||||||
if let Some(repository_entry) = worktree
|
|
||||||
.repository_entries
|
|
||||||
.get_mut(&(db_status_entry.work_directory_id as u64))
|
|
||||||
{
|
|
||||||
repository_entry.updated_statuses.push(proto::StatusEntry {
|
|
||||||
repo_path: db_status_entry.repo_path,
|
|
||||||
status: db_status_entry.status as i32,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Populate worktree diagnostic summaries.
|
// Populate worktree diagnostic summaries.
|
||||||
{
|
{
|
||||||
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
|
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
|
||||||
|
|
|
@ -15,6 +15,7 @@ pub struct Model {
|
||||||
pub inode: i64,
|
pub inode: i64,
|
||||||
pub mtime_seconds: i64,
|
pub mtime_seconds: i64,
|
||||||
pub mtime_nanos: i32,
|
pub mtime_nanos: i32,
|
||||||
|
pub git_status: Option<i64>,
|
||||||
pub is_symlink: bool,
|
pub is_symlink: bool,
|
||||||
pub is_ignored: bool,
|
pub is_ignored: bool,
|
||||||
pub is_deleted: bool,
|
pub is_deleted: bool,
|
||||||
|
|
|
@ -2415,14 +2415,10 @@ async fn test_git_diff_base_change(
|
||||||
"
|
"
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
client_a
|
client_a.fs.as_fake().set_index_for_repo(
|
||||||
.fs
|
Path::new("/dir/.git"),
|
||||||
.as_fake()
|
&[(Path::new("a.txt"), diff_base.clone())],
|
||||||
.set_index_for_repo(
|
);
|
||||||
Path::new("/dir/.git"),
|
|
||||||
&[(Path::new("a.txt"), diff_base.clone())],
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Create the buffer
|
// Create the buffer
|
||||||
let buffer_local_a = project_local
|
let buffer_local_a = project_local
|
||||||
|
@ -2464,14 +2460,10 @@ async fn test_git_diff_base_change(
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
client_a
|
client_a.fs.as_fake().set_index_for_repo(
|
||||||
.fs
|
Path::new("/dir/.git"),
|
||||||
.as_fake()
|
&[(Path::new("a.txt"), new_diff_base.clone())],
|
||||||
.set_index_for_repo(
|
);
|
||||||
Path::new("/dir/.git"),
|
|
||||||
&[(Path::new("a.txt"), new_diff_base.clone())],
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for buffer_local_a to receive it
|
// Wait for buffer_local_a to receive it
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
@ -2513,14 +2505,10 @@ async fn test_git_diff_base_change(
|
||||||
"
|
"
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
client_a
|
client_a.fs.as_fake().set_index_for_repo(
|
||||||
.fs
|
Path::new("/dir/sub/.git"),
|
||||||
.as_fake()
|
&[(Path::new("b.txt"), diff_base.clone())],
|
||||||
.set_index_for_repo(
|
);
|
||||||
Path::new("/dir/sub/.git"),
|
|
||||||
&[(Path::new("b.txt"), diff_base.clone())],
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Create the buffer
|
// Create the buffer
|
||||||
let buffer_local_b = project_local
|
let buffer_local_b = project_local
|
||||||
|
@ -2562,14 +2550,10 @@ async fn test_git_diff_base_change(
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
client_a
|
client_a.fs.as_fake().set_index_for_repo(
|
||||||
.fs
|
Path::new("/dir/sub/.git"),
|
||||||
.as_fake()
|
&[(Path::new("b.txt"), new_diff_base.clone())],
|
||||||
.set_index_for_repo(
|
);
|
||||||
Path::new("/dir/sub/.git"),
|
|
||||||
&[(Path::new("b.txt"), new_diff_base.clone())],
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for buffer_local_b to receive it
|
// Wait for buffer_local_b to receive it
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
@ -2646,8 +2630,7 @@ async fn test_git_branch_name(
|
||||||
client_a
|
client_a
|
||||||
.fs
|
.fs
|
||||||
.as_fake()
|
.as_fake()
|
||||||
.set_branch_name(Path::new("/dir/.git"), Some("branch-1"))
|
.set_branch_name(Path::new("/dir/.git"), Some("branch-1"));
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for it to catch up to the new branch
|
// Wait for it to catch up to the new branch
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
@ -2673,8 +2656,7 @@ async fn test_git_branch_name(
|
||||||
client_a
|
client_a
|
||||||
.fs
|
.fs
|
||||||
.as_fake()
|
.as_fake()
|
||||||
.set_branch_name(Path::new("/dir/.git"), Some("branch-2"))
|
.set_branch_name(Path::new("/dir/.git"), Some("branch-2"));
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for buffer_local_a to receive it
|
// Wait for buffer_local_a to receive it
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
@ -2726,17 +2708,13 @@ async fn test_git_status_sync(
|
||||||
const A_TXT: &'static str = "a.txt";
|
const A_TXT: &'static str = "a.txt";
|
||||||
const B_TXT: &'static str = "b.txt";
|
const B_TXT: &'static str = "b.txt";
|
||||||
|
|
||||||
client_a
|
client_a.fs.as_fake().set_status_for_repo_via_git_operation(
|
||||||
.fs
|
Path::new("/dir/.git"),
|
||||||
.as_fake()
|
&[
|
||||||
.set_status_for_repo(
|
(&Path::new(A_TXT), GitFileStatus::Added),
|
||||||
Path::new("/dir/.git"),
|
(&Path::new(B_TXT), GitFileStatus::Added),
|
||||||
&[
|
],
|
||||||
(&Path::new(A_TXT), GitFileStatus::Added),
|
);
|
||||||
(&Path::new(B_TXT), GitFileStatus::Added),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||||
let project_id = active_call_a
|
let project_id = active_call_a
|
||||||
|
@ -2763,8 +2741,7 @@ async fn test_git_status_sync(
|
||||||
assert_eq!(worktrees.len(), 1);
|
assert_eq!(worktrees.len(), 1);
|
||||||
let worktree = worktrees[0].clone();
|
let worktree = worktrees[0].clone();
|
||||||
let snapshot = worktree.read(cx).snapshot();
|
let snapshot = worktree.read(cx).snapshot();
|
||||||
let root_entry = snapshot.root_git_entry().unwrap();
|
assert_eq!(snapshot.status_for_file(file), status);
|
||||||
assert_eq!(root_entry.status_for_file(&snapshot, file), status);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Smoke test status reading
|
// Smoke test status reading
|
||||||
|
@ -2780,14 +2757,13 @@ async fn test_git_status_sync(
|
||||||
client_a
|
client_a
|
||||||
.fs
|
.fs
|
||||||
.as_fake()
|
.as_fake()
|
||||||
.set_status_for_repo(
|
.set_status_for_repo_via_working_copy_change(
|
||||||
Path::new("/dir/.git"),
|
Path::new("/dir/.git"),
|
||||||
&[
|
&[
|
||||||
(&Path::new(A_TXT), GitFileStatus::Modified),
|
(&Path::new(A_TXT), GitFileStatus::Modified),
|
||||||
(&Path::new(B_TXT), GitFileStatus::Modified),
|
(&Path::new(B_TXT), GitFileStatus::Modified),
|
||||||
],
|
],
|
||||||
)
|
);
|
||||||
.await;
|
|
||||||
|
|
||||||
// Wait for buffer_local_a to receive it
|
// Wait for buffer_local_a to receive it
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
|
|
@ -422,7 +422,7 @@ async fn apply_client_operation(
|
||||||
);
|
);
|
||||||
|
|
||||||
ensure_project_shared(&project, client, cx).await;
|
ensure_project_shared(&project, client, cx).await;
|
||||||
if !client.fs.paths().contains(&new_root_path) {
|
if !client.fs.paths(false).contains(&new_root_path) {
|
||||||
client.fs.create_dir(&new_root_path).await.unwrap();
|
client.fs.create_dir(&new_root_path).await.unwrap();
|
||||||
}
|
}
|
||||||
project
|
project
|
||||||
|
@ -628,12 +628,13 @@ async fn apply_client_operation(
|
||||||
|
|
||||||
ensure_project_shared(&project, client, cx).await;
|
ensure_project_shared(&project, client, cx).await;
|
||||||
let requested_version = buffer.read_with(cx, |buffer, _| buffer.version());
|
let requested_version = buffer.read_with(cx, |buffer, _| buffer.version());
|
||||||
let save = project.update(cx, |project, cx| project.save_buffer(buffer, cx));
|
let save = project.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||||
let save = cx.background().spawn(async move {
|
let save = cx.spawn(|cx| async move {
|
||||||
let (saved_version, _, _) = save
|
save.await
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("save request failed: {:?}", err))?;
|
.map_err(|err| anyhow!("save request failed: {:?}", err))?;
|
||||||
assert!(saved_version.observed_all(&requested_version));
|
assert!(buffer
|
||||||
|
.read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
|
||||||
|
.observed_all(&requested_version));
|
||||||
anyhow::Ok(())
|
anyhow::Ok(())
|
||||||
});
|
});
|
||||||
if detach {
|
if detach {
|
||||||
|
@ -743,7 +744,7 @@ async fn apply_client_operation(
|
||||||
} => {
|
} => {
|
||||||
if !client
|
if !client
|
||||||
.fs
|
.fs
|
||||||
.directories()
|
.directories(false)
|
||||||
.contains(&path.parent().unwrap().to_owned())
|
.contains(&path.parent().unwrap().to_owned())
|
||||||
{
|
{
|
||||||
return Err(TestError::Inapplicable);
|
return Err(TestError::Inapplicable);
|
||||||
|
@ -770,10 +771,16 @@ async fn apply_client_operation(
|
||||||
repo_path,
|
repo_path,
|
||||||
contents,
|
contents,
|
||||||
} => {
|
} => {
|
||||||
if !client.fs.directories().contains(&repo_path) {
|
if !client.fs.directories(false).contains(&repo_path) {
|
||||||
return Err(TestError::Inapplicable);
|
return Err(TestError::Inapplicable);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (path, _) in contents.iter() {
|
||||||
|
if !client.fs.files().contains(&repo_path.join(path)) {
|
||||||
|
return Err(TestError::Inapplicable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
log::info!(
|
log::info!(
|
||||||
"{}: writing git index for repo {:?}: {:?}",
|
"{}: writing git index for repo {:?}: {:?}",
|
||||||
client.username,
|
client.username,
|
||||||
|
@ -789,13 +796,13 @@ async fn apply_client_operation(
|
||||||
if client.fs.metadata(&dot_git_dir).await?.is_none() {
|
if client.fs.metadata(&dot_git_dir).await?.is_none() {
|
||||||
client.fs.create_dir(&dot_git_dir).await?;
|
client.fs.create_dir(&dot_git_dir).await?;
|
||||||
}
|
}
|
||||||
client.fs.set_index_for_repo(&dot_git_dir, &contents).await;
|
client.fs.set_index_for_repo(&dot_git_dir, &contents);
|
||||||
}
|
}
|
||||||
GitOperation::WriteGitBranch {
|
GitOperation::WriteGitBranch {
|
||||||
repo_path,
|
repo_path,
|
||||||
new_branch,
|
new_branch,
|
||||||
} => {
|
} => {
|
||||||
if !client.fs.directories().contains(&repo_path) {
|
if !client.fs.directories(false).contains(&repo_path) {
|
||||||
return Err(TestError::Inapplicable);
|
return Err(TestError::Inapplicable);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -810,15 +817,21 @@ async fn apply_client_operation(
|
||||||
if client.fs.metadata(&dot_git_dir).await?.is_none() {
|
if client.fs.metadata(&dot_git_dir).await?.is_none() {
|
||||||
client.fs.create_dir(&dot_git_dir).await?;
|
client.fs.create_dir(&dot_git_dir).await?;
|
||||||
}
|
}
|
||||||
client.fs.set_branch_name(&dot_git_dir, new_branch).await;
|
client.fs.set_branch_name(&dot_git_dir, new_branch);
|
||||||
}
|
}
|
||||||
GitOperation::WriteGitStatuses {
|
GitOperation::WriteGitStatuses {
|
||||||
repo_path,
|
repo_path,
|
||||||
statuses,
|
statuses,
|
||||||
|
git_operation,
|
||||||
} => {
|
} => {
|
||||||
if !client.fs.directories().contains(&repo_path) {
|
if !client.fs.directories(false).contains(&repo_path) {
|
||||||
return Err(TestError::Inapplicable);
|
return Err(TestError::Inapplicable);
|
||||||
}
|
}
|
||||||
|
for (path, _) in statuses.iter() {
|
||||||
|
if !client.fs.files().contains(&repo_path.join(path)) {
|
||||||
|
return Err(TestError::Inapplicable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
log::info!(
|
log::info!(
|
||||||
"{}: writing git statuses for repo {:?}: {:?}",
|
"{}: writing git statuses for repo {:?}: {:?}",
|
||||||
|
@ -838,10 +851,16 @@ async fn apply_client_operation(
|
||||||
client.fs.create_dir(&dot_git_dir).await?;
|
client.fs.create_dir(&dot_git_dir).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
client
|
if git_operation {
|
||||||
.fs
|
client
|
||||||
.set_status_for_repo(&dot_git_dir, statuses.as_slice())
|
.fs
|
||||||
.await;
|
.set_status_for_repo_via_git_operation(&dot_git_dir, statuses.as_slice());
|
||||||
|
} else {
|
||||||
|
client.fs.set_status_for_repo_via_working_copy_change(
|
||||||
|
&dot_git_dir,
|
||||||
|
statuses.as_slice(),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -913,9 +932,10 @@ fn check_consistency_between_clients(clients: &[(Rc<TestClient>, TestAppContext)
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
guest_snapshot.entries(false).collect::<Vec<_>>(),
|
guest_snapshot.entries(false).collect::<Vec<_>>(),
|
||||||
host_snapshot.entries(false).collect::<Vec<_>>(),
|
host_snapshot.entries(false).collect::<Vec<_>>(),
|
||||||
"{} has different snapshot than the host for worktree {:?} and project {:?}",
|
"{} has different snapshot than the host for worktree {:?} ({:?}) and project {:?}",
|
||||||
client.username,
|
client.username,
|
||||||
host_snapshot.abs_path(),
|
host_snapshot.abs_path(),
|
||||||
|
id,
|
||||||
guest_project.remote_id(),
|
guest_project.remote_id(),
|
||||||
);
|
);
|
||||||
assert_eq!(guest_snapshot.repositories().collect::<Vec<_>>(), host_snapshot.repositories().collect::<Vec<_>>(),
|
assert_eq!(guest_snapshot.repositories().collect::<Vec<_>>(), host_snapshot.repositories().collect::<Vec<_>>(),
|
||||||
|
@ -1230,6 +1250,7 @@ enum GitOperation {
|
||||||
WriteGitStatuses {
|
WriteGitStatuses {
|
||||||
repo_path: PathBuf,
|
repo_path: PathBuf,
|
||||||
statuses: Vec<(PathBuf, GitFileStatus)>,
|
statuses: Vec<(PathBuf, GitFileStatus)>,
|
||||||
|
git_operation: bool,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1575,7 +1596,7 @@ impl TestPlan {
|
||||||
.choose(&mut self.rng)
|
.choose(&mut self.rng)
|
||||||
.cloned() else { continue };
|
.cloned() else { continue };
|
||||||
let project_root_name = root_name_for_project(&project, cx);
|
let project_root_name = root_name_for_project(&project, cx);
|
||||||
let mut paths = client.fs.paths();
|
let mut paths = client.fs.paths(false);
|
||||||
paths.remove(0);
|
paths.remove(0);
|
||||||
let new_root_path = if paths.is_empty() || self.rng.gen() {
|
let new_root_path = if paths.is_empty() || self.rng.gen() {
|
||||||
Path::new("/").join(&self.next_root_dir_name(user_id))
|
Path::new("/").join(&self.next_root_dir_name(user_id))
|
||||||
|
@ -1755,7 +1776,7 @@ impl TestPlan {
|
||||||
let is_dir = self.rng.gen::<bool>();
|
let is_dir = self.rng.gen::<bool>();
|
||||||
let content;
|
let content;
|
||||||
let mut path;
|
let mut path;
|
||||||
let dir_paths = client.fs.directories();
|
let dir_paths = client.fs.directories(false);
|
||||||
|
|
||||||
if is_dir {
|
if is_dir {
|
||||||
content = String::new();
|
content = String::new();
|
||||||
|
@ -1809,7 +1830,7 @@ impl TestPlan {
|
||||||
|
|
||||||
let repo_path = client
|
let repo_path = client
|
||||||
.fs
|
.fs
|
||||||
.directories()
|
.directories(false)
|
||||||
.choose(&mut self.rng)
|
.choose(&mut self.rng)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.clone();
|
.clone();
|
||||||
|
@ -1855,9 +1876,12 @@ impl TestPlan {
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let git_operation = self.rng.gen::<bool>();
|
||||||
|
|
||||||
GitOperation::WriteGitStatuses {
|
GitOperation::WriteGitStatuses {
|
||||||
repo_path,
|
repo_path,
|
||||||
statuses,
|
statuses,
|
||||||
|
git_operation,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
|
|
@ -14,6 +14,8 @@ lsp = { path = "../lsp" }
|
||||||
rope = { path = "../rope" }
|
rope = { path = "../rope" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
|
rpc = { path = "../rpc" }
|
||||||
|
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
async-trait.workspace = true
|
async-trait.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
|
|
|
@ -29,6 +29,8 @@ use collections::{btree_map, BTreeMap};
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
use repository::{FakeGitRepositoryState, GitFileStatus};
|
use repository::{FakeGitRepositoryState, GitFileStatus};
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
use std::ffi::OsStr;
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
use std::sync::Weak;
|
use std::sync::Weak;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
@ -501,6 +503,11 @@ impl FakeFsState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref FS_DOT_GIT: &'static OsStr = OsStr::new(".git");
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl FakeFs {
|
impl FakeFs {
|
||||||
pub fn new(executor: Arc<gpui::executor::Background>) -> Arc<Self> {
|
pub fn new(executor: Arc<gpui::executor::Background>) -> Arc<Self> {
|
||||||
|
@ -619,7 +626,7 @@ impl FakeFs {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_git_state<F>(&self, dot_git: &Path, f: F)
|
pub fn with_git_state<F>(&self, dot_git: &Path, emit_git_event: bool, f: F)
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut FakeGitRepositoryState),
|
F: FnOnce(&mut FakeGitRepositoryState),
|
||||||
{
|
{
|
||||||
|
@ -633,18 +640,22 @@ impl FakeFs {
|
||||||
|
|
||||||
f(&mut repo_state);
|
f(&mut repo_state);
|
||||||
|
|
||||||
state.emit_event([dot_git]);
|
if emit_git_event {
|
||||||
|
state.emit_event([dot_git]);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
panic!("not a directory");
|
panic!("not a directory");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
|
pub fn set_branch_name(&self, dot_git: &Path, branch: Option<impl Into<String>>) {
|
||||||
self.with_git_state(dot_git, |state| state.branch_name = branch.map(Into::into))
|
self.with_git_state(dot_git, true, |state| {
|
||||||
|
state.branch_name = branch.map(Into::into)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
|
pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
|
||||||
self.with_git_state(dot_git, |state| {
|
self.with_git_state(dot_git, true, |state| {
|
||||||
state.index_contents.clear();
|
state.index_contents.clear();
|
||||||
state.index_contents.extend(
|
state.index_contents.extend(
|
||||||
head_state
|
head_state
|
||||||
|
@ -654,8 +665,32 @@ impl FakeFs {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, GitFileStatus)]) {
|
pub fn set_status_for_repo_via_working_copy_change(
|
||||||
self.with_git_state(dot_git, |state| {
|
&self,
|
||||||
|
dot_git: &Path,
|
||||||
|
statuses: &[(&Path, GitFileStatus)],
|
||||||
|
) {
|
||||||
|
self.with_git_state(dot_git, false, |state| {
|
||||||
|
state.worktree_statuses.clear();
|
||||||
|
state.worktree_statuses.extend(
|
||||||
|
statuses
|
||||||
|
.iter()
|
||||||
|
.map(|(path, content)| ((**path).into(), content.clone())),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
self.state.lock().emit_event(
|
||||||
|
statuses
|
||||||
|
.iter()
|
||||||
|
.map(|(path, _)| dot_git.parent().unwrap().join(path)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_status_for_repo_via_git_operation(
|
||||||
|
&self,
|
||||||
|
dot_git: &Path,
|
||||||
|
statuses: &[(&Path, GitFileStatus)],
|
||||||
|
) {
|
||||||
|
self.with_git_state(dot_git, true, |state| {
|
||||||
state.worktree_statuses.clear();
|
state.worktree_statuses.clear();
|
||||||
state.worktree_statuses.extend(
|
state.worktree_statuses.extend(
|
||||||
statuses
|
statuses
|
||||||
|
@ -665,7 +700,7 @@ impl FakeFs {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn paths(&self) -> Vec<PathBuf> {
|
pub fn paths(&self, include_dot_git: bool) -> Vec<PathBuf> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
let mut queue = collections::VecDeque::new();
|
let mut queue = collections::VecDeque::new();
|
||||||
queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
|
queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
|
||||||
|
@ -675,12 +710,18 @@ impl FakeFs {
|
||||||
queue.push_back((path.join(name), entry.clone()));
|
queue.push_back((path.join(name), entry.clone()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.push(path);
|
if include_dot_git
|
||||||
|
|| !path
|
||||||
|
.components()
|
||||||
|
.any(|component| component.as_os_str() == *FS_DOT_GIT)
|
||||||
|
{
|
||||||
|
result.push(path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn directories(&self) -> Vec<PathBuf> {
|
pub fn directories(&self, include_dot_git: bool) -> Vec<PathBuf> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
let mut queue = collections::VecDeque::new();
|
let mut queue = collections::VecDeque::new();
|
||||||
queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
|
queue.push_back((PathBuf::from("/"), self.state.lock().root.clone()));
|
||||||
|
@ -689,7 +730,13 @@ impl FakeFs {
|
||||||
for (name, entry) in entries {
|
for (name, entry) in entries {
|
||||||
queue.push_back((path.join(name), entry.clone()));
|
queue.push_back((path.join(name), entry.clone()));
|
||||||
}
|
}
|
||||||
result.push(path);
|
if include_dot_git
|
||||||
|
|| !path
|
||||||
|
.components()
|
||||||
|
.any(|component| component.as_os_str() == *FS_DOT_GIT)
|
||||||
|
{
|
||||||
|
result.push(path);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
use git2::ErrorCode;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
|
use rpc::proto;
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
cmp::Ordering,
|
cmp::Ordering,
|
||||||
|
@ -24,7 +26,7 @@ pub trait GitRepository: Send {
|
||||||
|
|
||||||
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
|
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
|
||||||
|
|
||||||
fn status(&self, path: &RepoPath) -> Option<GitFileStatus>;
|
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for dyn GitRepository {
|
impl std::fmt::Debug for dyn GitRepository {
|
||||||
|
@ -91,9 +93,18 @@ impl GitRepository for LibGitRepository {
|
||||||
Some(map)
|
Some(map)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
|
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>> {
|
||||||
let status = self.status_file(path).log_err()?;
|
let status = self.status_file(path);
|
||||||
read_status(status)
|
match status {
|
||||||
|
Ok(status) => Ok(read_status(status)),
|
||||||
|
Err(e) => {
|
||||||
|
if e.code() == ErrorCode::NotFound {
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
Err(e.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,9 +166,9 @@ impl GitRepository for FakeGitRepository {
|
||||||
Some(map)
|
Some(map)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn status(&self, path: &RepoPath) -> Option<GitFileStatus> {
|
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>> {
|
||||||
let state = self.state.lock();
|
let state = self.state.lock();
|
||||||
state.worktree_statuses.get(path).cloned()
|
Ok(state.worktree_statuses.get(path).cloned())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,8 +208,51 @@ pub enum GitFileStatus {
|
||||||
Conflict,
|
Conflict,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl GitFileStatus {
|
||||||
|
pub fn merge(
|
||||||
|
this: Option<GitFileStatus>,
|
||||||
|
other: Option<GitFileStatus>,
|
||||||
|
prefer_other: bool,
|
||||||
|
) -> Option<GitFileStatus> {
|
||||||
|
if prefer_other {
|
||||||
|
return other;
|
||||||
|
} else {
|
||||||
|
match (this, other) {
|
||||||
|
(Some(GitFileStatus::Conflict), _) | (_, Some(GitFileStatus::Conflict)) => {
|
||||||
|
Some(GitFileStatus::Conflict)
|
||||||
|
}
|
||||||
|
(Some(GitFileStatus::Modified), _) | (_, Some(GitFileStatus::Modified)) => {
|
||||||
|
Some(GitFileStatus::Modified)
|
||||||
|
}
|
||||||
|
(Some(GitFileStatus::Added), _) | (_, Some(GitFileStatus::Added)) => {
|
||||||
|
Some(GitFileStatus::Added)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_proto(git_status: Option<i32>) -> Option<GitFileStatus> {
|
||||||
|
git_status.and_then(|status| {
|
||||||
|
proto::GitStatus::from_i32(status).map(|status| match status {
|
||||||
|
proto::GitStatus::Added => GitFileStatus::Added,
|
||||||
|
proto::GitStatus::Modified => GitFileStatus::Modified,
|
||||||
|
proto::GitStatus::Conflict => GitFileStatus::Conflict,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_proto(self) -> i32 {
|
||||||
|
match self {
|
||||||
|
GitFileStatus::Added => proto::GitStatus::Added as i32,
|
||||||
|
GitFileStatus::Modified => proto::GitStatus::Modified as i32,
|
||||||
|
GitFileStatus::Conflict => proto::GitStatus::Conflict as i32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
|
#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
|
||||||
pub struct RepoPath(PathBuf);
|
pub struct RepoPath(pub PathBuf);
|
||||||
|
|
||||||
impl RepoPath {
|
impl RepoPath {
|
||||||
pub fn new(path: PathBuf) -> Self {
|
pub fn new(path: PathBuf) -> Self {
|
||||||
|
|
|
@ -37,8 +37,8 @@ use language::{
|
||||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
|
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
|
||||||
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
|
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
|
||||||
Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
|
Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
|
||||||
PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16,
|
PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
||||||
Transaction, Unclipped,
|
Unclipped,
|
||||||
};
|
};
|
||||||
use log::error;
|
use log::error;
|
||||||
use lsp::{
|
use lsp::{
|
||||||
|
@ -69,7 +69,7 @@ use std::{
|
||||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||||
Arc,
|
Arc,
|
||||||
},
|
},
|
||||||
time::{Duration, Instant, SystemTime},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
use terminals::Terminals;
|
use terminals::Terminals;
|
||||||
use util::{
|
use util::{
|
||||||
|
@ -1617,7 +1617,7 @@ impl Project {
|
||||||
&self,
|
&self,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: ModelHandle<Buffer>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
|
) -> Task<Result<()>> {
|
||||||
let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
|
let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
|
||||||
return Task::ready(Err(anyhow!("buffer doesn't have a file")));
|
return Task::ready(Err(anyhow!("buffer doesn't have a file")));
|
||||||
};
|
};
|
||||||
|
@ -5157,9 +5157,9 @@ impl Project {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let path = &project_path.path;
|
let path = &project_path.path;
|
||||||
changed_repos.iter().find(|(work_dir, change)| {
|
changed_repos
|
||||||
path.starts_with(work_dir) && change.git_dir_changed
|
.iter()
|
||||||
})?;
|
.find(|(work_dir, _)| path.starts_with(work_dir))?;
|
||||||
let receiver = receiver.clone();
|
let receiver = receiver.clone();
|
||||||
let path = path.clone();
|
let path = path.clone();
|
||||||
Some(async move {
|
Some(async move {
|
||||||
|
@ -5182,9 +5182,9 @@ impl Project {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let path = file.path();
|
let path = file.path();
|
||||||
changed_repos.iter().find(|(work_dir, change)| {
|
changed_repos
|
||||||
path.starts_with(work_dir) && change.git_dir_changed
|
.iter()
|
||||||
})?;
|
.find(|(work_dir, _)| path.starts_with(work_dir))?;
|
||||||
Some((buffer, path.clone()))
|
Some((buffer, path.clone()))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
@ -5985,16 +5985,15 @@ impl Project {
|
||||||
.await?;
|
.await?;
|
||||||
let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
|
let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
|
||||||
|
|
||||||
let (saved_version, fingerprint, mtime) = this
|
this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
|
||||||
.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
|
|
||||||
.await?;
|
.await?;
|
||||||
Ok(proto::BufferSaved {
|
Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
|
||||||
project_id,
|
project_id,
|
||||||
buffer_id,
|
buffer_id,
|
||||||
version: serialize_version(&saved_version),
|
version: serialize_version(buffer.saved_version()),
|
||||||
mtime: Some(mtime.into()),
|
mtime: Some(buffer.saved_mtime().into()),
|
||||||
fingerprint: language::proto::serialize_fingerprint(fingerprint),
|
fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_reload_buffers(
|
async fn handle_reload_buffers(
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1002,6 +1002,7 @@ impl ProjectPanel {
|
||||||
mtime: entry.mtime,
|
mtime: entry.mtime,
|
||||||
is_symlink: false,
|
is_symlink: false,
|
||||||
is_ignored: false,
|
is_ignored: false,
|
||||||
|
git_status: entry.git_status,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if expanded_dir_ids.binary_search(&entry.id).is_err()
|
if expanded_dir_ids.binary_search(&entry.id).is_err()
|
||||||
|
@ -1011,6 +1012,9 @@ impl ProjectPanel {
|
||||||
}
|
}
|
||||||
entry_iter.advance();
|
entry_iter.advance();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
snapshot.propagate_git_statuses(&mut visible_worktree_entries);
|
||||||
|
|
||||||
visible_worktree_entries.sort_by(|entry_a, entry_b| {
|
visible_worktree_entries.sort_by(|entry_a, entry_b| {
|
||||||
let mut components_a = entry_a.path.components().peekable();
|
let mut components_a = entry_a.path.components().peekable();
|
||||||
let mut components_b = entry_b.path.components().peekable();
|
let mut components_b = entry_b.path.components().peekable();
|
||||||
|
@ -1108,14 +1112,8 @@ impl ProjectPanel {
|
||||||
.unwrap_or(&[]);
|
.unwrap_or(&[]);
|
||||||
|
|
||||||
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
|
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
|
||||||
for (entry, repo) in
|
for entry in visible_worktree_entries[entry_range].iter() {
|
||||||
snapshot.entries_with_repositories(visible_worktree_entries[entry_range].iter())
|
let status = git_status_setting.then(|| entry.git_status).flatten();
|
||||||
{
|
|
||||||
let status = (git_status_setting
|
|
||||||
&& entry.path.parent().is_some()
|
|
||||||
&& !entry.is_ignored)
|
|
||||||
.then(|| repo.and_then(|repo| repo.status_for_path(&snapshot, &entry.path)))
|
|
||||||
.flatten();
|
|
||||||
|
|
||||||
let mut details = EntryDetails {
|
let mut details = EntryDetails {
|
||||||
filename: entry
|
filename: entry
|
||||||
|
|
|
@ -1005,13 +1005,12 @@ message Entry {
|
||||||
Timestamp mtime = 5;
|
Timestamp mtime = 5;
|
||||||
bool is_symlink = 6;
|
bool is_symlink = 6;
|
||||||
bool is_ignored = 7;
|
bool is_ignored = 7;
|
||||||
|
optional GitStatus git_status = 8;
|
||||||
}
|
}
|
||||||
|
|
||||||
message RepositoryEntry {
|
message RepositoryEntry {
|
||||||
uint64 work_directory_id = 1;
|
uint64 work_directory_id = 1;
|
||||||
optional string branch = 2;
|
optional string branch = 2;
|
||||||
repeated string removed_repo_paths = 3;
|
|
||||||
repeated StatusEntry updated_statuses = 4;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message StatusEntry {
|
message StatusEntry {
|
||||||
|
|
|
@ -480,6 +480,11 @@ impl<T: Item> SumTree<T> {
|
||||||
} => child_trees.last().unwrap().rightmost_leaf(),
|
} => child_trees.last().unwrap().rightmost_leaf(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
pub fn _debug_entries(&self) -> Vec<&T> {
|
||||||
|
self.iter().collect::<Vec<_>>()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Item + PartialEq> PartialEq for SumTree<T> {
|
impl<T: Item + PartialEq> PartialEq for SumTree<T> {
|
||||||
|
|
|
@ -57,7 +57,7 @@ pub fn post_inc<T: From<u8> + AddAssign<T> + Copy>(value: &mut T) -> T {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extend a sorted vector with a sorted sequence of items, maintaining the vector's sort order and
|
/// Extend a sorted vector with a sorted sequence of items, maintaining the vector's sort order and
|
||||||
/// enforcing a maximum length. Sort the items according to the given callback. Before calling this,
|
/// enforcing a maximum length. This also de-duplicates items. Sort the items according to the given callback. Before calling this,
|
||||||
/// both `vec` and `new_items` should already be sorted according to the `cmp` comparator.
|
/// both `vec` and `new_items` should already be sorted according to the `cmp` comparator.
|
||||||
pub fn extend_sorted<T, I, F>(vec: &mut Vec<T>, new_items: I, limit: usize, mut cmp: F)
|
pub fn extend_sorted<T, I, F>(vec: &mut Vec<T>, new_items: I, limit: usize, mut cmp: F)
|
||||||
where
|
where
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue