git: Compute and synchronize diffs from HEAD (#23626)
This PR builds on #21258 to make it possible to use HEAD as a diff base. The buffer store is extended to support holding multiple change sets, and collab gains support for synchronizing the committed text of files when any collaborator requires it. Not implemented in this PR: - Exposing the diff from HEAD to the user - Decorating the diff from HEAD with information about which hunks are staged `test_random_multibuffer` now fails first at `SEED=13277`, similar to the previous high-water mark, but with various bugs in the multibuffer logic now shaken out. Release Notes: - N/A --------- Co-authored-by: Max <max@zed.dev> Co-authored-by: Ben <ben@zed.dev> Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com> Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com> Co-authored-by: Conrad <conrad@zed.dev>
This commit is contained in:
parent
871f98bc4d
commit
5704b50fb1
29 changed files with 2799 additions and 603 deletions
|
@ -309,7 +309,8 @@ impl Server {
|
||||||
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
||||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||||
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
|
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
|
||||||
.add_request_handler(forward_read_only_project_request::<proto::GetStagedText>)
|
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedChanges>)
|
||||||
|
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedChanges>)
|
||||||
.add_request_handler(
|
.add_request_handler(
|
||||||
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
|
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
|
||||||
)
|
)
|
||||||
|
@ -348,7 +349,7 @@ impl Server {
|
||||||
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
|
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
|
||||||
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
|
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
|
||||||
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
|
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
|
||||||
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBase>)
|
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBases>)
|
||||||
.add_request_handler(get_users)
|
.add_request_handler(get_users)
|
||||||
.add_request_handler(fuzzy_search_users)
|
.add_request_handler(fuzzy_search_users)
|
||||||
.add_request_handler(request_contact)
|
.add_request_handler(request_contact)
|
||||||
|
|
|
@ -1991,10 +1991,9 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||||
.collect(),
|
.collect(),
|
||||||
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
|
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
|
||||||
};
|
};
|
||||||
client_a.fs().set_blame_for_repo(
|
client_a
|
||||||
Path::new("/my-repo/.git"),
|
.fs()
|
||||||
vec![(Path::new("file.txt"), blame)],
|
.set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]);
|
||||||
);
|
|
||||||
|
|
||||||
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
|
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
|
||||||
let project_id = active_call_a
|
let project_id = active_call_a
|
||||||
|
|
|
@ -2558,13 +2558,27 @@ async fn test_git_diff_base_change(
|
||||||
|
|
||||||
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
|
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
|
||||||
|
|
||||||
let diff_base = "
|
let staged_text = "
|
||||||
one
|
one
|
||||||
three
|
three
|
||||||
"
|
"
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let new_diff_base = "
|
let committed_text = "
|
||||||
|
one
|
||||||
|
TWO
|
||||||
|
three
|
||||||
|
"
|
||||||
|
.unindent();
|
||||||
|
|
||||||
|
let new_committed_text = "
|
||||||
|
one
|
||||||
|
TWO_HUNDRED
|
||||||
|
three
|
||||||
|
"
|
||||||
|
.unindent();
|
||||||
|
|
||||||
|
let new_staged_text = "
|
||||||
one
|
one
|
||||||
two
|
two
|
||||||
"
|
"
|
||||||
|
@ -2572,7 +2586,11 @@ async fn test_git_diff_base_change(
|
||||||
|
|
||||||
client_a.fs().set_index_for_repo(
|
client_a.fs().set_index_for_repo(
|
||||||
Path::new("/dir/.git"),
|
Path::new("/dir/.git"),
|
||||||
&[(Path::new("a.txt"), diff_base.clone())],
|
&[("a.txt".into(), staged_text.clone())],
|
||||||
|
);
|
||||||
|
client_a.fs().set_head_for_repo(
|
||||||
|
Path::new("/dir/.git"),
|
||||||
|
&[("a.txt".into(), committed_text.clone())],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create the buffer
|
// Create the buffer
|
||||||
|
@ -2580,7 +2598,7 @@ async fn test_git_diff_base_change(
|
||||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
|
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let change_set_local_a = project_local
|
let local_unstaged_changes_a = project_local
|
||||||
.update(cx_a, |p, cx| {
|
.update(cx_a, |p, cx| {
|
||||||
p.open_unstaged_changes(buffer_local_a.clone(), cx)
|
p.open_unstaged_changes(buffer_local_a.clone(), cx)
|
||||||
})
|
})
|
||||||
|
@ -2589,16 +2607,16 @@ async fn test_git_diff_base_change(
|
||||||
|
|
||||||
// Wait for it to catch up to the new diff
|
// Wait for it to catch up to the new diff
|
||||||
executor.run_until_parked();
|
executor.run_until_parked();
|
||||||
change_set_local_a.read_with(cx_a, |change_set, cx| {
|
local_unstaged_changes_a.read_with(cx_a, |change_set, cx| {
|
||||||
let buffer = buffer_local_a.read(cx);
|
let buffer = buffer_local_a.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(diff_base.as_str())
|
Some(staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&diff_base,
|
&change_set.base_text_string().unwrap(),
|
||||||
&[(1..2, "", "two\n")],
|
&[(1..2, "", "two\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -2608,7 +2626,7 @@ async fn test_git_diff_base_change(
|
||||||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
|
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let change_set_remote_a = project_remote
|
let remote_unstaged_changes_a = project_remote
|
||||||
.update(cx_b, |p, cx| {
|
.update(cx_b, |p, cx| {
|
||||||
p.open_unstaged_changes(buffer_remote_a.clone(), cx)
|
p.open_unstaged_changes(buffer_remote_a.clone(), cx)
|
||||||
})
|
})
|
||||||
|
@ -2617,64 +2635,104 @@ async fn test_git_diff_base_change(
|
||||||
|
|
||||||
// Wait remote buffer to catch up to the new diff
|
// Wait remote buffer to catch up to the new diff
|
||||||
executor.run_until_parked();
|
executor.run_until_parked();
|
||||||
change_set_remote_a.read_with(cx_b, |change_set, cx| {
|
remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| {
|
||||||
let buffer = buffer_remote_a.read(cx);
|
let buffer = buffer_remote_a.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(diff_base.as_str())
|
Some(staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&diff_base,
|
&change_set.base_text_string().unwrap(),
|
||||||
&[(1..2, "", "two\n")],
|
&[(1..2, "", "two\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update the staged text of the open buffer
|
// Open uncommitted changes on the guest, without opening them on the host first
|
||||||
|
let remote_uncommitted_changes_a = project_remote
|
||||||
|
.update(cx_b, |p, cx| {
|
||||||
|
p.open_uncommitted_changes(buffer_remote_a.clone(), cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
executor.run_until_parked();
|
||||||
|
remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| {
|
||||||
|
let buffer = buffer_remote_a.read(cx);
|
||||||
|
assert_eq!(
|
||||||
|
change_set.base_text_string().as_deref(),
|
||||||
|
Some(committed_text.as_str())
|
||||||
|
);
|
||||||
|
git::diff::assert_hunks(
|
||||||
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
|
buffer,
|
||||||
|
&change_set.base_text_string().unwrap(),
|
||||||
|
&[(1..2, "TWO\n", "two\n")],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update the index text of the open buffer
|
||||||
client_a.fs().set_index_for_repo(
|
client_a.fs().set_index_for_repo(
|
||||||
Path::new("/dir/.git"),
|
Path::new("/dir/.git"),
|
||||||
&[(Path::new("a.txt"), new_diff_base.clone())],
|
&[("a.txt".into(), new_staged_text.clone())],
|
||||||
|
);
|
||||||
|
client_a.fs().set_head_for_repo(
|
||||||
|
Path::new("/dir/.git"),
|
||||||
|
&[("a.txt".into(), new_committed_text.clone())],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Wait for buffer_local_a to receive it
|
// Wait for buffer_local_a to receive it
|
||||||
executor.run_until_parked();
|
executor.run_until_parked();
|
||||||
change_set_local_a.read_with(cx_a, |change_set, cx| {
|
local_unstaged_changes_a.read_with(cx_a, |change_set, cx| {
|
||||||
let buffer = buffer_local_a.read(cx);
|
let buffer = buffer_local_a.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(new_diff_base.as_str())
|
Some(new_staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&new_diff_base,
|
&change_set.base_text_string().unwrap(),
|
||||||
&[(2..3, "", "three\n")],
|
&[(2..3, "", "three\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
change_set_remote_a.read_with(cx_b, |change_set, cx| {
|
remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| {
|
||||||
let buffer = buffer_remote_a.read(cx);
|
let buffer = buffer_remote_a.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(new_diff_base.as_str())
|
Some(new_staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&new_diff_base,
|
&change_set.base_text_string().unwrap(),
|
||||||
&[(2..3, "", "three\n")],
|
&[(2..3, "", "three\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| {
|
||||||
|
let buffer = buffer_remote_a.read(cx);
|
||||||
|
assert_eq!(
|
||||||
|
change_set.base_text_string().as_deref(),
|
||||||
|
Some(new_committed_text.as_str())
|
||||||
|
);
|
||||||
|
git::diff::assert_hunks(
|
||||||
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
|
buffer,
|
||||||
|
&change_set.base_text_string().unwrap(),
|
||||||
|
&[(1..2, "TWO_HUNDRED\n", "two\n")],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
// Nested git dir
|
// Nested git dir
|
||||||
let diff_base = "
|
let staged_text = "
|
||||||
one
|
one
|
||||||
three
|
three
|
||||||
"
|
"
|
||||||
.unindent();
|
.unindent();
|
||||||
|
|
||||||
let new_diff_base = "
|
let new_staged_text = "
|
||||||
one
|
one
|
||||||
two
|
two
|
||||||
"
|
"
|
||||||
|
@ -2682,7 +2740,7 @@ async fn test_git_diff_base_change(
|
||||||
|
|
||||||
client_a.fs().set_index_for_repo(
|
client_a.fs().set_index_for_repo(
|
||||||
Path::new("/dir/sub/.git"),
|
Path::new("/dir/sub/.git"),
|
||||||
&[(Path::new("b.txt"), diff_base.clone())],
|
&[("b.txt".into(), staged_text.clone())],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create the buffer
|
// Create the buffer
|
||||||
|
@ -2690,7 +2748,7 @@ async fn test_git_diff_base_change(
|
||||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
|
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let change_set_local_b = project_local
|
let local_unstaged_changes_b = project_local
|
||||||
.update(cx_a, |p, cx| {
|
.update(cx_a, |p, cx| {
|
||||||
p.open_unstaged_changes(buffer_local_b.clone(), cx)
|
p.open_unstaged_changes(buffer_local_b.clone(), cx)
|
||||||
})
|
})
|
||||||
|
@ -2699,16 +2757,16 @@ async fn test_git_diff_base_change(
|
||||||
|
|
||||||
// Wait for it to catch up to the new diff
|
// Wait for it to catch up to the new diff
|
||||||
executor.run_until_parked();
|
executor.run_until_parked();
|
||||||
change_set_local_b.read_with(cx_a, |change_set, cx| {
|
local_unstaged_changes_b.read_with(cx_a, |change_set, cx| {
|
||||||
let buffer = buffer_local_b.read(cx);
|
let buffer = buffer_local_b.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(diff_base.as_str())
|
Some(staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&diff_base,
|
&change_set.base_text_string().unwrap(),
|
||||||
&[(1..2, "", "two\n")],
|
&[(1..2, "", "two\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -2718,7 +2776,7 @@ async fn test_git_diff_base_change(
|
||||||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
|
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let change_set_remote_b = project_remote
|
let remote_unstaged_changes_b = project_remote
|
||||||
.update(cx_b, |p, cx| {
|
.update(cx_b, |p, cx| {
|
||||||
p.open_unstaged_changes(buffer_remote_b.clone(), cx)
|
p.open_unstaged_changes(buffer_remote_b.clone(), cx)
|
||||||
})
|
})
|
||||||
|
@ -2726,52 +2784,52 @@ async fn test_git_diff_base_change(
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
executor.run_until_parked();
|
executor.run_until_parked();
|
||||||
change_set_remote_b.read_with(cx_b, |change_set, cx| {
|
remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| {
|
||||||
let buffer = buffer_remote_b.read(cx);
|
let buffer = buffer_remote_b.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(diff_base.as_str())
|
Some(staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&diff_base,
|
&staged_text,
|
||||||
&[(1..2, "", "two\n")],
|
&[(1..2, "", "two\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update the staged text
|
// Updatet the staged text
|
||||||
client_a.fs().set_index_for_repo(
|
client_a.fs().set_index_for_repo(
|
||||||
Path::new("/dir/sub/.git"),
|
Path::new("/dir/sub/.git"),
|
||||||
&[(Path::new("b.txt"), new_diff_base.clone())],
|
&[("b.txt".into(), new_staged_text.clone())],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Wait for buffer_local_b to receive it
|
// Wait for buffer_local_b to receive it
|
||||||
executor.run_until_parked();
|
executor.run_until_parked();
|
||||||
change_set_local_b.read_with(cx_a, |change_set, cx| {
|
local_unstaged_changes_b.read_with(cx_a, |change_set, cx| {
|
||||||
let buffer = buffer_local_b.read(cx);
|
let buffer = buffer_local_b.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(new_diff_base.as_str())
|
Some(new_staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&new_diff_base,
|
&new_staged_text,
|
||||||
&[(2..3, "", "three\n")],
|
&[(2..3, "", "three\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
change_set_remote_b.read_with(cx_b, |change_set, cx| {
|
remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| {
|
||||||
let buffer = buffer_remote_b.read(cx);
|
let buffer = buffer_remote_b.read(cx);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
change_set.base_text_string().as_deref(),
|
change_set.base_text_string().as_deref(),
|
||||||
Some(new_diff_base.as_str())
|
Some(new_staged_text.as_str())
|
||||||
);
|
);
|
||||||
git::diff::assert_hunks(
|
git::diff::assert_hunks(
|
||||||
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
|
||||||
buffer,
|
buffer,
|
||||||
&new_diff_base,
|
&new_staged_text,
|
||||||
&[(2..3, "", "three\n")],
|
&[(2..3, "", "three\n")],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
|
@ -953,8 +953,8 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||||
|
|
||||||
let dot_git_dir = repo_path.join(".git");
|
let dot_git_dir = repo_path.join(".git");
|
||||||
let contents = contents
|
let contents = contents
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|(path, contents)| (path.as_path(), contents.clone()))
|
.map(|(path, contents)| (path.into(), contents))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
if client.fs().metadata(&dot_git_dir).await?.is_none() {
|
if client.fs().metadata(&dot_git_dir).await?.is_none() {
|
||||||
client.fs().create_dir(&dot_git_dir).await?;
|
client.fs().create_dir(&dot_git_dir).await?;
|
||||||
|
@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||||
project
|
project
|
||||||
.buffer_store()
|
.buffer_store()
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.get_unstaged_changes(host_buffer.read(cx).remote_id())
|
.get_unstaged_changes(host_buffer.read(cx).remote_id(), cx)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.base_text_string()
|
.base_text_string()
|
||||||
|
@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||||
project
|
project
|
||||||
.buffer_store()
|
.buffer_store()
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.get_unstaged_changes(guest_buffer.read(cx).remote_id())
|
.get_unstaged_changes(guest_buffer.read(cx).remote_id(), cx)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.base_text_string()
|
.base_text_string()
|
||||||
|
|
|
@ -157,7 +157,7 @@ impl DiagnosticIndicator {
|
||||||
(buffer, cursor_position)
|
(buffer, cursor_position)
|
||||||
});
|
});
|
||||||
let new_diagnostic = buffer
|
let new_diagnostic = buffer
|
||||||
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
|
.diagnostics_in_range::<usize>(cursor_position..cursor_position)
|
||||||
.filter(|entry| !entry.range.is_empty())
|
.filter(|entry| !entry.range.is_empty())
|
||||||
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
|
||||||
.map(|entry| entry.diagnostic);
|
.map(|entry| entry.diagnostic);
|
||||||
|
|
|
@ -979,6 +979,7 @@ impl<'a> Iterator for WrapRows<'a> {
|
||||||
|
|
||||||
Some(if soft_wrapped {
|
Some(if soft_wrapped {
|
||||||
RowInfo {
|
RowInfo {
|
||||||
|
buffer_id: None,
|
||||||
buffer_row: None,
|
buffer_row: None,
|
||||||
multibuffer_row: None,
|
multibuffer_row: None,
|
||||||
diff_status,
|
diff_status,
|
||||||
|
|
|
@ -10137,12 +10137,12 @@ impl Editor {
|
||||||
let mut diagnostics;
|
let mut diagnostics;
|
||||||
if direction == Direction::Prev {
|
if direction == Direction::Prev {
|
||||||
diagnostics = buffer
|
diagnostics = buffer
|
||||||
.diagnostics_in_range::<_, usize>(0..search_start)
|
.diagnostics_in_range::<usize>(0..search_start)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
diagnostics.reverse();
|
diagnostics.reverse();
|
||||||
} else {
|
} else {
|
||||||
diagnostics = buffer
|
diagnostics = buffer
|
||||||
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
|
.diagnostics_in_range::<usize>(search_start..buffer.len())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
};
|
};
|
||||||
let group = diagnostics
|
let group = diagnostics
|
||||||
|
@ -11333,8 +11333,9 @@ impl Editor {
|
||||||
if let Some(active_diagnostics) = self.active_diagnostics.as_mut() {
|
if let Some(active_diagnostics) = self.active_diagnostics.as_mut() {
|
||||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||||
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
|
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
|
||||||
|
let primary_range_end = active_diagnostics.primary_range.end.to_offset(&buffer);
|
||||||
let is_valid = buffer
|
let is_valid = buffer
|
||||||
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
|
.diagnostics_in_range::<usize>(primary_range_start..primary_range_end)
|
||||||
.any(|entry| {
|
.any(|entry| {
|
||||||
entry.diagnostic.is_primary
|
entry.diagnostic.is_primary
|
||||||
&& !entry.range.is_empty()
|
&& !entry.range.is_empty()
|
||||||
|
|
|
@ -12431,8 +12431,8 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
|
||||||
(buffer_2.clone(), base_text_2),
|
(buffer_2.clone(), base_text_2),
|
||||||
(buffer_3.clone(), base_text_3),
|
(buffer_3.clone(), base_text_3),
|
||||||
] {
|
] {
|
||||||
let change_set = cx
|
let change_set =
|
||||||
.new(|cx| BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx));
|
cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx));
|
||||||
editor
|
editor
|
||||||
.buffer
|
.buffer
|
||||||
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
|
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
|
||||||
|
@ -13125,9 +13125,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
||||||
(buffer_2.clone(), file_2_old),
|
(buffer_2.clone(), file_2_old),
|
||||||
(buffer_3.clone(), file_3_old),
|
(buffer_3.clone(), file_3_old),
|
||||||
] {
|
] {
|
||||||
let change_set = cx.new(|cx| {
|
let change_set =
|
||||||
BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)
|
cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx));
|
||||||
});
|
|
||||||
editor
|
editor
|
||||||
.buffer
|
.buffer
|
||||||
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
|
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
|
||||||
|
@ -13212,7 +13211,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
|
||||||
init_test(cx, |_| {});
|
init_test(cx, |_| {});
|
||||||
|
|
||||||
let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n";
|
let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n";
|
||||||
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n";
|
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\nhhh\niii\n";
|
||||||
|
|
||||||
let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx));
|
let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx));
|
||||||
let multi_buffer = cx.new(|cx| {
|
let multi_buffer = cx.new(|cx| {
|
||||||
|
@ -13225,7 +13224,11 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
|
||||||
primary: None,
|
primary: None,
|
||||||
},
|
},
|
||||||
ExcerptRange {
|
ExcerptRange {
|
||||||
context: Point::new(5, 0)..Point::new(7, 0),
|
context: Point::new(4, 0)..Point::new(7, 0),
|
||||||
|
primary: None,
|
||||||
|
},
|
||||||
|
ExcerptRange {
|
||||||
|
context: Point::new(9, 0)..Point::new(10, 0),
|
||||||
primary: None,
|
primary: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -13239,8 +13242,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
|
||||||
});
|
});
|
||||||
editor
|
editor
|
||||||
.update(cx, |editor, _window, cx| {
|
.update(cx, |editor, _window, cx| {
|
||||||
let change_set =
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base, &buffer, cx));
|
||||||
cx.new(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx));
|
|
||||||
editor
|
editor
|
||||||
.buffer
|
.buffer
|
||||||
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx))
|
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx))
|
||||||
|
@ -13255,14 +13257,22 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
|
||||||
});
|
});
|
||||||
cx.executor().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
|
// When the start of a hunk coincides with the start of its excerpt,
|
||||||
|
// the hunk is expanded. When the start of a a hunk is earlier than
|
||||||
|
// the start of its excerpt, the hunk is not expanded.
|
||||||
cx.assert_state_with_diff(
|
cx.assert_state_with_diff(
|
||||||
"
|
"
|
||||||
ˇaaa
|
ˇaaa
|
||||||
- bbb
|
- bbb
|
||||||
+ BBB
|
+ BBB
|
||||||
|
|
||||||
|
- ddd
|
||||||
|
- eee
|
||||||
|
+ DDD
|
||||||
+ EEE
|
+ EEE
|
||||||
fff
|
fff
|
||||||
|
|
||||||
|
iii
|
||||||
"
|
"
|
||||||
.unindent(),
|
.unindent(),
|
||||||
);
|
);
|
||||||
|
@ -13500,8 +13510,8 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
|
||||||
|
|
||||||
cx.set_state(indoc! { "
|
cx.set_state(indoc! { "
|
||||||
one
|
one
|
||||||
TWO
|
ˇTWO
|
||||||
ˇthree
|
three
|
||||||
four
|
four
|
||||||
five
|
five
|
||||||
"});
|
"});
|
||||||
|
@ -13514,15 +13524,14 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
|
||||||
indoc! { "
|
indoc! { "
|
||||||
one
|
one
|
||||||
- two
|
- two
|
||||||
+ TWO
|
+ ˇTWO
|
||||||
ˇthree
|
three
|
||||||
four
|
four
|
||||||
five
|
five
|
||||||
"}
|
"}
|
||||||
.to_string(),
|
.to_string(),
|
||||||
);
|
);
|
||||||
cx.update_editor(|editor, window, cx| {
|
cx.update_editor(|editor, window, cx| {
|
||||||
editor.move_up(&Default::default(), window, cx);
|
|
||||||
editor.move_up(&Default::default(), window, cx);
|
editor.move_up(&Default::default(), window, cx);
|
||||||
editor.toggle_selected_diff_hunks(&Default::default(), window, cx);
|
editor.toggle_selected_diff_hunks(&Default::default(), window, cx);
|
||||||
});
|
});
|
||||||
|
@ -14402,12 +14411,8 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex
|
||||||
|
|
||||||
editor.buffer().update(cx, |multibuffer, cx| {
|
editor.buffer().update(cx, |multibuffer, cx| {
|
||||||
let buffer = multibuffer.as_singleton().unwrap();
|
let buffer = multibuffer.as_singleton().unwrap();
|
||||||
let change_set = cx.new(|cx| {
|
let change_set =
|
||||||
let mut change_set = BufferChangeSet::new(&buffer, cx);
|
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
|
||||||
let _ =
|
|
||||||
change_set.set_base_text(base_text.into(), buffer.read(cx).text_snapshot(), cx);
|
|
||||||
change_set
|
|
||||||
});
|
|
||||||
|
|
||||||
multibuffer.set_all_diff_hunks_expanded(cx);
|
multibuffer.set_all_diff_hunks_expanded(cx);
|
||||||
multibuffer.add_change_set(change_set, cx);
|
multibuffer.add_change_set(change_set, cx);
|
||||||
|
|
|
@ -5295,7 +5295,7 @@ impl EditorElement {
|
||||||
if scrollbar_settings.diagnostics != ScrollbarDiagnostics::None {
|
if scrollbar_settings.diagnostics != ScrollbarDiagnostics::None {
|
||||||
let diagnostics = snapshot
|
let diagnostics = snapshot
|
||||||
.buffer_snapshot
|
.buffer_snapshot
|
||||||
.diagnostics_in_range::<_, Point>(Point::zero()..max_point)
|
.diagnostics_in_range::<Point>(Point::zero()..max_point)
|
||||||
// Don't show diagnostics the user doesn't care about
|
// Don't show diagnostics the user doesn't care about
|
||||||
.filter(|diagnostic| {
|
.filter(|diagnostic| {
|
||||||
match (
|
match (
|
||||||
|
|
|
@ -697,7 +697,7 @@ mod tests {
|
||||||
fs.set_blame_for_repo(
|
fs.set_blame_for_repo(
|
||||||
Path::new("/my-repo/.git"),
|
Path::new("/my-repo/.git"),
|
||||||
vec![(
|
vec![(
|
||||||
Path::new("file.txt"),
|
"file.txt".into(),
|
||||||
Blame {
|
Blame {
|
||||||
entries: vec![
|
entries: vec![
|
||||||
blame_entry("1b1b1b", 0..1),
|
blame_entry("1b1b1b", 0..1),
|
||||||
|
@ -809,7 +809,7 @@ mod tests {
|
||||||
fs.set_blame_for_repo(
|
fs.set_blame_for_repo(
|
||||||
Path::new("/my-repo/.git"),
|
Path::new("/my-repo/.git"),
|
||||||
vec![(
|
vec![(
|
||||||
Path::new("file.txt"),
|
"file.txt".into(),
|
||||||
Blame {
|
Blame {
|
||||||
entries: vec![blame_entry("1b1b1b", 0..4)],
|
entries: vec![blame_entry("1b1b1b", 0..4)],
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -958,7 +958,7 @@ mod tests {
|
||||||
fs.set_blame_for_repo(
|
fs.set_blame_for_repo(
|
||||||
Path::new("/my-repo/.git"),
|
Path::new("/my-repo/.git"),
|
||||||
vec![(
|
vec![(
|
||||||
Path::new("file.txt"),
|
"file.txt".into(),
|
||||||
Blame {
|
Blame {
|
||||||
entries: blame_entries,
|
entries: blame_entries,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -1000,7 +1000,7 @@ mod tests {
|
||||||
fs.set_blame_for_repo(
|
fs.set_blame_for_repo(
|
||||||
Path::new("/my-repo/.git"),
|
Path::new("/my-repo/.git"),
|
||||||
vec![(
|
vec![(
|
||||||
Path::new("file.txt"),
|
"file.txt".into(),
|
||||||
Blame {
|
Blame {
|
||||||
entries: blame_entries,
|
entries: blame_entries,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
|
1296
crates/editor/src/git/project_diff.rs
Normal file
1296
crates/editor/src/git/project_diff.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -279,9 +279,10 @@ fn show_hover(
|
||||||
delay.await;
|
delay.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let offset = anchor.to_offset(&snapshot.buffer_snapshot);
|
||||||
let local_diagnostic = snapshot
|
let local_diagnostic = snapshot
|
||||||
.buffer_snapshot
|
.buffer_snapshot
|
||||||
.diagnostics_in_range::<_, usize>(anchor..anchor)
|
.diagnostics_in_range::<usize>(offset..offset)
|
||||||
// Find the entry with the most specific range
|
// Find the entry with the most specific range
|
||||||
.min_by_key(|entry| entry.range.len());
|
.min_by_key(|entry| entry.range.len());
|
||||||
|
|
||||||
|
|
|
@ -111,11 +111,7 @@ impl ProposedChangesEditor {
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.change_set_for(buffer.remote_id())?;
|
.change_set_for(buffer.remote_id())?;
|
||||||
Some(change_set.update(cx, |change_set, cx| {
|
Some(change_set.update(cx, |change_set, cx| {
|
||||||
change_set.set_base_text(
|
change_set.set_base_text(base_buffer.clone(), buffer, cx)
|
||||||
base_buffer.read(cx).text(),
|
|
||||||
buffer,
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
|
@ -192,7 +188,7 @@ impl ProposedChangesEditor {
|
||||||
new_change_sets.push(cx.new(|cx| {
|
new_change_sets.push(cx.new(|cx| {
|
||||||
let mut change_set = BufferChangeSet::new(&branch_buffer, cx);
|
let mut change_set = BufferChangeSet::new(&branch_buffer, cx);
|
||||||
let _ = change_set.set_base_text(
|
let _ = change_set.set_base_text(
|
||||||
location.buffer.read(cx).text(),
|
location.buffer.clone(),
|
||||||
branch_buffer.read(cx).text_snapshot(),
|
branch_buffer.read(cx).text_snapshot(),
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
|
|
|
@ -292,7 +292,7 @@ impl EditorTestContext {
|
||||||
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
|
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
|
||||||
fs.set_index_for_repo(
|
fs.set_index_for_repo(
|
||||||
&Self::root_path().join(".git"),
|
&Self::root_path().join(".git"),
|
||||||
&[(path.as_ref(), diff_base.to_string())],
|
&[(path.into(), diff_base.to_string())],
|
||||||
);
|
);
|
||||||
self.cx.run_until_parked();
|
self.cx.run_until_parked();
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,9 +5,9 @@ mod mac_watcher;
|
||||||
pub mod fs_watcher;
|
pub mod fs_watcher;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context as _, Result};
|
use anyhow::{anyhow, Context as _, Result};
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
use git::status::FileStatus;
|
|
||||||
use git::GitHostingProviderRegistry;
|
use git::GitHostingProviderRegistry;
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
use git::{repository::RepoPath, status::FileStatus};
|
||||||
|
|
||||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||||
use ashpd::desktop::trash;
|
use ashpd::desktop::trash;
|
||||||
|
@ -1270,25 +1270,32 @@ impl FakeFs {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
|
pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) {
|
||||||
self.with_git_state(dot_git, true, |state| {
|
self.with_git_state(dot_git, true, |state| {
|
||||||
state.index_contents.clear();
|
state.index_contents.clear();
|
||||||
state.index_contents.extend(
|
state.index_contents.extend(
|
||||||
head_state
|
index_state
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(path, content)| (path.to_path_buf(), content.clone())),
|
.map(|(path, content)| (path.clone(), content.clone())),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(&Path, git::blame::Blame)>) {
|
pub fn set_head_for_repo(&self, dot_git: &Path, head_state: &[(RepoPath, String)]) {
|
||||||
|
self.with_git_state(dot_git, true, |state| {
|
||||||
|
state.head_contents.clear();
|
||||||
|
state.head_contents.extend(
|
||||||
|
head_state
|
||||||
|
.iter()
|
||||||
|
.map(|(path, content)| (path.clone(), content.clone())),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) {
|
||||||
self.with_git_state(dot_git, true, |state| {
|
self.with_git_state(dot_git, true, |state| {
|
||||||
state.blames.clear();
|
state.blames.clear();
|
||||||
state.blames.extend(
|
state.blames.extend(blames);
|
||||||
blames
|
|
||||||
.into_iter()
|
|
||||||
.map(|(path, blame)| (path.to_path_buf(), blame)),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,31 +74,34 @@ impl BufferDiff {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self {
|
pub fn build(diff_base: Option<&str>, buffer: &text::BufferSnapshot) -> Self {
|
||||||
let mut tree = SumTree::new(buffer);
|
let mut tree = SumTree::new(buffer);
|
||||||
|
|
||||||
let buffer_text = buffer.as_rope().to_string();
|
if let Some(diff_base) = diff_base {
|
||||||
let patch = Self::diff(diff_base, &buffer_text);
|
let buffer_text = buffer.as_rope().to_string();
|
||||||
|
let patch = Self::diff(diff_base, &buffer_text);
|
||||||
|
|
||||||
// A common case in Zed is that the empty buffer is represented as just a newline,
|
// A common case in Zed is that the empty buffer is represented as just a newline,
|
||||||
// but if we just compute a naive diff you get a "preserved" line in the middle,
|
// but if we just compute a naive diff you get a "preserved" line in the middle,
|
||||||
// which is a bit odd.
|
// which is a bit odd.
|
||||||
if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 {
|
if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 {
|
||||||
tree.push(
|
tree.push(
|
||||||
InternalDiffHunk {
|
InternalDiffHunk {
|
||||||
buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
|
buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
|
||||||
diff_base_byte_range: 0..diff_base.len() - 1,
|
diff_base_byte_range: 0..diff_base.len() - 1,
|
||||||
},
|
},
|
||||||
buffer,
|
buffer,
|
||||||
);
|
);
|
||||||
return Self { tree };
|
return Self { tree };
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(patch) = patch {
|
if let Some(patch) = patch {
|
||||||
let mut divergence = 0;
|
let mut divergence = 0;
|
||||||
for hunk_index in 0..patch.num_hunks() {
|
for hunk_index in 0..patch.num_hunks() {
|
||||||
let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
|
let hunk =
|
||||||
tree.push(hunk, buffer);
|
Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
|
||||||
|
tree.push(hunk, buffer);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,11 +128,14 @@ impl BufferDiff {
|
||||||
range: Range<Anchor>,
|
range: Range<Anchor>,
|
||||||
buffer: &'a BufferSnapshot,
|
buffer: &'a BufferSnapshot,
|
||||||
) -> impl 'a + Iterator<Item = DiffHunk> {
|
) -> impl 'a + Iterator<Item = DiffHunk> {
|
||||||
|
let range = range.to_offset(buffer);
|
||||||
|
|
||||||
let mut cursor = self
|
let mut cursor = self
|
||||||
.tree
|
.tree
|
||||||
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
|
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
|
||||||
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
|
let summary_range = summary.buffer_range.to_offset(buffer);
|
||||||
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
|
let before_start = summary_range.end < range.start;
|
||||||
|
let after_end = summary_range.start > range.end;
|
||||||
!before_start && !after_end
|
!before_start && !after_end
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -151,21 +157,25 @@ impl BufferDiff {
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
|
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
|
||||||
iter::from_fn(move || {
|
iter::from_fn(move || loop {
|
||||||
let (start_point, (start_anchor, start_base)) = summaries.next()?;
|
let (start_point, (start_anchor, start_base)) = summaries.next()?;
|
||||||
let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?;
|
let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?;
|
||||||
|
|
||||||
|
if !start_anchor.is_valid(buffer) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if end_point.column > 0 {
|
if end_point.column > 0 {
|
||||||
end_point.row += 1;
|
end_point.row += 1;
|
||||||
end_point.column = 0;
|
end_point.column = 0;
|
||||||
end_anchor = buffer.anchor_before(end_point);
|
end_anchor = buffer.anchor_before(end_point);
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(DiffHunk {
|
return Some(DiffHunk {
|
||||||
row_range: start_point.row..end_point.row,
|
row_range: start_point.row..end_point.row,
|
||||||
diff_base_byte_range: start_base..end_base,
|
diff_base_byte_range: start_base..end_base,
|
||||||
buffer_range: start_anchor..end_anchor,
|
buffer_range: start_anchor..end_anchor,
|
||||||
})
|
});
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,7 +280,7 @@ impl BufferDiff {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) {
|
pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) {
|
||||||
*self = Self::build(&diff_base.to_string(), buffer);
|
*self = Self::build(Some(&diff_base.to_string()), buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -536,7 +546,7 @@ mod tests {
|
||||||
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1);
|
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1);
|
||||||
|
|
||||||
let empty_diff = BufferDiff::new(&buffer);
|
let empty_diff = BufferDiff::new(&buffer);
|
||||||
let diff_1 = BufferDiff::build(&base_text, &buffer);
|
let diff_1 = BufferDiff::build(Some(&base_text), &buffer);
|
||||||
let range = diff_1.compare(&empty_diff, &buffer).unwrap();
|
let range = diff_1.compare(&empty_diff, &buffer).unwrap();
|
||||||
assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0));
|
assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0));
|
||||||
|
|
||||||
|
@ -554,7 +564,7 @@ mod tests {
|
||||||
"
|
"
|
||||||
.unindent(),
|
.unindent(),
|
||||||
);
|
);
|
||||||
let diff_2 = BufferDiff::build(&base_text, &buffer);
|
let diff_2 = BufferDiff::build(Some(&base_text), &buffer);
|
||||||
assert_eq!(None, diff_2.compare(&diff_1, &buffer));
|
assert_eq!(None, diff_2.compare(&diff_1, &buffer));
|
||||||
|
|
||||||
// Edit turns a deletion hunk into a modification.
|
// Edit turns a deletion hunk into a modification.
|
||||||
|
@ -571,7 +581,7 @@ mod tests {
|
||||||
"
|
"
|
||||||
.unindent(),
|
.unindent(),
|
||||||
);
|
);
|
||||||
let diff_3 = BufferDiff::build(&base_text, &buffer);
|
let diff_3 = BufferDiff::build(Some(&base_text), &buffer);
|
||||||
let range = diff_3.compare(&diff_2, &buffer).unwrap();
|
let range = diff_3.compare(&diff_2, &buffer).unwrap();
|
||||||
assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0));
|
assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0));
|
||||||
|
|
||||||
|
@ -588,7 +598,7 @@ mod tests {
|
||||||
"
|
"
|
||||||
.unindent(),
|
.unindent(),
|
||||||
);
|
);
|
||||||
let diff_4 = BufferDiff::build(&base_text, &buffer);
|
let diff_4 = BufferDiff::build(Some(&base_text), &buffer);
|
||||||
let range = diff_4.compare(&diff_3, &buffer).unwrap();
|
let range = diff_4.compare(&diff_3, &buffer).unwrap();
|
||||||
assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0));
|
assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0));
|
||||||
|
|
||||||
|
@ -606,7 +616,7 @@ mod tests {
|
||||||
"
|
"
|
||||||
.unindent(),
|
.unindent(),
|
||||||
);
|
);
|
||||||
let diff_5 = BufferDiff::build(&base_text, &buffer);
|
let diff_5 = BufferDiff::build(Some(&base_text), &buffer);
|
||||||
let range = diff_5.compare(&diff_4, &buffer).unwrap();
|
let range = diff_5.compare(&diff_4, &buffer).unwrap();
|
||||||
assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0));
|
assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0));
|
||||||
|
|
||||||
|
@ -624,7 +634,7 @@ mod tests {
|
||||||
"
|
"
|
||||||
.unindent(),
|
.unindent(),
|
||||||
);
|
);
|
||||||
let diff_6 = BufferDiff::build(&base_text, &buffer);
|
let diff_6 = BufferDiff::build(Some(&base_text), &buffer);
|
||||||
let range = diff_6.compare(&diff_5, &buffer).unwrap();
|
let range = diff_6.compare(&diff_5, &buffer).unwrap();
|
||||||
assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0));
|
assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0));
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,9 +29,15 @@ pub struct Branch {
|
||||||
pub trait GitRepository: Send + Sync {
|
pub trait GitRepository: Send + Sync {
|
||||||
fn reload_index(&self);
|
fn reload_index(&self);
|
||||||
|
|
||||||
/// Loads a git repository entry's contents.
|
/// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path.
|
||||||
|
///
|
||||||
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
|
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
|
||||||
fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
|
fn load_index_text(&self, path: &RepoPath) -> Option<String>;
|
||||||
|
|
||||||
|
/// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path.
|
||||||
|
///
|
||||||
|
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
|
||||||
|
fn load_committed_text(&self, path: &RepoPath) -> Option<String>;
|
||||||
|
|
||||||
/// Returns the URL of the remote with the given name.
|
/// Returns the URL of the remote with the given name.
|
||||||
fn remote_url(&self, name: &str) -> Option<String>;
|
fn remote_url(&self, name: &str) -> Option<String>;
|
||||||
|
@ -106,15 +112,15 @@ impl GitRepository for RealGitRepository {
|
||||||
repo.path().into()
|
repo.path().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_index_text(&self, relative_file_path: &Path) -> Option<String> {
|
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
|
||||||
fn logic(repo: &git2::Repository, relative_file_path: &Path) -> Result<Option<String>> {
|
fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
|
||||||
const STAGE_NORMAL: i32 = 0;
|
const STAGE_NORMAL: i32 = 0;
|
||||||
let index = repo.index()?;
|
let index = repo.index()?;
|
||||||
|
|
||||||
// This check is required because index.get_path() unwraps internally :(
|
// This check is required because index.get_path() unwraps internally :(
|
||||||
check_path_to_repo_path_errors(relative_file_path)?;
|
check_path_to_repo_path_errors(path)?;
|
||||||
|
|
||||||
let oid = match index.get_path(relative_file_path, STAGE_NORMAL) {
|
let oid = match index.get_path(path, STAGE_NORMAL) {
|
||||||
Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
|
Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
|
||||||
_ => return Ok(None),
|
_ => return Ok(None),
|
||||||
};
|
};
|
||||||
|
@ -123,13 +129,22 @@ impl GitRepository for RealGitRepository {
|
||||||
Ok(Some(String::from_utf8(content)?))
|
Ok(Some(String::from_utf8(content)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
match logic(&self.repository.lock(), relative_file_path) {
|
match logic(&self.repository.lock(), path) {
|
||||||
Ok(value) => return value,
|
Ok(value) => return value,
|
||||||
Err(err) => log::error!("Error loading head text: {:?}", err),
|
Err(err) => log::error!("Error loading index text: {:?}", err),
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
|
||||||
|
let repo = self.repository.lock();
|
||||||
|
let head = repo.head().ok()?.peel_to_tree().log_err()?;
|
||||||
|
let oid = head.get_path(path).ok()?.id();
|
||||||
|
let content = repo.find_blob(oid).log_err()?.content().to_owned();
|
||||||
|
let content = String::from_utf8(content).log_err()?;
|
||||||
|
Some(content)
|
||||||
|
}
|
||||||
|
|
||||||
fn remote_url(&self, name: &str) -> Option<String> {
|
fn remote_url(&self, name: &str) -> Option<String> {
|
||||||
let repo = self.repository.lock();
|
let repo = self.repository.lock();
|
||||||
let remote = repo.find_remote(name).ok()?;
|
let remote = repo.find_remote(name).ok()?;
|
||||||
|
@ -325,8 +340,9 @@ pub struct FakeGitRepository {
|
||||||
pub struct FakeGitRepositoryState {
|
pub struct FakeGitRepositoryState {
|
||||||
pub dot_git_dir: PathBuf,
|
pub dot_git_dir: PathBuf,
|
||||||
pub event_emitter: smol::channel::Sender<PathBuf>,
|
pub event_emitter: smol::channel::Sender<PathBuf>,
|
||||||
pub index_contents: HashMap<PathBuf, String>,
|
pub head_contents: HashMap<RepoPath, String>,
|
||||||
pub blames: HashMap<PathBuf, Blame>,
|
pub index_contents: HashMap<RepoPath, String>,
|
||||||
|
pub blames: HashMap<RepoPath, Blame>,
|
||||||
pub statuses: HashMap<RepoPath, FileStatus>,
|
pub statuses: HashMap<RepoPath, FileStatus>,
|
||||||
pub current_branch_name: Option<String>,
|
pub current_branch_name: Option<String>,
|
||||||
pub branches: HashSet<String>,
|
pub branches: HashSet<String>,
|
||||||
|
@ -343,6 +359,7 @@ impl FakeGitRepositoryState {
|
||||||
FakeGitRepositoryState {
|
FakeGitRepositoryState {
|
||||||
dot_git_dir,
|
dot_git_dir,
|
||||||
event_emitter,
|
event_emitter,
|
||||||
|
head_contents: Default::default(),
|
||||||
index_contents: Default::default(),
|
index_contents: Default::default(),
|
||||||
blames: Default::default(),
|
blames: Default::default(),
|
||||||
statuses: Default::default(),
|
statuses: Default::default(),
|
||||||
|
@ -355,9 +372,14 @@ impl FakeGitRepositoryState {
|
||||||
impl GitRepository for FakeGitRepository {
|
impl GitRepository for FakeGitRepository {
|
||||||
fn reload_index(&self) {}
|
fn reload_index(&self) {}
|
||||||
|
|
||||||
fn load_index_text(&self, path: &Path) -> Option<String> {
|
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
|
||||||
let state = self.state.lock();
|
let state = self.state.lock();
|
||||||
state.index_contents.get(path).cloned()
|
state.index_contents.get(path.as_ref()).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
|
||||||
|
let state = self.state.lock();
|
||||||
|
state.head_contents.get(path.as_ref()).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remote_url(&self, _name: &str) -> Option<String> {
|
fn remote_url(&self, _name: &str) -> Option<String> {
|
||||||
|
@ -529,6 +551,12 @@ impl From<&Path> for RepoPath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<Arc<Path>> for RepoPath {
|
||||||
|
fn from(value: Arc<Path>) -> Self {
|
||||||
|
RepoPath(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<PathBuf> for RepoPath {
|
impl From<PathBuf> for RepoPath {
|
||||||
fn from(value: PathBuf) -> Self {
|
fn from(value: PathBuf) -> Self {
|
||||||
RepoPath::new(value)
|
RepoPath::new(value)
|
||||||
|
|
|
@ -1001,6 +1001,34 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
pub fn build_snapshot_sync(
|
||||||
|
text: Rope,
|
||||||
|
language: Option<Arc<Language>>,
|
||||||
|
language_registry: Option<Arc<LanguageRegistry>>,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> BufferSnapshot {
|
||||||
|
let entity_id = cx.reserve_entity::<Self>().entity_id();
|
||||||
|
let buffer_id = entity_id.as_non_zero_u64().into();
|
||||||
|
let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
|
||||||
|
let mut syntax = SyntaxMap::new(&text).snapshot();
|
||||||
|
if let Some(language) = language.clone() {
|
||||||
|
let text = text.clone();
|
||||||
|
let language = language.clone();
|
||||||
|
let language_registry = language_registry.clone();
|
||||||
|
syntax.reparse(&text, language_registry, language);
|
||||||
|
}
|
||||||
|
BufferSnapshot {
|
||||||
|
text,
|
||||||
|
syntax,
|
||||||
|
file: None,
|
||||||
|
diagnostics: Default::default(),
|
||||||
|
remote_selections: Default::default(),
|
||||||
|
language,
|
||||||
|
non_text_state_update_count: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Retrieve a snapshot of the buffer's current state. This is computationally
|
/// Retrieve a snapshot of the buffer's current state. This is computationally
|
||||||
/// cheap, and allows reading from the buffer on a background thread.
|
/// cheap, and allows reading from the buffer on a background thread.
|
||||||
pub fn snapshot(&self) -> BufferSnapshot {
|
pub fn snapshot(&self) -> BufferSnapshot {
|
||||||
|
|
|
@ -28,7 +28,7 @@ use smol::future::yield_now;
|
||||||
use std::{
|
use std::{
|
||||||
any::type_name,
|
any::type_name,
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
cell::{Ref, RefCell, RefMut},
|
cell::{Ref, RefCell},
|
||||||
cmp, fmt,
|
cmp, fmt,
|
||||||
future::Future,
|
future::Future,
|
||||||
io,
|
io,
|
||||||
|
@ -290,6 +290,7 @@ impl ExcerptBoundary {
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub struct RowInfo {
|
pub struct RowInfo {
|
||||||
|
pub buffer_id: Option<BufferId>,
|
||||||
pub buffer_row: Option<u32>,
|
pub buffer_row: Option<u32>,
|
||||||
pub multibuffer_row: Option<MultiBufferRow>,
|
pub multibuffer_row: Option<MultiBufferRow>,
|
||||||
pub diff_status: Option<git::diff::DiffHunkStatus>,
|
pub diff_status: Option<git::diff::DiffHunkStatus>,
|
||||||
|
@ -1742,7 +1743,7 @@ impl MultiBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.sync_diff_transforms(
|
self.sync_diff_transforms(
|
||||||
snapshot,
|
&mut snapshot,
|
||||||
vec![Edit {
|
vec![Edit {
|
||||||
old: edit_start..edit_start,
|
old: edit_start..edit_start,
|
||||||
new: edit_start..edit_end,
|
new: edit_start..edit_end,
|
||||||
|
@ -1775,7 +1776,7 @@ impl MultiBuffer {
|
||||||
snapshot.has_conflict = false;
|
snapshot.has_conflict = false;
|
||||||
|
|
||||||
self.sync_diff_transforms(
|
self.sync_diff_transforms(
|
||||||
snapshot,
|
&mut snapshot,
|
||||||
vec![Edit {
|
vec![Edit {
|
||||||
old: start..prev_len,
|
old: start..prev_len,
|
||||||
new: start..start,
|
new: start..start,
|
||||||
|
@ -2053,7 +2054,7 @@ impl MultiBuffer {
|
||||||
snapshot.trailing_excerpt_update_count += 1;
|
snapshot.trailing_excerpt_update_count += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
|
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
|
||||||
cx.emit(Event::Edited {
|
cx.emit(Event::Edited {
|
||||||
singleton_buffer_edited: false,
|
singleton_buffer_edited: false,
|
||||||
edited_buffer: None,
|
edited_buffer: None,
|
||||||
|
@ -2218,7 +2219,7 @@ impl MultiBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.sync_diff_transforms(
|
self.sync_diff_transforms(
|
||||||
snapshot,
|
&mut snapshot,
|
||||||
excerpt_edits,
|
excerpt_edits,
|
||||||
DiffChangeKind::DiffUpdated {
|
DiffChangeKind::DiffUpdated {
|
||||||
base_changed: base_text_changed,
|
base_changed: base_text_changed,
|
||||||
|
@ -2388,7 +2389,7 @@ impl MultiBuffer {
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) {
|
) {
|
||||||
self.sync(cx);
|
self.sync(cx);
|
||||||
let snapshot = self.snapshot.borrow_mut();
|
let mut snapshot = self.snapshot.borrow_mut();
|
||||||
let mut excerpt_edits = Vec::new();
|
let mut excerpt_edits = Vec::new();
|
||||||
for range in ranges.iter() {
|
for range in ranges.iter() {
|
||||||
let end_excerpt_id = range.end.excerpt_id;
|
let end_excerpt_id = range.end.excerpt_id;
|
||||||
|
@ -2422,7 +2423,7 @@ impl MultiBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.sync_diff_transforms(
|
self.sync_diff_transforms(
|
||||||
snapshot,
|
&mut snapshot,
|
||||||
excerpt_edits,
|
excerpt_edits,
|
||||||
DiffChangeKind::ExpandOrCollapseHunks { expand },
|
DiffChangeKind::ExpandOrCollapseHunks { expand },
|
||||||
);
|
);
|
||||||
|
@ -2491,7 +2492,7 @@ impl MultiBuffer {
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
|
||||||
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
|
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
|
||||||
cx.emit(Event::Edited {
|
cx.emit(Event::Edited {
|
||||||
singleton_buffer_edited: false,
|
singleton_buffer_edited: false,
|
||||||
edited_buffer: None,
|
edited_buffer: None,
|
||||||
|
@ -2592,7 +2593,7 @@ impl MultiBuffer {
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
|
||||||
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
|
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
|
||||||
cx.emit(Event::Edited {
|
cx.emit(Event::Edited {
|
||||||
singleton_buffer_edited: false,
|
singleton_buffer_edited: false,
|
||||||
edited_buffer: None,
|
edited_buffer: None,
|
||||||
|
@ -2705,12 +2706,12 @@ impl MultiBuffer {
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
|
||||||
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
|
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sync_diff_transforms(
|
fn sync_diff_transforms(
|
||||||
&self,
|
&self,
|
||||||
mut snapshot: RefMut<MultiBufferSnapshot>,
|
snapshot: &mut MultiBufferSnapshot,
|
||||||
excerpt_edits: Vec<text::Edit<ExcerptOffset>>,
|
excerpt_edits: Vec<text::Edit<ExcerptOffset>>,
|
||||||
change_kind: DiffChangeKind,
|
change_kind: DiffChangeKind,
|
||||||
) {
|
) {
|
||||||
|
@ -2791,11 +2792,23 @@ impl MultiBuffer {
|
||||||
if excerpt_edits.peek().map_or(true, |next_edit| {
|
if excerpt_edits.peek().map_or(true, |next_edit| {
|
||||||
next_edit.old.start >= old_diff_transforms.end(&()).0
|
next_edit.old.start >= old_diff_transforms.end(&()).0
|
||||||
}) {
|
}) {
|
||||||
|
let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end)
|
||||||
|
&& match old_diff_transforms.item() {
|
||||||
|
Some(DiffTransform::BufferContent {
|
||||||
|
inserted_hunk_anchor: Some(hunk_anchor),
|
||||||
|
..
|
||||||
|
}) => excerpts
|
||||||
|
.item()
|
||||||
|
.is_some_and(|excerpt| hunk_anchor.1.is_valid(&excerpt.buffer)),
|
||||||
|
_ => true,
|
||||||
|
};
|
||||||
|
|
||||||
let mut excerpt_offset = edit.new.end;
|
let mut excerpt_offset = edit.new.end;
|
||||||
if old_diff_transforms.start().0 < edit.old.end {
|
if !keep_next_old_transform {
|
||||||
excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end;
|
excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end;
|
||||||
old_diff_transforms.next(&());
|
old_diff_transforms.next(&());
|
||||||
}
|
}
|
||||||
|
|
||||||
old_expanded_hunks.clear();
|
old_expanded_hunks.clear();
|
||||||
self.push_buffer_content_transform(
|
self.push_buffer_content_transform(
|
||||||
&snapshot,
|
&snapshot,
|
||||||
|
@ -2894,12 +2907,14 @@ impl MultiBuffer {
|
||||||
buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end);
|
buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end);
|
||||||
|
|
||||||
for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) {
|
for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) {
|
||||||
|
let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
|
||||||
|
|
||||||
let hunk_anchor = (excerpt.id, hunk.buffer_range.start);
|
let hunk_anchor = (excerpt.id, hunk.buffer_range.start);
|
||||||
if !hunk_anchor.1.is_valid(buffer) {
|
if hunk_buffer_range.start < excerpt_buffer_start {
|
||||||
|
log::trace!("skipping hunk that starts before excerpt");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
|
|
||||||
let hunk_excerpt_start = excerpt_start
|
let hunk_excerpt_start = excerpt_start
|
||||||
+ ExcerptOffset::new(
|
+ ExcerptOffset::new(
|
||||||
hunk_buffer_range.start.saturating_sub(excerpt_buffer_start),
|
hunk_buffer_range.start.saturating_sub(excerpt_buffer_start),
|
||||||
|
@ -2941,8 +2956,9 @@ impl MultiBuffer {
|
||||||
if should_expand_hunk {
|
if should_expand_hunk {
|
||||||
did_expand_hunks = true;
|
did_expand_hunks = true;
|
||||||
log::trace!(
|
log::trace!(
|
||||||
"expanding hunk {:?}",
|
"expanding hunk {:?}, excerpt:{:?}",
|
||||||
hunk_excerpt_start.value..hunk_excerpt_end.value,
|
hunk_excerpt_start.value..hunk_excerpt_end.value,
|
||||||
|
excerpt.id
|
||||||
);
|
);
|
||||||
|
|
||||||
if !hunk.diff_base_byte_range.is_empty()
|
if !hunk.diff_base_byte_range.is_empty()
|
||||||
|
@ -3389,12 +3405,12 @@ impl MultiBufferSnapshot {
|
||||||
self.diff_hunks_in_range(Anchor::min()..Anchor::max())
|
self.diff_hunks_in_range(Anchor::min()..Anchor::max())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diff_hunks_in_range<T: ToOffset>(
|
pub fn diff_hunks_in_range<T: ToPoint>(
|
||||||
&self,
|
&self,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
|
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
let query_range = range.start.to_point(self)..range.end.to_point(self);
|
||||||
self.lift_buffer_metadata(range.clone(), move |buffer, buffer_range| {
|
self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| {
|
||||||
let diff = self.diffs.get(&buffer.remote_id())?;
|
let diff = self.diffs.get(&buffer.remote_id())?;
|
||||||
let buffer_start = buffer.anchor_before(buffer_range.start);
|
let buffer_start = buffer.anchor_before(buffer_range.start);
|
||||||
let buffer_end = buffer.anchor_after(buffer_range.end);
|
let buffer_end = buffer.anchor_after(buffer_range.end);
|
||||||
|
@ -3409,19 +3425,25 @@ impl MultiBufferSnapshot {
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.map(|(range, hunk, excerpt)| {
|
.filter_map(move |(range, hunk, excerpt)| {
|
||||||
|
if range.start != range.end
|
||||||
|
&& range.end == query_range.start
|
||||||
|
&& !hunk.row_range.is_empty()
|
||||||
|
{
|
||||||
|
return None;
|
||||||
|
}
|
||||||
let end_row = if range.end.column == 0 {
|
let end_row = if range.end.column == 0 {
|
||||||
range.end.row
|
range.end.row
|
||||||
} else {
|
} else {
|
||||||
range.end.row + 1
|
range.end.row + 1
|
||||||
};
|
};
|
||||||
MultiBufferDiffHunk {
|
Some(MultiBufferDiffHunk {
|
||||||
row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row),
|
row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row),
|
||||||
buffer_id: excerpt.buffer_id,
|
buffer_id: excerpt.buffer_id,
|
||||||
excerpt_id: excerpt.id,
|
excerpt_id: excerpt.id,
|
||||||
buffer_range: hunk.buffer_range.clone(),
|
buffer_range: hunk.buffer_range.clone(),
|
||||||
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
|
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
|
||||||
}
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3560,8 +3582,8 @@ impl MultiBufferSnapshot {
|
||||||
/// multi-buffer coordinates.
|
/// multi-buffer coordinates.
|
||||||
fn lift_buffer_metadata<'a, D, M, I>(
|
fn lift_buffer_metadata<'a, D, M, I>(
|
||||||
&'a self,
|
&'a self,
|
||||||
range: Range<usize>,
|
query_range: Range<D>,
|
||||||
get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range<usize>) -> Option<I>,
|
get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range<D>) -> Option<I>,
|
||||||
) -> impl Iterator<Item = (Range<D>, M, &'a Excerpt)> + 'a
|
) -> impl Iterator<Item = (Range<D>, M, &'a Excerpt)> + 'a
|
||||||
where
|
where
|
||||||
I: Iterator<Item = (Range<D>, M)> + 'a,
|
I: Iterator<Item = (Range<D>, M)> + 'a,
|
||||||
|
@ -3569,18 +3591,19 @@ impl MultiBufferSnapshot {
|
||||||
{
|
{
|
||||||
let max_position = D::from_text_summary(&self.text_summary());
|
let max_position = D::from_text_summary(&self.text_summary());
|
||||||
let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None;
|
let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None;
|
||||||
let mut cursor = self.cursor::<DimensionPair<usize, D>>();
|
let mut cursor = self.cursor::<D>();
|
||||||
|
|
||||||
// Find the excerpt and buffer offset where the given range ends.
|
// Find the excerpt and buffer offset where the given range ends.
|
||||||
cursor.seek(&DimensionPair {
|
cursor.seek(&query_range.end);
|
||||||
key: range.end,
|
|
||||||
value: None,
|
|
||||||
});
|
|
||||||
let mut range_end = None;
|
let mut range_end = None;
|
||||||
while let Some(region) = cursor.region() {
|
while let Some(region) = cursor.region() {
|
||||||
if region.is_main_buffer {
|
if region.is_main_buffer {
|
||||||
let mut buffer_end = region.buffer_range.start.key;
|
let mut buffer_end = region.buffer_range.start;
|
||||||
let overshoot = range.end.saturating_sub(region.range.start.key);
|
let overshoot = if query_range.end > region.range.start {
|
||||||
|
query_range.end - region.range.start
|
||||||
|
} else {
|
||||||
|
D::default()
|
||||||
|
};
|
||||||
buffer_end.add_assign(&overshoot);
|
buffer_end.add_assign(&overshoot);
|
||||||
range_end = Some((region.excerpt.id, buffer_end));
|
range_end = Some((region.excerpt.id, buffer_end));
|
||||||
break;
|
break;
|
||||||
|
@ -3588,13 +3611,10 @@ impl MultiBufferSnapshot {
|
||||||
cursor.next();
|
cursor.next();
|
||||||
}
|
}
|
||||||
|
|
||||||
cursor.seek(&DimensionPair {
|
cursor.seek(&query_range.start);
|
||||||
key: range.start,
|
|
||||||
value: None,
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) {
|
if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) {
|
||||||
if region.range.start.key > 0 {
|
if region.range.start > D::zero(&()) {
|
||||||
cursor.prev()
|
cursor.prev()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3613,14 +3633,18 @@ impl MultiBufferSnapshot {
|
||||||
// and retrieve the metadata for the resulting range.
|
// and retrieve the metadata for the resulting range.
|
||||||
else {
|
else {
|
||||||
let region = cursor.region()?;
|
let region = cursor.region()?;
|
||||||
let buffer_start = if region.is_main_buffer {
|
let mut buffer_start;
|
||||||
let start_overshoot = range.start.saturating_sub(region.range.start.key);
|
if region.is_main_buffer {
|
||||||
(region.buffer_range.start.key + start_overshoot)
|
buffer_start = region.buffer_range.start;
|
||||||
.min(region.buffer_range.end.key)
|
if query_range.start > region.range.start {
|
||||||
|
let overshoot = query_range.start - region.range.start;
|
||||||
|
buffer_start.add_assign(&overshoot);
|
||||||
|
}
|
||||||
|
buffer_start = buffer_start.min(region.buffer_range.end);
|
||||||
} else {
|
} else {
|
||||||
cursor.main_buffer_position()?.key
|
buffer_start = cursor.main_buffer_position()?;
|
||||||
};
|
};
|
||||||
let mut buffer_end = excerpt.range.context.end.to_offset(&excerpt.buffer);
|
let mut buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
|
||||||
if let Some((end_excerpt_id, end_buffer_offset)) = range_end {
|
if let Some((end_excerpt_id, end_buffer_offset)) = range_end {
|
||||||
if excerpt.id == end_excerpt_id {
|
if excerpt.id == end_excerpt_id {
|
||||||
buffer_end = buffer_end.min(end_buffer_offset);
|
buffer_end = buffer_end.min(end_buffer_offset);
|
||||||
|
@ -3637,53 +3661,56 @@ impl MultiBufferSnapshot {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Visit each metadata item.
|
// Visit each metadata item.
|
||||||
if let Some((range, metadata)) = metadata_iter.and_then(Iterator::next) {
|
if let Some((metadata_buffer_range, metadata)) = metadata_iter.and_then(Iterator::next)
|
||||||
|
{
|
||||||
// Find the multibuffer regions that contain the start and end of
|
// Find the multibuffer regions that contain the start and end of
|
||||||
// the metadata item's range.
|
// the metadata item's range.
|
||||||
if range.start > D::default() {
|
if metadata_buffer_range.start > D::default() {
|
||||||
while let Some(region) = cursor.region() {
|
while let Some(region) = cursor.region() {
|
||||||
if !region.is_main_buffer
|
if region.is_main_buffer
|
||||||
|| region.buffer.remote_id() == excerpt.buffer_id
|
&& (region.buffer_range.end >= metadata_buffer_range.start
|
||||||
&& region.buffer_range.end.value.unwrap() < range.start
|
|| cursor.is_at_end_of_excerpt())
|
||||||
{
|
{
|
||||||
cursor.next();
|
|
||||||
} else {
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
cursor.next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let start_region = cursor.region()?;
|
let start_region = cursor.region()?;
|
||||||
while let Some(region) = cursor.region() {
|
while let Some(region) = cursor.region() {
|
||||||
if !region.is_main_buffer
|
if region.is_main_buffer
|
||||||
|| region.buffer.remote_id() == excerpt.buffer_id
|
&& (region.buffer_range.end > metadata_buffer_range.end
|
||||||
&& region.buffer_range.end.value.unwrap() <= range.end
|
|| cursor.is_at_end_of_excerpt())
|
||||||
{
|
{
|
||||||
cursor.next();
|
|
||||||
} else {
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
cursor.next();
|
||||||
}
|
}
|
||||||
let end_region = cursor
|
let end_region = cursor.region();
|
||||||
.region()
|
|
||||||
.filter(|region| region.buffer.remote_id() == excerpt.buffer_id);
|
|
||||||
|
|
||||||
// Convert the metadata item's range into multibuffer coordinates.
|
// Convert the metadata item's range into multibuffer coordinates.
|
||||||
let mut start = start_region.range.start.value.unwrap();
|
let mut start_position = start_region.range.start;
|
||||||
let region_buffer_start = start_region.buffer_range.start.value.unwrap();
|
let region_buffer_start = start_region.buffer_range.start;
|
||||||
if start_region.is_main_buffer && range.start > region_buffer_start {
|
if start_region.is_main_buffer && metadata_buffer_range.start > region_buffer_start
|
||||||
start.add_assign(&(range.start - region_buffer_start));
|
{
|
||||||
}
|
start_position.add_assign(&(metadata_buffer_range.start - region_buffer_start));
|
||||||
let mut end = max_position;
|
start_position = start_position.min(start_region.range.end);
|
||||||
if let Some(end_region) = end_region {
|
|
||||||
end = end_region.range.start.value.unwrap();
|
|
||||||
debug_assert!(end_region.is_main_buffer);
|
|
||||||
let region_buffer_start = end_region.buffer_range.start.value.unwrap();
|
|
||||||
if range.end > region_buffer_start {
|
|
||||||
end.add_assign(&(range.end - region_buffer_start));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some((start..end, metadata, excerpt));
|
let mut end_position = max_position;
|
||||||
|
if let Some(end_region) = &end_region {
|
||||||
|
end_position = end_region.range.start;
|
||||||
|
debug_assert!(end_region.is_main_buffer);
|
||||||
|
let region_buffer_start = end_region.buffer_range.start;
|
||||||
|
if metadata_buffer_range.end > region_buffer_start {
|
||||||
|
end_position.add_assign(&(metadata_buffer_range.end - region_buffer_start));
|
||||||
|
}
|
||||||
|
end_position = end_position.min(end_region.range.end);
|
||||||
|
}
|
||||||
|
|
||||||
|
if start_position <= query_range.end && end_position >= query_range.start {
|
||||||
|
return Some((start_position..end_position, metadata, excerpt));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// When there are no more metadata items for this excerpt, move to the next excerpt.
|
// When there are no more metadata items for this excerpt, move to the next excerpt.
|
||||||
else {
|
else {
|
||||||
|
@ -4509,7 +4536,16 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
let excerpt_start_position = D::from_text_summary(&cursor.start().text);
|
let excerpt_start_position = D::from_text_summary(&cursor.start().text);
|
||||||
if let Some(excerpt) = cursor.item().filter(|excerpt| excerpt.id == excerpt_id) {
|
if let Some(excerpt) = cursor.item() {
|
||||||
|
if excerpt.id != excerpt_id {
|
||||||
|
let position = self.resolve_summary_for_anchor(
|
||||||
|
&Anchor::min(),
|
||||||
|
excerpt_start_position,
|
||||||
|
&mut diff_transforms_cursor,
|
||||||
|
);
|
||||||
|
summaries.extend(excerpt_anchors.map(|_| position));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
let excerpt_buffer_start =
|
let excerpt_buffer_start =
|
||||||
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
|
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
|
||||||
let excerpt_buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
|
let excerpt_buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
|
||||||
|
@ -5525,7 +5561,7 @@ impl MultiBufferSnapshot {
|
||||||
buffer_id: BufferId,
|
buffer_id: BufferId,
|
||||||
group_id: usize,
|
group_id: usize,
|
||||||
) -> impl Iterator<Item = DiagnosticEntry<Point>> + '_ {
|
) -> impl Iterator<Item = DiagnosticEntry<Point>> + '_ {
|
||||||
self.lift_buffer_metadata(0..self.len(), move |buffer, _| {
|
self.lift_buffer_metadata(Point::zero()..self.max_point(), move |buffer, _| {
|
||||||
if buffer.remote_id() != buffer_id {
|
if buffer.remote_id() != buffer_id {
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
@ -5538,15 +5574,19 @@ impl MultiBufferSnapshot {
|
||||||
.map(|(range, diagnostic, _)| DiagnosticEntry { diagnostic, range })
|
.map(|(range, diagnostic, _)| DiagnosticEntry { diagnostic, range })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_in_range<'a, T, O>(
|
pub fn diagnostics_in_range<'a, T>(
|
||||||
&'a self,
|
&'a self,
|
||||||
range: Range<T>,
|
range: Range<T>,
|
||||||
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
|
) -> impl Iterator<Item = DiagnosticEntry<T>> + 'a
|
||||||
where
|
where
|
||||||
T: 'a + ToOffset,
|
T: 'a
|
||||||
O: 'a + text::FromAnchor + Copy + TextDimension + Ord + Sub<O, Output = O> + fmt::Debug,
|
+ text::ToOffset
|
||||||
|
+ text::FromAnchor
|
||||||
|
+ TextDimension
|
||||||
|
+ Ord
|
||||||
|
+ Sub<T, Output = T>
|
||||||
|
+ fmt::Debug,
|
||||||
{
|
{
|
||||||
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
|
||||||
self.lift_buffer_metadata(range, move |buffer, buffer_range| {
|
self.lift_buffer_metadata(range, move |buffer, buffer_range| {
|
||||||
Some(
|
Some(
|
||||||
buffer
|
buffer
|
||||||
|
@ -6036,6 +6076,24 @@ where
|
||||||
self.cached_region.clone()
|
self.cached_region.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_at_end_of_excerpt(&mut self) -> bool {
|
||||||
|
if self.diff_transforms.end(&()).1 < self.excerpts.end(&()) {
|
||||||
|
return false;
|
||||||
|
} else if self.diff_transforms.end(&()).1 > self.excerpts.end(&())
|
||||||
|
|| self.diff_transforms.item().is_none()
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.diff_transforms.next(&());
|
||||||
|
let next_transform = self.diff_transforms.item();
|
||||||
|
self.diff_transforms.prev(&());
|
||||||
|
|
||||||
|
next_transform.map_or(true, |next_transform| {
|
||||||
|
matches!(next_transform, DiffTransform::BufferContent { .. })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn main_buffer_position(&self) -> Option<D> {
|
fn main_buffer_position(&self) -> Option<D> {
|
||||||
let excerpt = self.excerpts.item()?;
|
let excerpt = self.excerpts.item()?;
|
||||||
let buffer = &excerpt.buffer;
|
let buffer = &excerpt.buffer;
|
||||||
|
@ -6879,6 +6937,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
|
||||||
if self.is_empty && self.point.row == 0 {
|
if self.is_empty && self.point.row == 0 {
|
||||||
self.point += Point::new(1, 0);
|
self.point += Point::new(1, 0);
|
||||||
return Some(RowInfo {
|
return Some(RowInfo {
|
||||||
|
buffer_id: None,
|
||||||
buffer_row: Some(0),
|
buffer_row: Some(0),
|
||||||
multibuffer_row: Some(MultiBufferRow(0)),
|
multibuffer_row: Some(MultiBufferRow(0)),
|
||||||
diff_status: None,
|
diff_status: None,
|
||||||
|
@ -6906,6 +6965,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
|
||||||
.to_point(&last_excerpt.buffer)
|
.to_point(&last_excerpt.buffer)
|
||||||
.row;
|
.row;
|
||||||
return Some(RowInfo {
|
return Some(RowInfo {
|
||||||
|
buffer_id: Some(last_excerpt.buffer_id),
|
||||||
buffer_row: Some(last_row),
|
buffer_row: Some(last_row),
|
||||||
multibuffer_row: Some(multibuffer_row),
|
multibuffer_row: Some(multibuffer_row),
|
||||||
diff_status: None,
|
diff_status: None,
|
||||||
|
@ -6919,6 +6979,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
|
||||||
let overshoot = self.point - region.range.start;
|
let overshoot = self.point - region.range.start;
|
||||||
let buffer_point = region.buffer_range.start + overshoot;
|
let buffer_point = region.buffer_range.start + overshoot;
|
||||||
let result = Some(RowInfo {
|
let result = Some(RowInfo {
|
||||||
|
buffer_id: Some(region.buffer.remote_id()),
|
||||||
buffer_row: Some(buffer_point.row),
|
buffer_row: Some(buffer_point.row),
|
||||||
multibuffer_row: Some(MultiBufferRow(self.point.row)),
|
multibuffer_row: Some(MultiBufferRow(self.point.row)),
|
||||||
diff_status: if region.is_inserted_hunk && self.point < region.range.end {
|
diff_status: if region.is_inserted_hunk && self.point < region.range.end {
|
||||||
|
|
|
@ -19,12 +19,14 @@ fn init_logger() {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_empty_singleton(cx: &mut App) {
|
fn test_empty_singleton(cx: &mut App) {
|
||||||
let buffer = cx.new(|cx| Buffer::local("", cx));
|
let buffer = cx.new(|cx| Buffer::local("", cx));
|
||||||
|
let buffer_id = buffer.read(cx).remote_id();
|
||||||
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
||||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||||
assert_eq!(snapshot.text(), "");
|
assert_eq!(snapshot.text(), "");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
|
snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
|
||||||
[RowInfo {
|
[RowInfo {
|
||||||
|
buffer_id: Some(buffer_id),
|
||||||
buffer_row: Some(0),
|
buffer_row: Some(0),
|
||||||
multibuffer_row: Some(MultiBufferRow(0)),
|
multibuffer_row: Some(MultiBufferRow(0)),
|
||||||
diff_status: None
|
diff_status: None
|
||||||
|
@ -359,13 +361,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) {
|
||||||
let base_text = "one\ntwo\nthree\n";
|
let base_text = "one\ntwo\nthree\n";
|
||||||
let text = "one\nthree\n";
|
let text = "one\nthree\n";
|
||||||
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
||||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
|
||||||
let change_set = cx.new(|cx| {
|
|
||||||
let mut change_set = BufferChangeSet::new(&buffer, cx);
|
|
||||||
let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
|
|
||||||
change_set
|
|
||||||
});
|
|
||||||
cx.run_until_parked();
|
|
||||||
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.add_change_set(change_set, cx)
|
multibuffer.add_change_set(change_set, cx)
|
||||||
|
@ -382,7 +378,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) {
|
||||||
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||||
let actual_text = snapshot.text();
|
let actual_text = snapshot.text();
|
||||||
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
|
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
|
||||||
let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default());
|
let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default(), None);
|
||||||
pretty_assertions::assert_eq!(
|
pretty_assertions::assert_eq!(
|
||||||
actual_diff,
|
actual_diff,
|
||||||
indoc! {
|
indoc! {
|
||||||
|
@ -409,13 +405,7 @@ fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
|
||||||
let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n";
|
let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n";
|
||||||
let text = "one\nfour\nseven\n";
|
let text = "one\nfour\nseven\n";
|
||||||
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
||||||
let change_set = cx.new(|cx| {
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
|
||||||
let mut change_set = BufferChangeSet::new(&buffer, cx);
|
|
||||||
let snapshot = buffer.read(cx).snapshot();
|
|
||||||
let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
|
|
||||||
change_set
|
|
||||||
});
|
|
||||||
cx.run_until_parked();
|
|
||||||
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||||
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
|
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
(multibuffer.snapshot(cx), multibuffer.subscribe())
|
(multibuffer.snapshot(cx), multibuffer.subscribe())
|
||||||
|
@ -508,13 +498,7 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) {
|
||||||
let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n";
|
let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n";
|
||||||
let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n";
|
let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n";
|
||||||
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
||||||
let change_set = cx.new(|cx| {
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx));
|
||||||
let mut change_set = BufferChangeSet::new(&buffer, cx);
|
|
||||||
let snapshot = buffer.read(cx).text_snapshot();
|
|
||||||
let _ = change_set.set_base_text(base_text.into(), snapshot, cx);
|
|
||||||
change_set
|
|
||||||
});
|
|
||||||
cx.run_until_parked();
|
|
||||||
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
||||||
|
|
||||||
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
|
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
|
@ -995,12 +979,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
|
||||||
let buffer = cx.new(|cx| Buffer::local("", cx));
|
let buffer = cx.new(|cx| Buffer::local("", cx));
|
||||||
let base_text = "a\nb\nc";
|
let base_text = "a\nb\nc";
|
||||||
|
|
||||||
let change_set = cx.new(|cx| {
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
|
||||||
let snapshot = buffer.read(cx).snapshot();
|
|
||||||
let mut change_set = BufferChangeSet::new(&buffer, cx);
|
|
||||||
let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
|
|
||||||
change_set
|
|
||||||
});
|
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
multibuffer.update(cx, |multibuffer, cx| {
|
||||||
multibuffer.set_all_diff_hunks_expanded(cx);
|
multibuffer.set_all_diff_hunks_expanded(cx);
|
||||||
multibuffer.add_change_set(change_set.clone(), cx);
|
multibuffer.add_change_set(change_set.clone(), cx);
|
||||||
|
@ -1040,7 +1019,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
|
||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
buffer.edit([(0..0, "a\nb\nc")], None, cx);
|
buffer.edit([(0..0, "a\nb\nc")], None, cx);
|
||||||
change_set.update(cx, |change_set, cx| {
|
change_set.update(cx, |change_set, cx| {
|
||||||
let _ = change_set.recalculate_diff(buffer.snapshot().text, cx);
|
change_set.recalculate_diff_sync(buffer.snapshot().text, cx);
|
||||||
});
|
});
|
||||||
assert_eq!(buffer.text(), "a\nb\nc")
|
assert_eq!(buffer.text(), "a\nb\nc")
|
||||||
});
|
});
|
||||||
|
@ -1052,7 +1031,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
|
||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
buffer.undo(cx);
|
buffer.undo(cx);
|
||||||
change_set.update(cx, |change_set, cx| {
|
change_set.update(cx, |change_set, cx| {
|
||||||
let _ = change_set.recalculate_diff(buffer.snapshot().text, cx);
|
change_set.recalculate_diff_sync(buffer.snapshot().text, cx);
|
||||||
});
|
});
|
||||||
assert_eq!(buffer.text(), "")
|
assert_eq!(buffer.text(), "")
|
||||||
});
|
});
|
||||||
|
@ -1294,8 +1273,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
|
||||||
);
|
);
|
||||||
|
|
||||||
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
||||||
let change_set =
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
|
||||||
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx));
|
|
||||||
cx.run_until_parked();
|
cx.run_until_parked();
|
||||||
|
|
||||||
let multibuffer = cx.new(|cx| {
|
let multibuffer = cx.new(|cx| {
|
||||||
|
@ -1485,8 +1463,8 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
|
||||||
assert_line_indents(&snapshot);
|
assert_line_indents(&snapshot);
|
||||||
|
|
||||||
// Recalculate the diff, changing the first diff hunk.
|
// Recalculate the diff, changing the first diff hunk.
|
||||||
let _ = change_set.update(cx, |change_set, cx| {
|
change_set.update(cx, |change_set, cx| {
|
||||||
change_set.recalculate_diff(buffer.read(cx).text_snapshot(), cx)
|
change_set.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx);
|
||||||
});
|
});
|
||||||
cx.run_until_parked();
|
cx.run_until_parked();
|
||||||
assert_new_snapshot(
|
assert_new_snapshot(
|
||||||
|
@ -1538,8 +1516,7 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
|
||||||
);
|
);
|
||||||
|
|
||||||
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
let buffer = cx.new(|cx| Buffer::local(text, cx));
|
||||||
let change_set =
|
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
|
||||||
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx));
|
|
||||||
cx.run_until_parked();
|
cx.run_until_parked();
|
||||||
|
|
||||||
let multibuffer = cx.new(|cx| {
|
let multibuffer = cx.new(|cx| {
|
||||||
|
@ -1840,10 +1817,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||||
|
|
||||||
let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx));
|
let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx));
|
||||||
let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx));
|
let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx));
|
||||||
let change_set_1 =
|
let change_set_1 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1, &buffer_1, cx));
|
||||||
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1.to_string(), &buffer_1, cx));
|
let change_set_2 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2, &buffer_2, cx));
|
||||||
let change_set_2 =
|
|
||||||
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2.to_string(), &buffer_2, cx));
|
|
||||||
cx.run_until_parked();
|
cx.run_until_parked();
|
||||||
|
|
||||||
let multibuffer = cx.new(|cx| {
|
let multibuffer = cx.new(|cx| {
|
||||||
|
@ -2028,6 +2003,7 @@ struct ReferenceMultibuffer {
|
||||||
change_sets: HashMap<BufferId, Entity<BufferChangeSet>>,
|
change_sets: HashMap<BufferId, Entity<BufferChangeSet>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
struct ReferenceExcerpt {
|
struct ReferenceExcerpt {
|
||||||
id: ExcerptId,
|
id: ExcerptId,
|
||||||
buffer: Entity<Buffer>,
|
buffer: Entity<Buffer>,
|
||||||
|
@ -2037,6 +2013,7 @@ struct ReferenceExcerpt {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ReferenceRegion {
|
struct ReferenceRegion {
|
||||||
|
buffer_id: Option<BufferId>,
|
||||||
range: Range<usize>,
|
range: Range<usize>,
|
||||||
buffer_start: Option<Point>,
|
buffer_start: Option<Point>,
|
||||||
status: Option<DiffHunkStatus>,
|
status: Option<DiffHunkStatus>,
|
||||||
|
@ -2117,37 +2094,26 @@ impl ReferenceMultibuffer {
|
||||||
};
|
};
|
||||||
let diff = change_set.read(cx).diff_to_buffer.clone();
|
let diff = change_set.read(cx).diff_to_buffer.clone();
|
||||||
let excerpt_range = excerpt.range.to_offset(&buffer);
|
let excerpt_range = excerpt.range.to_offset(&buffer);
|
||||||
if excerpt_range.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for hunk in diff.hunks_intersecting_range(range, &buffer) {
|
for hunk in diff.hunks_intersecting_range(range, &buffer) {
|
||||||
let hunk_range = hunk.buffer_range.to_offset(&buffer);
|
let hunk_range = hunk.buffer_range.to_offset(&buffer);
|
||||||
let hunk_precedes_excerpt = hunk
|
if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end {
|
||||||
.buffer_range
|
|
||||||
.end
|
|
||||||
.cmp(&excerpt.range.start, &buffer)
|
|
||||||
.is_lt();
|
|
||||||
let hunk_follows_excerpt = hunk
|
|
||||||
.buffer_range
|
|
||||||
.start
|
|
||||||
.cmp(&excerpt.range.end, &buffer)
|
|
||||||
.is_ge();
|
|
||||||
if hunk_precedes_excerpt || hunk_follows_excerpt {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(ix) = excerpt
|
if let Err(ix) = excerpt
|
||||||
.expanded_diff_hunks
|
.expanded_diff_hunks
|
||||||
.binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer))
|
.binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer))
|
||||||
{
|
{
|
||||||
log::info!(
|
log::info!(
|
||||||
"expanding diff hunk {:?}. excerpt: {:?}",
|
"expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}",
|
||||||
hunk_range,
|
hunk_range,
|
||||||
|
excerpt_id,
|
||||||
excerpt_range
|
excerpt_range
|
||||||
);
|
);
|
||||||
excerpt
|
excerpt
|
||||||
.expanded_diff_hunks
|
.expanded_diff_hunks
|
||||||
.insert(ix, hunk.buffer_range.start);
|
.insert(ix, hunk.buffer_range.start);
|
||||||
|
} else {
|
||||||
|
log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2170,17 +2136,12 @@ impl ReferenceMultibuffer {
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
||||||
while let Some(hunk) = hunks.next() {
|
while let Some(hunk) = hunks.next() {
|
||||||
if !hunk.buffer_range.start.is_valid(&buffer) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ignore hunks that are outside the excerpt range.
|
// Ignore hunks that are outside the excerpt range.
|
||||||
let mut hunk_range = hunk.buffer_range.to_offset(buffer);
|
let mut hunk_range = hunk.buffer_range.to_offset(buffer);
|
||||||
|
|
||||||
hunk_range.end = hunk_range.end.min(buffer_range.end);
|
hunk_range.end = hunk_range.end.min(buffer_range.end);
|
||||||
if hunk_range.start > buffer_range.end
|
if hunk_range.start > buffer_range.end || hunk_range.start < buffer_range.start {
|
||||||
|| hunk_range.end < buffer_range.start
|
log::trace!("skipping hunk outside excerpt range");
|
||||||
|| buffer_range.is_empty()
|
|
||||||
{
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2188,6 +2149,12 @@ impl ReferenceMultibuffer {
|
||||||
expanded_anchor.to_offset(&buffer).max(buffer_range.start)
|
expanded_anchor.to_offset(&buffer).max(buffer_range.start)
|
||||||
== hunk_range.start.max(buffer_range.start)
|
== hunk_range.start.max(buffer_range.start)
|
||||||
}) {
|
}) {
|
||||||
|
log::trace!("skipping a hunk that's not marked as expanded");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hunk.buffer_range.start.is_valid(&buffer) {
|
||||||
|
log::trace!("skipping hunk with deleted start: {:?}", hunk.row_range);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2196,6 +2163,7 @@ impl ReferenceMultibuffer {
|
||||||
let len = text.len();
|
let len = text.len();
|
||||||
text.extend(buffer.text_for_range(offset..hunk_range.start));
|
text.extend(buffer.text_for_range(offset..hunk_range.start));
|
||||||
regions.push(ReferenceRegion {
|
regions.push(ReferenceRegion {
|
||||||
|
buffer_id: Some(buffer.remote_id()),
|
||||||
range: len..text.len(),
|
range: len..text.len(),
|
||||||
buffer_start: Some(buffer.offset_to_point(offset)),
|
buffer_start: Some(buffer.offset_to_point(offset)),
|
||||||
status: None,
|
status: None,
|
||||||
|
@ -2212,6 +2180,7 @@ impl ReferenceMultibuffer {
|
||||||
let len = text.len();
|
let len = text.len();
|
||||||
text.push_str(&base_text);
|
text.push_str(&base_text);
|
||||||
regions.push(ReferenceRegion {
|
regions.push(ReferenceRegion {
|
||||||
|
buffer_id: Some(base_buffer.remote_id()),
|
||||||
range: len..text.len(),
|
range: len..text.len(),
|
||||||
buffer_start: Some(
|
buffer_start: Some(
|
||||||
base_buffer.offset_to_point(hunk.diff_base_byte_range.start),
|
base_buffer.offset_to_point(hunk.diff_base_byte_range.start),
|
||||||
|
@ -2228,6 +2197,7 @@ impl ReferenceMultibuffer {
|
||||||
let len = text.len();
|
let len = text.len();
|
||||||
text.extend(buffer.text_for_range(offset..hunk_range.end));
|
text.extend(buffer.text_for_range(offset..hunk_range.end));
|
||||||
regions.push(ReferenceRegion {
|
regions.push(ReferenceRegion {
|
||||||
|
buffer_id: Some(buffer.remote_id()),
|
||||||
range: len..text.len(),
|
range: len..text.len(),
|
||||||
buffer_start: Some(buffer.offset_to_point(offset)),
|
buffer_start: Some(buffer.offset_to_point(offset)),
|
||||||
status: Some(DiffHunkStatus::Added),
|
status: Some(DiffHunkStatus::Added),
|
||||||
|
@ -2241,6 +2211,7 @@ impl ReferenceMultibuffer {
|
||||||
text.extend(buffer.text_for_range(offset..buffer_range.end));
|
text.extend(buffer.text_for_range(offset..buffer_range.end));
|
||||||
text.push('\n');
|
text.push('\n');
|
||||||
regions.push(ReferenceRegion {
|
regions.push(ReferenceRegion {
|
||||||
|
buffer_id: Some(buffer.remote_id()),
|
||||||
range: len..text.len(),
|
range: len..text.len(),
|
||||||
buffer_start: Some(buffer.offset_to_point(offset)),
|
buffer_start: Some(buffer.offset_to_point(offset)),
|
||||||
status: None,
|
status: None,
|
||||||
|
@ -2250,6 +2221,7 @@ impl ReferenceMultibuffer {
|
||||||
// Remove final trailing newline.
|
// Remove final trailing newline.
|
||||||
if self.excerpts.is_empty() {
|
if self.excerpts.is_empty() {
|
||||||
regions.push(ReferenceRegion {
|
regions.push(ReferenceRegion {
|
||||||
|
buffer_id: None,
|
||||||
range: 0..1,
|
range: 0..1,
|
||||||
buffer_start: Some(Point::new(0, 0)),
|
buffer_start: Some(Point::new(0, 0)),
|
||||||
status: None,
|
status: None,
|
||||||
|
@ -2273,6 +2245,7 @@ impl ReferenceMultibuffer {
|
||||||
+ text[region.range.start..ix].matches('\n').count() as u32
|
+ text[region.range.start..ix].matches('\n').count() as u32
|
||||||
});
|
});
|
||||||
RowInfo {
|
RowInfo {
|
||||||
|
buffer_id: region.buffer_id,
|
||||||
diff_status: region.status,
|
diff_status: region.status,
|
||||||
buffer_row,
|
buffer_row,
|
||||||
multibuffer_row: Some(MultiBufferRow(
|
multibuffer_row: Some(MultiBufferRow(
|
||||||
|
@ -2348,6 +2321,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||||
buffer.update(cx, |buf, cx| {
|
buffer.update(cx, |buf, cx| {
|
||||||
let edit_count = rng.gen_range(1..5);
|
let edit_count = rng.gen_range(1..5);
|
||||||
buf.randomly_edit(&mut rng, edit_count, cx);
|
buf.randomly_edit(&mut rng, edit_count, cx);
|
||||||
|
log::info!("buffer text:\n{}", buf.text());
|
||||||
needs_diff_calculation = true;
|
needs_diff_calculation = true;
|
||||||
});
|
});
|
||||||
cx.update(|cx| reference.diffs_updated(cx));
|
cx.update(|cx| reference.diffs_updated(cx));
|
||||||
|
@ -2440,7 +2414,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||||
let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap()
|
let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap()
|
||||||
..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap();
|
..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap();
|
||||||
|
|
||||||
log::info!("expanding diff hunks for excerpt {:?}", excerpt_ix);
|
log::info!(
|
||||||
|
"expanding diff hunks in range {:?} (excerpt id {:?}) index {excerpt_ix:?})",
|
||||||
|
range.to_offset(&snapshot),
|
||||||
|
excerpt.id
|
||||||
|
);
|
||||||
reference.expand_diff_hunks(excerpt.id, start..end, cx);
|
reference.expand_diff_hunks(excerpt.id, start..end, cx);
|
||||||
multibuffer.expand_diff_hunks(vec![range], cx);
|
multibuffer.expand_diff_hunks(vec![range], cx);
|
||||||
});
|
});
|
||||||
|
@ -2457,7 +2435,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||||
"recalculating diff for buffer {:?}",
|
"recalculating diff for buffer {:?}",
|
||||||
snapshot.remote_id(),
|
snapshot.remote_id(),
|
||||||
);
|
);
|
||||||
change_set.recalculate_diff(snapshot.text, cx)
|
change_set.recalculate_diff_sync(snapshot.text, cx);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
reference.diffs_updated(cx);
|
reference.diffs_updated(cx);
|
||||||
|
@ -2471,14 +2449,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
|
|
||||||
let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx));
|
let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx));
|
||||||
let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx));
|
let change_set =
|
||||||
change_set
|
cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx));
|
||||||
.update(cx, |change_set, cx| {
|
|
||||||
let snapshot = buffer.read(cx).snapshot();
|
|
||||||
change_set.set_base_text(base_text, snapshot.text, cx)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
multibuffer.update(cx, |multibuffer, cx| {
|
multibuffer.update(cx, |multibuffer, cx| {
|
||||||
reference.add_change_set(change_set.clone(), cx);
|
reference.add_change_set(change_set.clone(), cx);
|
||||||
|
@ -2553,12 +2525,28 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||||
.filter_map(|b| if b.next.is_some() { Some(b.row) } else { None })
|
.filter_map(|b| if b.next.is_some() { Some(b.row) } else { None })
|
||||||
.collect::<HashSet<_>>();
|
.collect::<HashSet<_>>();
|
||||||
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
|
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
|
||||||
let actual_diff = format_diff(&actual_text, &actual_row_infos, &actual_boundary_rows);
|
|
||||||
|
|
||||||
let (expected_text, expected_row_infos, expected_boundary_rows) =
|
let (expected_text, expected_row_infos, expected_boundary_rows) =
|
||||||
cx.update(|cx| reference.expected_content(cx));
|
cx.update(|cx| reference.expected_content(cx));
|
||||||
let expected_diff =
|
|
||||||
format_diff(&expected_text, &expected_row_infos, &expected_boundary_rows);
|
let has_diff = actual_row_infos
|
||||||
|
.iter()
|
||||||
|
.any(|info| info.diff_status.is_some())
|
||||||
|
|| expected_row_infos
|
||||||
|
.iter()
|
||||||
|
.any(|info| info.diff_status.is_some());
|
||||||
|
let actual_diff = format_diff(
|
||||||
|
&actual_text,
|
||||||
|
&actual_row_infos,
|
||||||
|
&actual_boundary_rows,
|
||||||
|
Some(has_diff),
|
||||||
|
);
|
||||||
|
let expected_diff = format_diff(
|
||||||
|
&expected_text,
|
||||||
|
&expected_row_infos,
|
||||||
|
&expected_boundary_rows,
|
||||||
|
Some(has_diff),
|
||||||
|
);
|
||||||
|
|
||||||
log::info!("Multibuffer content:\n{}", actual_diff);
|
log::info!("Multibuffer content:\n{}", actual_diff);
|
||||||
|
|
||||||
|
@ -2569,8 +2557,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||||
actual_text.split('\n').count()
|
actual_text.split('\n').count()
|
||||||
);
|
);
|
||||||
pretty_assertions::assert_eq!(actual_diff, expected_diff);
|
pretty_assertions::assert_eq!(actual_diff, expected_diff);
|
||||||
pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
|
|
||||||
pretty_assertions::assert_eq!(actual_text, expected_text);
|
pretty_assertions::assert_eq!(actual_text, expected_text);
|
||||||
|
pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
|
||||||
|
|
||||||
for _ in 0..5 {
|
for _ in 0..5 {
|
||||||
let start_row = rng.gen_range(0..=expected_row_infos.len());
|
let start_row = rng.gen_range(0..=expected_row_infos.len());
|
||||||
|
@ -2937,8 +2925,10 @@ fn format_diff(
|
||||||
text: &str,
|
text: &str,
|
||||||
row_infos: &Vec<RowInfo>,
|
row_infos: &Vec<RowInfo>,
|
||||||
boundary_rows: &HashSet<MultiBufferRow>,
|
boundary_rows: &HashSet<MultiBufferRow>,
|
||||||
|
has_diff: Option<bool>,
|
||||||
) -> String {
|
) -> String {
|
||||||
let has_diff = row_infos.iter().any(|info| info.diff_status.is_some());
|
let has_diff =
|
||||||
|
has_diff.unwrap_or_else(|| row_infos.iter().any(|info| info.diff_status.is_some()));
|
||||||
text.split('\n')
|
text.split('\n')
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.zip(row_infos)
|
.zip(row_infos)
|
||||||
|
@ -3002,7 +2992,7 @@ fn assert_new_snapshot(
|
||||||
let line_infos = new_snapshot
|
let line_infos = new_snapshot
|
||||||
.row_infos(MultiBufferRow(0))
|
.row_infos(MultiBufferRow(0))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let actual_diff = format_diff(&actual_text, &line_infos, &Default::default());
|
let actual_diff = format_diff(&actual_text, &line_infos, &Default::default(), None);
|
||||||
pretty_assertions::assert_eq!(actual_diff, expected_diff);
|
pretty_assertions::assert_eq!(actual_diff, expected_diff);
|
||||||
check_edits(
|
check_edits(
|
||||||
snapshot,
|
snapshot,
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1970,6 +1970,20 @@ impl Project {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn open_uncommitted_changes(
|
||||||
|
&mut self,
|
||||||
|
buffer: Entity<Buffer>,
|
||||||
|
cx: &mut Context<Self>,
|
||||||
|
) -> Task<Result<Entity<BufferChangeSet>>> {
|
||||||
|
if self.is_disconnected(cx) {
|
||||||
|
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.buffer_store.update(cx, |buffer_store, cx| {
|
||||||
|
buffer_store.open_uncommitted_changes(buffer, cx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn open_buffer_by_id(
|
pub fn open_buffer_by_id(
|
||||||
&mut self,
|
&mut self,
|
||||||
id: BufferId,
|
id: BufferId,
|
||||||
|
|
|
@ -5624,7 +5624,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||||
|
|
||||||
fs.set_index_for_repo(
|
fs.set_index_for_repo(
|
||||||
Path::new("/dir/.git"),
|
Path::new("/dir/.git"),
|
||||||
&[(Path::new("src/main.rs"), staged_contents)],
|
&[("src/main.rs".into(), staged_contents)],
|
||||||
);
|
);
|
||||||
|
|
||||||
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
||||||
|
@ -5669,7 +5669,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||||
|
|
||||||
fs.set_index_for_repo(
|
fs.set_index_for_repo(
|
||||||
Path::new("/dir/.git"),
|
Path::new("/dir/.git"),
|
||||||
&[(Path::new("src/main.rs"), staged_contents)],
|
&[("src/main.rs".into(), staged_contents)],
|
||||||
);
|
);
|
||||||
|
|
||||||
cx.run_until_parked();
|
cx.run_until_parked();
|
||||||
|
@ -5684,6 +5684,108 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx);
|
||||||
|
|
||||||
|
let committed_contents = r#"
|
||||||
|
fn main() {
|
||||||
|
println!("hello world");
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.unindent();
|
||||||
|
let staged_contents = r#"
|
||||||
|
fn main() {
|
||||||
|
println!("goodbye world");
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.unindent();
|
||||||
|
let file_contents = r#"
|
||||||
|
// print goodbye
|
||||||
|
fn main() {
|
||||||
|
println!("goodbye world");
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.unindent();
|
||||||
|
|
||||||
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
|
fs.insert_tree(
|
||||||
|
"/dir",
|
||||||
|
json!({
|
||||||
|
".git": {},
|
||||||
|
"src": {
|
||||||
|
"main.rs": file_contents,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
fs.set_index_for_repo(
|
||||||
|
Path::new("/dir/.git"),
|
||||||
|
&[("src/main.rs".into(), staged_contents)],
|
||||||
|
);
|
||||||
|
fs.set_head_for_repo(
|
||||||
|
Path::new("/dir/.git"),
|
||||||
|
&[("src/main.rs".into(), committed_contents)],
|
||||||
|
);
|
||||||
|
|
||||||
|
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
||||||
|
|
||||||
|
let buffer = project
|
||||||
|
.update(cx, |project, cx| {
|
||||||
|
project.open_local_buffer("/dir/src/main.rs", cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let uncommitted_changes = project
|
||||||
|
.update(cx, |project, cx| {
|
||||||
|
project.open_uncommitted_changes(buffer.clone(), cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
cx.run_until_parked();
|
||||||
|
uncommitted_changes.update(cx, |uncommitted_changes, cx| {
|
||||||
|
let snapshot = buffer.read(cx).snapshot();
|
||||||
|
assert_hunks(
|
||||||
|
uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
|
||||||
|
&snapshot,
|
||||||
|
&uncommitted_changes.base_text.as_ref().unwrap().text(),
|
||||||
|
&[
|
||||||
|
(0..1, "", "// print goodbye\n"),
|
||||||
|
(
|
||||||
|
2..3,
|
||||||
|
" println!(\"hello world\");\n",
|
||||||
|
" println!(\"goodbye world\");\n",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let committed_contents = r#"
|
||||||
|
// print goodbye
|
||||||
|
fn main() {
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.unindent();
|
||||||
|
|
||||||
|
fs.set_head_for_repo(
|
||||||
|
Path::new("/dir/.git"),
|
||||||
|
&[("src/main.rs".into(), committed_contents)],
|
||||||
|
);
|
||||||
|
|
||||||
|
cx.run_until_parked();
|
||||||
|
uncommitted_changes.update(cx, |uncommitted_changes, cx| {
|
||||||
|
let snapshot = buffer.read(cx).snapshot();
|
||||||
|
assert_hunks(
|
||||||
|
uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
|
||||||
|
&snapshot,
|
||||||
|
&uncommitted_changes.base_text.as_ref().unwrap().text(),
|
||||||
|
&[(2..3, "", " println!(\"goodbye world\");\n")],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
async fn search(
|
async fn search(
|
||||||
project: &Entity<Project>,
|
project: &Entity<Project>,
|
||||||
query: SearchQuery,
|
query: SearchQuery,
|
||||||
|
|
|
@ -129,7 +129,7 @@ message Envelope {
|
||||||
GetPrivateUserInfo get_private_user_info = 102;
|
GetPrivateUserInfo get_private_user_info = 102;
|
||||||
GetPrivateUserInfoResponse get_private_user_info_response = 103;
|
GetPrivateUserInfoResponse get_private_user_info_response = 103;
|
||||||
UpdateUserPlan update_user_plan = 234;
|
UpdateUserPlan update_user_plan = 234;
|
||||||
UpdateDiffBase update_diff_base = 104;
|
UpdateDiffBases update_diff_bases = 104;
|
||||||
AcceptTermsOfService accept_terms_of_service = 239;
|
AcceptTermsOfService accept_terms_of_service = 239;
|
||||||
AcceptTermsOfServiceResponse accept_terms_of_service_response = 240;
|
AcceptTermsOfServiceResponse accept_terms_of_service_response = 240;
|
||||||
|
|
||||||
|
@ -304,15 +304,18 @@ message Envelope {
|
||||||
SyncExtensionsResponse sync_extensions_response = 286;
|
SyncExtensionsResponse sync_extensions_response = 286;
|
||||||
InstallExtension install_extension = 287;
|
InstallExtension install_extension = 287;
|
||||||
|
|
||||||
GetStagedText get_staged_text = 288;
|
OpenUnstagedChanges open_unstaged_changes = 288;
|
||||||
GetStagedTextResponse get_staged_text_response = 289;
|
OpenUnstagedChangesResponse open_unstaged_changes_response = 289;
|
||||||
|
|
||||||
RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
|
RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
|
||||||
|
|
||||||
Stage stage = 293;
|
Stage stage = 293;
|
||||||
Unstage unstage = 294;
|
Unstage unstage = 294;
|
||||||
Commit commit = 295;
|
Commit commit = 295;
|
||||||
OpenCommitMessageBuffer open_commit_message_buffer = 296; // current max
|
OpenCommitMessageBuffer open_commit_message_buffer = 296;
|
||||||
|
|
||||||
|
OpenUncommittedChanges open_uncommitted_changes = 297;
|
||||||
|
OpenUncommittedChangesResponse open_uncommitted_changes_response = 298; // current max
|
||||||
}
|
}
|
||||||
|
|
||||||
reserved 87 to 88;
|
reserved 87 to 88;
|
||||||
|
@ -2035,21 +2038,53 @@ message WorktreeMetadata {
|
||||||
string abs_path = 4;
|
string abs_path = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message UpdateDiffBase {
|
message UpdateDiffBases {
|
||||||
uint64 project_id = 1;
|
uint64 project_id = 1;
|
||||||
uint64 buffer_id = 2;
|
uint64 buffer_id = 2;
|
||||||
|
|
||||||
|
enum Mode {
|
||||||
|
// No collaborator is using the unstaged diff.
|
||||||
|
HEAD_ONLY = 0;
|
||||||
|
// No collaborator is using the diff from HEAD.
|
||||||
|
INDEX_ONLY = 1;
|
||||||
|
// Both the unstaged and uncommitted diffs are demanded,
|
||||||
|
// and the contents of the index and HEAD are the same for this path.
|
||||||
|
INDEX_MATCHES_HEAD = 2;
|
||||||
|
// Both the unstaged and uncommitted diffs are demanded,
|
||||||
|
// and the contents of the index and HEAD differ for this path,
|
||||||
|
// where None means the path doesn't exist in that state of the repo.
|
||||||
|
INDEX_AND_HEAD = 3;
|
||||||
|
}
|
||||||
|
|
||||||
optional string staged_text = 3;
|
optional string staged_text = 3;
|
||||||
|
optional string committed_text = 4;
|
||||||
|
Mode mode = 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetStagedText {
|
message OpenUnstagedChanges {
|
||||||
uint64 project_id = 1;
|
uint64 project_id = 1;
|
||||||
uint64 buffer_id = 2;
|
uint64 buffer_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetStagedTextResponse {
|
message OpenUnstagedChangesResponse {
|
||||||
optional string staged_text = 1;
|
optional string staged_text = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message OpenUncommittedChanges {
|
||||||
|
uint64 project_id = 1;
|
||||||
|
uint64 buffer_id = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message OpenUncommittedChangesResponse {
|
||||||
|
enum Mode {
|
||||||
|
INDEX_MATCHES_HEAD = 0;
|
||||||
|
INDEX_AND_HEAD = 1;
|
||||||
|
}
|
||||||
|
optional string staged_text = 1;
|
||||||
|
optional string committed_text = 2;
|
||||||
|
Mode mode = 3;
|
||||||
|
}
|
||||||
|
|
||||||
message GetNotifications {
|
message GetNotifications {
|
||||||
optional uint64 before_id = 1;
|
optional uint64 before_id = 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -219,8 +219,10 @@ messages!(
|
||||||
(GetImplementationResponse, Background),
|
(GetImplementationResponse, Background),
|
||||||
(GetLlmToken, Background),
|
(GetLlmToken, Background),
|
||||||
(GetLlmTokenResponse, Background),
|
(GetLlmTokenResponse, Background),
|
||||||
(GetStagedText, Foreground),
|
(OpenUnstagedChanges, Foreground),
|
||||||
(GetStagedTextResponse, Foreground),
|
(OpenUnstagedChangesResponse, Foreground),
|
||||||
|
(OpenUncommittedChanges, Foreground),
|
||||||
|
(OpenUncommittedChangesResponse, Foreground),
|
||||||
(GetUsers, Foreground),
|
(GetUsers, Foreground),
|
||||||
(Hello, Foreground),
|
(Hello, Foreground),
|
||||||
(IncomingCall, Foreground),
|
(IncomingCall, Foreground),
|
||||||
|
@ -309,7 +311,7 @@ messages!(
|
||||||
(UpdateUserChannels, Foreground),
|
(UpdateUserChannels, Foreground),
|
||||||
(UpdateContacts, Foreground),
|
(UpdateContacts, Foreground),
|
||||||
(UpdateDiagnosticSummary, Foreground),
|
(UpdateDiagnosticSummary, Foreground),
|
||||||
(UpdateDiffBase, Foreground),
|
(UpdateDiffBases, Foreground),
|
||||||
(UpdateFollowers, Foreground),
|
(UpdateFollowers, Foreground),
|
||||||
(UpdateInviteInfo, Foreground),
|
(UpdateInviteInfo, Foreground),
|
||||||
(UpdateLanguageServer, Foreground),
|
(UpdateLanguageServer, Foreground),
|
||||||
|
@ -422,7 +424,8 @@ request_messages!(
|
||||||
(GetProjectSymbols, GetProjectSymbolsResponse),
|
(GetProjectSymbols, GetProjectSymbolsResponse),
|
||||||
(GetReferences, GetReferencesResponse),
|
(GetReferences, GetReferencesResponse),
|
||||||
(GetSignatureHelp, GetSignatureHelpResponse),
|
(GetSignatureHelp, GetSignatureHelpResponse),
|
||||||
(GetStagedText, GetStagedTextResponse),
|
(OpenUnstagedChanges, OpenUnstagedChangesResponse),
|
||||||
|
(OpenUncommittedChanges, OpenUncommittedChangesResponse),
|
||||||
(GetSupermavenApiKey, GetSupermavenApiKeyResponse),
|
(GetSupermavenApiKey, GetSupermavenApiKeyResponse),
|
||||||
(GetTypeDefinition, GetTypeDefinitionResponse),
|
(GetTypeDefinition, GetTypeDefinitionResponse),
|
||||||
(LinkedEditingRange, LinkedEditingRangeResponse),
|
(LinkedEditingRange, LinkedEditingRangeResponse),
|
||||||
|
@ -543,7 +546,8 @@ entity_messages!(
|
||||||
GetProjectSymbols,
|
GetProjectSymbols,
|
||||||
GetReferences,
|
GetReferences,
|
||||||
GetSignatureHelp,
|
GetSignatureHelp,
|
||||||
GetStagedText,
|
OpenUnstagedChanges,
|
||||||
|
OpenUncommittedChanges,
|
||||||
GetTypeDefinition,
|
GetTypeDefinition,
|
||||||
InlayHints,
|
InlayHints,
|
||||||
JoinProject,
|
JoinProject,
|
||||||
|
@ -575,7 +579,7 @@ entity_messages!(
|
||||||
UpdateBuffer,
|
UpdateBuffer,
|
||||||
UpdateBufferFile,
|
UpdateBufferFile,
|
||||||
UpdateDiagnosticSummary,
|
UpdateDiagnosticSummary,
|
||||||
UpdateDiffBase,
|
UpdateDiffBases,
|
||||||
UpdateLanguageServer,
|
UpdateLanguageServer,
|
||||||
UpdateProject,
|
UpdateProject,
|
||||||
UpdateProjectCollaborator,
|
UpdateProjectCollaborator,
|
||||||
|
|
|
@ -46,7 +46,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
|
||||||
.await;
|
.await;
|
||||||
fs.set_index_for_repo(
|
fs.set_index_for_repo(
|
||||||
Path::new("/code/project1/.git"),
|
Path::new("/code/project1/.git"),
|
||||||
&[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())],
|
&[("src/lib.rs".into(), "fn one() -> usize { 0 }".into())],
|
||||||
);
|
);
|
||||||
|
|
||||||
let (project, _headless) = init_test(&fs, cx, server_cx).await;
|
let (project, _headless) = init_test(&fs, cx, server_cx).await;
|
||||||
|
@ -147,7 +147,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
|
||||||
|
|
||||||
fs.set_index_for_repo(
|
fs.set_index_for_repo(
|
||||||
Path::new("/code/project1/.git"),
|
Path::new("/code/project1/.git"),
|
||||||
&[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())],
|
&[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())],
|
||||||
);
|
);
|
||||||
cx.executor().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
change_set.update(cx, |change_set, _| {
|
change_set.update(cx, |change_set, _| {
|
||||||
|
|
|
@ -450,6 +450,10 @@ impl Rope {
|
||||||
self.clip_point(Point::new(row, u32::MAX), Bias::Left)
|
self.clip_point(Point::new(row, u32::MAX), Bias::Left)
|
||||||
.column
|
.column
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn ptr_eq(&self, other: &Self) -> bool {
|
||||||
|
self.chunks.ptr_eq(&other.chunks)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> From<&'a str> for Rope {
|
impl<'a> From<&'a str> for Rope {
|
||||||
|
|
|
@ -516,6 +516,10 @@ impl<T: Item> SumTree<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn ptr_eq(&self, other: &Self) -> bool {
|
||||||
|
Arc::ptr_eq(&self.0, &other.0)
|
||||||
|
}
|
||||||
|
|
||||||
fn push_tree_recursive(
|
fn push_tree_recursive(
|
||||||
&mut self,
|
&mut self,
|
||||||
other: SumTree<T>,
|
other: SumTree<T>,
|
||||||
|
|
|
@ -895,6 +895,30 @@ impl Worktree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task<Result<Option<String>>> {
|
||||||
|
match self {
|
||||||
|
Worktree::Local(this) => {
|
||||||
|
let path = Arc::from(path);
|
||||||
|
let snapshot = this.snapshot();
|
||||||
|
cx.background_executor().spawn(async move {
|
||||||
|
if let Some(repo) = snapshot.repository_for_path(&path) {
|
||||||
|
if let Some(repo_path) = repo.relativize(&path).log_err() {
|
||||||
|
if let Some(git_repo) =
|
||||||
|
snapshot.git_repositories.get(&repo.work_directory_id)
|
||||||
|
{
|
||||||
|
return Ok(git_repo.repo_ptr.load_committed_text(&repo_path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Worktree::Remote(_) => Task::ready(Err(anyhow!(
|
||||||
|
"remote worktrees can't yet load committed files"
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn load_binary_file(
|
pub fn load_binary_file(
|
||||||
&self,
|
&self,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue