git: Compute and synchronize diffs from HEAD (#23626)

This PR builds on #21258 to make it possible to use HEAD as a diff base.
The buffer store is extended to support holding multiple change sets,
and collab gains support for synchronizing the committed text of files
when any collaborator requires it.

Not implemented in this PR:

- Exposing the diff from HEAD to the user
- Decorating the diff from HEAD with information about which hunks are
staged

`test_random_multibuffer` now fails first at `SEED=13277`, similar to
the previous high-water mark, but with various bugs in the multibuffer
logic now shaken out.

Release Notes:

- N/A

---------

Co-authored-by: Max <max@zed.dev>
Co-authored-by: Ben <ben@zed.dev>
Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
Co-authored-by: Conrad <conrad@zed.dev>
This commit is contained in:
Cole Miller 2025-02-04 15:29:10 -05:00 committed by GitHub
parent 871f98bc4d
commit 5704b50fb1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 2799 additions and 603 deletions

View file

@ -309,7 +309,8 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
.add_request_handler(forward_read_only_project_request::<proto::GetStagedText>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedChanges>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedChanges>)
.add_request_handler(
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
)
@ -348,7 +349,7 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBase>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBases>)
.add_request_handler(get_users)
.add_request_handler(fuzzy_search_users)
.add_request_handler(request_contact)

View file

@ -1991,10 +1991,9 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
.collect(),
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
};
client_a.fs().set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(Path::new("file.txt"), blame)],
);
client_a
.fs()
.set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let project_id = active_call_a

View file

@ -2558,13 +2558,27 @@ async fn test_git_diff_base_change(
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
let diff_base = "
let staged_text = "
one
three
"
.unindent();
let new_diff_base = "
let committed_text = "
one
TWO
three
"
.unindent();
let new_committed_text = "
one
TWO_HUNDRED
three
"
.unindent();
let new_staged_text = "
one
two
"
@ -2572,7 +2586,11 @@ async fn test_git_diff_base_change(
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("a.txt"), diff_base.clone())],
&[("a.txt".into(), staged_text.clone())],
);
client_a.fs().set_head_for_repo(
Path::new("/dir/.git"),
&[("a.txt".into(), committed_text.clone())],
);
// Create the buffer
@ -2580,7 +2598,7 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let change_set_local_a = project_local
let local_unstaged_changes_a = project_local
.update(cx_a, |p, cx| {
p.open_unstaged_changes(buffer_local_a.clone(), cx)
})
@ -2589,16 +2607,16 @@ async fn test_git_diff_base_change(
// Wait for it to catch up to the new diff
executor.run_until_parked();
change_set_local_a.read_with(cx_a, |change_set, cx| {
local_unstaged_changes_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
Some(staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&change_set.base_text_string().unwrap(),
&[(1..2, "", "two\n")],
);
});
@ -2608,7 +2626,7 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let change_set_remote_a = project_remote
let remote_unstaged_changes_a = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_changes(buffer_remote_a.clone(), cx)
})
@ -2617,64 +2635,104 @@ async fn test_git_diff_base_change(
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
change_set_remote_a.read_with(cx_b, |change_set, cx| {
remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
Some(staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&change_set.base_text_string().unwrap(),
&[(1..2, "", "two\n")],
);
});
// Update the staged text of the open buffer
// Open uncommitted changes on the guest, without opening them on the host first
let remote_uncommitted_changes_a = project_remote
.update(cx_b, |p, cx| {
p.open_uncommitted_changes(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(committed_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&change_set.base_text_string().unwrap(),
&[(1..2, "TWO\n", "two\n")],
);
});
// Update the index text of the open buffer
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("a.txt"), new_diff_base.clone())],
&[("a.txt".into(), new_staged_text.clone())],
);
client_a.fs().set_head_for_repo(
Path::new("/dir/.git"),
&[("a.txt".into(), new_committed_text.clone())],
);
// Wait for buffer_local_a to receive it
executor.run_until_parked();
change_set_local_a.read_with(cx_a, |change_set, cx| {
local_unstaged_changes_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
Some(new_staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&new_diff_base,
&change_set.base_text_string().unwrap(),
&[(2..3, "", "three\n")],
);
});
change_set_remote_a.read_with(cx_b, |change_set, cx| {
remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
Some(new_staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&new_diff_base,
&change_set.base_text_string().unwrap(),
&[(2..3, "", "three\n")],
);
});
remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(new_committed_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&change_set.base_text_string().unwrap(),
&[(1..2, "TWO_HUNDRED\n", "two\n")],
);
});
// Nested git dir
let diff_base = "
let staged_text = "
one
three
"
.unindent();
let new_diff_base = "
let new_staged_text = "
one
two
"
@ -2682,7 +2740,7 @@ async fn test_git_diff_base_change(
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
&[(Path::new("b.txt"), diff_base.clone())],
&[("b.txt".into(), staged_text.clone())],
);
// Create the buffer
@ -2690,7 +2748,7 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let change_set_local_b = project_local
let local_unstaged_changes_b = project_local
.update(cx_a, |p, cx| {
p.open_unstaged_changes(buffer_local_b.clone(), cx)
})
@ -2699,16 +2757,16 @@ async fn test_git_diff_base_change(
// Wait for it to catch up to the new diff
executor.run_until_parked();
change_set_local_b.read_with(cx_a, |change_set, cx| {
local_unstaged_changes_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
Some(staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&change_set.base_text_string().unwrap(),
&[(1..2, "", "two\n")],
);
});
@ -2718,7 +2776,7 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let change_set_remote_b = project_remote
let remote_unstaged_changes_b = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_changes(buffer_remote_b.clone(), cx)
})
@ -2726,52 +2784,52 @@ async fn test_git_diff_base_change(
.unwrap();
executor.run_until_parked();
change_set_remote_b.read_with(cx_b, |change_set, cx| {
remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(diff_base.as_str())
Some(staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&staged_text,
&[(1..2, "", "two\n")],
);
});
// Update the staged text
// Updatet the staged text
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
&[(Path::new("b.txt"), new_diff_base.clone())],
&[("b.txt".into(), new_staged_text.clone())],
);
// Wait for buffer_local_b to receive it
executor.run_until_parked();
change_set_local_b.read_with(cx_a, |change_set, cx| {
local_unstaged_changes_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
Some(new_staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&new_diff_base,
&new_staged_text,
&[(2..3, "", "three\n")],
);
});
change_set_remote_b.read_with(cx_b, |change_set, cx| {
remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str())
Some(new_staged_text.as_str())
);
git::diff::assert_hunks(
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&new_diff_base,
&new_staged_text,
&[(2..3, "", "three\n")],
);
});

View file

@ -953,8 +953,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let dot_git_dir = repo_path.join(".git");
let contents = contents
.iter()
.map(|(path, contents)| (path.as_path(), contents.clone()))
.into_iter()
.map(|(path, contents)| (path.into(), contents))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
client.fs().create_dir(&dot_git_dir).await?;
@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest {
project
.buffer_store()
.read(cx)
.get_unstaged_changes(host_buffer.read(cx).remote_id())
.get_unstaged_changes(host_buffer.read(cx).remote_id(), cx)
.unwrap()
.read(cx)
.base_text_string()
@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest {
project
.buffer_store()
.read(cx)
.get_unstaged_changes(guest_buffer.read(cx).remote_id())
.get_unstaged_changes(guest_buffer.read(cx).remote_id(), cx)
.unwrap()
.read(cx)
.base_text_string()

View file

@ -157,7 +157,7 @@ impl DiagnosticIndicator {
(buffer, cursor_position)
});
let new_diagnostic = buffer
.diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
.diagnostics_in_range::<usize>(cursor_position..cursor_position)
.filter(|entry| !entry.range.is_empty())
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
.map(|entry| entry.diagnostic);

View file

@ -979,6 +979,7 @@ impl<'a> Iterator for WrapRows<'a> {
Some(if soft_wrapped {
RowInfo {
buffer_id: None,
buffer_row: None,
multibuffer_row: None,
diff_status,

View file

@ -10137,12 +10137,12 @@ impl Editor {
let mut diagnostics;
if direction == Direction::Prev {
diagnostics = buffer
.diagnostics_in_range::<_, usize>(0..search_start)
.diagnostics_in_range::<usize>(0..search_start)
.collect::<Vec<_>>();
diagnostics.reverse();
} else {
diagnostics = buffer
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
.diagnostics_in_range::<usize>(search_start..buffer.len())
.collect::<Vec<_>>();
};
let group = diagnostics
@ -11333,8 +11333,9 @@ impl Editor {
if let Some(active_diagnostics) = self.active_diagnostics.as_mut() {
let buffer = self.buffer.read(cx).snapshot(cx);
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
let primary_range_end = active_diagnostics.primary_range.end.to_offset(&buffer);
let is_valid = buffer
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
.diagnostics_in_range::<usize>(primary_range_start..primary_range_end)
.any(|entry| {
entry.diagnostic.is_primary
&& !entry.range.is_empty()

View file

@ -12431,8 +12431,8 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
(buffer_2.clone(), base_text_2),
(buffer_3.clone(), base_text_3),
] {
let change_set = cx
.new(|cx| BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx));
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
@ -13125,9 +13125,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
(buffer_2.clone(), file_2_old),
(buffer_3.clone(), file_3_old),
] {
let change_set = cx.new(|cx| {
BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)
});
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
@ -13212,7 +13211,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
init_test(cx, |_| {});
let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n";
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n";
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\nhhh\niii\n";
let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx));
let multi_buffer = cx.new(|cx| {
@ -13225,7 +13224,11 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
primary: None,
},
ExcerptRange {
context: Point::new(5, 0)..Point::new(7, 0),
context: Point::new(4, 0)..Point::new(7, 0),
primary: None,
},
ExcerptRange {
context: Point::new(9, 0)..Point::new(10, 0),
primary: None,
},
],
@ -13239,8 +13242,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
editor
.update(cx, |editor, _window, cx| {
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx));
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base, &buffer, cx));
editor
.buffer
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx))
@ -13255,14 +13257,22 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
cx.executor().run_until_parked();
// When the start of a hunk coincides with the start of its excerpt,
// the hunk is expanded. When the start of a a hunk is earlier than
// the start of its excerpt, the hunk is not expanded.
cx.assert_state_with_diff(
"
ˇaaa
- bbb
+ BBB
- ddd
- eee
+ DDD
+ EEE
fff
iii
"
.unindent(),
);
@ -13500,8 +13510,8 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
cx.set_state(indoc! { "
one
TWO
ˇthree
ˇTWO
three
four
five
"});
@ -13514,15 +13524,14 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
indoc! { "
one
- two
+ TWO
ˇthree
+ ˇTWO
three
four
five
"}
.to_string(),
);
cx.update_editor(|editor, window, cx| {
editor.move_up(&Default::default(), window, cx);
editor.move_up(&Default::default(), window, cx);
editor.toggle_selected_diff_hunks(&Default::default(), window, cx);
});
@ -14402,12 +14411,8 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = multibuffer.as_singleton().unwrap();
let change_set = cx.new(|cx| {
let mut change_set = BufferChangeSet::new(&buffer, cx);
let _ =
change_set.set_base_text(base_text.into(), buffer.read(cx).text_snapshot(), cx);
change_set
});
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
multibuffer.set_all_diff_hunks_expanded(cx);
multibuffer.add_change_set(change_set, cx);

View file

@ -5295,7 +5295,7 @@ impl EditorElement {
if scrollbar_settings.diagnostics != ScrollbarDiagnostics::None {
let diagnostics = snapshot
.buffer_snapshot
.diagnostics_in_range::<_, Point>(Point::zero()..max_point)
.diagnostics_in_range::<Point>(Point::zero()..max_point)
// Don't show diagnostics the user doesn't care about
.filter(|diagnostic| {
match (

View file

@ -697,7 +697,7 @@ mod tests {
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
"file.txt".into(),
Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
@ -809,7 +809,7 @@ mod tests {
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
"file.txt".into(),
Blame {
entries: vec![blame_entry("1b1b1b", 0..4)],
..Default::default()
@ -958,7 +958,7 @@ mod tests {
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
"file.txt".into(),
Blame {
entries: blame_entries,
..Default::default()
@ -1000,7 +1000,7 @@ mod tests {
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
"file.txt".into(),
Blame {
entries: blame_entries,
..Default::default()

File diff suppressed because it is too large Load diff

View file

@ -279,9 +279,10 @@ fn show_hover(
delay.await;
}
let offset = anchor.to_offset(&snapshot.buffer_snapshot);
let local_diagnostic = snapshot
.buffer_snapshot
.diagnostics_in_range::<_, usize>(anchor..anchor)
.diagnostics_in_range::<usize>(offset..offset)
// Find the entry with the most specific range
.min_by_key(|entry| entry.range.len());

View file

@ -111,11 +111,7 @@ impl ProposedChangesEditor {
.read(cx)
.change_set_for(buffer.remote_id())?;
Some(change_set.update(cx, |change_set, cx| {
change_set.set_base_text(
base_buffer.read(cx).text(),
buffer,
cx,
)
change_set.set_base_text(base_buffer.clone(), buffer, cx)
}))
})
.collect::<Vec<_>>()
@ -192,7 +188,7 @@ impl ProposedChangesEditor {
new_change_sets.push(cx.new(|cx| {
let mut change_set = BufferChangeSet::new(&branch_buffer, cx);
let _ = change_set.set_base_text(
location.buffer.read(cx).text(),
location.buffer.clone(),
branch_buffer.read(cx).text_snapshot(),
cx,
);

View file

@ -292,7 +292,7 @@ impl EditorTestContext {
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
fs.set_index_for_repo(
&Self::root_path().join(".git"),
&[(path.as_ref(), diff_base.to_string())],
&[(path.into(), diff_base.to_string())],
);
self.cx.run_until_parked();
}

View file

@ -5,9 +5,9 @@ mod mac_watcher;
pub mod fs_watcher;
use anyhow::{anyhow, Context as _, Result};
#[cfg(any(test, feature = "test-support"))]
use git::status::FileStatus;
use git::GitHostingProviderRegistry;
#[cfg(any(test, feature = "test-support"))]
use git::{repository::RepoPath, status::FileStatus};
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
use ashpd::desktop::trash;
@ -1270,25 +1270,32 @@ impl FakeFs {
})
}
pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) {
self.with_git_state(dot_git, true, |state| {
state.index_contents.clear();
state.index_contents.extend(
head_state
index_state
.iter()
.map(|(path, content)| (path.to_path_buf(), content.clone())),
.map(|(path, content)| (path.clone(), content.clone())),
);
});
}
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(&Path, git::blame::Blame)>) {
pub fn set_head_for_repo(&self, dot_git: &Path, head_state: &[(RepoPath, String)]) {
self.with_git_state(dot_git, true, |state| {
state.head_contents.clear();
state.head_contents.extend(
head_state
.iter()
.map(|(path, content)| (path.clone(), content.clone())),
);
});
}
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) {
self.with_git_state(dot_git, true, |state| {
state.blames.clear();
state.blames.extend(
blames
.into_iter()
.map(|(path, blame)| (path.to_path_buf(), blame)),
);
state.blames.extend(blames);
});
}

View file

@ -74,31 +74,34 @@ impl BufferDiff {
}
}
pub fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self {
pub fn build(diff_base: Option<&str>, buffer: &text::BufferSnapshot) -> Self {
let mut tree = SumTree::new(buffer);
let buffer_text = buffer.as_rope().to_string();
let patch = Self::diff(diff_base, &buffer_text);
if let Some(diff_base) = diff_base {
let buffer_text = buffer.as_rope().to_string();
let patch = Self::diff(diff_base, &buffer_text);
// A common case in Zed is that the empty buffer is represented as just a newline,
// but if we just compute a naive diff you get a "preserved" line in the middle,
// which is a bit odd.
if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 {
tree.push(
InternalDiffHunk {
buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
diff_base_byte_range: 0..diff_base.len() - 1,
},
buffer,
);
return Self { tree };
}
// A common case in Zed is that the empty buffer is represented as just a newline,
// but if we just compute a naive diff you get a "preserved" line in the middle,
// which is a bit odd.
if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 {
tree.push(
InternalDiffHunk {
buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
diff_base_byte_range: 0..diff_base.len() - 1,
},
buffer,
);
return Self { tree };
}
if let Some(patch) = patch {
let mut divergence = 0;
for hunk_index in 0..patch.num_hunks() {
let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
tree.push(hunk, buffer);
if let Some(patch) = patch {
let mut divergence = 0;
for hunk_index in 0..patch.num_hunks() {
let hunk =
Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
tree.push(hunk, buffer);
}
}
}
@ -125,11 +128,14 @@ impl BufferDiff {
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk> {
let range = range.to_offset(buffer);
let mut cursor = self
.tree
.filter::<_, DiffHunkSummary>(buffer, move |summary| {
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
let summary_range = summary.buffer_range.to_offset(buffer);
let before_start = summary_range.end < range.start;
let after_end = summary_range.start > range.end;
!before_start && !after_end
});
@ -151,21 +157,25 @@ impl BufferDiff {
});
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
iter::from_fn(move || {
iter::from_fn(move || loop {
let (start_point, (start_anchor, start_base)) = summaries.next()?;
let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?;
if !start_anchor.is_valid(buffer) {
continue;
}
if end_point.column > 0 {
end_point.row += 1;
end_point.column = 0;
end_anchor = buffer.anchor_before(end_point);
}
Some(DiffHunk {
return Some(DiffHunk {
row_range: start_point.row..end_point.row,
diff_base_byte_range: start_base..end_base,
buffer_range: start_anchor..end_anchor,
})
});
})
}
@ -270,7 +280,7 @@ impl BufferDiff {
}
pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) {
*self = Self::build(&diff_base.to_string(), buffer);
*self = Self::build(Some(&diff_base.to_string()), buffer);
}
#[cfg(test)]
@ -536,7 +546,7 @@ mod tests {
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1);
let empty_diff = BufferDiff::new(&buffer);
let diff_1 = BufferDiff::build(&base_text, &buffer);
let diff_1 = BufferDiff::build(Some(&base_text), &buffer);
let range = diff_1.compare(&empty_diff, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0));
@ -554,7 +564,7 @@ mod tests {
"
.unindent(),
);
let diff_2 = BufferDiff::build(&base_text, &buffer);
let diff_2 = BufferDiff::build(Some(&base_text), &buffer);
assert_eq!(None, diff_2.compare(&diff_1, &buffer));
// Edit turns a deletion hunk into a modification.
@ -571,7 +581,7 @@ mod tests {
"
.unindent(),
);
let diff_3 = BufferDiff::build(&base_text, &buffer);
let diff_3 = BufferDiff::build(Some(&base_text), &buffer);
let range = diff_3.compare(&diff_2, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0));
@ -588,7 +598,7 @@ mod tests {
"
.unindent(),
);
let diff_4 = BufferDiff::build(&base_text, &buffer);
let diff_4 = BufferDiff::build(Some(&base_text), &buffer);
let range = diff_4.compare(&diff_3, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0));
@ -606,7 +616,7 @@ mod tests {
"
.unindent(),
);
let diff_5 = BufferDiff::build(&base_text, &buffer);
let diff_5 = BufferDiff::build(Some(&base_text), &buffer);
let range = diff_5.compare(&diff_4, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0));
@ -624,7 +634,7 @@ mod tests {
"
.unindent(),
);
let diff_6 = BufferDiff::build(&base_text, &buffer);
let diff_6 = BufferDiff::build(Some(&base_text), &buffer);
let range = diff_6.compare(&diff_5, &buffer).unwrap();
assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0));
}

View file

@ -29,9 +29,15 @@ pub struct Branch {
pub trait GitRepository: Send + Sync {
fn reload_index(&self);
/// Loads a git repository entry's contents.
/// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path.
///
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
fn load_index_text(&self, path: &RepoPath) -> Option<String>;
/// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path.
///
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
fn load_committed_text(&self, path: &RepoPath) -> Option<String>;
/// Returns the URL of the remote with the given name.
fn remote_url(&self, name: &str) -> Option<String>;
@ -106,15 +112,15 @@ impl GitRepository for RealGitRepository {
repo.path().into()
}
fn load_index_text(&self, relative_file_path: &Path) -> Option<String> {
fn logic(repo: &git2::Repository, relative_file_path: &Path) -> Result<Option<String>> {
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
const STAGE_NORMAL: i32 = 0;
let index = repo.index()?;
// This check is required because index.get_path() unwraps internally :(
check_path_to_repo_path_errors(relative_file_path)?;
check_path_to_repo_path_errors(path)?;
let oid = match index.get_path(relative_file_path, STAGE_NORMAL) {
let oid = match index.get_path(path, STAGE_NORMAL) {
Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
_ => return Ok(None),
};
@ -123,13 +129,22 @@ impl GitRepository for RealGitRepository {
Ok(Some(String::from_utf8(content)?))
}
match logic(&self.repository.lock(), relative_file_path) {
match logic(&self.repository.lock(), path) {
Ok(value) => return value,
Err(err) => log::error!("Error loading head text: {:?}", err),
Err(err) => log::error!("Error loading index text: {:?}", err),
}
None
}
fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
let repo = self.repository.lock();
let head = repo.head().ok()?.peel_to_tree().log_err()?;
let oid = head.get_path(path).ok()?.id();
let content = repo.find_blob(oid).log_err()?.content().to_owned();
let content = String::from_utf8(content).log_err()?;
Some(content)
}
fn remote_url(&self, name: &str) -> Option<String> {
let repo = self.repository.lock();
let remote = repo.find_remote(name).ok()?;
@ -325,8 +340,9 @@ pub struct FakeGitRepository {
pub struct FakeGitRepositoryState {
pub dot_git_dir: PathBuf,
pub event_emitter: smol::channel::Sender<PathBuf>,
pub index_contents: HashMap<PathBuf, String>,
pub blames: HashMap<PathBuf, Blame>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
pub blames: HashMap<RepoPath, Blame>,
pub statuses: HashMap<RepoPath, FileStatus>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
@ -343,6 +359,7 @@ impl FakeGitRepositoryState {
FakeGitRepositoryState {
dot_git_dir,
event_emitter,
head_contents: Default::default(),
index_contents: Default::default(),
blames: Default::default(),
statuses: Default::default(),
@ -355,9 +372,14 @@ impl FakeGitRepositoryState {
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
fn load_index_text(&self, path: &Path) -> Option<String> {
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
let state = self.state.lock();
state.index_contents.get(path).cloned()
state.index_contents.get(path.as_ref()).cloned()
}
fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
let state = self.state.lock();
state.head_contents.get(path.as_ref()).cloned()
}
fn remote_url(&self, _name: &str) -> Option<String> {
@ -529,6 +551,12 @@ impl From<&Path> for RepoPath {
}
}
impl From<Arc<Path>> for RepoPath {
fn from(value: Arc<Path>) -> Self {
RepoPath(value)
}
}
impl From<PathBuf> for RepoPath {
fn from(value: PathBuf) -> Self {
RepoPath::new(value)

View file

@ -1001,6 +1001,34 @@ impl Buffer {
}
}
#[cfg(any(test, feature = "test-support"))]
pub fn build_snapshot_sync(
text: Rope,
language: Option<Arc<Language>>,
language_registry: Option<Arc<LanguageRegistry>>,
cx: &mut App,
) -> BufferSnapshot {
let entity_id = cx.reserve_entity::<Self>().entity_id();
let buffer_id = entity_id.as_non_zero_u64().into();
let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
let mut syntax = SyntaxMap::new(&text).snapshot();
if let Some(language) = language.clone() {
let text = text.clone();
let language = language.clone();
let language_registry = language_registry.clone();
syntax.reparse(&text, language_registry, language);
}
BufferSnapshot {
text,
syntax,
file: None,
diagnostics: Default::default(),
remote_selections: Default::default(),
language,
non_text_state_update_count: 0,
}
}
/// Retrieve a snapshot of the buffer's current state. This is computationally
/// cheap, and allows reading from the buffer on a background thread.
pub fn snapshot(&self) -> BufferSnapshot {

View file

@ -28,7 +28,7 @@ use smol::future::yield_now;
use std::{
any::type_name,
borrow::Cow,
cell::{Ref, RefCell, RefMut},
cell::{Ref, RefCell},
cmp, fmt,
future::Future,
io,
@ -290,6 +290,7 @@ impl ExcerptBoundary {
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct RowInfo {
pub buffer_id: Option<BufferId>,
pub buffer_row: Option<u32>,
pub multibuffer_row: Option<MultiBufferRow>,
pub diff_status: Option<git::diff::DiffHunkStatus>,
@ -1742,7 +1743,7 @@ impl MultiBuffer {
}
self.sync_diff_transforms(
snapshot,
&mut snapshot,
vec![Edit {
old: edit_start..edit_start,
new: edit_start..edit_end,
@ -1775,7 +1776,7 @@ impl MultiBuffer {
snapshot.has_conflict = false;
self.sync_diff_transforms(
snapshot,
&mut snapshot,
vec![Edit {
old: start..prev_len,
new: start..start,
@ -2053,7 +2054,7 @@ impl MultiBuffer {
snapshot.trailing_excerpt_update_count += 1;
}
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
cx.emit(Event::Edited {
singleton_buffer_edited: false,
edited_buffer: None,
@ -2218,7 +2219,7 @@ impl MultiBuffer {
}
self.sync_diff_transforms(
snapshot,
&mut snapshot,
excerpt_edits,
DiffChangeKind::DiffUpdated {
base_changed: base_text_changed,
@ -2388,7 +2389,7 @@ impl MultiBuffer {
cx: &mut Context<Self>,
) {
self.sync(cx);
let snapshot = self.snapshot.borrow_mut();
let mut snapshot = self.snapshot.borrow_mut();
let mut excerpt_edits = Vec::new();
for range in ranges.iter() {
let end_excerpt_id = range.end.excerpt_id;
@ -2422,7 +2423,7 @@ impl MultiBuffer {
}
self.sync_diff_transforms(
snapshot,
&mut snapshot,
excerpt_edits,
DiffChangeKind::ExpandOrCollapseHunks { expand },
);
@ -2491,7 +2492,7 @@ impl MultiBuffer {
drop(cursor);
snapshot.excerpts = new_excerpts;
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
cx.emit(Event::Edited {
singleton_buffer_edited: false,
edited_buffer: None,
@ -2592,7 +2593,7 @@ impl MultiBuffer {
drop(cursor);
snapshot.excerpts = new_excerpts;
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
cx.emit(Event::Edited {
singleton_buffer_edited: false,
edited_buffer: None,
@ -2705,12 +2706,12 @@ impl MultiBuffer {
drop(cursor);
snapshot.excerpts = new_excerpts;
self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited);
self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited);
}
fn sync_diff_transforms(
&self,
mut snapshot: RefMut<MultiBufferSnapshot>,
snapshot: &mut MultiBufferSnapshot,
excerpt_edits: Vec<text::Edit<ExcerptOffset>>,
change_kind: DiffChangeKind,
) {
@ -2791,11 +2792,23 @@ impl MultiBuffer {
if excerpt_edits.peek().map_or(true, |next_edit| {
next_edit.old.start >= old_diff_transforms.end(&()).0
}) {
let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end)
&& match old_diff_transforms.item() {
Some(DiffTransform::BufferContent {
inserted_hunk_anchor: Some(hunk_anchor),
..
}) => excerpts
.item()
.is_some_and(|excerpt| hunk_anchor.1.is_valid(&excerpt.buffer)),
_ => true,
};
let mut excerpt_offset = edit.new.end;
if old_diff_transforms.start().0 < edit.old.end {
if !keep_next_old_transform {
excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end;
old_diff_transforms.next(&());
}
old_expanded_hunks.clear();
self.push_buffer_content_transform(
&snapshot,
@ -2894,12 +2907,14 @@ impl MultiBuffer {
buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end);
for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) {
let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
let hunk_anchor = (excerpt.id, hunk.buffer_range.start);
if !hunk_anchor.1.is_valid(buffer) {
if hunk_buffer_range.start < excerpt_buffer_start {
log::trace!("skipping hunk that starts before excerpt");
continue;
}
let hunk_buffer_range = hunk.buffer_range.to_offset(buffer);
let hunk_excerpt_start = excerpt_start
+ ExcerptOffset::new(
hunk_buffer_range.start.saturating_sub(excerpt_buffer_start),
@ -2941,8 +2956,9 @@ impl MultiBuffer {
if should_expand_hunk {
did_expand_hunks = true;
log::trace!(
"expanding hunk {:?}",
"expanding hunk {:?}, excerpt:{:?}",
hunk_excerpt_start.value..hunk_excerpt_end.value,
excerpt.id
);
if !hunk.diff_base_byte_range.is_empty()
@ -3389,12 +3405,12 @@ impl MultiBufferSnapshot {
self.diff_hunks_in_range(Anchor::min()..Anchor::max())
}
pub fn diff_hunks_in_range<T: ToOffset>(
pub fn diff_hunks_in_range<T: ToPoint>(
&self,
range: Range<T>,
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
let range = range.start.to_offset(self)..range.end.to_offset(self);
self.lift_buffer_metadata(range.clone(), move |buffer, buffer_range| {
let query_range = range.start.to_point(self)..range.end.to_point(self);
self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| {
let diff = self.diffs.get(&buffer.remote_id())?;
let buffer_start = buffer.anchor_before(buffer_range.start);
let buffer_end = buffer.anchor_after(buffer_range.end);
@ -3409,19 +3425,25 @@ impl MultiBufferSnapshot {
}),
)
})
.map(|(range, hunk, excerpt)| {
.filter_map(move |(range, hunk, excerpt)| {
if range.start != range.end
&& range.end == query_range.start
&& !hunk.row_range.is_empty()
{
return None;
}
let end_row = if range.end.column == 0 {
range.end.row
} else {
range.end.row + 1
};
MultiBufferDiffHunk {
Some(MultiBufferDiffHunk {
row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row),
buffer_id: excerpt.buffer_id,
excerpt_id: excerpt.id,
buffer_range: hunk.buffer_range.clone(),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
}
})
})
}
@ -3560,8 +3582,8 @@ impl MultiBufferSnapshot {
/// multi-buffer coordinates.
fn lift_buffer_metadata<'a, D, M, I>(
&'a self,
range: Range<usize>,
get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range<usize>) -> Option<I>,
query_range: Range<D>,
get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range<D>) -> Option<I>,
) -> impl Iterator<Item = (Range<D>, M, &'a Excerpt)> + 'a
where
I: Iterator<Item = (Range<D>, M)> + 'a,
@ -3569,18 +3591,19 @@ impl MultiBufferSnapshot {
{
let max_position = D::from_text_summary(&self.text_summary());
let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None;
let mut cursor = self.cursor::<DimensionPair<usize, D>>();
let mut cursor = self.cursor::<D>();
// Find the excerpt and buffer offset where the given range ends.
cursor.seek(&DimensionPair {
key: range.end,
value: None,
});
cursor.seek(&query_range.end);
let mut range_end = None;
while let Some(region) = cursor.region() {
if region.is_main_buffer {
let mut buffer_end = region.buffer_range.start.key;
let overshoot = range.end.saturating_sub(region.range.start.key);
let mut buffer_end = region.buffer_range.start;
let overshoot = if query_range.end > region.range.start {
query_range.end - region.range.start
} else {
D::default()
};
buffer_end.add_assign(&overshoot);
range_end = Some((region.excerpt.id, buffer_end));
break;
@ -3588,13 +3611,10 @@ impl MultiBufferSnapshot {
cursor.next();
}
cursor.seek(&DimensionPair {
key: range.start,
value: None,
});
cursor.seek(&query_range.start);
if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) {
if region.range.start.key > 0 {
if region.range.start > D::zero(&()) {
cursor.prev()
}
}
@ -3613,14 +3633,18 @@ impl MultiBufferSnapshot {
// and retrieve the metadata for the resulting range.
else {
let region = cursor.region()?;
let buffer_start = if region.is_main_buffer {
let start_overshoot = range.start.saturating_sub(region.range.start.key);
(region.buffer_range.start.key + start_overshoot)
.min(region.buffer_range.end.key)
let mut buffer_start;
if region.is_main_buffer {
buffer_start = region.buffer_range.start;
if query_range.start > region.range.start {
let overshoot = query_range.start - region.range.start;
buffer_start.add_assign(&overshoot);
}
buffer_start = buffer_start.min(region.buffer_range.end);
} else {
cursor.main_buffer_position()?.key
buffer_start = cursor.main_buffer_position()?;
};
let mut buffer_end = excerpt.range.context.end.to_offset(&excerpt.buffer);
let mut buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
if let Some((end_excerpt_id, end_buffer_offset)) = range_end {
if excerpt.id == end_excerpt_id {
buffer_end = buffer_end.min(end_buffer_offset);
@ -3637,53 +3661,56 @@ impl MultiBufferSnapshot {
};
// Visit each metadata item.
if let Some((range, metadata)) = metadata_iter.and_then(Iterator::next) {
if let Some((metadata_buffer_range, metadata)) = metadata_iter.and_then(Iterator::next)
{
// Find the multibuffer regions that contain the start and end of
// the metadata item's range.
if range.start > D::default() {
if metadata_buffer_range.start > D::default() {
while let Some(region) = cursor.region() {
if !region.is_main_buffer
|| region.buffer.remote_id() == excerpt.buffer_id
&& region.buffer_range.end.value.unwrap() < range.start
if region.is_main_buffer
&& (region.buffer_range.end >= metadata_buffer_range.start
|| cursor.is_at_end_of_excerpt())
{
cursor.next();
} else {
break;
}
cursor.next();
}
}
let start_region = cursor.region()?;
while let Some(region) = cursor.region() {
if !region.is_main_buffer
|| region.buffer.remote_id() == excerpt.buffer_id
&& region.buffer_range.end.value.unwrap() <= range.end
if region.is_main_buffer
&& (region.buffer_range.end > metadata_buffer_range.end
|| cursor.is_at_end_of_excerpt())
{
cursor.next();
} else {
break;
}
cursor.next();
}
let end_region = cursor
.region()
.filter(|region| region.buffer.remote_id() == excerpt.buffer_id);
let end_region = cursor.region();
// Convert the metadata item's range into multibuffer coordinates.
let mut start = start_region.range.start.value.unwrap();
let region_buffer_start = start_region.buffer_range.start.value.unwrap();
if start_region.is_main_buffer && range.start > region_buffer_start {
start.add_assign(&(range.start - region_buffer_start));
}
let mut end = max_position;
if let Some(end_region) = end_region {
end = end_region.range.start.value.unwrap();
debug_assert!(end_region.is_main_buffer);
let region_buffer_start = end_region.buffer_range.start.value.unwrap();
if range.end > region_buffer_start {
end.add_assign(&(range.end - region_buffer_start));
}
let mut start_position = start_region.range.start;
let region_buffer_start = start_region.buffer_range.start;
if start_region.is_main_buffer && metadata_buffer_range.start > region_buffer_start
{
start_position.add_assign(&(metadata_buffer_range.start - region_buffer_start));
start_position = start_position.min(start_region.range.end);
}
return Some((start..end, metadata, excerpt));
let mut end_position = max_position;
if let Some(end_region) = &end_region {
end_position = end_region.range.start;
debug_assert!(end_region.is_main_buffer);
let region_buffer_start = end_region.buffer_range.start;
if metadata_buffer_range.end > region_buffer_start {
end_position.add_assign(&(metadata_buffer_range.end - region_buffer_start));
}
end_position = end_position.min(end_region.range.end);
}
if start_position <= query_range.end && end_position >= query_range.start {
return Some((start_position..end_position, metadata, excerpt));
}
}
// When there are no more metadata items for this excerpt, move to the next excerpt.
else {
@ -4509,7 +4536,16 @@ impl MultiBufferSnapshot {
}
let excerpt_start_position = D::from_text_summary(&cursor.start().text);
if let Some(excerpt) = cursor.item().filter(|excerpt| excerpt.id == excerpt_id) {
if let Some(excerpt) = cursor.item() {
if excerpt.id != excerpt_id {
let position = self.resolve_summary_for_anchor(
&Anchor::min(),
excerpt_start_position,
&mut diff_transforms_cursor,
);
summaries.extend(excerpt_anchors.map(|_| position));
continue;
}
let excerpt_buffer_start =
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
let excerpt_buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
@ -5525,7 +5561,7 @@ impl MultiBufferSnapshot {
buffer_id: BufferId,
group_id: usize,
) -> impl Iterator<Item = DiagnosticEntry<Point>> + '_ {
self.lift_buffer_metadata(0..self.len(), move |buffer, _| {
self.lift_buffer_metadata(Point::zero()..self.max_point(), move |buffer, _| {
if buffer.remote_id() != buffer_id {
return None;
};
@ -5538,15 +5574,19 @@ impl MultiBufferSnapshot {
.map(|(range, diagnostic, _)| DiagnosticEntry { diagnostic, range })
}
pub fn diagnostics_in_range<'a, T, O>(
pub fn diagnostics_in_range<'a, T>(
&'a self,
range: Range<T>,
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
) -> impl Iterator<Item = DiagnosticEntry<T>> + 'a
where
T: 'a + ToOffset,
O: 'a + text::FromAnchor + Copy + TextDimension + Ord + Sub<O, Output = O> + fmt::Debug,
T: 'a
+ text::ToOffset
+ text::FromAnchor
+ TextDimension
+ Ord
+ Sub<T, Output = T>
+ fmt::Debug,
{
let range = range.start.to_offset(self)..range.end.to_offset(self);
self.lift_buffer_metadata(range, move |buffer, buffer_range| {
Some(
buffer
@ -6036,6 +6076,24 @@ where
self.cached_region.clone()
}
fn is_at_end_of_excerpt(&mut self) -> bool {
if self.diff_transforms.end(&()).1 < self.excerpts.end(&()) {
return false;
} else if self.diff_transforms.end(&()).1 > self.excerpts.end(&())
|| self.diff_transforms.item().is_none()
{
return true;
}
self.diff_transforms.next(&());
let next_transform = self.diff_transforms.item();
self.diff_transforms.prev(&());
next_transform.map_or(true, |next_transform| {
matches!(next_transform, DiffTransform::BufferContent { .. })
})
}
fn main_buffer_position(&self) -> Option<D> {
let excerpt = self.excerpts.item()?;
let buffer = &excerpt.buffer;
@ -6879,6 +6937,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
if self.is_empty && self.point.row == 0 {
self.point += Point::new(1, 0);
return Some(RowInfo {
buffer_id: None,
buffer_row: Some(0),
multibuffer_row: Some(MultiBufferRow(0)),
diff_status: None,
@ -6906,6 +6965,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
.to_point(&last_excerpt.buffer)
.row;
return Some(RowInfo {
buffer_id: Some(last_excerpt.buffer_id),
buffer_row: Some(last_row),
multibuffer_row: Some(multibuffer_row),
diff_status: None,
@ -6919,6 +6979,7 @@ impl<'a> Iterator for MultiBufferRows<'a> {
let overshoot = self.point - region.range.start;
let buffer_point = region.buffer_range.start + overshoot;
let result = Some(RowInfo {
buffer_id: Some(region.buffer.remote_id()),
buffer_row: Some(buffer_point.row),
multibuffer_row: Some(MultiBufferRow(self.point.row)),
diff_status: if region.is_inserted_hunk && self.point < region.range.end {

View file

@ -19,12 +19,14 @@ fn init_logger() {
#[gpui::test]
fn test_empty_singleton(cx: &mut App) {
let buffer = cx.new(|cx| Buffer::local("", cx));
let buffer_id = buffer.read(cx).remote_id();
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let snapshot = multibuffer.read(cx).snapshot(cx);
assert_eq!(snapshot.text(), "");
assert_eq!(
snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
[RowInfo {
buffer_id: Some(buffer_id),
buffer_row: Some(0),
multibuffer_row: Some(MultiBufferRow(0)),
diff_status: None
@ -359,13 +361,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nthree\n";
let text = "one\nthree\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let change_set = cx.new(|cx| {
let mut change_set = BufferChangeSet::new(&buffer, cx);
let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
change_set
});
cx.run_until_parked();
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.add_change_set(change_set, cx)
@ -382,7 +378,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) {
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
let actual_text = snapshot.text();
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default());
let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default(), None);
pretty_assertions::assert_eq!(
actual_diff,
indoc! {
@ -409,13 +405,7 @@ fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n";
let text = "one\nfour\nseven\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
let change_set = cx.new(|cx| {
let mut change_set = BufferChangeSet::new(&buffer, cx);
let snapshot = buffer.read(cx).snapshot();
let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
change_set
});
cx.run_until_parked();
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
(multibuffer.snapshot(cx), multibuffer.subscribe())
@ -508,13 +498,7 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) {
let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n";
let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n";
let buffer = cx.new(|cx| Buffer::local(text, cx));
let change_set = cx.new(|cx| {
let mut change_set = BufferChangeSet::new(&buffer, cx);
let snapshot = buffer.read(cx).text_snapshot();
let _ = change_set.set_base_text(base_text.into(), snapshot, cx);
change_set
});
cx.run_until_parked();
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| {
@ -995,12 +979,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
let buffer = cx.new(|cx| Buffer::local("", cx));
let base_text = "a\nb\nc";
let change_set = cx.new(|cx| {
let snapshot = buffer.read(cx).snapshot();
let mut change_set = BufferChangeSet::new(&buffer, cx);
let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx);
change_set
});
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.set_all_diff_hunks_expanded(cx);
multibuffer.add_change_set(change_set.clone(), cx);
@ -1040,7 +1019,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..0, "a\nb\nc")], None, cx);
change_set.update(cx, |change_set, cx| {
let _ = change_set.recalculate_diff(buffer.snapshot().text, cx);
change_set.recalculate_diff_sync(buffer.snapshot().text, cx);
});
assert_eq!(buffer.text(), "a\nb\nc")
});
@ -1052,7 +1031,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) {
buffer.update(cx, |buffer, cx| {
buffer.undo(cx);
change_set.update(cx, |change_set, cx| {
let _ = change_set.recalculate_diff(buffer.snapshot().text, cx);
change_set.recalculate_diff_sync(buffer.snapshot().text, cx);
});
assert_eq!(buffer.text(), "")
});
@ -1294,8 +1273,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
);
let buffer = cx.new(|cx| Buffer::local(text, cx));
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx));
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
cx.run_until_parked();
let multibuffer = cx.new(|cx| {
@ -1485,8 +1463,8 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) {
assert_line_indents(&snapshot);
// Recalculate the diff, changing the first diff hunk.
let _ = change_set.update(cx, |change_set, cx| {
change_set.recalculate_diff(buffer.read(cx).text_snapshot(), cx)
change_set.update(cx, |change_set, cx| {
change_set.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx);
});
cx.run_until_parked();
assert_new_snapshot(
@ -1538,8 +1516,7 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
);
let buffer = cx.new(|cx| Buffer::local(text, cx));
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx));
let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx));
cx.run_until_parked();
let multibuffer = cx.new(|cx| {
@ -1840,10 +1817,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx));
let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx));
let change_set_1 =
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1.to_string(), &buffer_1, cx));
let change_set_2 =
cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2.to_string(), &buffer_2, cx));
let change_set_1 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1, &buffer_1, cx));
let change_set_2 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2, &buffer_2, cx));
cx.run_until_parked();
let multibuffer = cx.new(|cx| {
@ -2028,6 +2003,7 @@ struct ReferenceMultibuffer {
change_sets: HashMap<BufferId, Entity<BufferChangeSet>>,
}
#[derive(Debug)]
struct ReferenceExcerpt {
id: ExcerptId,
buffer: Entity<Buffer>,
@ -2037,6 +2013,7 @@ struct ReferenceExcerpt {
#[derive(Debug)]
struct ReferenceRegion {
buffer_id: Option<BufferId>,
range: Range<usize>,
buffer_start: Option<Point>,
status: Option<DiffHunkStatus>,
@ -2117,37 +2094,26 @@ impl ReferenceMultibuffer {
};
let diff = change_set.read(cx).diff_to_buffer.clone();
let excerpt_range = excerpt.range.to_offset(&buffer);
if excerpt_range.is_empty() {
return;
}
for hunk in diff.hunks_intersecting_range(range, &buffer) {
let hunk_range = hunk.buffer_range.to_offset(&buffer);
let hunk_precedes_excerpt = hunk
.buffer_range
.end
.cmp(&excerpt.range.start, &buffer)
.is_lt();
let hunk_follows_excerpt = hunk
.buffer_range
.start
.cmp(&excerpt.range.end, &buffer)
.is_ge();
if hunk_precedes_excerpt || hunk_follows_excerpt {
if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end {
continue;
}
if let Err(ix) = excerpt
.expanded_diff_hunks
.binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer))
{
log::info!(
"expanding diff hunk {:?}. excerpt: {:?}",
"expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}",
hunk_range,
excerpt_id,
excerpt_range
);
excerpt
.expanded_diff_hunks
.insert(ix, hunk.buffer_range.start);
} else {
log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}");
}
}
}
@ -2170,17 +2136,12 @@ impl ReferenceMultibuffer {
.peekable();
while let Some(hunk) = hunks.next() {
if !hunk.buffer_range.start.is_valid(&buffer) {
continue;
}
// Ignore hunks that are outside the excerpt range.
let mut hunk_range = hunk.buffer_range.to_offset(buffer);
hunk_range.end = hunk_range.end.min(buffer_range.end);
if hunk_range.start > buffer_range.end
|| hunk_range.end < buffer_range.start
|| buffer_range.is_empty()
{
if hunk_range.start > buffer_range.end || hunk_range.start < buffer_range.start {
log::trace!("skipping hunk outside excerpt range");
continue;
}
@ -2188,6 +2149,12 @@ impl ReferenceMultibuffer {
expanded_anchor.to_offset(&buffer).max(buffer_range.start)
== hunk_range.start.max(buffer_range.start)
}) {
log::trace!("skipping a hunk that's not marked as expanded");
continue;
}
if !hunk.buffer_range.start.is_valid(&buffer) {
log::trace!("skipping hunk with deleted start: {:?}", hunk.row_range);
continue;
}
@ -2196,6 +2163,7 @@ impl ReferenceMultibuffer {
let len = text.len();
text.extend(buffer.text_for_range(offset..hunk_range.start));
regions.push(ReferenceRegion {
buffer_id: Some(buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(buffer.offset_to_point(offset)),
status: None,
@ -2212,6 +2180,7 @@ impl ReferenceMultibuffer {
let len = text.len();
text.push_str(&base_text);
regions.push(ReferenceRegion {
buffer_id: Some(base_buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(
base_buffer.offset_to_point(hunk.diff_base_byte_range.start),
@ -2228,6 +2197,7 @@ impl ReferenceMultibuffer {
let len = text.len();
text.extend(buffer.text_for_range(offset..hunk_range.end));
regions.push(ReferenceRegion {
buffer_id: Some(buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(buffer.offset_to_point(offset)),
status: Some(DiffHunkStatus::Added),
@ -2241,6 +2211,7 @@ impl ReferenceMultibuffer {
text.extend(buffer.text_for_range(offset..buffer_range.end));
text.push('\n');
regions.push(ReferenceRegion {
buffer_id: Some(buffer.remote_id()),
range: len..text.len(),
buffer_start: Some(buffer.offset_to_point(offset)),
status: None,
@ -2250,6 +2221,7 @@ impl ReferenceMultibuffer {
// Remove final trailing newline.
if self.excerpts.is_empty() {
regions.push(ReferenceRegion {
buffer_id: None,
range: 0..1,
buffer_start: Some(Point::new(0, 0)),
status: None,
@ -2273,6 +2245,7 @@ impl ReferenceMultibuffer {
+ text[region.range.start..ix].matches('\n').count() as u32
});
RowInfo {
buffer_id: region.buffer_id,
diff_status: region.status,
buffer_row,
multibuffer_row: Some(MultiBufferRow(
@ -2348,6 +2321,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
buffer.update(cx, |buf, cx| {
let edit_count = rng.gen_range(1..5);
buf.randomly_edit(&mut rng, edit_count, cx);
log::info!("buffer text:\n{}", buf.text());
needs_diff_calculation = true;
});
cx.update(|cx| reference.diffs_updated(cx));
@ -2440,7 +2414,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap()
..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap();
log::info!("expanding diff hunks for excerpt {:?}", excerpt_ix);
log::info!(
"expanding diff hunks in range {:?} (excerpt id {:?}) index {excerpt_ix:?})",
range.to_offset(&snapshot),
excerpt.id
);
reference.expand_diff_hunks(excerpt.id, start..end, cx);
multibuffer.expand_diff_hunks(vec![range], cx);
});
@ -2457,7 +2435,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
"recalculating diff for buffer {:?}",
snapshot.remote_id(),
);
change_set.recalculate_diff(snapshot.text, cx)
change_set.recalculate_diff_sync(snapshot.text, cx);
});
}
reference.diffs_updated(cx);
@ -2471,14 +2449,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
.collect::<String>();
let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx));
let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx));
change_set
.update(cx, |change_set, cx| {
let snapshot = buffer.read(cx).snapshot();
change_set.set_base_text(base_text, snapshot.text, cx)
})
.await
.unwrap();
let change_set =
cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx));
multibuffer.update(cx, |multibuffer, cx| {
reference.add_change_set(change_set.clone(), cx);
@ -2553,12 +2525,28 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
.filter_map(|b| if b.next.is_some() { Some(b.row) } else { None })
.collect::<HashSet<_>>();
let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
let actual_diff = format_diff(&actual_text, &actual_row_infos, &actual_boundary_rows);
let (expected_text, expected_row_infos, expected_boundary_rows) =
cx.update(|cx| reference.expected_content(cx));
let expected_diff =
format_diff(&expected_text, &expected_row_infos, &expected_boundary_rows);
let has_diff = actual_row_infos
.iter()
.any(|info| info.diff_status.is_some())
|| expected_row_infos
.iter()
.any(|info| info.diff_status.is_some());
let actual_diff = format_diff(
&actual_text,
&actual_row_infos,
&actual_boundary_rows,
Some(has_diff),
);
let expected_diff = format_diff(
&expected_text,
&expected_row_infos,
&expected_boundary_rows,
Some(has_diff),
);
log::info!("Multibuffer content:\n{}", actual_diff);
@ -2569,8 +2557,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) {
actual_text.split('\n').count()
);
pretty_assertions::assert_eq!(actual_diff, expected_diff);
pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
pretty_assertions::assert_eq!(actual_text, expected_text);
pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos);
for _ in 0..5 {
let start_row = rng.gen_range(0..=expected_row_infos.len());
@ -2937,8 +2925,10 @@ fn format_diff(
text: &str,
row_infos: &Vec<RowInfo>,
boundary_rows: &HashSet<MultiBufferRow>,
has_diff: Option<bool>,
) -> String {
let has_diff = row_infos.iter().any(|info| info.diff_status.is_some());
let has_diff =
has_diff.unwrap_or_else(|| row_infos.iter().any(|info| info.diff_status.is_some()));
text.split('\n')
.enumerate()
.zip(row_infos)
@ -3002,7 +2992,7 @@ fn assert_new_snapshot(
let line_infos = new_snapshot
.row_infos(MultiBufferRow(0))
.collect::<Vec<_>>();
let actual_diff = format_diff(&actual_text, &line_infos, &Default::default());
let actual_diff = format_diff(&actual_text, &line_infos, &Default::default(), None);
pretty_assertions::assert_eq!(actual_diff, expected_diff);
check_edits(
snapshot,

File diff suppressed because it is too large Load diff

View file

@ -1970,6 +1970,20 @@ impl Project {
})
}
pub fn open_uncommitted_changes(
&mut self,
buffer: Entity<Buffer>,
cx: &mut Context<Self>,
) -> Task<Result<Entity<BufferChangeSet>>> {
if self.is_disconnected(cx) {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_uncommitted_changes(buffer, cx)
})
}
pub fn open_buffer_by_id(
&mut self,
id: BufferId,

View file

@ -5624,7 +5624,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
fs.set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("src/main.rs"), staged_contents)],
&[("src/main.rs".into(), staged_contents)],
);
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
@ -5669,7 +5669,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
fs.set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("src/main.rs"), staged_contents)],
&[("src/main.rs".into(), staged_contents)],
);
cx.run_until_parked();
@ -5684,6 +5684,108 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
});
}
#[gpui::test]
async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) {
init_test(cx);
let committed_contents = r#"
fn main() {
println!("hello world");
}
"#
.unindent();
let staged_contents = r#"
fn main() {
println!("goodbye world");
}
"#
.unindent();
let file_contents = r#"
// print goodbye
fn main() {
println!("goodbye world");
}
"#
.unindent();
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/dir",
json!({
".git": {},
"src": {
"main.rs": file_contents,
}
}),
)
.await;
fs.set_index_for_repo(
Path::new("/dir/.git"),
&[("src/main.rs".into(), staged_contents)],
);
fs.set_head_for_repo(
Path::new("/dir/.git"),
&[("src/main.rs".into(), committed_contents)],
);
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/dir/src/main.rs", cx)
})
.await
.unwrap();
let uncommitted_changes = project
.update(cx, |project, cx| {
project.open_uncommitted_changes(buffer.clone(), cx)
})
.await
.unwrap();
cx.run_until_parked();
uncommitted_changes.update(cx, |uncommitted_changes, cx| {
let snapshot = buffer.read(cx).snapshot();
assert_hunks(
uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
&snapshot,
&uncommitted_changes.base_text.as_ref().unwrap().text(),
&[
(0..1, "", "// print goodbye\n"),
(
2..3,
" println!(\"hello world\");\n",
" println!(\"goodbye world\");\n",
),
],
);
});
let committed_contents = r#"
// print goodbye
fn main() {
}
"#
.unindent();
fs.set_head_for_repo(
Path::new("/dir/.git"),
&[("src/main.rs".into(), committed_contents)],
);
cx.run_until_parked();
uncommitted_changes.update(cx, |uncommitted_changes, cx| {
let snapshot = buffer.read(cx).snapshot();
assert_hunks(
uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
&snapshot,
&uncommitted_changes.base_text.as_ref().unwrap().text(),
&[(2..3, "", " println!(\"goodbye world\");\n")],
);
});
}
async fn search(
project: &Entity<Project>,
query: SearchQuery,

View file

@ -129,7 +129,7 @@ message Envelope {
GetPrivateUserInfo get_private_user_info = 102;
GetPrivateUserInfoResponse get_private_user_info_response = 103;
UpdateUserPlan update_user_plan = 234;
UpdateDiffBase update_diff_base = 104;
UpdateDiffBases update_diff_bases = 104;
AcceptTermsOfService accept_terms_of_service = 239;
AcceptTermsOfServiceResponse accept_terms_of_service_response = 240;
@ -304,15 +304,18 @@ message Envelope {
SyncExtensionsResponse sync_extensions_response = 286;
InstallExtension install_extension = 287;
GetStagedText get_staged_text = 288;
GetStagedTextResponse get_staged_text_response = 289;
OpenUnstagedChanges open_unstaged_changes = 288;
OpenUnstagedChangesResponse open_unstaged_changes_response = 289;
RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
Stage stage = 293;
Unstage unstage = 294;
Commit commit = 295;
OpenCommitMessageBuffer open_commit_message_buffer = 296; // current max
OpenCommitMessageBuffer open_commit_message_buffer = 296;
OpenUncommittedChanges open_uncommitted_changes = 297;
OpenUncommittedChangesResponse open_uncommitted_changes_response = 298; // current max
}
reserved 87 to 88;
@ -2035,21 +2038,53 @@ message WorktreeMetadata {
string abs_path = 4;
}
message UpdateDiffBase {
message UpdateDiffBases {
uint64 project_id = 1;
uint64 buffer_id = 2;
enum Mode {
// No collaborator is using the unstaged diff.
HEAD_ONLY = 0;
// No collaborator is using the diff from HEAD.
INDEX_ONLY = 1;
// Both the unstaged and uncommitted diffs are demanded,
// and the contents of the index and HEAD are the same for this path.
INDEX_MATCHES_HEAD = 2;
// Both the unstaged and uncommitted diffs are demanded,
// and the contents of the index and HEAD differ for this path,
// where None means the path doesn't exist in that state of the repo.
INDEX_AND_HEAD = 3;
}
optional string staged_text = 3;
optional string committed_text = 4;
Mode mode = 5;
}
message GetStagedText {
message OpenUnstagedChanges {
uint64 project_id = 1;
uint64 buffer_id = 2;
}
message GetStagedTextResponse {
message OpenUnstagedChangesResponse {
optional string staged_text = 1;
}
message OpenUncommittedChanges {
uint64 project_id = 1;
uint64 buffer_id = 2;
}
message OpenUncommittedChangesResponse {
enum Mode {
INDEX_MATCHES_HEAD = 0;
INDEX_AND_HEAD = 1;
}
optional string staged_text = 1;
optional string committed_text = 2;
Mode mode = 3;
}
message GetNotifications {
optional uint64 before_id = 1;
}

View file

@ -219,8 +219,10 @@ messages!(
(GetImplementationResponse, Background),
(GetLlmToken, Background),
(GetLlmTokenResponse, Background),
(GetStagedText, Foreground),
(GetStagedTextResponse, Foreground),
(OpenUnstagedChanges, Foreground),
(OpenUnstagedChangesResponse, Foreground),
(OpenUncommittedChanges, Foreground),
(OpenUncommittedChangesResponse, Foreground),
(GetUsers, Foreground),
(Hello, Foreground),
(IncomingCall, Foreground),
@ -309,7 +311,7 @@ messages!(
(UpdateUserChannels, Foreground),
(UpdateContacts, Foreground),
(UpdateDiagnosticSummary, Foreground),
(UpdateDiffBase, Foreground),
(UpdateDiffBases, Foreground),
(UpdateFollowers, Foreground),
(UpdateInviteInfo, Foreground),
(UpdateLanguageServer, Foreground),
@ -422,7 +424,8 @@ request_messages!(
(GetProjectSymbols, GetProjectSymbolsResponse),
(GetReferences, GetReferencesResponse),
(GetSignatureHelp, GetSignatureHelpResponse),
(GetStagedText, GetStagedTextResponse),
(OpenUnstagedChanges, OpenUnstagedChangesResponse),
(OpenUncommittedChanges, OpenUncommittedChangesResponse),
(GetSupermavenApiKey, GetSupermavenApiKeyResponse),
(GetTypeDefinition, GetTypeDefinitionResponse),
(LinkedEditingRange, LinkedEditingRangeResponse),
@ -543,7 +546,8 @@ entity_messages!(
GetProjectSymbols,
GetReferences,
GetSignatureHelp,
GetStagedText,
OpenUnstagedChanges,
OpenUncommittedChanges,
GetTypeDefinition,
InlayHints,
JoinProject,
@ -575,7 +579,7 @@ entity_messages!(
UpdateBuffer,
UpdateBufferFile,
UpdateDiagnosticSummary,
UpdateDiffBase,
UpdateDiffBases,
UpdateLanguageServer,
UpdateProject,
UpdateProjectCollaborator,

View file

@ -46,7 +46,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
.await;
fs.set_index_for_repo(
Path::new("/code/project1/.git"),
&[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())],
&[("src/lib.rs".into(), "fn one() -> usize { 0 }".into())],
);
let (project, _headless) = init_test(&fs, cx, server_cx).await;
@ -147,7 +147,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
fs.set_index_for_repo(
Path::new("/code/project1/.git"),
&[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())],
&[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())],
);
cx.executor().run_until_parked();
change_set.update(cx, |change_set, _| {

View file

@ -450,6 +450,10 @@ impl Rope {
self.clip_point(Point::new(row, u32::MAX), Bias::Left)
.column
}
pub fn ptr_eq(&self, other: &Self) -> bool {
self.chunks.ptr_eq(&other.chunks)
}
}
impl<'a> From<&'a str> for Rope {

View file

@ -516,6 +516,10 @@ impl<T: Item> SumTree<T> {
}
}
pub fn ptr_eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
fn push_tree_recursive(
&mut self,
other: SumTree<T>,

View file

@ -895,6 +895,30 @@ impl Worktree {
}
}
pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task<Result<Option<String>>> {
match self {
Worktree::Local(this) => {
let path = Arc::from(path);
let snapshot = this.snapshot();
cx.background_executor().spawn(async move {
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&repo.work_directory_id)
{
return Ok(git_repo.repo_ptr.load_committed_text(&repo_path));
}
}
}
Ok(None)
})
}
Worktree::Remote(_) => Task::ready(Err(anyhow!(
"remote worktrees can't yet load committed files"
))),
}
}
pub fn load_binary_file(
&self,
path: &Path,