Restructure git diff state management to allow viewing buffers with different diff bases (#21258)

This is a pure refactor of our Git diff state management. Buffers are no
longer are associated with one single diff (the unstaged changes).
Instead, there is an explicit project API for retrieving a buffer's
unstaged changes, and the `Editor` view layer is responsible for
choosing what diff to associate with a buffer.

The reason for this change is that we'll soon want to add multiple "git
diff views" to Zed, one of which will show the *uncommitted* changes for
a buffer. But that view will need to co-exist with other views of the
same buffer, which may want to show the unstaged changes.

### Todo

* [x] Get git gutter and git hunks working with new structure
* [x] Update editor tests to use new APIs
* [x] Update buffer tests
* [x] Restructure remoting/collab protocol
* [x] Update assertions about staged text in
`random_project_collaboration_tests`
* [x] Move buffer tests for git diff management to a new spot, using the
new APIs

Release Notes:

- N/A

---------

Co-authored-by: Richard <richard@zed.dev>
Co-authored-by: Cole <cole@zed.dev>
Co-authored-by: Conrad <conrad@zed.dev>
This commit is contained in:
Max Brunsfeld 2024-12-04 15:02:33 -08:00 committed by GitHub
parent 31796171de
commit a2115e7242
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 1832 additions and 1651 deletions

2
Cargo.lock generated
View file

@ -4995,7 +4995,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"clock",
"collections",
"derive_more",
"git2",
@ -6534,7 +6533,6 @@ dependencies = [
"fs",
"futures 0.3.31",
"fuzzy",
"git",
"globset",
"gpui",
"http_client",

View file

@ -673,6 +673,7 @@ new_ret_no_self = { level = "allow" }
# We have a few `next` functions that differ in lifetimes
# compared to Iterator::next. Yet, clippy complains about those.
should_implement_trait = { level = "allow" }
let_underscore_future = "allow"
[workspace.metadata.cargo-machete]
ignored = ["bindgen", "cbindgen", "prost_build", "serde"]

View file

@ -309,6 +309,7 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitBranches>)
.add_request_handler(forward_read_only_project_request::<proto::GetStagedText>)
.add_request_handler(forward_mutating_project_request::<proto::UpdateGitBranch>)
.add_request_handler(forward_mutating_project_request::<proto::GetCompletions>)
.add_request_handler(

View file

@ -2561,19 +2561,23 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let change_set_local_a = project_local
.update(cx_a, |p, cx| {
p.open_unstaged_changes(buffer_local_a.clone(), cx)
})
.await
.unwrap();
// Wait for it to catch up to the new diff
executor.run_until_parked();
// Smoke test diffing
buffer_local_a.read_with(cx_a, |buffer, _| {
change_set_local_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&[(1..2, "", "two\n")],
@ -2585,25 +2589,30 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let change_set_remote_a = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_changes(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
// Smoke test diffing
buffer_remote_a.read_with(cx_b, |buffer, _| {
change_set_remote_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&[(1..2, "", "two\n")],
);
});
// Update the staged text of the open buffer
client_a.fs().set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("a.txt"), new_diff_base.clone())],
@ -2611,40 +2620,35 @@ async fn test_git_diff_base_change(
// Wait for buffer_local_a to receive it
executor.run_until_parked();
// Smoke test new diffing
buffer_local_a.read_with(cx_a, |buffer, _| {
change_set_local_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(new_diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&new_diff_base,
&[(2..3, "", "three\n")],
);
});
// Smoke test B
buffer_remote_a.read_with(cx_b, |buffer, _| {
change_set_remote_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(new_diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&new_diff_base,
&[(2..3, "", "three\n")],
);
});
// Nested git dir
let diff_base = "
one
three
@ -2667,19 +2671,23 @@ async fn test_git_diff_base_change(
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let change_set_local_b = project_local
.update(cx_a, |p, cx| {
p.open_unstaged_changes(buffer_local_b.clone(), cx)
})
.await
.unwrap();
// Wait for it to catch up to the new diff
executor.run_until_parked();
// Smoke test diffing
buffer_local_b.read_with(cx_a, |buffer, _| {
change_set_local_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&[(1..2, "", "two\n")],
@ -2691,25 +2699,29 @@ async fn test_git_diff_base_change(
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let change_set_remote_b = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_changes(buffer_remote_b.clone(), cx)
})
.await
.unwrap();
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
// Smoke test diffing
buffer_remote_b.read_with(cx_b, |buffer, _| {
change_set_remote_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&[(1..2, "", "two\n")],
);
});
// Update the staged text
client_a.fs().set_index_for_repo(
Path::new("/dir/sub/.git"),
&[(Path::new("b.txt"), new_diff_base.clone())],
@ -2717,43 +2729,30 @@ async fn test_git_diff_base_change(
// Wait for buffer_local_b to receive it
executor.run_until_parked();
// Smoke test new diffing
buffer_local_b.read_with(cx_a, |buffer, _| {
change_set_local_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(new_diff_base.as_str())
);
println!("{:?}", buffer.as_rope().to_string());
println!("{:?}", buffer.diff_base());
println!(
"{:?}",
buffer
.snapshot()
.git_diff_hunks_in_row_range(0..4)
.collect::<Vec<_>>()
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&new_diff_base,
&[(2..3, "", "three\n")],
);
});
// Smoke test B
buffer_remote_b.read_with(cx_b, |buffer, _| {
change_set_remote_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx);
assert_eq!(
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
change_set.base_text_string(cx).as_deref(),
Some(new_diff_base.as_str())
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer),
buffer,
&diff_base,
&new_diff_base,
&[(2..3, "", "three\n")],
);
});

View file

@ -1336,10 +1336,24 @@ impl RandomizedTest for ProjectCollaborationTest {
(_, None) => panic!("guest's file is None, hosts's isn't"),
}
let host_diff_base = host_buffer
.read_with(host_cx, |b, _| b.diff_base().map(ToString::to_string));
let guest_diff_base = guest_buffer
.read_with(client_cx, |b, _| b.diff_base().map(ToString::to_string));
let host_diff_base = host_project.read_with(host_cx, |project, cx| {
project
.buffer_store()
.read(cx)
.get_unstaged_changes(host_buffer.read(cx).remote_id())
.unwrap()
.read(cx)
.base_text_string(cx)
});
let guest_diff_base = guest_project.read_with(client_cx, |project, cx| {
project
.buffer_store()
.read(cx)
.get_unstaged_changes(guest_buffer.read(cx).remote_id())
.unwrap()
.read(cx)
.base_text_string(cx)
});
assert_eq!(
guest_diff_base, host_diff_base,
"guest {} diff base does not match host's for path {path:?} in project {project_id}",

View file

@ -585,7 +585,7 @@ impl Deref for TestClient {
}
impl TestClient {
pub fn fs(&self) -> &FakeFs {
pub fn fs(&self) -> Arc<FakeFs> {
self.app_state.fs.as_fake()
}

View file

@ -83,7 +83,7 @@ use gpui::{
use highlight_matching_bracket::refresh_matching_bracket_highlights;
use hover_popover::{hide_hover, HoverState};
pub(crate) use hunk_diff::HoveredHunk;
use hunk_diff::{diff_hunk_to_display, ExpandedHunks};
use hunk_diff::{diff_hunk_to_display, DiffMap, DiffMapSnapshot};
use indent_guides::ActiveIndentGuidesState;
use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy};
pub use inline_completion::Direction;
@ -625,7 +625,7 @@ pub struct Editor {
enable_inline_completions: bool,
show_inline_completions_override: Option<bool>,
inlay_hint_cache: InlayHintCache,
expanded_hunks: ExpandedHunks,
diff_map: DiffMap,
next_inlay_id: usize,
_subscriptions: Vec<Subscription>,
pixel_position_of_newest_cursor: Option<gpui::Point<Pixels>>,
@ -692,6 +692,7 @@ pub struct EditorSnapshot {
git_blame_gutter_max_author_length: Option<usize>,
pub display_snapshot: DisplaySnapshot,
pub placeholder_text: Option<Arc<str>>,
diff_map: DiffMapSnapshot,
is_focused: bool,
scroll_anchor: ScrollAnchor,
ongoing_scroll: OngoingScroll,
@ -2002,11 +2003,10 @@ impl Editor {
}
}
let inlay_hint_settings = inlay_hint_settings(
selections.newest_anchor().head(),
&buffer.read(cx).snapshot(cx),
cx,
);
let buffer_snapshot = buffer.read(cx).snapshot(cx);
let inlay_hint_settings =
inlay_hint_settings(selections.newest_anchor().head(), &buffer_snapshot, cx);
let focus_handle = cx.focus_handle();
cx.on_focus(&focus_handle, Self::handle_focus).detach();
cx.on_focus_in(&focus_handle, Self::handle_focus_in)
@ -2023,6 +2023,28 @@ impl Editor {
let mut code_action_providers = Vec::new();
if let Some(project) = project.clone() {
let mut tasks = Vec::new();
buffer.update(cx, |multibuffer, cx| {
project.update(cx, |project, cx| {
multibuffer.for_each_buffer(|buffer| {
tasks.push(project.open_unstaged_changes(buffer.clone(), cx))
});
});
});
cx.spawn(|this, mut cx| async move {
let change_sets = futures::future::join_all(tasks).await;
this.update(&mut cx, |this, cx| {
for change_set in change_sets {
if let Some(change_set) = change_set.log_err() {
this.diff_map.add_change_set(change_set, cx);
}
}
})
.ok();
})
.detach();
code_action_providers.push(Arc::new(project) as Arc<_>);
}
@ -2105,7 +2127,7 @@ impl Editor {
inline_completion_provider: None,
active_inline_completion: None,
inlay_hint_cache: InlayHintCache::new(inlay_hint_settings),
expanded_hunks: ExpandedHunks::default(),
diff_map: DiffMap::default(),
gutter_hovered: false,
pixel_position_of_newest_cursor: None,
last_bounds: None,
@ -2365,6 +2387,7 @@ impl Editor {
scroll_anchor: self.scroll_manager.anchor(),
ongoing_scroll: self.scroll_manager.ongoing_scroll(),
placeholder_text: self.placeholder_text.clone(),
diff_map: self.diff_map.snapshot(),
is_focused: self.focus_handle.is_focused(cx),
current_line_highlight: self
.current_line_highlight
@ -6503,12 +6526,12 @@ impl Editor {
pub fn revert_file(&mut self, _: &RevertFile, cx: &mut ViewContext<Self>) {
let mut revert_changes = HashMap::default();
let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
for hunk in hunks_for_rows(
Some(MultiBufferRow(0)..multi_buffer_snapshot.max_row()).into_iter(),
&multi_buffer_snapshot,
let snapshot = self.snapshot(cx);
for hunk in hunks_for_ranges(
Some(Point::zero()..snapshot.buffer_snapshot.max_point()).into_iter(),
&snapshot,
) {
Self::prepare_revert_change(&mut revert_changes, self.buffer(), &hunk, cx);
self.prepare_revert_change(&mut revert_changes, &hunk, cx);
}
if !revert_changes.is_empty() {
self.transact(cx, |editor, cx| {
@ -6525,7 +6548,7 @@ impl Editor {
}
pub fn revert_selected_hunks(&mut self, _: &RevertSelectedHunks, cx: &mut ViewContext<Self>) {
let revert_changes = self.gather_revert_changes(&self.selections.disjoint_anchors(), cx);
let revert_changes = self.gather_revert_changes(&self.selections.all(cx), cx);
if !revert_changes.is_empty() {
self.transact(cx, |editor, cx| {
editor.revert(revert_changes, cx);
@ -6533,6 +6556,18 @@ impl Editor {
}
}
fn revert_hunk(&mut self, hunk: HoveredHunk, cx: &mut ViewContext<Editor>) {
let snapshot = self.buffer.read(cx).read(cx);
if let Some(hunk) = crate::hunk_diff::to_diff_hunk(&hunk, &snapshot) {
drop(snapshot);
let mut revert_changes = HashMap::default();
self.prepare_revert_change(&mut revert_changes, &hunk, cx);
if !revert_changes.is_empty() {
self.revert(revert_changes, cx)
}
}
}
pub fn open_active_item_in_terminal(&mut self, _: &OpenInTerminal, cx: &mut ViewContext<Self>) {
if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| {
let project_path = buffer.read(cx).project_path(cx)?;
@ -6552,26 +6587,33 @@ impl Editor {
fn gather_revert_changes(
&mut self,
selections: &[Selection<Anchor>],
selections: &[Selection<Point>],
cx: &mut ViewContext<'_, Editor>,
) -> HashMap<BufferId, Vec<(Range<text::Anchor>, Rope)>> {
let mut revert_changes = HashMap::default();
let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
for hunk in hunks_for_selections(&multi_buffer_snapshot, selections) {
Self::prepare_revert_change(&mut revert_changes, self.buffer(), &hunk, cx);
let snapshot = self.snapshot(cx);
for hunk in hunks_for_selections(&snapshot, selections) {
self.prepare_revert_change(&mut revert_changes, &hunk, cx);
}
revert_changes
}
pub fn prepare_revert_change(
&mut self,
revert_changes: &mut HashMap<BufferId, Vec<(Range<text::Anchor>, Rope)>>,
multi_buffer: &Model<MultiBuffer>,
hunk: &MultiBufferDiffHunk,
cx: &AppContext,
) -> Option<()> {
let buffer = multi_buffer.read(cx).buffer(hunk.buffer_id)?;
let buffer = self.buffer.read(cx).buffer(hunk.buffer_id)?;
let buffer = buffer.read(cx);
let original_text = buffer.diff_base()?.slice(hunk.diff_base_byte_range.clone());
let change_set = &self.diff_map.diff_bases.get(&hunk.buffer_id)?.change_set;
let original_text = change_set
.read(cx)
.base_text
.as_ref()?
.read(cx)
.as_rope()
.slice(hunk.diff_base_byte_range.clone());
let buffer_snapshot = buffer.snapshot();
let buffer_revert_changes = revert_changes.entry(buffer.remote_id()).or_default();
if let Err(i) = buffer_revert_changes.binary_search_by(|probe| {
@ -9752,80 +9794,63 @@ impl Editor {
}
fn go_to_next_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext<Self>) {
let snapshot = self
.display_map
.update(cx, |display_map, cx| display_map.snapshot(cx));
let snapshot = self.snapshot(cx);
let selection = self.selections.newest::<Point>(cx);
self.go_to_hunk_after_position(&snapshot, selection.head(), cx);
}
fn go_to_hunk_after_position(
&mut self,
snapshot: &DisplaySnapshot,
snapshot: &EditorSnapshot,
position: Point,
cx: &mut ViewContext<'_, Editor>,
) -> Option<MultiBufferDiffHunk> {
for (ix, position) in [position, Point::zero()].into_iter().enumerate() {
if let Some(hunk) = self.go_to_next_hunk_in_direction(
snapshot,
position,
false,
snapshot
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow(position.row + 1)..MultiBufferRow::MAX),
ix > 0,
snapshot.diff_map.diff_hunks_in_range(
position + Point::new(1, 0)..snapshot.buffer_snapshot.max_point(),
&snapshot.buffer_snapshot,
),
cx,
) {
return Some(hunk);
}
let wrapped_point = Point::zero();
self.go_to_next_hunk_in_direction(
snapshot,
wrapped_point,
true,
snapshot.buffer_snapshot.git_diff_hunks_in_range(
MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX,
),
cx,
)
}
None
}
fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext<Self>) {
let snapshot = self
.display_map
.update(cx, |display_map, cx| display_map.snapshot(cx));
let snapshot = self.snapshot(cx);
let selection = self.selections.newest::<Point>(cx);
self.go_to_hunk_before_position(&snapshot, selection.head(), cx);
}
fn go_to_hunk_before_position(
&mut self,
snapshot: &DisplaySnapshot,
snapshot: &EditorSnapshot,
position: Point,
cx: &mut ViewContext<'_, Editor>,
) -> Option<MultiBufferDiffHunk> {
for (ix, position) in [position, snapshot.buffer_snapshot.max_point()]
.into_iter()
.enumerate()
{
if let Some(hunk) = self.go_to_next_hunk_in_direction(
snapshot,
position,
false,
ix > 0,
snapshot
.buffer_snapshot
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(position.row)),
.diff_map
.diff_hunks_in_range_rev(Point::zero()..position, &snapshot.buffer_snapshot),
cx,
) {
return Some(hunk);
}
let wrapped_point = snapshot.buffer_snapshot.max_point();
self.go_to_next_hunk_in_direction(
snapshot,
wrapped_point,
true,
snapshot
.buffer_snapshot
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(wrapped_point.row)),
cx,
)
}
None
}
fn go_to_next_hunk_in_direction(
@ -11270,13 +11295,13 @@ impl Editor {
return;
}
let mut buffers_affected = HashMap::default();
let mut buffers_affected = HashSet::default();
let multi_buffer = self.buffer().read(cx);
for crease in &creases {
if let Some((_, buffer, _)) =
multi_buffer.excerpt_containing(crease.range().start.clone(), cx)
{
buffers_affected.insert(buffer.read(cx).remote_id(), buffer);
buffers_affected.insert(buffer.read(cx).remote_id());
};
}
@ -11286,8 +11311,8 @@ impl Editor {
self.request_autoscroll(Autoscroll::fit(), cx);
}
for buffer in buffers_affected.into_values() {
self.sync_expanded_diff_hunks(buffer, cx);
for buffer_id in buffers_affected {
Self::sync_expanded_diff_hunks(&mut self.diff_map, buffer_id, cx);
}
cx.notify();
@ -11344,11 +11369,11 @@ impl Editor {
return;
}
let mut buffers_affected = HashMap::default();
let mut buffers_affected = HashSet::default();
let multi_buffer = self.buffer().read(cx);
for range in ranges {
if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) {
buffers_affected.insert(buffer.read(cx).remote_id(), buffer);
buffers_affected.insert(buffer.read(cx).remote_id());
};
}
@ -11358,8 +11383,8 @@ impl Editor {
self.request_autoscroll(Autoscroll::fit(), cx);
}
for buffer in buffers_affected.into_values() {
self.sync_expanded_diff_hunks(buffer, cx);
for buffer_id in buffers_affected {
Self::sync_expanded_diff_hunks(&mut self.diff_map, buffer_id, cx);
}
cx.notify();
@ -12653,15 +12678,11 @@ impl Editor {
multi_buffer::Event::FileHandleChanged | multi_buffer::Event::Reloaded => {
cx.emit(EditorEvent::TitleChanged)
}
multi_buffer::Event::DiffBaseChanged => {
self.scrollbar_marker_state.dirty = true;
cx.emit(EditorEvent::DiffBaseChanged);
cx.notify();
}
multi_buffer::Event::DiffUpdated { buffer } => {
self.sync_expanded_diff_hunks(buffer.clone(), cx);
cx.notify();
}
// multi_buffer::Event::DiffBaseChanged => {
// self.scrollbar_marker_state.dirty = true;
// cx.emit(EditorEvent::DiffBaseChanged);
// cx.notify();
// }
multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed),
multi_buffer::Event::DiagnosticsUpdated => {
self.refresh_active_diagnostics(cx);
@ -12829,7 +12850,7 @@ impl Editor {
// When editing branch buffers, jump to the corresponding location
// in their base buffer.
let buffer = buffer_handle.read(cx);
if let Some(base_buffer) = buffer.diff_base_buffer() {
if let Some(base_buffer) = buffer.base_buffer() {
range = buffer.range_to_version(range, &base_buffer.read(cx).version());
buffer_handle = base_buffer;
}
@ -13606,35 +13627,29 @@ fn test_wrap_with_prefix() {
}
fn hunks_for_selections(
multi_buffer_snapshot: &MultiBufferSnapshot,
selections: &[Selection<Anchor>],
snapshot: &EditorSnapshot,
selections: &[Selection<Point>],
) -> Vec<MultiBufferDiffHunk> {
let buffer_rows_for_selections = selections.iter().map(|selection| {
let head = selection.head();
let tail = selection.tail();
let start = MultiBufferRow(tail.to_point(multi_buffer_snapshot).row);
let end = MultiBufferRow(head.to_point(multi_buffer_snapshot).row);
if start > end {
end..start
} else {
start..end
}
});
hunks_for_rows(buffer_rows_for_selections, multi_buffer_snapshot)
hunks_for_ranges(
selections.iter().map(|selection| selection.range()),
snapshot,
)
}
pub fn hunks_for_rows(
rows: impl Iterator<Item = Range<MultiBufferRow>>,
multi_buffer_snapshot: &MultiBufferSnapshot,
pub fn hunks_for_ranges(
ranges: impl Iterator<Item = Range<Point>>,
snapshot: &EditorSnapshot,
) -> Vec<MultiBufferDiffHunk> {
let mut hunks = Vec::new();
let mut processed_buffer_rows: HashMap<BufferId, HashSet<Range<text::Anchor>>> =
HashMap::default();
for selected_multi_buffer_rows in rows {
for query_range in ranges {
let query_rows =
selected_multi_buffer_rows.start..selected_multi_buffer_rows.end.next_row();
for hunk in multi_buffer_snapshot.git_diff_hunks_in_range(query_rows.clone()) {
MultiBufferRow(query_range.start.row)..MultiBufferRow(query_range.end.row + 1);
for hunk in snapshot.diff_map.diff_hunks_in_range(
Point::new(query_rows.start.0, 0)..Point::new(query_rows.end.0, 0),
&snapshot.buffer_snapshot,
) {
// Deleted hunk is an empty row range, no caret can be placed there and Zed allows to revert it
// when the caret is just above or just below the deleted hunk.
let allow_adjacent = hunk_status(&hunk) == DiffHunkStatus::Removed;
@ -13643,10 +13658,7 @@ pub fn hunks_for_rows(
|| hunk.row_range.start == query_rows.end
|| hunk.row_range.end == query_rows.start
} else {
// `selected_multi_buffer_rows` are inclusive (e.g. [2..2] means 2nd row is selected)
// `hunk.row_range` is exclusive (e.g. [2..3] means 2nd row is selected)
hunk.row_range.overlaps(&selected_multi_buffer_rows)
|| selected_multi_buffer_rows.end == hunk.row_range.start
hunk.row_range.overlaps(&query_rows)
};
if related_to_selection {
if !processed_buffer_rows

View file

@ -25,7 +25,7 @@ use language::{
use language_settings::{Formatter, FormatterList, IndentGuideSettings};
use multi_buffer::MultiBufferIndentGuide;
use parking_lot::Mutex;
use project::FakeFs;
use project::{buffer_store::BufferChangeSet, FakeFs};
use project::{
lsp_command::SIGNATURE_HELP_HIGHLIGHT_CURRENT,
project_settings::{LspSettings, ProjectSettings},
@ -3313,7 +3313,7 @@ async fn test_join_lines_with_git_diff_base(
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
// Join lines
@ -3353,16 +3353,15 @@ async fn test_custom_newlines_cause_no_false_positive_diffs(
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.set_state("Line 0\r\nLine 1\rˇ\nLine 2\r\nLine 3");
cx.set_diff_base(Some("Line 0\r\nLine 1\r\nLine 2\r\nLine 3"));
cx.set_diff_base("Line 0\r\nLine 1\r\nLine 2\r\nLine 3");
executor.run_until_parked();
cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
assert_eq!(
editor
.buffer()
.read(cx)
.snapshot(cx)
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
snapshot
.diff_map
.diff_hunks_in_range(0..snapshot.buffer_snapshot.len(), &snapshot.buffer_snapshot)
.collect::<Vec<_>>(),
Vec::new(),
"Should not have any diffs for files with custom newlines"
@ -10088,7 +10087,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext)
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
@ -11125,7 +11124,8 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
async fn test_addition_reverts(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
let base_text = indoc! {r#"struct Row;
let base_text = indoc! {r#"
struct Row;
struct Row1;
struct Row2;
@ -11266,7 +11266,8 @@ struct Row10;"#};
async fn test_modification_reverts(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
let base_text = indoc! {r#"struct Row;
let base_text = indoc! {r#"
struct Row;
struct Row1;
struct Row2;
@ -11494,54 +11495,18 @@ struct Row10;"#};
async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let cols = 4;
let rows = 10;
let sample_text_1 = sample_text(rows, cols, 'a');
assert_eq!(
sample_text_1,
"aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj"
);
let sample_text_2 = sample_text(rows, cols, 'l');
assert_eq!(
sample_text_2,
"llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu"
);
let sample_text_3 = sample_text(rows, cols, 'v');
assert_eq!(
sample_text_3,
"vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}"
);
let base_text_1 = "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj";
let base_text_2 = "llll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu";
let base_text_3 =
"vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}";
fn diff_every_buffer_row(
buffer: &Model<Buffer>,
sample_text: String,
cols: usize,
cx: &mut gpui::TestAppContext,
) {
// revert first character in each row, creating one large diff hunk per buffer
let is_first_char = |offset: usize| offset % cols == 0;
buffer.update(cx, |buffer, cx| {
buffer.set_text(
sample_text
.chars()
.enumerate()
.map(|(offset, c)| if is_first_char(offset) { 'X' } else { c })
.collect::<String>(),
cx,
);
buffer.set_diff_base(Some(sample_text), cx);
});
cx.executor().run_until_parked();
}
let text_1 = edit_first_char_of_every_line(base_text_1);
let text_2 = edit_first_char_of_every_line(base_text_2);
let text_3 = edit_first_char_of_every_line(base_text_3);
let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text_1.clone(), cx));
diff_every_buffer_row(&buffer_1, sample_text_1.clone(), cols, cx);
let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text_2.clone(), cx));
diff_every_buffer_row(&buffer_2, sample_text_2.clone(), cols, cx);
let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx));
diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx);
let buffer_1 = cx.new_model(|cx| Buffer::local(text_1.clone(), cx));
let buffer_2 = cx.new_model(|cx| Buffer::local(text_2.clone(), cx));
let buffer_3 = cx.new_model(|cx| Buffer::local(text_3.clone(), cx));
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(ReadWrite);
@ -11604,57 +11569,85 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
let (editor, cx) = cx.add_window_view(|cx| build_editor(multibuffer, cx));
editor.update(cx, |editor, cx| {
assert_eq!(editor.text(cx), "XaaaXbbbX\nccXc\ndXdd\n\nhXhh\nXiiiXjjjX\n\nXlllXmmmX\nnnXn\noXoo\n\nsXss\nXtttXuuuX\n\nXvvvXwwwX\nxxXx\nyXyy\n\n}X}}\nX~~~X\u{7f}\u{7f}\u{7f}X\n");
for (buffer, diff_base) in [
(buffer_1.clone(), base_text_1),
(buffer_2.clone(), base_text_2),
(buffer_3.clone(), base_text_3),
] {
let change_set = cx.new_model(|cx| {
BufferChangeSet::new_with_base_text(
diff_base.to_string(),
buffer.read(cx).text_snapshot(),
cx,
)
});
editor.diff_map.add_change_set(change_set, cx)
}
});
cx.executor().run_until_parked();
editor.update(cx, |editor, cx| {
assert_eq!(editor.text(cx), "Xaaa\nXbbb\nXccc\n\nXfff\nXggg\n\nXjjj\nXlll\nXmmm\nXnnn\n\nXqqq\nXrrr\n\nXuuu\nXvvv\nXwww\nXxxx\n\nX{{{\nX|||\n\nX\u{7f}\u{7f}\u{7f}");
editor.select_all(&SelectAll, cx);
editor.revert_selected_hunks(&RevertSelectedHunks, cx);
});
cx.executor().run_until_parked();
// When all ranges are selected, all buffer hunks are reverted.
editor.update(cx, |editor, cx| {
assert_eq!(editor.text(cx), "aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nllll\nmmmm\nnnnn\noooo\npppp\nqqqq\nrrrr\nssss\ntttt\nuuuu\n\n\nvvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}\n\n");
});
buffer_1.update(cx, |buffer, _| {
assert_eq!(buffer.text(), sample_text_1);
assert_eq!(buffer.text(), base_text_1);
});
buffer_2.update(cx, |buffer, _| {
assert_eq!(buffer.text(), sample_text_2);
assert_eq!(buffer.text(), base_text_2);
});
buffer_3.update(cx, |buffer, _| {
assert_eq!(buffer.text(), sample_text_3);
assert_eq!(buffer.text(), base_text_3);
});
editor.update(cx, |editor, cx| {
editor.undo(&Default::default(), cx);
});
diff_every_buffer_row(&buffer_1, sample_text_1.clone(), cols, cx);
diff_every_buffer_row(&buffer_2, sample_text_2.clone(), cols, cx);
diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx);
editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |s| {
s.select_ranges(Some(Point::new(0, 0)..Point::new(6, 0)));
});
editor.revert_selected_hunks(&RevertSelectedHunks, cx);
});
// Now, when all ranges selected belong to buffer_1, the revert should succeed,
// but not affect buffer_2 and its related excerpts.
editor.update(cx, |editor, cx| {
assert_eq!(
editor.text(cx),
"aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nXlllXmmmX\nnnXn\noXoo\nXpppXqqqX\nrrXr\nsXss\nXtttXuuuX\n\n\nXvvvXwwwX\nxxXx\nyXyy\nXzzzX{{{X\n||X|\n}X}}\nX~~~X\u{7f}\u{7f}\u{7f}X\n\n"
"aaaa\nbbbb\ncccc\ndddd\neeee\nffff\ngggg\nhhhh\niiii\njjjj\n\n\nXlll\nXmmm\nXnnn\n\nXqqq\nXrrr\n\nXuuu\nXvvv\nXwww\nXxxx\n\nX{{{\nX|||\n\nX\u{7f}\u{7f}\u{7f}"
);
});
buffer_1.update(cx, |buffer, _| {
assert_eq!(buffer.text(), sample_text_1);
assert_eq!(buffer.text(), base_text_1);
});
buffer_2.update(cx, |buffer, _| {
assert_eq!(
buffer.text(),
"XlllXmmmX\nnnXn\noXoo\nXpppXqqqX\nrrXr\nsXss\nXtttXuuuX"
"Xlll\nXmmm\nXnnn\nXooo\nXppp\nXqqq\nXrrr\nXsss\nXttt\nXuuu"
);
});
buffer_3.update(cx, |buffer, _| {
assert_eq!(
buffer.text(),
"XvvvXwwwX\nxxXx\nyXyy\nXzzzX{{{X\n||X|\n}X}}\nX~~~X\u{7f}\u{7f}\u{7f}X"
"Xvvv\nXwww\nXxxx\nXyyy\nXzzz\nX{{{\nX|||\nX}}}\nX~~~\nX\u{7f}\u{7f}\u{7f}"
);
});
fn edit_first_char_of_every_line(text: &str) -> String {
text.split('\n')
.map(|line| format!("X{}", &line[1..]))
.collect::<Vec<_>>()
.join("\n")
}
}
#[gpui::test]
@ -12049,7 +12042,7 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
@ -12057,14 +12050,14 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
editor.toggle_hunk_diff(&ToggleHunkDiff, cx);
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::modified;
fn main() {
- println!("hello");
+ println!("hello there");
+ ˇ println!("hello there");
println!("around the");
println!("world");
@ -12080,28 +12073,13 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
}
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
use some::modified;
ˇ
fn main() {
println!("hello there");
println!("around the");
println!("world");
}
"#
.unindent(),
);
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
- use some::mod;
+ use some::modified;
- const A: u32 = 42;
ˇ
fn main() {
- println!("hello");
+ println!("hello there");
@ -12117,11 +12095,11 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
editor.cancel(&Cancel, cx);
});
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::modified;
ˇ
fn main() {
println!("hello there");
@ -12176,14 +12154,14 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx);
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
- use some::mod1;
use some::mod2;
@ -12192,7 +12170,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
- const B: u32 = 42;
const C: u32 = 42;
fn main() {
fn main(ˇ) {
- println!("hello");
+ //println!("hello");
@ -12204,16 +12182,16 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
.unindent(),
);
cx.set_diff_base(Some("new diff base!"));
cx.set_diff_base("new diff base!");
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod2;
const A: u32 = 42;
const C: u32 = 42;
fn main() {
fn main(ˇ) {
//println!("hello");
println!("world");
@ -12228,7 +12206,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx);
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
- new diff base!
+ use some::mod2;
@ -12236,7 +12214,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
+ const A: u32 = 42;
+ const C: u32 = 42;
+
+ fn main() {
+ fn main(ˇ) {
+ //println!("hello");
+
+ println!("world");
@ -12304,7 +12282,7 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
@ -12312,10 +12290,10 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
- use some::mod1;
use some::mod2;
«use some::mod2;
const A: u32 = 42;
- const B: u32 = 42;
@ -12327,7 +12305,7 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
println!("world");
+ //
+ //
+ //ˇ»
}
fn another() {
@ -12347,9 +12325,9 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
cx.executor().run_until_parked();
// Hunks are not shown if their position is within a fold
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod2;
«use some::mod2;
const A: u32 = 42;
const C: u32 = 42;
@ -12359,7 +12337,7 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
println!("world");
//
//
//ˇ»
}
fn another() {
@ -12381,10 +12359,10 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
cx.executor().run_until_parked();
// The deletions reappear when unfolding.
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
- use some::mod1;
use some::mod2;
«use some::mod2;
const A: u32 = 42;
- const B: u32 = 42;
@ -12407,7 +12385,7 @@ async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui:
- fn another2() {
println!("another2");
}
"#
ˇ»"#
.unindent(),
);
}
@ -12423,21 +12401,9 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
let file_3_old = "111\n222\n333\n444\n555\n777\n888\n999\n000\n!!!";
let file_3_new = "111\n222\n333\n444\n555\n666\n777\n888\n999\n000\n!!!";
let buffer_1 = cx.new_model(|cx| {
let mut buffer = Buffer::local(file_1_new.to_string(), cx);
buffer.set_diff_base(Some(file_1_old.into()), cx);
buffer
});
let buffer_2 = cx.new_model(|cx| {
let mut buffer = Buffer::local(file_2_new.to_string(), cx);
buffer.set_diff_base(Some(file_2_old.into()), cx);
buffer
});
let buffer_3 = cx.new_model(|cx| {
let mut buffer = Buffer::local(file_3_new.to_string(), cx);
buffer.set_diff_base(Some(file_3_old.into()), cx);
buffer
});
let buffer_1 = cx.new_model(|cx| Buffer::local(file_1_new.to_string(), cx));
let buffer_2 = cx.new_model(|cx| Buffer::local(file_2_new.to_string(), cx));
let buffer_3 = cx.new_model(|cx| Buffer::local(file_3_new.to_string(), cx));
let multi_buffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(ReadWrite);
@ -12499,6 +12465,25 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
});
let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx));
editor
.update(cx, |editor, cx| {
for (buffer, diff_base) in [
(buffer_1.clone(), file_1_old),
(buffer_2.clone(), file_2_old),
(buffer_3.clone(), file_3_old),
] {
let change_set = cx.new_model(|cx| {
BufferChangeSet::new_with_base_text(
diff_base.to_string(),
buffer.read(cx).text_snapshot(),
cx,
)
});
editor.diff_map.add_change_set(change_set, cx)
}
})
.unwrap();
let mut cx = EditorTestContext::for_editor(editor, cx).await;
cx.run_until_parked();
@ -12538,9 +12523,9 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
});
cx.executor().run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
"
aaa
«aaa
- bbb
ccc
ddd
@ -12566,7 +12551,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
777
000
!!!"
!!!ˇ»"
.unindent(),
);
}
@ -12578,12 +12563,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n";
let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n";
let buffer = cx.new_model(|cx| {
let mut buffer = Buffer::local(text.to_string(), cx);
buffer.set_diff_base(Some(base.into()), cx);
buffer
});
let buffer = cx.new_model(|cx| Buffer::local(text.to_string(), cx));
let multi_buffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(ReadWrite);
multibuffer.push_excerpts(
@ -12604,15 +12584,24 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
});
let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx));
editor
.update(cx, |editor, cx| {
let buffer = buffer.read(cx).text_snapshot();
let change_set = cx
.new_model(|cx| BufferChangeSet::new_with_base_text(base.to_string(), buffer, cx));
editor.diff_map.add_change_set(change_set, cx)
})
.unwrap();
let mut cx = EditorTestContext::for_editor(editor, cx).await;
cx.run_until_parked();
cx.update_editor(|editor, cx| editor.expand_all_hunk_diffs(&Default::default(), cx));
cx.executor().run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
"
aaa
ˇaaa
- bbb
+ BBB
@ -12667,7 +12656,7 @@ async fn test_edits_around_expanded_insertion_hunks(
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
@ -12675,7 +12664,7 @@ async fn test_edits_around_expanded_insertion_hunks(
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -12683,7 +12672,7 @@ async fn test_edits_around_expanded_insertion_hunks(
const A: u32 = 42;
+ const B: u32 = 42;
+ const C: u32 = 42;
+
+ ˇ
fn main() {
println!("hello");
@ -12697,7 +12686,7 @@ async fn test_edits_around_expanded_insertion_hunks(
cx.update_editor(|editor, cx| editor.handle_input("const D: u32 = 42;\n", cx));
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -12706,7 +12695,7 @@ async fn test_edits_around_expanded_insertion_hunks(
+ const B: u32 = 42;
+ const C: u32 = 42;
+ const D: u32 = 42;
+
+ ˇ
fn main() {
println!("hello");
@ -12720,7 +12709,7 @@ async fn test_edits_around_expanded_insertion_hunks(
cx.update_editor(|editor, cx| editor.handle_input("const E: u32 = 42;\n", cx));
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -12730,7 +12719,7 @@ async fn test_edits_around_expanded_insertion_hunks(
+ const C: u32 = 42;
+ const D: u32 = 42;
+ const E: u32 = 42;
+
+ ˇ
fn main() {
println!("hello");
@ -12746,7 +12735,7 @@ async fn test_edits_around_expanded_insertion_hunks(
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -12756,32 +12745,6 @@ async fn test_edits_around_expanded_insertion_hunks(
+ const C: u32 = 42;
+ const D: u32 = 42;
+ const E: u32 = 42;
fn main() {
println!("hello");
println!("world");
}
"#
.unindent(),
);
cx.update_editor(|editor, cx| {
editor.move_up(&MoveUp, cx);
editor.delete_line(&DeleteLine, cx);
editor.move_up(&MoveUp, cx);
editor.delete_line(&DeleteLine, cx);
editor.move_up(&MoveUp, cx);
editor.delete_line(&DeleteLine, cx);
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
use some::mod1;
use some::mod2;
const A: u32 = 42;
const B: u32 = 42;
ˇ
fn main() {
println!("hello");
@ -12792,14 +12755,23 @@ async fn test_edits_around_expanded_insertion_hunks(
.unindent(),
);
cx.assert_diff_hunks(
cx.update_editor(|editor, cx| {
editor.move_up(&MoveUp, cx);
editor.delete_line(&DeleteLine, cx);
editor.move_up(&MoveUp, cx);
editor.delete_line(&DeleteLine, cx);
editor.move_up(&MoveUp, cx);
editor.delete_line(&DeleteLine, cx);
});
executor.run_until_parked();
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
const A: u32 = 42;
+ const B: u32 = 42;
ˇ
fn main() {
println!("hello");
@ -12814,13 +12786,13 @@ async fn test_edits_around_expanded_insertion_hunks(
editor.delete_line(&DeleteLine, cx);
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
- use some::mod2;
-
- const A: u32 = 42;
ˇ
fn main() {
println!("hello");
@ -12875,7 +12847,7 @@ async fn test_edits_around_expanded_deletion_hunks(
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
@ -12883,13 +12855,13 @@ async fn test_edits_around_expanded_deletion_hunks(
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
- const A: u32 = 42;
const B: u32 = 42;
ˇconst B: u32 = 42;
const C: u32 = 42;
@ -12906,32 +12878,16 @@ async fn test_edits_around_expanded_deletion_hunks(
editor.delete_line(&DeleteLine, cx);
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
- const A: u32 = 42;
- const B: u32 = 42;
ˇconst C: u32 = 42;
fn main() {
println!("hello");
println!("world");
}
"#
.unindent(),
);
cx.assert_diff_hunks(
r#"
use some::mod1;
use some::mod2;
- const A: u32 = 42;
- const B: u32 = 42;
const C: u32 = 42;
fn main() {
println!("hello");
@ -12945,22 +12901,7 @@ async fn test_edits_around_expanded_deletion_hunks(
editor.delete_line(&DeleteLine, cx);
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
use some::mod1;
use some::mod2;
ˇ
fn main() {
println!("hello");
println!("world");
}
"#
.unindent(),
);
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -12968,7 +12909,7 @@ async fn test_edits_around_expanded_deletion_hunks(
- const A: u32 = 42;
- const B: u32 = 42;
- const C: u32 = 42;
ˇ
fn main() {
println!("hello");
@ -12983,22 +12924,7 @@ async fn test_edits_around_expanded_deletion_hunks(
editor.handle_input("replacement", cx);
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
use some::mod1;
use some::mod2;
replacementˇ
fn main() {
println!("hello");
println!("world");
}
"#
.unindent(),
);
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -13007,7 +12933,7 @@ async fn test_edits_around_expanded_deletion_hunks(
- const B: u32 = 42;
- const C: u32 = 42;
-
+ replacement
+ replacementˇ
fn main() {
println!("hello");
@ -13064,14 +12990,14 @@ async fn test_edit_after_expanded_modification_hunk(
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx);
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -13079,7 +13005,7 @@ async fn test_edit_after_expanded_modification_hunk(
const A: u32 = 42;
const B: u32 = 42;
- const C: u32 = 42;
+ const C: u32 = 43
+ const C: u32 = 43ˇ
const D: u32 = 42;
@ -13096,7 +13022,7 @@ async fn test_edit_after_expanded_modification_hunk(
});
executor.run_until_parked();
cx.assert_diff_hunks(
cx.assert_state_with_diff(
r#"
use some::mod1;
use some::mod2;
@ -13106,7 +13032,7 @@ async fn test_edit_after_expanded_modification_hunk(
- const C: u32 = 42;
+ const C: u32 = 43
+ new_line
+
+ ˇ
const D: u32 = 42;
@ -14185,22 +14111,14 @@ fn assert_hunk_revert(
cx: &mut EditorLspTestContext,
) {
cx.set_state(not_reverted_text_with_selections);
cx.update_editor(|editor, cx| {
editor
.buffer()
.read(cx)
.as_singleton()
.unwrap()
.update(cx, |buffer, cx| {
buffer.set_diff_base(Some(base_text.into()), cx);
});
});
cx.set_diff_base(base_text);
cx.executor().run_until_parked();
let reverted_hunk_statuses = cx.update_editor(|editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let snapshot = editor.snapshot(cx);
let reverted_hunk_statuses = snapshot
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
.diff_map
.diff_hunks_in_range(0..snapshot.buffer_snapshot.len(), &snapshot.buffer_snapshot)
.map(|hunk| hunk_status(&hunk))
.collect::<Vec<_>>();

View file

@ -1169,7 +1169,7 @@ impl EditorElement {
let editor = self.editor.read(cx);
let is_singleton = editor.is_singleton(cx);
// Git
(is_singleton && scrollbar_settings.git_diff && snapshot.buffer_snapshot.has_git_diffs())
(is_singleton && scrollbar_settings.git_diff && !snapshot.diff_map.is_empty())
||
// Buffer Search Results
(is_singleton && scrollbar_settings.search_results && editor.has_background_highlights::<BufferSearchHighlights>())
@ -1320,17 +1320,8 @@ impl EditorElement {
cx: &mut WindowContext,
) -> Vec<(DisplayDiffHunk, Option<Hitbox>)> {
let buffer_snapshot = &snapshot.buffer_snapshot;
let buffer_start_row = MultiBufferRow(
DisplayPoint::new(display_rows.start, 0)
.to_point(snapshot)
.row,
);
let buffer_end_row = MultiBufferRow(
DisplayPoint::new(display_rows.end, 0)
.to_point(snapshot)
.row,
);
let buffer_start = DisplayPoint::new(display_rows.start, 0).to_point(snapshot);
let buffer_end = DisplayPoint::new(display_rows.end, 0).to_point(snapshot);
let git_gutter_setting = ProjectSettings::get_global(cx)
.git
@ -1338,7 +1329,7 @@ impl EditorElement {
.unwrap_or_default();
self.editor.update(cx, |editor, cx| {
let expanded_hunks = &editor.expanded_hunks.hunks;
let expanded_hunks = &editor.diff_map.hunks;
let expanded_hunks_start_ix = expanded_hunks
.binary_search_by(|hunk| {
hunk.hunk_range
@ -1349,8 +1340,10 @@ impl EditorElement {
.unwrap_err();
let mut expanded_hunks = expanded_hunks[expanded_hunks_start_ix..].iter().peekable();
let display_hunks = buffer_snapshot
.git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
let mut display_hunks: Vec<(DisplayDiffHunk, Option<Hitbox>)> = editor
.diff_map
.snapshot
.diff_hunks_in_range(buffer_start..buffer_end, &buffer_snapshot)
.filter_map(|hunk| {
let display_hunk = diff_hunk_to_display(&hunk, snapshot);
@ -1393,25 +1386,23 @@ impl EditorElement {
Some(display_hunk)
})
.dedup()
.map(|hunk| match git_gutter_setting {
GitGutterSetting::TrackedFiles => {
let hitbox = match hunk {
DisplayDiffHunk::Unfolded { .. } => {
.map(|hunk| (hunk, None))
.collect();
if let GitGutterSetting::TrackedFiles = git_gutter_setting {
for (hunk, hitbox) in &mut display_hunks {
if let DisplayDiffHunk::Unfolded { .. } = hunk {
let hunk_bounds = Self::diff_hunk_bounds(
snapshot,
line_height,
gutter_hitbox.bounds,
&hunk,
);
Some(cx.insert_hitbox(hunk_bounds, true))
}
DisplayDiffHunk::Folded { .. } => None,
*hitbox = Some(cx.insert_hitbox(hunk_bounds, true));
};
(hunk, hitbox)
}
GitGutterSetting::Hide => (hunk, None),
})
.collect();
}
display_hunks
})
}
@ -3755,10 +3746,8 @@ impl EditorElement {
let mut marker_quads = Vec::new();
if scrollbar_settings.git_diff {
let marker_row_ranges = snapshot
.buffer_snapshot
.git_diff_hunks_in_range(
MultiBufferRow::MIN..MultiBufferRow::MAX,
)
.diff_map
.diff_hunks(&snapshot.buffer_snapshot)
.map(|hunk| {
let start_display_row =
MultiBufferPoint::new(hunk.row_range.start.0, 0)
@ -5440,7 +5429,7 @@ impl Element for EditorElement {
let expanded_add_hunks_by_rows = self.editor.update(cx, |editor, _| {
editor
.expanded_hunks
.diff_map
.hunks(false)
.filter(|hunk| hunk.status == DiffHunkStatus::Added)
.map(|expanded_hunk| {

View file

@ -9,13 +9,15 @@ use std::{
use anyhow::Context as _;
use collections::{BTreeMap, HashMap};
use feature_flags::FeatureFlagAppExt;
use futures::{stream::FuturesUnordered, StreamExt};
use git::{diff::DiffHunk, repository::GitFileStatus};
use git::{
diff::{BufferDiff, DiffHunk},
repository::GitFileStatus,
};
use gpui::{
actions, AnyElement, AnyView, AppContext, EventEmitter, FocusHandle, FocusableView,
InteractiveElement, Model, Render, Subscription, Task, View, WeakView,
};
use language::{Buffer, BufferRow, BufferSnapshot};
use language::{Buffer, BufferRow};
use multi_buffer::{ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer};
use project::{Project, ProjectEntryId, ProjectPath, WorktreeId};
use text::{OffsetRangeExt, ToPoint};
@ -215,17 +217,23 @@ impl ProjectDiffEditor {
.ok()
.flatten()
.unwrap_or_default();
let buffers_with_git_diff = cx
.background_executor()
.spawn(async move {
let mut open_tasks = open_tasks
.into_iter()
.map(|(status, entry_id, entry_path, open_task)| async move {
let Some((buffers, mut new_entries, change_sets)) = cx
.spawn(|mut cx| async move {
let mut new_entries = Vec::new();
let mut buffers = HashMap::<
ProjectEntryId,
(
GitFileStatus,
text::BufferSnapshot,
Model<Buffer>,
BufferDiff,
),
>::default();
let mut change_sets = Vec::new();
for (status, entry_id, entry_path, open_task) in open_tasks {
let (_, opened_model) = open_task.await.with_context(|| {
format!(
"loading buffer {} for git diff",
entry_path.path.display()
)
format!("loading buffer {} for git diff", entry_path.path.display())
})?;
let buffer = match opened_model.downcast::<Buffer>() {
Ok(buffer) => buffer,
@ -234,35 +242,31 @@ impl ProjectDiffEditor {
entry_path.path.display()
),
};
anyhow::Ok((status, entry_id, entry_path, buffer))
})
.collect::<FuturesUnordered<_>>();
let change_set = project
.update(&mut cx, |project, cx| {
project.open_unstaged_changes(buffer.clone(), cx)
})?
.await?;
let mut buffers_with_git_diff = Vec::new();
while let Some(opened_buffer) = open_tasks.next().await {
if let Some(opened_buffer) = opened_buffer.log_err() {
buffers_with_git_diff.push(opened_buffer);
}
}
buffers_with_git_diff
})
.await;
let Some((buffers, mut new_entries)) = cx
.update(|cx| {
let mut buffers = HashMap::<
ProjectEntryId,
(GitFileStatus, Model<Buffer>, BufferSnapshot),
>::default();
let mut new_entries = Vec::new();
for (status, entry_id, entry_path, buffer) in buffers_with_git_diff {
let buffer_snapshot = buffer.read(cx).snapshot();
buffers.insert(entry_id, (status, buffer, buffer_snapshot));
cx.update(|cx| {
buffers.insert(
entry_id,
(
status,
buffer.read(cx).text_snapshot(),
buffer,
change_set.read(cx).diff_to_buffer.clone(),
),
);
})?;
change_sets.push(change_set);
new_entries.push((entry_path, entry_id));
}
(buffers, new_entries)
Ok((buffers, new_entries, change_sets))
})
.ok()
.await
.log_err()
else {
return;
};
@ -271,14 +275,14 @@ impl ProjectDiffEditor {
.background_executor()
.spawn(async move {
let mut new_changes = HashMap::<ProjectEntryId, Changes>::default();
for (entry_id, (status, buffer, buffer_snapshot)) in buffers {
for (entry_id, (status, buffer_snapshot, buffer, buffer_diff)) in buffers {
new_changes.insert(
entry_id,
Changes {
_status: status,
buffer,
hunks: buffer_snapshot
.git_diff_hunks_in_row_range(0..BufferRow::MAX)
hunks: buffer_diff
.hunks_in_row_range(0..BufferRow::MAX, &buffer_snapshot)
.collect::<Vec<_>>(),
},
);
@ -294,33 +298,16 @@ impl ProjectDiffEditor {
})
.await;
let mut diff_recalculations = FuturesUnordered::new();
project_diff_editor
.update(&mut cx, |project_diff_editor, cx| {
project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx);
for buffer in project_diff_editor
.editor
.read(cx)
.buffer()
.read(cx)
.all_buffers()
{
buffer.update(cx, |buffer, cx| {
if let Some(diff_recalculation) = buffer.recalculate_diff(cx) {
diff_recalculations.push(diff_recalculation);
}
for change_set in change_sets {
project_diff_editor.editor.update(cx, |editor, cx| {
editor.diff_map.add_change_set(change_set, cx)
});
}
})
.ok();
cx.background_executor()
.spawn(async move {
while let Some(()) = diff_recalculations.next().await {
// another diff is calculated
}
})
.await;
}),
);
}
@ -1100,13 +1087,13 @@ impl Render for ProjectDiffEditor {
#[cfg(test)]
mod tests {
use std::{ops::Deref as _, path::Path, sync::Arc};
// use std::{ops::Deref as _, path::Path, sync::Arc};
use fs::RealFs;
use gpui::{SemanticVersion, TestAppContext, VisualTestContext};
use settings::SettingsStore;
// use fs::RealFs;
// use gpui::{SemanticVersion, TestAppContext, VisualTestContext};
// use settings::SettingsStore;
use super::*;
// use super::*;
// TODO finish
// #[gpui::test]
@ -1122,114 +1109,114 @@ mod tests {
// // Apply randomized changes to the project: select a random file, random change and apply to buffers
// }
#[gpui::test]
async fn simple_edit_test(cx: &mut TestAppContext) {
cx.executor().allow_parking();
init_test(cx);
// #[gpui::test]
// async fn simple_edit_test(cx: &mut TestAppContext) {
// cx.executor().allow_parking();
// init_test(cx);
let dir = tempfile::tempdir().unwrap();
let dst = dir.path();
// let dir = tempfile::tempdir().unwrap();
// let dst = dir.path();
std::fs::write(dst.join("file_a"), "This is file_a").unwrap();
std::fs::write(dst.join("file_b"), "This is file_b").unwrap();
// std::fs::write(dst.join("file_a"), "This is file_a").unwrap();
// std::fs::write(dst.join("file_b"), "This is file_b").unwrap();
run_git(dst, &["init"]);
run_git(dst, &["add", "*"]);
run_git(dst, &["commit", "-m", "Initial commit"]);
// run_git(dst, &["init"]);
// run_git(dst, &["add", "*"]);
// run_git(dst, &["commit", "-m", "Initial commit"]);
let project = Project::test(Arc::new(RealFs::default()), [dst], cx).await;
let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
// let project = Project::test(Arc::new(RealFs::default()), [dst], cx).await;
// let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
// let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx);
let file_a_editor = workspace
.update(cx, |workspace, cx| {
let file_a_editor = workspace.open_abs_path(dst.join("file_a"), true, cx);
ProjectDiffEditor::deploy(workspace, &Deploy, cx);
file_a_editor
})
.unwrap()
.await
.expect("did not open an item at all")
.downcast::<Editor>()
.expect("did not open an editor for file_a");
// let file_a_editor = workspace
// .update(cx, |workspace, cx| {
// let file_a_editor = workspace.open_abs_path(dst.join("file_a"), true, cx);
// ProjectDiffEditor::deploy(workspace, &Deploy, cx);
// file_a_editor
// })
// .unwrap()
// .await
// .expect("did not open an item at all")
// .downcast::<Editor>()
// .expect("did not open an editor for file_a");
let project_diff_editor = workspace
.update(cx, |workspace, cx| {
workspace
.active_pane()
.read(cx)
.items()
.find_map(|item| item.downcast::<ProjectDiffEditor>())
})
.unwrap()
.expect("did not find a ProjectDiffEditor");
project_diff_editor.update(cx, |project_diff_editor, cx| {
assert!(
project_diff_editor.editor.read(cx).text(cx).is_empty(),
"Should have no changes after opening the diff on no git changes"
);
});
// let project_diff_editor = workspace
// .update(cx, |workspace, cx| {
// workspace
// .active_pane()
// .read(cx)
// .items()
// .find_map(|item| item.downcast::<ProjectDiffEditor>())
// })
// .unwrap()
// .expect("did not find a ProjectDiffEditor");
// project_diff_editor.update(cx, |project_diff_editor, cx| {
// assert!(
// project_diff_editor.editor.read(cx).text(cx).is_empty(),
// "Should have no changes after opening the diff on no git changes"
// );
// });
let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx));
let change = "an edit after git add";
file_a_editor
.update(cx, |file_a_editor, cx| {
file_a_editor.insert(change, cx);
file_a_editor.save(false, project.clone(), cx)
})
.await
.expect("failed to save a file");
cx.executor().advance_clock(Duration::from_secs(1));
cx.run_until_parked();
// let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx));
// let change = "an edit after git add";
// file_a_editor
// .update(cx, |file_a_editor, cx| {
// file_a_editor.insert(change, cx);
// file_a_editor.save(false, project.clone(), cx)
// })
// .await
// .expect("failed to save a file");
// cx.executor().advance_clock(Duration::from_secs(1));
// cx.run_until_parked();
// TODO does not work on Linux for some reason, returning a blank line
// hence disable the last check for now, and do some fiddling to avoid the warnings.
#[cfg(target_os = "linux")]
{
if true {
return;
}
}
project_diff_editor.update(cx, |project_diff_editor, cx| {
// TODO assert it better: extract added text (based on the background changes) and deleted text (based on the deleted blocks added)
assert_eq!(
project_diff_editor.editor.read(cx).text(cx),
format!("{change}{old_text}"),
"Should have a new change shown in the beginning, and the old text shown as deleted text afterwards"
);
});
}
fn run_git(path: &Path, args: &[&str]) -> String {
let output = std::process::Command::new("git")
.args(args)
.current_dir(path)
.output()
.expect("git commit failed");
format!(
"Stdout: {}; stderr: {}",
String::from_utf8(output.stdout).unwrap(),
String::from_utf8(output.stderr).unwrap()
)
}
fn init_test(cx: &mut gpui::TestAppContext) {
if std::env::var("RUST_LOG").is_ok() {
env_logger::try_init().ok();
}
cx.update(|cx| {
assets::Assets.load_test_fonts(cx);
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
client::init_settings(cx);
language::init(cx);
Project::init_settings(cx);
workspace::init_settings(cx);
crate::init(cx);
});
}
// // TODO does not work on Linux for some reason, returning a blank line
// // hence disable the last check for now, and do some fiddling to avoid the warnings.
// #[cfg(target_os = "linux")]
// {
// if true {
// return;
// }
// }
// project_diff_editor.update(cx, |project_diff_editor, cx| {
// // TODO assert it better: extract added text (based on the background changes) and deleted text (based on the deleted blocks added)
// assert_eq!(
// project_diff_editor.editor.read(cx).text(cx),
// format!("{change}{old_text}"),
// "Should have a new change shown in the beginning, and the old text shown as deleted text afterwards"
// );
// });
// }
// fn run_git(path: &Path, args: &[&str]) -> String {
// let output = std::process::Command::new("git")
// .args(args)
// .current_dir(path)
// .output()
// .expect("git commit failed");
// format!(
// "Stdout: {}; stderr: {}",
// String::from_utf8(output.stdout).unwrap(),
// String::from_utf8(output.stderr).unwrap()
// )
// }
// fn init_test(cx: &mut gpui::TestAppContext) {
// if std::env::var("RUST_LOG").is_ok() {
// env_logger::try_init().ok();
// }
// cx.update(|cx| {
// assets::Assets.load_test_fonts(cx);
// let settings_store = SettingsStore::test(cx);
// cx.set_global(settings_store);
// theme::init(theme::LoadThemes::JustBase, cx);
// release_channel::init(SemanticVersion::default(), cx);
// client::init_settings(cx);
// language::init(cx);
// Project::init_settings(cx);
// workspace::init_settings(cx);
// crate::init(cx);
// });
// }
}

View file

@ -1,12 +1,17 @@
use collections::{hash_map, HashMap, HashSet};
use collections::{HashMap, HashSet};
use git::diff::DiffHunkStatus;
use gpui::{Action, AnchorCorner, AppContext, CursorStyle, Hsla, Model, MouseButton, Task, View};
use gpui::{
Action, AnchorCorner, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task,
View,
};
use language::{Buffer, BufferId, Point};
use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow,
MultiBufferSnapshot, ToPoint,
MultiBufferSnapshot, ToOffset, ToPoint,
};
use project::buffer_store::BufferChangeSet;
use std::{ops::Range, sync::Arc};
use sum_tree::TreeMap;
use text::OffsetRangeExt;
use ui::{
prelude::*, ActiveTheme, ContextMenu, IconButtonShape, InteractiveElement, IntoElement,
@ -29,10 +34,11 @@ pub(super) struct HoveredHunk {
pub diff_base_byte_range: Range<usize>,
}
#[derive(Debug, Default)]
pub(super) struct ExpandedHunks {
#[derive(Default)]
pub(super) struct DiffMap {
pub(crate) hunks: Vec<ExpandedHunk>,
diff_base: HashMap<BufferId, DiffBaseBuffer>,
pub(crate) diff_bases: HashMap<BufferId, DiffBaseState>,
pub(crate) snapshot: DiffMapSnapshot,
hunk_update_tasks: HashMap<Option<BufferId>, Task<()>>,
expand_all: bool,
}
@ -46,10 +52,13 @@ pub(super) struct ExpandedHunk {
pub folded: bool,
}
#[derive(Debug)]
struct DiffBaseBuffer {
buffer: Model<Buffer>,
diff_base_version: usize,
#[derive(Clone, Debug, Default)]
pub(crate) struct DiffMapSnapshot(TreeMap<BufferId, git::diff::BufferDiff>);
pub(crate) struct DiffBaseState {
pub(crate) change_set: Model<BufferChangeSet>,
pub(crate) last_version: Option<usize>,
_subscription: Subscription,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -66,7 +75,38 @@ pub enum DisplayDiffHunk {
},
}
impl ExpandedHunks {
impl DiffMap {
pub fn snapshot(&self) -> DiffMapSnapshot {
self.snapshot.clone()
}
pub fn add_change_set(
&mut self,
change_set: Model<BufferChangeSet>,
cx: &mut ViewContext<Editor>,
) {
let buffer_id = change_set.read(cx).buffer_id;
self.snapshot
.0
.insert(buffer_id, change_set.read(cx).diff_to_buffer.clone());
Editor::sync_expanded_diff_hunks(self, buffer_id, cx);
self.diff_bases.insert(
buffer_id,
DiffBaseState {
last_version: None,
_subscription: cx.observe(&change_set, move |editor, change_set, cx| {
editor
.diff_map
.snapshot
.0
.insert(buffer_id, change_set.read(cx).diff_to_buffer.clone());
Editor::sync_expanded_diff_hunks(&mut editor.diff_map, buffer_id, cx);
}),
change_set,
},
);
}
pub fn hunks(&self, include_folded: bool) -> impl Iterator<Item = &ExpandedHunk> {
self.hunks
.iter()
@ -74,9 +114,92 @@ impl ExpandedHunks {
}
}
impl DiffMapSnapshot {
pub fn is_empty(&self) -> bool {
self.0.values().all(|diff| diff.is_empty())
}
pub fn diff_hunks<'a>(
&'a self,
buffer_snapshot: &'a MultiBufferSnapshot,
) -> impl Iterator<Item = MultiBufferDiffHunk> + 'a {
self.diff_hunks_in_range(0..buffer_snapshot.len(), buffer_snapshot)
}
pub fn diff_hunks_in_range<'a, T: ToOffset>(
&'a self,
range: Range<T>,
buffer_snapshot: &'a MultiBufferSnapshot,
) -> impl Iterator<Item = MultiBufferDiffHunk> + 'a {
let range = range.start.to_offset(buffer_snapshot)..range.end.to_offset(buffer_snapshot);
buffer_snapshot
.excerpts_for_range(range.clone())
.filter_map(move |excerpt| {
let buffer = excerpt.buffer();
let buffer_id = buffer.remote_id();
let diff = self.0.get(&buffer_id)?;
let buffer_range = excerpt.map_range_to_buffer(range.clone());
let buffer_range =
buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end);
Some(
diff.hunks_intersecting_range(buffer_range, excerpt.buffer())
.map(move |hunk| {
let start =
excerpt.map_point_from_buffer(Point::new(hunk.row_range.start, 0));
let end =
excerpt.map_point_from_buffer(Point::new(hunk.row_range.end, 0));
MultiBufferDiffHunk {
row_range: MultiBufferRow(start.row)..MultiBufferRow(end.row),
buffer_id,
buffer_range: hunk.buffer_range.clone(),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
}
}),
)
})
.flatten()
}
pub fn diff_hunks_in_range_rev<'a, T: ToOffset>(
&'a self,
range: Range<T>,
buffer_snapshot: &'a MultiBufferSnapshot,
) -> impl Iterator<Item = MultiBufferDiffHunk> + 'a {
let range = range.start.to_offset(buffer_snapshot)..range.end.to_offset(buffer_snapshot);
buffer_snapshot
.excerpts_for_range_rev(range.clone())
.filter_map(move |excerpt| {
let buffer = excerpt.buffer();
let buffer_id = buffer.remote_id();
let diff = self.0.get(&buffer_id)?;
let buffer_range = excerpt.map_range_to_buffer(range.clone());
let buffer_range =
buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end);
Some(
diff.hunks_intersecting_range_rev(buffer_range, excerpt.buffer())
.map(move |hunk| {
let start_row = excerpt
.map_point_from_buffer(Point::new(hunk.row_range.start, 0))
.row;
let end_row = excerpt
.map_point_from_buffer(Point::new(hunk.row_range.end, 0))
.row;
MultiBufferDiffHunk {
row_range: MultiBufferRow(start_row)..MultiBufferRow(end_row),
buffer_id,
buffer_range: hunk.buffer_range.clone(),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
}
}),
)
})
.flatten()
}
}
impl Editor {
pub fn set_expand_all_diff_hunks(&mut self) {
self.expanded_hunks.expand_all = true;
self.diff_map.expand_all = true;
}
pub(super) fn toggle_hovered_hunk(
@ -92,18 +215,15 @@ impl Editor {
}
pub fn toggle_hunk_diff(&mut self, _: &ToggleHunkDiff, cx: &mut ViewContext<Self>) {
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
let selections = self.selections.disjoint_anchors();
self.toggle_hunks_expanded(
hunks_for_selections(&multi_buffer_snapshot, &selections),
cx,
);
let snapshot = self.snapshot(cx);
let selections = self.selections.all(cx);
self.toggle_hunks_expanded(hunks_for_selections(&snapshot, &selections), cx);
}
pub fn expand_all_hunk_diffs(&mut self, _: &ExpandAllHunkDiffs, cx: &mut ViewContext<Self>) {
let snapshot = self.snapshot(cx);
let display_rows_with_expanded_hunks = self
.expanded_hunks
.diff_map
.hunks(false)
.map(|hunk| &hunk.hunk_range)
.map(|anchor_range| {
@ -119,10 +239,10 @@ impl Editor {
)
})
.collect::<HashMap<_, _>>();
let hunks = snapshot
.display_snapshot
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
let hunks = self
.diff_map
.snapshot
.diff_hunks(&snapshot.display_snapshot.buffer_snapshot)
.filter(|hunk| {
let hunk_display_row_range = Point::new(hunk.row_range.start.0, 0)
.to_display_point(&snapshot.display_snapshot)
@ -140,11 +260,11 @@ impl Editor {
hunks_to_toggle: Vec<MultiBufferDiffHunk>,
cx: &mut ViewContext<Self>,
) {
if self.expanded_hunks.expand_all {
if self.diff_map.expand_all {
return;
}
let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None);
let previous_toggle_task = self.diff_map.hunk_update_tasks.remove(&None);
let new_toggle_task = cx.spawn(move |editor, mut cx| async move {
if let Some(task) = previous_toggle_task {
task.await;
@ -154,11 +274,10 @@ impl Editor {
.update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let mut hunks_to_toggle = hunks_to_toggle.into_iter().fuse().peekable();
let mut highlights_to_remove =
Vec::with_capacity(editor.expanded_hunks.hunks.len());
let mut highlights_to_remove = Vec::with_capacity(editor.diff_map.hunks.len());
let mut blocks_to_remove = HashSet::default();
let mut hunks_to_expand = Vec::new();
editor.expanded_hunks.hunks.retain(|expanded_hunk| {
editor.diff_map.hunks.retain(|expanded_hunk| {
if expanded_hunk.folded {
return true;
}
@ -238,7 +357,7 @@ impl Editor {
.ok();
});
self.expanded_hunks
self.diff_map
.hunk_update_tasks
.insert(None, cx.background_executor().spawn(new_toggle_task));
}
@ -252,30 +371,34 @@ impl Editor {
let buffer = self.buffer.clone();
let multi_buffer_snapshot = buffer.read(cx).snapshot(cx);
let hunk_range = hunk.multi_buffer_range.clone();
let (diff_base_buffer, deleted_text_lines) = buffer.update(cx, |buffer, cx| {
let buffer = buffer.buffer(hunk_range.start.buffer_id?)?;
let diff_base_buffer = diff_base_buffer
.or_else(|| self.current_diff_base_buffer(&buffer, cx))
.or_else(|| create_diff_base_buffer(&buffer, cx))?;
let deleted_text_lines = buffer.read(cx).diff_base().map(|diff_base| {
let buffer_id = hunk_range.start.buffer_id?;
let diff_base_buffer = diff_base_buffer.or_else(|| {
self.diff_map
.diff_bases
.get(&buffer_id)?
.change_set
.read(cx)
.base_text
.clone()
})?;
let diff_base = diff_base_buffer.read(cx);
let diff_start_row = diff_base
.offset_to_point(hunk.diff_base_byte_range.start)
.row;
let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row;
diff_end_row - diff_start_row
})?;
Some((diff_base_buffer, deleted_text_lines))
})?;
let deleted_text_lines = diff_end_row - diff_start_row;
let block_insert_index = match self.expanded_hunks.hunks.binary_search_by(|probe| {
let block_insert_index = self
.diff_map
.hunks
.binary_search_by(|probe| {
probe
.hunk_range
.start
.cmp(&hunk_range.start, &multi_buffer_snapshot)
}) {
Ok(_already_present) => return None,
Err(ix) => ix,
};
})
.err()?;
let blocks;
match hunk.status {
@ -315,7 +438,7 @@ impl Editor {
);
}
};
self.expanded_hunks.hunks.insert(
self.diff_map.hunks.insert(
block_insert_index,
ExpandedHunk {
blocks,
@ -374,8 +497,8 @@ impl Editor {
_: &ApplyDiffHunk,
cx: &mut ViewContext<Self>,
) {
let snapshot = self.buffer.read(cx).snapshot(cx);
let hunks = hunks_for_selections(&snapshot, &self.selections.disjoint_anchors());
let snapshot = self.snapshot(cx);
let hunks = hunks_for_selections(&snapshot, &self.selections.all(cx));
let mut ranges_by_buffer = HashMap::default();
self.transact(cx, |editor, cx| {
for hunk in hunks {
@ -401,7 +524,7 @@ impl Editor {
fn has_multiple_hunks(&self, cx: &AppContext) -> bool {
let snapshot = self.buffer.read(cx).snapshot(cx);
let mut hunks = snapshot.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX);
let mut hunks = self.diff_map.snapshot.diff_hunks(&snapshot);
hunks.nth(1).is_some()
}
@ -415,7 +538,7 @@ impl Editor {
.read(cx)
.point_to_buffer_offset(hunk.multi_buffer_range.start, cx)
.map_or(false, |(buffer, _, _)| {
buffer.read(cx).diff_base_buffer().is_some()
buffer.read(cx).base_buffer().is_some()
});
let border_color = cx.theme().colors().border_variant;
@ -552,30 +675,10 @@ impl Editor {
let editor = editor.clone();
let hunk = hunk.clone();
move |_event, cx| {
let multi_buffer =
editor.read(cx).buffer().clone();
let multi_buffer_snapshot =
multi_buffer.read(cx).snapshot(cx);
let mut revert_changes = HashMap::default();
if let Some(hunk) =
crate::hunk_diff::to_diff_hunk(
&hunk,
&multi_buffer_snapshot,
)
{
Editor::prepare_revert_change(
&mut revert_changes,
&multi_buffer,
&hunk,
cx,
);
}
if !revert_changes.is_empty() {
editor.update(cx, |editor, cx| {
editor.revert(revert_changes, cx)
editor.revert_hunk(hunk.clone(), cx);
});
}
}
}),
)
.map(|this| {
@ -763,13 +866,13 @@ impl Editor {
}
pub(super) fn clear_expanded_diff_hunks(&mut self, cx: &mut ViewContext<'_, Editor>) -> bool {
if self.expanded_hunks.expand_all {
if self.diff_map.expand_all {
return false;
}
self.expanded_hunks.hunk_update_tasks.clear();
self.diff_map.hunk_update_tasks.clear();
self.clear_row_highlights::<DiffRowHighlight>();
let to_remove = self
.expanded_hunks
.diff_map
.hunks
.drain(..)
.flat_map(|expanded_hunk| expanded_hunk.blocks.into_iter())
@ -783,48 +886,39 @@ impl Editor {
}
pub(super) fn sync_expanded_diff_hunks(
&mut self,
buffer: Model<Buffer>,
diff_map: &mut DiffMap,
buffer_id: BufferId,
cx: &mut ViewContext<'_, Self>,
) {
let buffer_id = buffer.read(cx).remote_id();
let buffer_diff_base_version = buffer.read(cx).diff_base_version();
self.expanded_hunks
.hunk_update_tasks
.remove(&Some(buffer_id));
let diff_base_buffer = self.current_diff_base_buffer(&buffer, cx);
let new_sync_task = cx.spawn(move |editor, mut cx| async move {
let diff_base_buffer_unchanged = diff_base_buffer.is_some();
let Ok(diff_base_buffer) =
cx.update(|cx| diff_base_buffer.or_else(|| create_diff_base_buffer(&buffer, cx)))
else {
return;
};
editor
.update(&mut cx, |editor, cx| {
if let Some(diff_base_buffer) = &diff_base_buffer {
editor.expanded_hunks.diff_base.insert(
buffer_id,
DiffBaseBuffer {
buffer: diff_base_buffer.clone(),
diff_base_version: buffer_diff_base_version,
},
);
let diff_base_state = diff_map.diff_bases.get_mut(&buffer_id);
let mut diff_base_buffer = None;
let mut diff_base_buffer_unchanged = true;
if let Some(diff_base_state) = diff_base_state {
diff_base_state.change_set.update(cx, |change_set, _| {
if diff_base_state.last_version != Some(change_set.base_text_version) {
diff_base_state.last_version = Some(change_set.base_text_version);
diff_base_buffer_unchanged = false;
}
diff_base_buffer = change_set.base_text.clone();
})
}
diff_map.hunk_update_tasks.remove(&Some(buffer_id));
let new_sync_task = cx.spawn(move |editor, mut cx| async move {
editor
.update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let mut recalculated_hunks = snapshot
.buffer_snapshot
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
.diff_map
.diff_hunks(&snapshot.buffer_snapshot)
.filter(|hunk| hunk.buffer_id == buffer_id)
.fuse()
.peekable();
let mut highlights_to_remove =
Vec::with_capacity(editor.expanded_hunks.hunks.len());
let mut highlights_to_remove = Vec::with_capacity(editor.diff_map.hunks.len());
let mut blocks_to_remove = HashSet::default();
let mut hunks_to_reexpand =
Vec::with_capacity(editor.expanded_hunks.hunks.len());
editor.expanded_hunks.hunks.retain_mut(|expanded_hunk| {
let mut hunks_to_reexpand = Vec::with_capacity(editor.diff_map.hunks.len());
editor.diff_map.hunks.retain_mut(|expanded_hunk| {
if expanded_hunk.hunk_range.start.buffer_id != Some(buffer_id) {
return true;
};
@ -874,7 +968,7 @@ impl Editor {
> hunk_display_range.end
{
recalculated_hunks.next();
if editor.expanded_hunks.expand_all {
if editor.diff_map.expand_all {
hunks_to_reexpand.push(HoveredHunk {
status,
multi_buffer_range,
@ -917,7 +1011,7 @@ impl Editor {
retain
});
if editor.expanded_hunks.expand_all {
if editor.diff_map.expand_all {
for hunk in recalculated_hunks {
match diff_hunk_to_display(&hunk, &snapshot) {
DisplayDiffHunk::Folded { .. } => {}
@ -935,6 +1029,8 @@ impl Editor {
}
}
}
} else {
drop(recalculated_hunks);
}
editor.remove_highlighted_rows::<DiffRowHighlight>(highlights_to_remove, cx);
@ -949,32 +1045,12 @@ impl Editor {
.ok();
});
self.expanded_hunks.hunk_update_tasks.insert(
diff_map.hunk_update_tasks.insert(
Some(buffer_id),
cx.background_executor().spawn(new_sync_task),
);
}
fn current_diff_base_buffer(
&mut self,
buffer: &Model<Buffer>,
cx: &mut AppContext,
) -> Option<Model<Buffer>> {
buffer.update(cx, |buffer, _| {
match self.expanded_hunks.diff_base.entry(buffer.remote_id()) {
hash_map::Entry::Occupied(o) => {
if o.get().diff_base_version != buffer.diff_base_version() {
o.remove();
None
} else {
Some(o.get().buffer.clone())
}
}
hash_map::Entry::Vacant(_) => None,
}
})
}
fn go_to_subsequent_hunk(&mut self, position: Anchor, cx: &mut ViewContext<Self>) {
let snapshot = self.snapshot(cx);
let position = position.to_point(&snapshot.buffer_snapshot);
@ -1021,7 +1097,7 @@ impl Editor {
}
}
fn to_diff_hunk(
pub(crate) fn to_diff_hunk(
hovered_hunk: &HoveredHunk,
multi_buffer_snapshot: &MultiBufferSnapshot,
) -> Option<MultiBufferDiffHunk> {
@ -1043,24 +1119,6 @@ fn to_diff_hunk(
})
}
fn create_diff_base_buffer(buffer: &Model<Buffer>, cx: &mut AppContext) -> Option<Model<Buffer>> {
buffer
.update(cx, |buffer, _| {
let language = buffer.language().cloned();
let diff_base = buffer.diff_base()?.clone();
Some((buffer.line_ending(), diff_base, language))
})
.map(|(line_ending, diff_base, language)| {
cx.new_model(|cx| {
let buffer = Buffer::local_normalized(diff_base, line_ending, cx);
match language {
Some(language) => buffer.with_language(language, cx),
None => buffer,
}
})
})
}
fn added_hunk_color(cx: &AppContext) -> Hsla {
let mut created_color = cx.theme().status().git().created;
created_color.fade_out(0.7);
@ -1118,51 +1176,27 @@ fn editor_with_deleted_text(
});
})]);
let original_multi_buffer_range = hunk.multi_buffer_range.clone();
let diff_base_range = hunk.diff_base_byte_range.clone();
editor
.register_action::<RevertSelectedHunks>({
let hunk = hunk.clone();
let parent_editor = parent_editor.clone();
move |_, cx| {
parent_editor
.update(cx, |editor, cx| {
let Some((buffer, original_text)) =
editor.buffer().update(cx, |buffer, cx| {
let (_, buffer, _) = buffer.excerpt_containing(
original_multi_buffer_range.start,
cx,
)?;
let original_text =
buffer.read(cx).diff_base()?.slice(diff_base_range.clone());
Some((buffer, Arc::from(original_text.to_string())))
})
else {
return;
};
buffer.update(cx, |buffer, cx| {
buffer.edit(
Some((
original_multi_buffer_range.start.text_anchor
..original_multi_buffer_range.end.text_anchor,
original_text,
)),
None,
cx,
)
});
})
.update(cx, |editor, cx| editor.revert_hunk(hunk.clone(), cx))
.ok();
}
})
.detach();
let hunk = hunk.clone();
editor
.register_action::<ToggleHunkDiff>(move |_, cx| {
.register_action::<ToggleHunkDiff>({
let hunk = hunk.clone();
move |_, cx| {
parent_editor
.update(cx, |editor, cx| {
editor.toggle_hovered_hunk(&hunk, cx);
})
.ok();
}
})
.detach();
editor
@ -1272,27 +1306,7 @@ mod tests {
let project = Project::test(fs, [], cx).await;
// buffer has two modified hunks with two rows each
let buffer_1 = project.update(cx, |project, cx| {
project.create_local_buffer(
"
1.zero
1.ONE
1.TWO
1.three
1.FOUR
1.FIVE
1.six
"
.unindent()
.as_str(),
None,
cx,
)
});
buffer_1.update(cx, |buffer, cx| {
buffer.set_diff_base(
Some(
"
let diff_base_1 = "
1.zero
1.one
1.two
@ -1301,34 +1315,21 @@ mod tests {
1.five
1.six
"
.unindent(),
),
cx,
);
});
.unindent();
let text_1 = "
1.zero
1.ONE
1.TWO
1.three
1.FOUR
1.FIVE
1.six
"
.unindent();
// buffer has a deletion hunk and an insertion hunk
let buffer_2 = project.update(cx, |project, cx| {
project.create_local_buffer(
"
2.zero
2.one
2.two
2.three
2.four
2.five
2.six
"
.unindent()
.as_str(),
None,
cx,
)
});
buffer_2.update(cx, |buffer, cx| {
buffer.set_diff_base(
Some(
"
let diff_base_2 = "
2.zero
2.one
2.one-and-a-half
@ -1337,13 +1338,25 @@ mod tests {
2.four
2.six
"
.unindent(),
),
cx,
);
});
.unindent();
cx.background_executor.run_until_parked();
let text_2 = "
2.zero
2.one
2.two
2.three
2.four
2.five
2.six
"
.unindent();
let buffer_1 = project.update(cx, |project, cx| {
project.create_local_buffer(text_1.as_str(), None, cx)
});
let buffer_2 = project.update(cx, |project, cx| {
project.create_local_buffer(text_2.as_str(), None, cx)
});
let multibuffer = cx.new_model(|cx| {
let mut multibuffer = MultiBuffer::new(ReadWrite);
@ -1392,10 +1405,30 @@ mod tests {
multibuffer
});
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
let editor = cx.add_window(|cx| Editor::for_multibuffer(multibuffer, None, false, cx));
editor
.update(cx, |editor, cx| {
for (buffer, diff_base) in [
(buffer_1.clone(), diff_base_1),
(buffer_2.clone(), diff_base_2),
] {
let change_set = cx.new_model(|cx| {
BufferChangeSet::new_with_base_text(
diff_base.to_string(),
buffer.read(cx).text_snapshot(),
cx,
)
});
editor.diff_map.add_change_set(change_set, cx)
}
})
.unwrap();
cx.background_executor.run_until_parked();
let snapshot = editor.update(cx, |editor, cx| editor.snapshot(cx)).unwrap();
assert_eq!(
snapshot.text(),
snapshot.buffer_snapshot.text(),
"
1.zero
1.ONE
@ -1438,7 +1471,8 @@ mod tests {
assert_eq!(
snapshot
.git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12))
.diff_map
.diff_hunks_in_range(Point::zero()..Point::new(12, 0), &snapshot.buffer_snapshot)
.map(|hunk| (hunk_status(&hunk), hunk.row_range))
.collect::<Vec<_>>(),
&expected,
@ -1446,7 +1480,11 @@ mod tests {
assert_eq!(
snapshot
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12))
.diff_map
.diff_hunks_in_range_rev(
Point::zero()..Point::new(12, 0),
&snapshot.buffer_snapshot
)
.map(|hunk| (hunk_status(&hunk), hunk.row_range))
.collect::<Vec<_>>(),
expected

View file

@ -737,7 +737,7 @@ impl Item for Editor {
let buffers = self.buffer().clone().read(cx).all_buffers();
let buffers = buffers
.into_iter()
.map(|handle| handle.read(cx).diff_base_buffer().unwrap_or(handle.clone()))
.map(|handle| handle.read(cx).base_buffer().unwrap_or(handle.clone()))
.collect::<HashSet<_>>();
cx.spawn(|this, mut cx| async move {
if format {

View file

@ -4,7 +4,7 @@ use futures::{channel::mpsc, future::join_all};
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
use language::{Buffer, BufferEvent, Capability};
use multi_buffer::{ExcerptRange, MultiBuffer};
use project::Project;
use project::{buffer_store::BufferChangeSet, Project};
use smol::stream::StreamExt;
use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
use text::ToOffset;
@ -75,7 +75,7 @@ impl ProposedChangesEditor {
title: title.into(),
buffer_entries: Vec::new(),
recalculate_diffs_tx,
_recalculate_diffs_task: cx.spawn(|_, mut cx| async move {
_recalculate_diffs_task: cx.spawn(|this, mut cx| async move {
let mut buffers_to_diff = HashSet::default();
while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await {
buffers_to_diff.insert(recalculate_diff.buffer);
@ -96,12 +96,37 @@ impl ProposedChangesEditor {
}
}
join_all(buffers_to_diff.drain().filter_map(|buffer| {
buffer
.update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx))
.ok()?
let recalculate_diff_futures = this
.update(&mut cx, |this, cx| {
buffers_to_diff
.drain()
.filter_map(|buffer| {
let buffer = buffer.read(cx);
let base_buffer = buffer.base_buffer()?;
let buffer = buffer.text_snapshot();
let change_set = this.editor.update(cx, |editor, _| {
Some(
editor
.diff_map
.diff_bases
.get(&buffer.remote_id())?
.change_set
.clone(),
)
})?;
Some(change_set.update(cx, |change_set, cx| {
change_set.set_base_text(
base_buffer.read(cx).text(),
buffer,
cx,
)
}))
.await;
})
.collect::<Vec<_>>()
})
.ok()?;
join_all(recalculate_diff_futures).await;
}
None
}),
@ -154,6 +179,7 @@ impl ProposedChangesEditor {
});
let mut buffer_entries = Vec::new();
let mut new_change_sets = Vec::new();
for location in locations {
let branch_buffer;
if let Some(ix) = self
@ -166,6 +192,15 @@ impl ProposedChangesEditor {
buffer_entries.push(entry);
} else {
branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx));
new_change_sets.push(cx.new_model(|cx| {
let mut change_set = BufferChangeSet::new(branch_buffer.read(cx));
let _ = change_set.set_base_text(
location.buffer.read(cx).text(),
branch_buffer.read(cx).text_snapshot(),
cx,
);
change_set
}));
buffer_entries.push(BufferEntry {
branch: branch_buffer.clone(),
base: location.buffer.clone(),
@ -187,7 +222,10 @@ impl ProposedChangesEditor {
self.buffer_entries = buffer_entries;
self.editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |selections| selections.refresh())
editor.change_selections(None, cx, |selections| selections.refresh());
for change_set in new_change_sets {
editor.diff_map.add_change_set(change_set, cx)
}
});
}
@ -217,14 +255,14 @@ impl ProposedChangesEditor {
})
.ok();
}
BufferEvent::DiffBaseChanged => {
self.recalculate_diffs_tx
.unbounded_send(RecalculateDiff {
buffer,
debounce: false,
})
.ok();
}
// BufferEvent::DiffBaseChanged => {
// self.recalculate_diffs_tx
// .unbounded_send(RecalculateDiff {
// buffer,
// debounce: false,
// })
// .ok();
// }
_ => (),
}
}
@ -373,7 +411,7 @@ impl BranchBufferSemanticsProvider {
positions: &[text::Anchor],
cx: &AppContext,
) -> Option<Model<Buffer>> {
let base_buffer = buffer.read(cx).diff_base_buffer()?;
let base_buffer = buffer.read(cx).base_buffer()?;
let version = base_buffer.read(cx).version();
if positions
.iter()

View file

@ -113,7 +113,15 @@ impl EditorLspTestContext {
app_state
.fs
.as_fake()
.insert_tree(root, json!({ "dir": { file_name.clone(): "" }}))
.insert_tree(
root,
json!({
".git": {},
"dir": {
file_name.clone(): ""
}
}),
)
.await;
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));

View file

@ -42,16 +42,16 @@ pub struct EditorTestContext {
impl EditorTestContext {
pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext {
let fs = FakeFs::new(cx.executor());
// fs.insert_file("/file", "".to_owned()).await;
let root = Self::root_path();
fs.insert_tree(
root,
serde_json::json!({
".git": {},
"file": "",
}),
)
.await;
let project = Project::test(fs, [root], cx).await;
let project = Project::test(fs.clone(), [root], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer(root.join("file"), cx)
@ -65,6 +65,8 @@ impl EditorTestContext {
editor
});
let editor_view = editor.root_view(cx).unwrap();
cx.run_until_parked();
Self {
cx: VisualTestContext::from_window(*editor.deref(), cx),
window: editor.into(),
@ -276,8 +278,16 @@ impl EditorTestContext {
snapshot.anchor_before(ranges[0].start)..snapshot.anchor_after(ranges[0].end)
}
pub fn set_diff_base(&mut self, diff_base: Option<&str>) {
self.update_buffer(|buffer, cx| buffer.set_diff_base(diff_base.map(ToOwned::to_owned), cx));
pub fn set_diff_base(&mut self, diff_base: &str) {
self.cx.run_until_parked();
let fs = self
.update_editor(|editor, cx| editor.project.as_ref().unwrap().read(cx).fs().as_fake());
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
fs.set_index_for_repo(
&Self::root_path().join(".git"),
&[(path.as_ref(), diff_base.to_string())],
);
self.cx.run_until_parked();
}
/// Change the editor's text and selections using a string containing
@ -319,10 +329,12 @@ impl EditorTestContext {
state_context
}
/// Assert about the text of the editor, the selections, and the expanded
/// diff hunks.
///
/// Diff hunks are indicated by lines starting with `+` and `-`.
#[track_caller]
pub fn assert_diff_hunks(&mut self, expected_diff: String) {
// Normalize the expected diff. If it has no diff markers, then insert blank markers
// before each line. Strip any whitespace-only lines.
pub fn assert_state_with_diff(&mut self, expected_diff: String) {
let has_diff_markers = expected_diff
.lines()
.any(|line| line.starts_with("+") || line.starts_with("-"));
@ -340,11 +352,14 @@ impl EditorTestContext {
})
.join("\n");
let actual_selections = self.editor_selections();
let actual_marked_text =
generate_marked_text(&self.buffer_text(), &actual_selections, true);
// Read the actual diff from the editor's row highlights and block
// decorations.
let actual_diff = self.editor.update(&mut self.cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let text = editor.text(cx);
let insertions = editor
.highlighted_rows::<DiffRowHighlight>()
.map(|(range, _)| {
@ -354,7 +369,7 @@ impl EditorTestContext {
})
.collect::<Vec<_>>();
let deletions = editor
.expanded_hunks
.diff_map
.hunks
.iter()
.filter_map(|hunk| {
@ -371,10 +386,20 @@ impl EditorTestContext {
.read(cx)
.excerpt_containing(hunk.hunk_range.start, cx)
.expect("no excerpt for expanded buffer's hunk start");
let deleted_text = buffer
.read(cx)
.diff_base()
let buffer_id = buffer.read(cx).remote_id();
let change_set = &editor
.diff_map
.diff_bases
.get(&buffer_id)
.expect("should have a diff base for expanded hunk")
.change_set;
let deleted_text = change_set
.read(cx)
.base_text
.as_ref()
.expect("no base text for expanded hunk")
.read(cx)
.as_rope()
.slice(hunk.diff_base_byte_range.clone())
.to_string();
if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status {
@ -384,7 +409,7 @@ impl EditorTestContext {
}
})
.collect::<Vec<_>>();
format_diff(text, deletions, insertions)
format_diff(actual_marked_text, deletions, insertions)
});
pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state");

View file

@ -132,7 +132,7 @@ pub trait Fs: Send + Sync {
async fn is_case_sensitive(&self) -> Result<bool>;
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> &FakeFs {
fn as_fake(&self) -> Arc<FakeFs> {
panic!("called as_fake on a real fs");
}
}
@ -840,6 +840,7 @@ impl Watcher for RealWatcher {
#[cfg(any(test, feature = "test-support"))]
pub struct FakeFs {
this: std::sync::Weak<Self>,
// Use an unfair lock to ensure tests are deterministic.
state: Mutex<FakeFsState>,
executor: gpui::BackgroundExecutor,
@ -1022,7 +1023,8 @@ impl FakeFs {
pub fn new(executor: gpui::BackgroundExecutor) -> Arc<Self> {
let (tx, mut rx) = smol::channel::bounded::<PathBuf>(10);
let this = Arc::new(Self {
let this = Arc::new_cyclic(|this| Self {
this: this.clone(),
executor: executor.clone(),
state: Mutex::new(FakeFsState {
root: Arc::new(Mutex::new(FakeFsEntry::Dir {
@ -1474,7 +1476,8 @@ struct FakeHandle {
#[cfg(any(test, feature = "test-support"))]
impl FileHandle for FakeHandle {
fn current_path(&self, fs: &Arc<dyn Fs>) -> Result<PathBuf> {
let state = fs.as_fake().state.lock();
let fs = fs.as_fake();
let state = fs.state.lock();
let Some(target) = state.moves.get(&self.inode) else {
anyhow::bail!("fake fd not moved")
};
@ -1970,8 +1973,8 @@ impl Fs for FakeFs {
}
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> &FakeFs {
self
fn as_fake(&self) -> Arc<FakeFs> {
self.this.upgrade().unwrap()
}
}

View file

@ -14,7 +14,6 @@ path = "src/git.rs"
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
clock.workspace = true
collections.workspace = true
derive_more.workspace = true
git2.workspace = true

View file

@ -64,18 +64,33 @@ impl sum_tree::Summary for DiffHunkSummary {
#[derive(Debug, Clone)]
pub struct BufferDiff {
last_buffer_version: Option<clock::Global>,
tree: SumTree<InternalDiffHunk>,
}
impl BufferDiff {
pub fn new(buffer: &BufferSnapshot) -> BufferDiff {
BufferDiff {
last_buffer_version: None,
tree: SumTree::new(buffer),
}
}
pub async fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self {
let mut tree = SumTree::new(buffer);
let buffer_text = buffer.as_rope().to_string();
let patch = Self::diff(diff_base, &buffer_text);
if let Some(patch) = patch {
let mut divergence = 0;
for hunk_index in 0..patch.num_hunks() {
let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
tree.push(hunk, buffer);
}
}
Self { tree }
}
pub fn is_empty(&self) -> bool {
self.tree.is_empty()
}
@ -168,27 +183,11 @@ impl BufferDiff {
#[cfg(test)]
fn clear(&mut self, buffer: &text::BufferSnapshot) {
self.last_buffer_version = Some(buffer.version().clone());
self.tree = SumTree::new(buffer);
}
pub async fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) {
let mut tree = SumTree::new(buffer);
let diff_base_text = diff_base.to_string();
let buffer_text = buffer.as_rope().to_string();
let patch = Self::diff(&diff_base_text, &buffer_text);
if let Some(patch) = patch {
let mut divergence = 0;
for hunk_index in 0..patch.num_hunks() {
let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
tree.push(hunk, buffer);
}
}
self.tree = tree;
self.last_buffer_version = Some(buffer.version().clone());
*self = Self::build(&diff_base.to_string(), buffer).await;
}
#[cfg(test)]

View file

@ -34,7 +34,6 @@ ec4rs.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
git.workspace = true
globset.workspace = true
gpui.workspace = true
http_client.workspace = true

View file

@ -90,22 +90,11 @@ pub enum Capability {
pub type BufferRow = u32;
#[derive(Clone)]
enum BufferDiffBase {
Git(Rope),
PastBufferVersion {
buffer: Model<Buffer>,
rope: Rope,
merged_operations: Vec<Lamport>,
},
}
/// An in-memory representation of a source code file, including its text,
/// syntax trees, git status, and diagnostics.
pub struct Buffer {
text: TextBuffer,
diff_base: Option<BufferDiffBase>,
git_diff: git::diff::BufferDiff,
branch_state: Option<BufferBranchState>,
/// Filesystem state, `None` when there is no path.
file: Option<Arc<dyn File>>,
/// The mtime of the file when this buffer was last loaded from
@ -135,7 +124,6 @@ pub struct Buffer {
deferred_ops: OperationQueue<Operation>,
capability: Capability,
has_conflict: bool,
diff_base_version: usize,
/// Memoize calls to has_changes_since(saved_version).
/// The contents of a cell are (self.version, has_changes) at the time of a last call.
has_unsaved_edits: Cell<(clock::Global, bool)>,
@ -148,11 +136,15 @@ pub enum ParseStatus {
Parsing,
}
struct BufferBranchState {
base_buffer: Model<Buffer>,
merged_operations: Vec<Lamport>,
}
/// An immutable, cheaply cloneable representation of a fixed
/// state of a buffer.
pub struct BufferSnapshot {
text: text::BufferSnapshot,
git_diff: git::diff::BufferDiff,
pub(crate) syntax: SyntaxSnapshot,
file: Option<Arc<dyn File>>,
diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
@ -345,10 +337,6 @@ pub enum BufferEvent {
Reloaded,
/// The buffer is in need of a reload
ReloadNeeded,
/// The buffer's diff_base changed.
DiffBaseChanged,
/// Buffer's excerpts for a certain diff base were recalculated.
DiffUpdated,
/// The buffer's language was changed.
LanguageChanged,
/// The buffer's syntax trees were updated.
@ -626,7 +614,6 @@ impl Buffer {
Self::build(
TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
None,
None,
Capability::ReadWrite,
)
}
@ -645,7 +632,6 @@ impl Buffer {
base_text_normalized,
),
None,
None,
Capability::ReadWrite,
)
}
@ -660,7 +646,6 @@ impl Buffer {
Self::build(
TextBuffer::new(replica_id, remote_id, base_text.into()),
None,
None,
capability,
)
}
@ -676,7 +661,7 @@ impl Buffer {
let buffer_id = BufferId::new(message.id)
.with_context(|| anyhow!("Could not deserialize buffer_id"))?;
let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
let mut this = Self::build(buffer, message.diff_base, file, capability);
let mut this = Self::build(buffer, file, capability);
this.text.set_line_ending(proto::deserialize_line_ending(
rpc::proto::LineEnding::from_i32(message.line_ending)
.ok_or_else(|| anyhow!("missing line_ending"))?,
@ -692,7 +677,6 @@ impl Buffer {
id: self.remote_id().into(),
file: self.file.as_ref().map(|f| f.to_proto(cx)),
base_text: self.base_text().to_string(),
diff_base: self.diff_base().as_ref().map(|h| h.to_string()),
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
saved_version: proto::serialize_version(&self.saved_version),
saved_mtime: self.saved_mtime.map(|time| time.into()),
@ -766,15 +750,9 @@ impl Buffer {
}
/// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
pub fn build(
buffer: TextBuffer,
diff_base: Option<String>,
file: Option<Arc<dyn File>>,
capability: Capability,
) -> Self {
pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
let snapshot = buffer.snapshot();
let git_diff = git::diff::BufferDiff::new(&snapshot);
let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
Self {
saved_mtime,
@ -785,12 +763,7 @@ impl Buffer {
was_dirty_before_starting_transaction: None,
has_unsaved_edits: Cell::new((buffer.version(), false)),
text: buffer,
diff_base: diff_base.map(|mut raw_diff_base| {
LineEnding::normalize(&mut raw_diff_base);
BufferDiffBase::Git(Rope::from(raw_diff_base))
}),
diff_base_version: 0,
git_diff,
branch_state: None,
file,
capability,
syntax_map,
@ -824,7 +797,6 @@ impl Buffer {
BufferSnapshot {
text,
syntax,
git_diff: self.git_diff.clone(),
file: self.file.clone(),
remote_selections: self.remote_selections.clone(),
diagnostics: self.diagnostics.clone(),
@ -837,21 +809,15 @@ impl Buffer {
let this = cx.handle();
cx.new_model(|cx| {
let mut branch = Self {
diff_base: Some(BufferDiffBase::PastBufferVersion {
buffer: this.clone(),
rope: self.as_rope().clone(),
branch_state: Some(BufferBranchState {
base_buffer: this.clone(),
merged_operations: Default::default(),
}),
language: self.language.clone(),
has_conflict: self.has_conflict,
has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
_subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
..Self::build(
self.text.branch(),
None,
self.file.clone(),
self.capability(),
)
..Self::build(self.text.branch(), self.file.clone(), self.capability())
};
if let Some(language_registry) = self.language_registry() {
branch.set_language_registry(language_registry);
@ -870,7 +836,7 @@ impl Buffer {
/// If `ranges` is empty, then all changes will be applied. This buffer must
/// be a branch buffer to call this method.
pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut ModelContext<Self>) {
let Some(base_buffer) = self.diff_base_buffer() else {
let Some(base_buffer) = self.base_buffer() else {
debug_panic!("not a branch buffer");
return;
};
@ -906,14 +872,14 @@ impl Buffer {
}
let operation = base_buffer.update(cx, |base_buffer, cx| {
cx.emit(BufferEvent::DiffBaseChanged);
// cx.emit(BufferEvent::DiffBaseChanged);
base_buffer.edit(edits, None, cx)
});
if let Some(operation) = operation {
if let Some(BufferDiffBase::PastBufferVersion {
if let Some(BufferBranchState {
merged_operations, ..
}) = &mut self.diff_base
}) = &mut self.branch_state
{
merged_operations.push(operation);
}
@ -929,9 +895,9 @@ impl Buffer {
let BufferEvent::Operation { operation, .. } = event else {
return;
};
let Some(BufferDiffBase::PastBufferVersion {
let Some(BufferBranchState {
merged_operations, ..
}) = &mut self.diff_base
}) = &mut self.branch_state
else {
return;
};
@ -950,8 +916,6 @@ impl Buffer {
let counts = [(timestamp, u32::MAX)].into_iter().collect();
self.undo_operations(counts, cx);
}
self.diff_base_version += 1;
}
#[cfg(test)]
@ -1123,74 +1087,8 @@ impl Buffer {
}
}
/// Returns the current diff base, see [`Buffer::set_diff_base`].
pub fn diff_base(&self) -> Option<&Rope> {
match self.diff_base.as_ref()? {
BufferDiffBase::Git(rope) | BufferDiffBase::PastBufferVersion { rope, .. } => {
Some(rope)
}
}
}
/// Sets the text that will be used to compute a Git diff
/// against the buffer text.
pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &ModelContext<Self>) {
self.diff_base = diff_base.map(|mut raw_diff_base| {
LineEnding::normalize(&mut raw_diff_base);
BufferDiffBase::Git(Rope::from(raw_diff_base))
});
self.diff_base_version += 1;
if let Some(recalc_task) = self.recalculate_diff(cx) {
cx.spawn(|buffer, mut cx| async move {
recalc_task.await;
buffer
.update(&mut cx, |_, cx| {
cx.emit(BufferEvent::DiffBaseChanged);
})
.ok();
})
.detach();
}
}
/// Returns a number, unique per diff base set to the buffer.
pub fn diff_base_version(&self) -> usize {
self.diff_base_version
}
pub fn diff_base_buffer(&self) -> Option<Model<Self>> {
match self.diff_base.as_ref()? {
BufferDiffBase::Git(_) => None,
BufferDiffBase::PastBufferVersion { buffer, .. } => Some(buffer.clone()),
}
}
/// Recomputes the diff.
pub fn recalculate_diff(&self, cx: &ModelContext<Self>) -> Option<Task<()>> {
let diff_base_rope = match self.diff_base.as_ref()? {
BufferDiffBase::Git(rope) => rope.clone(),
BufferDiffBase::PastBufferVersion { buffer, .. } => buffer.read(cx).as_rope().clone(),
};
let snapshot = self.snapshot();
let mut diff = self.git_diff.clone();
let diff = cx.background_executor().spawn(async move {
diff.update(&diff_base_rope, &snapshot).await;
(diff, diff_base_rope)
});
Some(cx.spawn(|this, mut cx| async move {
let (buffer_diff, diff_base_rope) = diff.await;
this.update(&mut cx, |this, cx| {
this.git_diff = buffer_diff;
this.non_text_state_update_count += 1;
if let Some(BufferDiffBase::PastBufferVersion { rope, .. }) = &mut this.diff_base {
*rope = diff_base_rope;
}
cx.emit(BufferEvent::DiffUpdated);
})
.ok();
}))
pub fn base_buffer(&self) -> Option<Model<Self>> {
Some(self.branch_state.as_ref()?.base_buffer.clone())
}
/// Returns the primary [`Language`] assigned to this [`Buffer`].
@ -3992,37 +3890,6 @@ impl BufferSnapshot {
})
}
/// Whether the buffer contains any Git changes.
pub fn has_git_diff(&self) -> bool {
!self.git_diff.is_empty()
}
/// Returns all the Git diff hunks intersecting the given row range.
pub fn git_diff_hunks_in_row_range(
&self,
range: Range<BufferRow>,
) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
self.git_diff.hunks_in_row_range(range, self)
}
/// Returns all the Git diff hunks intersecting the given
/// range.
pub fn git_diff_hunks_intersecting_range(
&self,
range: Range<Anchor>,
) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
self.git_diff.hunks_intersecting_range(range, self)
}
/// Returns all the Git diff hunks intersecting the given
/// range, in reverse order.
pub fn git_diff_hunks_intersecting_range_rev(
&self,
range: Range<Anchor>,
) -> impl '_ + Iterator<Item = git::diff::DiffHunk> {
self.git_diff.hunks_intersecting_range_rev(range, self)
}
/// Returns if the buffer contains any diagnostics.
pub fn has_diagnostics(&self) -> bool {
!self.diagnostics.is_empty()
@ -4167,7 +4034,6 @@ impl Clone for BufferSnapshot {
fn clone(&self) -> Self {
Self {
text: self.text.clone(),
git_diff: self.git_diff.clone(),
syntax: self.syntax.clone(),
file: self.file.clone(),
remote_selections: self.remote_selections.clone(),

View file

@ -6,7 +6,6 @@ use crate::Buffer;
use clock::ReplicaId;
use collections::BTreeMap;
use futures::FutureExt as _;
use git::diff::assert_hunks;
use gpui::{AppContext, BorrowAppContext, Model};
use gpui::{Context, TestAppContext};
use indoc::indoc;
@ -2608,15 +2607,6 @@ fn test_branch_and_merge(cx: &mut TestAppContext) {
);
});
// The branch buffer maintains a diff with respect to its base buffer.
start_recalculating_diff(&branch, cx);
cx.run_until_parked();
assert_diff_hunks(
&branch,
cx,
&[(1..2, "", "1.5\n"), (3..4, "three\n", "THREE\n")],
);
// Edits to the base are applied to the branch.
base.update(cx, |buffer, cx| {
buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx)
@ -2626,21 +2616,6 @@ fn test_branch_and_merge(cx: &mut TestAppContext) {
assert_eq!(buffer.text(), "ZERO\none\n1.5\ntwo\nTHREE\n");
});
// Until the git diff recalculation is complete, the git diff references
// the previous content of the base buffer, so that it stays in sync.
start_recalculating_diff(&branch, cx);
assert_diff_hunks(
&branch,
cx,
&[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")],
);
cx.run_until_parked();
assert_diff_hunks(
&branch,
cx,
&[(2..3, "", "1.5\n"), (4..5, "three\n", "THREE\n")],
);
// Edits to any replica of the base are applied to the branch.
base_replica.update(cx, |buffer, cx| {
buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "2.5\n")], None, cx)
@ -2731,29 +2706,6 @@ fn test_undo_after_merge_into_base(cx: &mut TestAppContext) {
branch.read_with(cx, |branch, _| assert_eq!(branch.text(), "ABCdefgHIjk"));
}
fn start_recalculating_diff(buffer: &Model<Buffer>, cx: &mut TestAppContext) {
buffer
.update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap())
.detach();
}
#[track_caller]
fn assert_diff_hunks(
buffer: &Model<Buffer>,
cx: &mut TestAppContext,
expected_hunks: &[(Range<u32>, &str, &str)],
) {
let (snapshot, diff_base) = buffer.read_with(cx, |buffer, _| {
(buffer.snapshot(), buffer.diff_base().unwrap().to_string())
});
assert_hunks(
snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX),
&snapshot,
&diff_base,
expected_hunks,
);
}
#[gpui::test(iterations = 100)]
fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
let min_peers = env::var("MIN_PEERS")

View file

@ -95,10 +95,7 @@ pub enum Event {
},
Reloaded,
ReloadNeeded,
DiffBaseChanged,
DiffUpdated {
buffer: Model<Buffer>,
},
LanguageChanged(BufferId),
CapabilityChanged,
Reparsed(BufferId),
@ -257,6 +254,7 @@ struct Excerpt {
pub struct MultiBufferExcerpt<'a> {
excerpt: &'a Excerpt,
excerpt_offset: usize,
excerpt_position: Point,
}
#[derive(Clone, Debug)]
@ -1824,8 +1822,6 @@ impl MultiBuffer {
language::BufferEvent::FileHandleChanged => Event::FileHandleChanged,
language::BufferEvent::Reloaded => Event::Reloaded,
language::BufferEvent::ReloadNeeded => Event::ReloadNeeded,
language::BufferEvent::DiffBaseChanged => Event::DiffBaseChanged,
language::BufferEvent::DiffUpdated => Event::DiffUpdated { buffer },
language::BufferEvent::LanguageChanged => {
Event::LanguageChanged(buffer.read(cx).remote_id())
}
@ -3424,47 +3420,86 @@ impl MultiBufferSnapshot {
.map(|excerpt| (excerpt.id, &excerpt.buffer, excerpt.range.clone()))
}
fn excerpts_for_range<T: ToOffset>(
pub fn all_excerpts(&self) -> impl Iterator<Item = MultiBufferExcerpt> {
let mut cursor = self.excerpts.cursor::<(usize, Point)>(&());
cursor.next(&());
std::iter::from_fn(move || {
let excerpt = cursor.item()?;
let excerpt = MultiBufferExcerpt::new(excerpt, *cursor.start());
cursor.next(&());
Some(excerpt)
})
}
pub fn excerpts_for_range<T: ToOffset>(
&self,
range: Range<T>,
) -> impl Iterator<Item = (&Excerpt, usize)> + '_ {
) -> impl Iterator<Item = MultiBufferExcerpt> + '_ {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = self.excerpts.cursor::<usize>(&());
let mut cursor = self.excerpts.cursor::<(usize, Point)>(&());
cursor.seek(&range.start, Bias::Right, &());
cursor.prev(&());
iter::from_fn(move || {
cursor.next(&());
if cursor.start() < &range.end {
cursor.item().map(|item| (item, *cursor.start()))
if cursor.start().0 < range.end {
cursor
.item()
.map(|item| MultiBufferExcerpt::new(item, *cursor.start()))
} else {
None
}
})
}
pub fn excerpts_for_range_rev<T: ToOffset>(
&self,
range: Range<T>,
) -> impl Iterator<Item = MultiBufferExcerpt> + '_ {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = self.excerpts.cursor::<(usize, Point)>(&());
cursor.seek(&range.end, Bias::Left, &());
if cursor.item().is_none() {
cursor.prev(&());
}
std::iter::from_fn(move || {
let excerpt = cursor.item()?;
let excerpt = MultiBufferExcerpt::new(excerpt, *cursor.start());
cursor.prev(&());
Some(excerpt)
})
}
pub fn excerpt_before(&self, id: ExcerptId) -> Option<MultiBufferExcerpt<'_>> {
let start_locator = self.excerpt_locator_for_id(id);
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
cursor.seek(&Some(start_locator), Bias::Left, &());
let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&());
cursor.seek(start_locator, Bias::Left, &());
cursor.prev(&());
let excerpt = cursor.item()?;
let excerpt_offset = cursor.start().text.len;
let excerpt_position = cursor.start().text.lines;
Some(MultiBufferExcerpt {
excerpt,
excerpt_offset: 0,
excerpt_offset,
excerpt_position,
})
}
pub fn excerpt_after(&self, id: ExcerptId) -> Option<MultiBufferExcerpt<'_>> {
let start_locator = self.excerpt_locator_for_id(id);
let mut cursor = self.excerpts.cursor::<Option<&Locator>>(&());
cursor.seek(&Some(start_locator), Bias::Left, &());
let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&());
cursor.seek(start_locator, Bias::Left, &());
cursor.next(&());
let excerpt = cursor.item()?;
let excerpt_offset = cursor.start().text.len;
let excerpt_position = cursor.start().text.lines;
Some(MultiBufferExcerpt {
excerpt,
excerpt_offset: 0,
excerpt_offset,
excerpt_position,
})
}
@ -3647,22 +3682,12 @@ impl MultiBufferSnapshot {
) -> impl Iterator<Item = Range<usize>> + 'a {
let range = range.start.to_offset(self)..range.end.to_offset(self);
self.excerpts_for_range(range.clone())
.filter(move |&(excerpt, _)| redaction_enabled(excerpt.buffer.file()))
.flat_map(move |(excerpt, excerpt_offset)| {
let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
.filter(move |excerpt| redaction_enabled(excerpt.buffer().file()))
.flat_map(move |excerpt| {
excerpt
.buffer
.redacted_ranges(excerpt.range.context.clone())
.map(move |mut redacted_range| {
// Re-base onto the excerpts coordinates in the multibuffer
redacted_range.start = excerpt_offset
+ redacted_range.start.saturating_sub(excerpt_buffer_start);
redacted_range.end = excerpt_offset
+ redacted_range.end.saturating_sub(excerpt_buffer_start);
redacted_range
})
.buffer()
.redacted_ranges(excerpt.buffer_range().clone())
.map(move |redacted_range| excerpt.map_range_from_buffer(redacted_range))
.skip_while(move |redacted_range| redacted_range.end < range.start)
.take_while(move |redacted_range| redacted_range.start < range.end)
})
@ -3674,12 +3699,13 @@ impl MultiBufferSnapshot {
) -> impl Iterator<Item = language::RunnableRange> + '_ {
let range = range.start.to_offset(self)..range.end.to_offset(self);
self.excerpts_for_range(range.clone())
.flat_map(move |(excerpt, excerpt_offset)| {
let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
.flat_map(move |excerpt| {
let excerpt_buffer_start =
excerpt.buffer_range().start.to_offset(&excerpt.buffer());
excerpt
.buffer
.runnable_ranges(excerpt.range.context.clone())
.buffer()
.runnable_ranges(excerpt.buffer_range())
.filter_map(move |mut runnable| {
// Re-base onto the excerpts coordinates in the multibuffer
//
@ -3688,15 +3714,14 @@ impl MultiBufferSnapshot {
if runnable.run_range.start < excerpt_buffer_start {
return None;
}
if language::ToPoint::to_point(&runnable.run_range.end, &excerpt.buffer).row
> excerpt.max_buffer_row
if language::ToPoint::to_point(&runnable.run_range.end, &excerpt.buffer())
.row
> excerpt.max_buffer_row()
{
return None;
}
runnable.run_range.start =
excerpt_offset + runnable.run_range.start - excerpt_buffer_start;
runnable.run_range.end =
excerpt_offset + runnable.run_range.end - excerpt_buffer_start;
runnable.run_range = excerpt.map_range_from_buffer(runnable.run_range);
Some(runnable)
})
.skip_while(move |runnable| runnable.run_range.end < range.start)
@ -3730,15 +3755,15 @@ impl MultiBufferSnapshot {
let range = range.start.to_offset(self)..range.end.to_offset(self);
self.excerpts_for_range(range.clone())
.flat_map(move |(excerpt, excerpt_offset)| {
.flat_map(move |excerpt| {
let excerpt_buffer_start_row =
excerpt.range.context.start.to_point(&excerpt.buffer).row;
let excerpt_offset_row = crate::ToPoint::to_point(&excerpt_offset, self).row;
excerpt.buffer_range().start.to_point(&excerpt.buffer()).row;
let excerpt_offset_row = excerpt.start_point().row;
excerpt
.buffer
.buffer()
.indent_guides_in_range(
excerpt.range.context.clone(),
excerpt.buffer_range(),
ignore_disabled_for_language,
cx,
)
@ -3856,151 +3881,6 @@ impl MultiBufferSnapshot {
})
}
pub fn has_git_diffs(&self) -> bool {
for excerpt in self.excerpts.iter() {
if excerpt.buffer.has_git_diff() {
return true;
}
}
false
}
pub fn git_diff_hunks_in_range_rev(
&self,
row_range: Range<MultiBufferRow>,
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
let mut cursor = self.excerpts.cursor::<Point>(&());
cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &());
if cursor.item().is_none() {
cursor.prev(&());
}
std::iter::from_fn(move || {
let excerpt = cursor.item()?;
let multibuffer_start = *cursor.start();
let multibuffer_end = multibuffer_start + excerpt.text_summary.lines;
if multibuffer_start.row >= row_range.end.0 {
return None;
}
let mut buffer_start = excerpt.range.context.start;
let mut buffer_end = excerpt.range.context.end;
let excerpt_start_point = buffer_start.to_point(&excerpt.buffer);
let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines;
if row_range.start.0 > multibuffer_start.row {
let buffer_start_point =
excerpt_start_point + Point::new(row_range.start.0 - multibuffer_start.row, 0);
buffer_start = excerpt.buffer.anchor_before(buffer_start_point);
}
if row_range.end.0 < multibuffer_end.row {
let buffer_end_point =
excerpt_start_point + Point::new(row_range.end.0 - multibuffer_start.row, 0);
buffer_end = excerpt.buffer.anchor_before(buffer_end_point);
}
let buffer_hunks = excerpt
.buffer
.git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end)
.map(move |hunk| {
let start = multibuffer_start.row
+ hunk.row_range.start.saturating_sub(excerpt_start_point.row);
let end = multibuffer_start.row
+ hunk
.row_range
.end
.min(excerpt_end_point.row + 1)
.saturating_sub(excerpt_start_point.row);
MultiBufferDiffHunk {
row_range: MultiBufferRow(start)..MultiBufferRow(end),
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
buffer_range: hunk.buffer_range.clone(),
buffer_id: excerpt.buffer_id,
}
});
cursor.prev(&());
Some(buffer_hunks)
})
.flatten()
}
pub fn git_diff_hunks_in_range(
&self,
row_range: Range<MultiBufferRow>,
) -> impl Iterator<Item = MultiBufferDiffHunk> + '_ {
let mut cursor = self.excerpts.cursor::<Point>(&());
cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &());
std::iter::from_fn(move || {
let excerpt = cursor.item()?;
let multibuffer_start = *cursor.start();
let multibuffer_end = multibuffer_start + excerpt.text_summary.lines;
let mut buffer_start = excerpt.range.context.start;
let mut buffer_end = excerpt.range.context.end;
let excerpt_rows = match multibuffer_start.row.cmp(&row_range.end.0) {
cmp::Ordering::Less => {
let excerpt_start_point = buffer_start.to_point(&excerpt.buffer);
let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines;
if row_range.start.0 > multibuffer_start.row {
let buffer_start_point = excerpt_start_point
+ Point::new(row_range.start.0 - multibuffer_start.row, 0);
buffer_start = excerpt.buffer.anchor_before(buffer_start_point);
}
if row_range.end.0 < multibuffer_end.row {
let buffer_end_point = excerpt_start_point
+ Point::new(row_range.end.0 - multibuffer_start.row, 0);
buffer_end = excerpt.buffer.anchor_before(buffer_end_point);
}
excerpt_start_point.row..excerpt_end_point.row
}
cmp::Ordering::Equal if row_range.end.0 == 0 => {
buffer_end = buffer_start;
0..0
}
cmp::Ordering::Greater | cmp::Ordering::Equal => return None,
};
let buffer_hunks = excerpt
.buffer
.git_diff_hunks_intersecting_range(buffer_start..buffer_end)
.map(move |hunk| {
let buffer_range = if excerpt_rows.start == 0 && excerpt_rows.end == 0 {
MultiBufferRow(0)..MultiBufferRow(1)
} else {
let start = multibuffer_start.row
+ hunk.row_range.start.saturating_sub(excerpt_rows.start);
let end = multibuffer_start.row
+ hunk
.row_range
.end
.min(excerpt_rows.end + 1)
.saturating_sub(excerpt_rows.start);
MultiBufferRow(start)..MultiBufferRow(end)
};
MultiBufferDiffHunk {
row_range: buffer_range,
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
buffer_range: hunk.buffer_range.clone(),
buffer_id: excerpt.buffer_id,
}
});
cursor.next(&());
Some(buffer_hunks)
})
.flatten()
}
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let excerpt = self.excerpt_containing(range.clone())?;
@ -4179,7 +4059,7 @@ impl MultiBufferSnapshot {
pub fn excerpt_containing<T: ToOffset>(&self, range: Range<T>) -> Option<MultiBufferExcerpt> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut cursor = self.excerpts.cursor::<usize>(&());
let mut cursor = self.excerpts.cursor::<(usize, Point)>(&());
cursor.seek(&range.start, Bias::Right, &());
let start_excerpt = cursor.item()?;
@ -4204,12 +4084,12 @@ impl MultiBufferSnapshot {
I: IntoIterator<Item = Range<Anchor>> + 'a,
{
let mut ranges = ranges.into_iter().map(|range| range.to_offset(self));
let mut cursor = self.excerpts.cursor::<usize>(&());
let mut cursor = self.excerpts.cursor::<(usize, Point)>(&());
cursor.next(&());
let mut current_range = ranges.next();
iter::from_fn(move || {
let range = current_range.clone()?;
if range.start >= cursor.end(&()) {
if range.start >= cursor.end(&()).0 {
cursor.seek_forward(&range.start, Bias::Right, &());
if range.start == self.len() {
cursor.prev(&());
@ -4217,11 +4097,11 @@ impl MultiBufferSnapshot {
}
let excerpt = cursor.item()?;
let range_start_in_excerpt = cmp::max(range.start, *cursor.start());
let range_start_in_excerpt = cmp::max(range.start, cursor.start().0);
let range_end_in_excerpt = if excerpt.has_trailing_newline {
cmp::min(range.end, cursor.end(&()) - 1)
cmp::min(range.end, cursor.end(&()).0 - 1)
} else {
cmp::min(range.end, cursor.end(&()))
cmp::min(range.end, cursor.end(&()).0)
};
let buffer_range = MultiBufferExcerpt::new(excerpt, *cursor.start())
.map_range_to_buffer(range_start_in_excerpt..range_end_in_excerpt);
@ -4237,7 +4117,7 @@ impl MultiBufferSnapshot {
text_anchor: excerpt.buffer.anchor_after(buffer_range.end),
};
if range.end > cursor.end(&()) {
if range.end > cursor.end(&()).0 {
cursor.next(&());
} else {
current_range = ranges.next();
@ -4256,12 +4136,12 @@ impl MultiBufferSnapshot {
ranges: impl IntoIterator<Item = Range<Anchor>>,
) -> impl Iterator<Item = (ExcerptId, &BufferSnapshot, Range<usize>)> {
let mut ranges = ranges.into_iter().map(|range| range.to_offset(self));
let mut cursor = self.excerpts.cursor::<usize>(&());
let mut cursor = self.excerpts.cursor::<(usize, Point)>(&());
cursor.next(&());
let mut current_range = ranges.next();
iter::from_fn(move || {
let range = current_range.clone()?;
if range.start >= cursor.end(&()) {
if range.start >= cursor.end(&()).0 {
cursor.seek_forward(&range.start, Bias::Right, &());
if range.start == self.len() {
cursor.prev(&());
@ -4269,16 +4149,16 @@ impl MultiBufferSnapshot {
}
let excerpt = cursor.item()?;
let range_start_in_excerpt = cmp::max(range.start, *cursor.start());
let range_start_in_excerpt = cmp::max(range.start, cursor.start().0);
let range_end_in_excerpt = if excerpt.has_trailing_newline {
cmp::min(range.end, cursor.end(&()) - 1)
cmp::min(range.end, cursor.end(&()).0 - 1)
} else {
cmp::min(range.end, cursor.end(&()))
cmp::min(range.end, cursor.end(&()).0)
};
let buffer_range = MultiBufferExcerpt::new(excerpt, *cursor.start())
.map_range_to_buffer(range_start_in_excerpt..range_end_in_excerpt);
if range.end > cursor.end(&()) {
if range.end > cursor.end(&()).0 {
cursor.next(&());
} else {
current_range = ranges.next();
@ -4702,6 +4582,11 @@ impl Excerpt {
self.range.context.start.to_offset(&self.buffer)
}
/// The [`Excerpt`]'s start point in its [`Buffer`]
fn buffer_start_point(&self) -> Point {
self.range.context.start.to_point(&self.buffer)
}
/// The [`Excerpt`]'s end offset in its [`Buffer`]
fn buffer_end_offset(&self) -> usize {
self.buffer_start_offset() + self.text_summary.len
@ -4709,10 +4594,11 @@ impl Excerpt {
}
impl<'a> MultiBufferExcerpt<'a> {
fn new(excerpt: &'a Excerpt, excerpt_offset: usize) -> Self {
fn new(excerpt: &'a Excerpt, (excerpt_offset, excerpt_position): (usize, Point)) -> Self {
MultiBufferExcerpt {
excerpt,
excerpt_offset,
excerpt_position,
}
}
@ -4740,9 +4626,32 @@ impl<'a> MultiBufferExcerpt<'a> {
&self.excerpt.buffer
}
pub fn buffer_range(&self) -> Range<text::Anchor> {
self.excerpt.range.context.clone()
}
pub fn start_offset(&self) -> usize {
self.excerpt_offset
}
pub fn start_point(&self) -> Point {
self.excerpt_position
}
/// Maps an offset within the [`MultiBuffer`] to an offset within the [`Buffer`]
pub fn map_offset_to_buffer(&self, offset: usize) -> usize {
self.excerpt.buffer_start_offset() + offset.saturating_sub(self.excerpt_offset)
self.excerpt.buffer_start_offset()
+ offset
.saturating_sub(self.excerpt_offset)
.min(self.excerpt.text_summary.len)
}
/// Maps a point within the [`MultiBuffer`] to a point within the [`Buffer`]
pub fn map_point_to_buffer(&self, point: Point) -> Point {
self.excerpt.buffer_start_point()
+ point
.saturating_sub(self.excerpt_position)
.min(self.excerpt.text_summary.lines)
}
/// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`]
@ -4752,14 +4661,20 @@ impl<'a> MultiBufferExcerpt<'a> {
/// Map an offset within the [`Buffer`] to an offset within the [`MultiBuffer`]
pub fn map_offset_from_buffer(&self, buffer_offset: usize) -> usize {
let mut buffer_offset_in_excerpt =
buffer_offset.saturating_sub(self.excerpt.buffer_start_offset());
buffer_offset_in_excerpt =
cmp::min(buffer_offset_in_excerpt, self.excerpt.text_summary.len);
let buffer_offset_in_excerpt = buffer_offset
.saturating_sub(self.excerpt.buffer_start_offset())
.min(self.excerpt.text_summary.len);
self.excerpt_offset + buffer_offset_in_excerpt
}
/// Map a point within the [`Buffer`] to a point within the [`MultiBuffer`]
pub fn map_point_from_buffer(&self, buffer_position: Point) -> Point {
let position_in_excerpt = buffer_position.saturating_sub(self.excerpt.buffer_start_point());
let position_in_excerpt =
position_in_excerpt.min(self.excerpt.text_summary.lines + Point::new(1, 0));
self.excerpt_position + position_in_excerpt
}
/// Map a range within the [`Buffer`] to a range within the [`MultiBuffer`]
pub fn map_range_from_buffer(&self, buffer_range: Range<usize>) -> Range<usize> {
self.map_offset_from_buffer(buffer_range.start)
@ -4771,6 +4686,10 @@ impl<'a> MultiBufferExcerpt<'a> {
range.start >= self.excerpt.buffer_start_offset()
&& range.end <= self.excerpt.buffer_end_offset()
}
pub fn max_buffer_row(&self) -> u32 {
self.excerpt.max_buffer_row
}
}
impl ExcerptId {

View file

@ -8,8 +8,8 @@ use anyhow::{anyhow, Context as _, Result};
use client::Client;
use collections::{hash_map, HashMap, HashSet};
use fs::Fs;
use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt};
use git::blame::Blame;
use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
use git::{blame::Blame, diff::BufferDiff};
use gpui::{
AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Subscription,
Task, WeakModel,
@ -25,7 +25,7 @@ use language::{
use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope};
use smol::channel::Receiver;
use std::{io, ops::Range, path::Path, str::FromStr as _, sync::Arc, time::Instant};
use text::BufferId;
use text::{BufferId, LineEnding, Rope};
use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId};
@ -33,14 +33,29 @@ use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Work
pub struct BufferStore {
state: BufferStoreState,
#[allow(clippy::type_complexity)]
loading_buffers_by_path: HashMap<
ProjectPath,
postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
>,
loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Model<Buffer>, Arc<anyhow::Error>>>>>,
#[allow(clippy::type_complexity)]
loading_change_sets:
HashMap<BufferId, Shared<Task<Result<Model<BufferChangeSet>, Arc<anyhow::Error>>>>>,
worktree_store: Model<WorktreeStore>,
opened_buffers: HashMap<BufferId, OpenBuffer>,
downstream_client: Option<(AnyProtoClient, u64)>,
shared_buffers: HashMap<proto::PeerId, HashSet<Model<Buffer>>>,
shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
}
#[derive(Hash, Eq, PartialEq, Clone)]
struct SharedBuffer {
buffer: Model<Buffer>,
unstaged_changes: Option<Model<BufferChangeSet>>,
}
pub struct BufferChangeSet {
pub buffer_id: BufferId,
pub base_text: Option<Model<Buffer>>,
pub diff_to_buffer: git::diff::BufferDiff,
pub recalculate_diff_task: Option<Task<Result<()>>>,
pub diff_updated_futures: Vec<oneshot::Sender<()>>,
pub base_text_version: usize,
}
enum BufferStoreState {
@ -66,7 +81,10 @@ struct LocalBufferStore {
}
enum OpenBuffer {
Buffer(WeakModel<Buffer>),
Complete {
buffer: WeakModel<Buffer>,
unstaged_changes: Option<WeakModel<BufferChangeSet>>,
},
Operations(Vec<Operation>),
}
@ -85,6 +103,23 @@ pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>)
impl EventEmitter<BufferStoreEvent> for BufferStore {}
impl RemoteBufferStore {
fn load_staged_text(
&self,
buffer_id: BufferId,
cx: &AppContext,
) -> Task<Result<Option<String>>> {
let project_id = self.project_id;
let client = self.upstream_client.clone();
cx.background_executor().spawn(async move {
Ok(client
.request(proto::GetStagedText {
project_id,
buffer_id: buffer_id.to_proto(),
})
.await?
.staged_text)
})
}
pub fn wait_for_remote_buffer(
&mut self,
id: BufferId,
@ -352,6 +387,27 @@ impl RemoteBufferStore {
}
impl LocalBufferStore {
fn load_staged_text(
&self,
buffer: &Model<Buffer>,
cx: &AppContext,
) -> Task<Result<Option<String>>> {
let Some(file) = buffer.read(cx).file() else {
return Task::ready(Err(anyhow!("buffer has no file")));
};
let worktree_id = file.worktree_id(cx);
let path = file.path().clone();
let Some(worktree) = self
.worktree_store
.read(cx)
.worktree_for_id(worktree_id, cx)
else {
return Task::ready(Err(anyhow!("no such worktree")));
};
worktree.read(cx).load_staged_file(path.as_ref(), cx)
}
fn save_local_buffer(
&self,
buffer_handle: Model<Buffer>,
@ -463,94 +519,71 @@ impl LocalBufferStore {
) {
debug_assert!(worktree_handle.read(cx).is_local());
// Identify the loading buffers whose containing repository that has changed.
let future_buffers = this
.loading_buffers()
.filter_map(|(project_path, receiver)| {
if project_path.worktree_id != worktree_handle.read(cx).id() {
return None;
}
let path = &project_path.path;
changed_repos
.iter()
.find(|(work_dir, _)| path.starts_with(work_dir))?;
let path = path.clone();
Some(async move {
BufferStore::wait_for_loading_buffer(receiver)
.await
.ok()
.map(|buffer| (buffer, path))
})
})
.collect::<FuturesUnordered<_>>();
// Identify the current buffers whose containing repository has changed.
let current_buffers = this
.buffers()
let buffer_change_sets = this
.opened_buffers
.values()
.filter_map(|buffer| {
let file = File::from_dyn(buffer.read(cx).file())?;
if let OpenBuffer::Complete {
buffer,
unstaged_changes,
} = buffer
{
let buffer = buffer.upgrade()?.read(cx);
let file = File::from_dyn(buffer.file())?;
if file.worktree != worktree_handle {
return None;
}
changed_repos
.iter()
.find(|(work_dir, _)| file.path.starts_with(work_dir))?;
Some((buffer, file.path.clone()))
let unstaged_changes = unstaged_changes.as_ref()?.upgrade()?;
let snapshot = buffer.text_snapshot();
Some((unstaged_changes, snapshot, file.path.clone()))
} else {
None
}
})
.collect::<Vec<_>>();
if future_buffers.len() + current_buffers.len() == 0 {
if buffer_change_sets.is_empty() {
return;
}
cx.spawn(move |this, mut cx| async move {
// Wait for all of the buffers to load.
let future_buffers = future_buffers.collect::<Vec<_>>().await;
// Reload the diff base for every buffer whose containing git repository has changed.
let snapshot =
worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
let diff_bases_by_buffer = cx
.background_executor()
.spawn(async move {
let mut diff_base_tasks = future_buffers
buffer_change_sets
.into_iter()
.flatten()
.chain(current_buffers)
.filter_map(|(buffer, path)| {
.filter_map(|(change_set, buffer_snapshot, path)| {
let (repo_entry, local_repo_entry) = snapshot.repo_for_path(&path)?;
let relative_path = repo_entry.relativize(&snapshot, &path).ok()?;
Some(async move {
let base_text =
local_repo_entry.repo().load_index_text(&relative_path);
Some((buffer, base_text))
let base_text = local_repo_entry.repo().load_index_text(&relative_path);
Some((change_set, buffer_snapshot, base_text))
})
})
.collect::<FuturesUnordered<_>>();
let mut diff_bases = Vec::with_capacity(diff_base_tasks.len());
while let Some(diff_base) = diff_base_tasks.next().await {
if let Some(diff_base) = diff_base {
diff_bases.push(diff_base);
}
}
diff_bases
.collect::<Vec<_>>()
})
.await;
this.update(&mut cx, |this, cx| {
// Assign the new diff bases on all of the buffers.
for (buffer, diff_base) in diff_bases_by_buffer {
let buffer_id = buffer.update(cx, |buffer, cx| {
buffer.set_diff_base(diff_base.clone(), cx);
buffer.remote_id().to_proto()
for (change_set, buffer_snapshot, staged_text) in diff_bases_by_buffer {
change_set.update(cx, |change_set, cx| {
if let Some(staged_text) = staged_text.clone() {
let _ =
change_set.set_base_text(staged_text, buffer_snapshot.clone(), cx);
} else {
change_set.unset_base_text(buffer_snapshot.clone(), cx);
}
});
if let Some((client, project_id)) = &this.downstream_client.clone() {
client
.send(proto::UpdateDiffBase {
project_id: *project_id,
buffer_id,
diff_base,
buffer_id: buffer_snapshot.remote_id().to_proto(),
staged_text,
})
.log_err();
}
@ -759,12 +792,7 @@ impl LocalBufferStore {
.spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
.await;
cx.insert_model(reservation, |_| {
Buffer::build(
text_buffer,
loaded.diff_base,
Some(loaded.file),
Capability::ReadWrite,
)
Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
})
})
});
@ -777,7 +805,6 @@ impl LocalBufferStore {
let text_buffer = text::Buffer::new(0, buffer_id, "".into());
Buffer::build(
text_buffer,
None,
Some(Arc::new(File {
worktree,
path,
@ -861,11 +888,12 @@ impl BufferStore {
client.add_model_message_handler(Self::handle_buffer_reloaded);
client.add_model_message_handler(Self::handle_buffer_saved);
client.add_model_message_handler(Self::handle_update_buffer_file);
client.add_model_message_handler(Self::handle_update_diff_base);
client.add_model_request_handler(Self::handle_save_buffer);
client.add_model_request_handler(Self::handle_blame_buffer);
client.add_model_request_handler(Self::handle_reload_buffers);
client.add_model_request_handler(Self::handle_get_permalink_to_line);
client.add_model_request_handler(Self::handle_get_staged_text);
client.add_model_message_handler(Self::handle_update_diff_base);
}
/// Creates a buffer store, optionally retaining its buffers.
@ -885,7 +913,8 @@ impl BufferStore {
downstream_client: None,
opened_buffers: Default::default(),
shared_buffers: Default::default(),
loading_buffers_by_path: Default::default(),
loading_buffers: Default::default(),
loading_change_sets: Default::default(),
worktree_store,
}
}
@ -907,7 +936,8 @@ impl BufferStore {
}),
downstream_client: None,
opened_buffers: Default::default(),
loading_buffers_by_path: Default::default(),
loading_buffers: Default::default(),
loading_change_sets: Default::default(),
shared_buffers: Default::default(),
worktree_store,
}
@ -939,11 +969,14 @@ impl BufferStore {
project_path: ProjectPath,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
let existing_buffer = self.get_by_path(&project_path, cx);
if let Some(existing_buffer) = existing_buffer {
return Task::ready(Ok(existing_buffer));
if let Some(buffer) = self.get_by_path(&project_path, cx) {
return Task::ready(Ok(buffer));
}
let task = match self.loading_buffers.entry(project_path.clone()) {
hash_map::Entry::Occupied(e) => e.get().clone(),
hash_map::Entry::Vacant(entry) => {
let path = project_path.path.clone();
let Some(worktree) = self
.worktree_store
.read(cx)
@ -951,43 +984,110 @@ impl BufferStore {
else {
return Task::ready(Err(anyhow!("no such worktree")));
};
let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
// If the given path is already being loaded, then wait for that existing
// task to complete and return the same buffer.
hash_map::Entry::Occupied(e) => e.get().clone(),
// Otherwise, record the fact that this path is now being loaded.
hash_map::Entry::Vacant(entry) => {
let (mut tx, rx) = postage::watch::channel();
entry.insert(rx.clone());
let path = project_path.path.clone();
let load_buffer = match &self.state {
BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
};
entry
.insert(
cx.spawn(move |this, mut cx| async move {
let load_result = load_buffer.await;
*tx.borrow_mut() = Some(this.update(&mut cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
// Record the fact that the buffer is no longer loading.
this.loading_buffers_by_path.remove(&project_path);
let buffer = load_result.map_err(Arc::new)?;
Ok(buffer)
})?);
anyhow::Ok(())
this.loading_buffers.remove(&project_path);
})
.detach();
rx
.ok();
load_result.map_err(Arc::new)
})
.shared(),
)
.clone()
}
};
cx.background_executor().spawn(async move {
Self::wait_for_loading_buffer(loading_watch)
cx.background_executor()
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
pub fn open_unstaged_changes(
&mut self,
buffer: Model<Buffer>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<BufferChangeSet>>> {
let buffer_id = buffer.read(cx).remote_id();
if let Some(change_set) = self.get_unstaged_changes(buffer_id) {
return Task::ready(Ok(change_set));
}
let task = match self.loading_change_sets.entry(buffer_id) {
hash_map::Entry::Occupied(e) => e.get().clone(),
hash_map::Entry::Vacant(entry) => {
let load = match &self.state {
BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx),
BufferStoreState::Remote(this) => this.load_staged_text(buffer_id, cx),
};
entry
.insert(
cx.spawn(move |this, cx| async move {
Self::open_unstaged_changes_internal(this, load.await, buffer, cx)
.await
.map_err(|e| e.cloned())
.map_err(Arc::new)
})
.shared(),
)
.clone()
}
};
cx.background_executor()
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
pub async fn open_unstaged_changes_internal(
this: WeakModel<Self>,
text: Result<Option<String>>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Model<BufferChangeSet>> {
let text = match text {
Err(e) => {
this.update(&mut cx, |this, cx| {
let buffer_id = buffer.read(cx).remote_id();
this.loading_change_sets.remove(&buffer_id);
})?;
return Err(e);
}
Ok(text) => text,
};
let change_set = buffer.update(&mut cx, |buffer, cx| {
cx.new_model(|_| BufferChangeSet::new(buffer))
})?;
if let Some(text) = text {
change_set
.update(&mut cx, |change_set, cx| {
let snapshot = buffer.read(cx).text_snapshot();
change_set.set_base_text(text, snapshot, cx)
})?
.await
.ok();
}
this.update(&mut cx, |this, cx| {
let buffer_id = buffer.read(cx).remote_id();
this.loading_change_sets.remove(&buffer_id);
if let Some(OpenBuffer::Complete {
unstaged_changes, ..
}) = this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
{
*unstaged_changes = Some(change_set.downgrade());
}
})?;
Ok(change_set)
}
pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Model<Buffer>>> {
@ -1166,7 +1266,10 @@ impl BufferStore {
fn add_buffer(&mut self, buffer: Model<Buffer>, cx: &mut ModelContext<Self>) -> Result<()> {
let remote_id = buffer.read(cx).remote_id();
let is_remote = buffer.read(cx).replica_id() != 0;
let open_buffer = OpenBuffer::Buffer(buffer.downgrade());
let open_buffer = OpenBuffer::Complete {
buffer: buffer.downgrade(),
unstaged_changes: None,
};
let handle = cx.handle().downgrade();
buffer.update(cx, move |_, cx| {
@ -1212,15 +1315,11 @@ impl BufferStore {
pub fn loading_buffers(
&self,
) -> impl Iterator<
Item = (
&ProjectPath,
postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
),
> {
self.loading_buffers_by_path
.iter()
.map(|(path, rx)| (path, rx.clone()))
) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Model<Buffer>>>)> {
self.loading_buffers.iter().map(|(path, task)| {
let task = task.clone();
(path, async move { task.await.map_err(|e| anyhow!("{e}")) })
})
}
pub fn get_by_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Model<Buffer>> {
@ -1235,9 +1334,7 @@ impl BufferStore {
}
pub fn get(&self, buffer_id: BufferId) -> Option<Model<Buffer>> {
self.opened_buffers
.get(&buffer_id)
.and_then(|buffer| buffer.upgrade())
self.opened_buffers.get(&buffer_id)?.upgrade()
}
pub fn get_existing(&self, buffer_id: BufferId) -> Result<Model<Buffer>> {
@ -1252,6 +1349,17 @@ impl BufferStore {
})
}
pub fn get_unstaged_changes(&self, buffer_id: BufferId) -> Option<Model<BufferChangeSet>> {
if let OpenBuffer::Complete {
unstaged_changes, ..
} = self.opened_buffers.get(&buffer_id)?
{
unstaged_changes.as_ref()?.upgrade()
} else {
None
}
}
pub fn buffer_version_info(
&self,
cx: &AppContext,
@ -1366,6 +1474,35 @@ impl BufferStore {
rx
}
pub fn recalculate_buffer_diffs(
&mut self,
buffers: Vec<Model<Buffer>>,
cx: &mut ModelContext<Self>,
) -> impl Future<Output = ()> {
let mut futures = Vec::new();
for buffer in buffers {
let buffer = buffer.read(cx).text_snapshot();
if let Some(OpenBuffer::Complete {
unstaged_changes, ..
}) = self.opened_buffers.get_mut(&buffer.remote_id())
{
if let Some(unstaged_changes) = unstaged_changes
.as_ref()
.and_then(|changes| changes.upgrade())
{
unstaged_changes.update(cx, |unstaged_changes, cx| {
futures.push(unstaged_changes.recalculate_diff(buffer.clone(), cx));
});
} else {
unstaged_changes.take();
}
}
}
async move {
futures::future::join_all(futures).await;
}
}
fn on_buffer_event(
&mut self,
buffer: Model<Buffer>,
@ -1413,7 +1550,7 @@ impl BufferStore {
match this.opened_buffers.entry(buffer_id) {
hash_map::Entry::Occupied(mut e) => match e.get_mut() {
OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
OpenBuffer::Buffer(buffer) => {
OpenBuffer::Complete { buffer, .. } => {
if let Some(buffer) = buffer.upgrade() {
buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
}
@ -1449,7 +1586,11 @@ impl BufferStore {
self.shared_buffers
.entry(guest_id)
.or_default()
.insert(buffer.clone());
.entry(buffer_id)
.or_insert_with(|| SharedBuffer {
buffer: buffer.clone(),
unstaged_changes: None,
});
let buffer = buffer.read(cx);
response.buffers.push(proto::BufferVersion {
@ -1469,13 +1610,14 @@ impl BufferStore {
.log_err();
}
client
.send(proto::UpdateDiffBase {
project_id,
buffer_id: buffer_id.into(),
diff_base: buffer.diff_base().map(ToString::to_string),
})
.log_err();
// todo!(max): do something
// client
// .send(proto::UpdateStagedText {
// project_id,
// buffer_id: buffer_id.into(),
// diff_base: buffer.diff_base().map(ToString::to_string),
// })
// .log_err();
client
.send(proto::BufferReloaded {
@ -1579,32 +1721,6 @@ impl BufferStore {
})?
}
pub async fn handle_update_diff_base(
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateDiffBase>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
let buffer_id = envelope.payload.buffer_id;
let buffer_id = BufferId::new(buffer_id)?;
if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
buffer.update(cx, |buffer, cx| {
buffer.set_diff_base(envelope.payload.diff_base.clone(), cx)
});
}
if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
downstream_client
.send(proto::UpdateDiffBase {
project_id: *project_id,
buffer_id: buffer_id.into(),
diff_base: envelope.payload.diff_base,
})
.log_err();
}
Ok(())
})?
}
pub async fn handle_save_buffer(
this: Model<Self>,
envelope: TypedEnvelope<proto::SaveBuffer>,
@ -1654,16 +1770,14 @@ impl BufferStore {
let peer_id = envelope.sender_id;
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
this.update(&mut cx, |this, _| {
if let Some(buffer) = this.get(buffer_id) {
if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
if shared.remove(&buffer) {
if shared.remove(&buffer_id).is_some() {
if shared.is_empty() {
this.shared_buffers.remove(&peer_id);
}
return;
}
}
};
debug_panic!(
"peer_id {} closed buffer_id {} which was either not open or already closed",
peer_id,
@ -1779,18 +1893,66 @@ impl BufferStore {
})
}
pub async fn wait_for_loading_buffer(
mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
loop {
if let Some(result) = receiver.borrow().as_ref() {
match result {
Ok(buffer) => return Ok(buffer.to_owned()),
Err(e) => return Err(e.to_owned()),
pub async fn handle_get_staged_text(
this: Model<Self>,
request: TypedEnvelope<proto::GetStagedText>,
mut cx: AsyncAppContext,
) -> Result<proto::GetStagedTextResponse> {
let buffer_id = BufferId::new(request.payload.buffer_id)?;
let change_set = this
.update(&mut cx, |this, cx| {
let buffer = this.get(buffer_id)?;
Some(this.open_unstaged_changes(buffer, cx))
})?
.ok_or_else(|| anyhow!("no such buffer"))?
.await?;
this.update(&mut cx, |this, _| {
let shared_buffers = this
.shared_buffers
.entry(request.original_sender_id.unwrap_or(request.sender_id))
.or_default();
debug_assert!(shared_buffers.contains_key(&buffer_id));
if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
shared.unstaged_changes = Some(change_set.clone());
}
})?;
let staged_text = change_set.read_with(&cx, |change_set, cx| {
change_set
.base_text
.as_ref()
.map(|buffer| buffer.read(cx).text())
})?;
Ok(proto::GetStagedTextResponse { staged_text })
}
receiver.next().await;
pub async fn handle_update_diff_base(
this: Model<Self>,
request: TypedEnvelope<proto::UpdateDiffBase>,
mut cx: AsyncAppContext,
) -> Result<()> {
let buffer_id = BufferId::new(request.payload.buffer_id)?;
let Some((buffer, change_set)) = this.update(&mut cx, |this, _| {
if let OpenBuffer::Complete {
unstaged_changes,
buffer,
} = this.opened_buffers.get(&buffer_id)?
{
Some((buffer.upgrade()?, unstaged_changes.as_ref()?.upgrade()?))
} else {
None
}
})?
else {
return Ok(());
};
change_set.update(&mut cx, |change_set, cx| {
if let Some(staged_text) = request.payload.staged_text {
let _ = change_set.set_base_text(staged_text, buffer.read(cx).text_snapshot(), cx);
} else {
change_set.unset_base_text(buffer.read(cx).text_snapshot(), cx)
}
})?;
Ok(())
}
pub fn reload_buffers(
@ -1839,14 +2001,17 @@ impl BufferStore {
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let buffer_id = buffer.read(cx).remote_id();
if !self
.shared_buffers
.entry(peer_id)
.or_default()
.insert(buffer.clone())
{
let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
if shared_buffers.contains_key(&buffer_id) {
return Task::ready(Ok(()));
}
shared_buffers.insert(
buffer_id,
SharedBuffer {
buffer: buffer.clone(),
unstaged_changes: None,
},
);
let Some((client, project_id)) = self.downstream_client.clone() else {
return Task::ready(Ok(()));
@ -1909,8 +2074,8 @@ impl BufferStore {
}
}
pub fn shared_buffers(&self) -> &HashMap<proto::PeerId, HashSet<Model<Buffer>>> {
&self.shared_buffers
pub fn has_shared_buffers(&self) -> bool {
!self.shared_buffers.is_empty()
}
pub fn create_local_buffer(
@ -1998,10 +2163,129 @@ impl BufferStore {
}
}
impl BufferChangeSet {
pub fn new(buffer: &text::BufferSnapshot) -> Self {
Self {
buffer_id: buffer.remote_id(),
base_text: None,
diff_to_buffer: git::diff::BufferDiff::new(buffer),
recalculate_diff_task: None,
diff_updated_futures: Vec::new(),
base_text_version: 0,
}
}
#[cfg(any(test, feature = "test-support"))]
pub fn new_with_base_text(
base_text: String,
buffer: text::BufferSnapshot,
cx: &mut ModelContext<Self>,
) -> Self {
let mut this = Self::new(&buffer);
let _ = this.set_base_text(base_text, buffer, cx);
this
}
pub fn diff_hunks_intersecting_range<'a>(
&'a self,
range: Range<text::Anchor>,
buffer_snapshot: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = git::diff::DiffHunk> {
self.diff_to_buffer
.hunks_intersecting_range(range, buffer_snapshot)
}
pub fn diff_hunks_intersecting_range_rev<'a>(
&'a self,
range: Range<text::Anchor>,
buffer_snapshot: &'a text::BufferSnapshot,
) -> impl 'a + Iterator<Item = git::diff::DiffHunk> {
self.diff_to_buffer
.hunks_intersecting_range_rev(range, buffer_snapshot)
}
#[cfg(any(test, feature = "test-support"))]
pub fn base_text_string(&self, cx: &AppContext) -> Option<String> {
self.base_text.as_ref().map(|buffer| buffer.read(cx).text())
}
pub fn set_base_text(
&mut self,
mut base_text: String,
buffer_snapshot: text::BufferSnapshot,
cx: &mut ModelContext<Self>,
) -> oneshot::Receiver<()> {
LineEnding::normalize(&mut base_text);
self.recalculate_diff_internal(base_text, buffer_snapshot, true, cx)
}
pub fn unset_base_text(
&mut self,
buffer_snapshot: text::BufferSnapshot,
cx: &mut ModelContext<Self>,
) {
if self.base_text.is_some() {
self.base_text = None;
self.diff_to_buffer = BufferDiff::new(&buffer_snapshot);
self.recalculate_diff_task.take();
self.base_text_version += 1;
cx.notify();
}
}
pub fn recalculate_diff(
&mut self,
buffer_snapshot: text::BufferSnapshot,
cx: &mut ModelContext<Self>,
) -> oneshot::Receiver<()> {
if let Some(base_text) = self.base_text.clone() {
self.recalculate_diff_internal(base_text.read(cx).text(), buffer_snapshot, false, cx)
} else {
oneshot::channel().1
}
}
fn recalculate_diff_internal(
&mut self,
base_text: String,
buffer_snapshot: text::BufferSnapshot,
base_text_changed: bool,
cx: &mut ModelContext<Self>,
) -> oneshot::Receiver<()> {
let (tx, rx) = oneshot::channel();
self.diff_updated_futures.push(tx);
self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
let (base_text, diff) = cx
.background_executor()
.spawn(async move {
let diff = BufferDiff::build(&base_text, &buffer_snapshot).await;
(base_text, diff)
})
.await;
this.update(&mut cx, |this, cx| {
if base_text_changed {
this.base_text_version += 1;
this.base_text = Some(cx.new_model(|cx| {
Buffer::local_normalized(Rope::from(base_text), LineEnding::default(), cx)
}));
}
this.diff_to_buffer = diff;
this.recalculate_diff_task.take();
for tx in this.diff_updated_futures.drain(..) {
tx.send(()).ok();
}
cx.notify();
})?;
Ok(())
}));
rx
}
}
impl OpenBuffer {
fn upgrade(&self) -> Option<Model<Buffer>> {
match self {
OpenBuffer::Buffer(handle) => handle.upgrade(),
OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
OpenBuffer::Operations(_) => None,
}
}

View file

@ -25,7 +25,7 @@ pub mod search_history;
mod yarn;
use anyhow::{anyhow, Context as _, Result};
use buffer_store::{BufferStore, BufferStoreEvent};
use buffer_store::{BufferChangeSet, BufferStore, BufferStoreEvent};
use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore};
use clock::ReplicaId;
use collections::{BTreeSet, HashMap, HashSet};
@ -1821,6 +1821,20 @@ impl Project {
})
}
pub fn open_unstaged_changes(
&mut self,
buffer: Model<Buffer>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<BufferChangeSet>>> {
if self.is_disconnected(cx) {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_unstaged_changes(buffer, cx)
})
}
pub fn open_buffer_by_id(
&mut self,
id: BufferId,
@ -2269,10 +2283,7 @@ impl Project {
event: &BufferEvent,
cx: &mut ModelContext<Self>,
) -> Option<()> {
if matches!(
event,
BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
) {
if matches!(event, BufferEvent::Edited { .. } | BufferEvent::Reloaded) {
self.request_buffer_diff_recalculation(&buffer, cx);
}
@ -2369,34 +2380,32 @@ impl Project {
}
fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
cx.spawn(move |this, mut cx| async move {
let tasks: Vec<_> = buffers
.iter()
.filter_map(|buffer| {
let buffer = buffer.upgrade()?;
buffer
.update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx))
.ok()
.flatten()
})
.collect();
futures::future::join_all(tasks).await;
this.update(&mut cx, |this, cx| {
if this.buffers_needing_diff.is_empty() {
// TODO: Would a `ModelContext<Project>.notify()` suffice here?
for buffer in buffers {
if let Some(buffer) = buffer.upgrade() {
buffer.update(cx, |_, cx| cx.notify());
}
}
loop {
let task = this
.update(&mut cx, |this, cx| {
let buffers = this
.buffers_needing_diff
.drain()
.filter_map(|buffer| buffer.upgrade())
.collect::<Vec<_>>();
if buffers.is_empty() {
None
} else {
this.recalculate_buffer_diffs(cx).detach();
Some(this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.recalculate_buffer_diffs(buffers, cx)
}))
}
})
.ok();
.ok()
.flatten();
if let Some(task) = task {
task.await;
} else {
break;
}
}
})
}
@ -4149,6 +4158,10 @@ impl Project {
.read(cx)
.language_servers_for_buffer(buffer, cx)
}
pub fn buffer_store(&self) -> &Model<BufferStore> {
&self.buffer_store
}
}
fn deserialize_code_actions(code_actions: &HashMap<String, bool>) -> Vec<lsp::CodeActionKind> {

View file

@ -1,6 +1,7 @@
use crate::{Event, *};
use fs::FakeFs;
use futures::{future, StreamExt};
use git::diff::assert_hunks;
use gpui::{AppContext, SemanticVersion, UpdateGlobal};
use http_client::Url;
use language::{
@ -5396,6 +5397,98 @@ async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
});
}
#[gpui::test]
async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
init_test(cx);
let staged_contents = r#"
fn main() {
println!("hello world");
}
"#
.unindent();
let file_contents = r#"
// print goodbye
fn main() {
println!("goodbye world");
}
"#
.unindent();
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/dir",
json!({
".git": {},
"src": {
"main.rs": file_contents,
}
}),
)
.await;
fs.set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("src/main.rs"), staged_contents)],
);
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/dir/src/main.rs", cx)
})
.await
.unwrap();
let unstaged_changes = project
.update(cx, |project, cx| {
project.open_unstaged_changes(buffer.clone(), cx)
})
.await
.unwrap();
cx.run_until_parked();
unstaged_changes.update(cx, |unstaged_changes, cx| {
let snapshot = buffer.read(cx).snapshot();
assert_hunks(
unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
&snapshot,
&unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
&[
(0..1, "", "// print goodbye\n"),
(
2..3,
" println!(\"hello world\");\n",
" println!(\"goodbye world\");\n",
),
],
);
});
let staged_contents = r#"
// print goodbye
fn main() {
}
"#
.unindent();
fs.set_index_for_repo(
Path::new("/dir/.git"),
&[(Path::new("src/main.rs"), staged_contents)],
);
cx.run_until_parked();
unstaged_changes.update(cx, |unstaged_changes, cx| {
let snapshot = buffer.read(cx).snapshot();
assert_hunks(
unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
&snapshot,
&unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
&[(2..3, "", " println!(\"goodbye world\");\n")],
);
});
}
async fn search(
project: &Model<Project>,
query: SearchQuery,

View file

@ -301,7 +301,10 @@ message Envelope {
SyncExtensions sync_extensions = 285;
SyncExtensionsResponse sync_extensions_response = 286;
InstallExtension install_extension = 287; // current max
InstallExtension install_extension = 287;
GetStagedText get_staged_text = 288;
GetStagedTextResponse get_staged_text_response = 289; // current max
}
reserved 87 to 88;
@ -1788,11 +1791,12 @@ message BufferState {
uint64 id = 1;
optional File file = 2;
string base_text = 3;
optional string diff_base = 4;
LineEnding line_ending = 5;
repeated VectorClockEntry saved_version = 6;
reserved 7;
Timestamp saved_mtime = 8;
reserved 7;
reserved 4;
}
message BufferChunk {
@ -1983,7 +1987,16 @@ message WorktreeMetadata {
message UpdateDiffBase {
uint64 project_id = 1;
uint64 buffer_id = 2;
optional string diff_base = 3;
optional string staged_text = 3;
}
message GetStagedText {
uint64 project_id = 1;
uint64 buffer_id = 2;
}
message GetStagedTextResponse {
optional string staged_text = 1;
}
message GetNotifications {

View file

@ -216,6 +216,8 @@ messages!(
(GetImplementationResponse, Background),
(GetLlmToken, Background),
(GetLlmTokenResponse, Background),
(GetStagedText, Foreground),
(GetStagedTextResponse, Foreground),
(GetUsers, Foreground),
(Hello, Foreground),
(IncomingCall, Foreground),
@ -411,6 +413,7 @@ request_messages!(
(GetProjectSymbols, GetProjectSymbolsResponse),
(GetReferences, GetReferencesResponse),
(GetSignatureHelp, GetSignatureHelpResponse),
(GetStagedText, GetStagedTextResponse),
(GetSupermavenApiKey, GetSupermavenApiKeyResponse),
(GetTypeDefinition, GetTypeDefinitionResponse),
(LinkedEditingRange, LinkedEditingRangeResponse),
@ -525,6 +528,7 @@ entity_messages!(
GetProjectSymbols,
GetReferences,
GetSignatureHelp,
GetStagedText,
GetTypeDefinition,
InlayHints,
JoinProject,

View file

@ -78,13 +78,22 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
})
.await
.unwrap();
let change_set = project
.update(cx, |project, cx| {
project.open_unstaged_changes(buffer.clone(), cx)
})
.await
.unwrap();
change_set.update(cx, |change_set, cx| {
assert_eq!(
change_set.base_text_string(cx).unwrap(),
"fn one() -> usize { 0 }"
);
});
buffer.update(cx, |buffer, cx| {
assert_eq!(buffer.text(), "fn one() -> usize { 1 }");
assert_eq!(
buffer.diff_base().unwrap().to_string(),
"fn one() -> usize { 0 }"
);
let ix = buffer.text().find('1').unwrap();
buffer.edit([(ix..ix + 1, "100")], None, cx);
});
@ -140,9 +149,9 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
&[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())],
);
cx.executor().run_until_parked();
buffer.update(cx, |buffer, _| {
change_set.update(cx, |change_set, cx| {
assert_eq!(
buffer.diff_base().unwrap().to_string(),
change_set.base_text_string(cx).unwrap(),
"fn one() -> usize { 100 }"
);
});
@ -213,7 +222,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes
// test that the headless server is tracking which buffers we have open correctly.
cx.run_until_parked();
headless.update(server_cx, |headless, cx| {
assert!(!headless.buffer_store.read(cx).shared_buffers().is_empty())
assert!(headless.buffer_store.read(cx).has_shared_buffers())
});
do_search(&project, cx.clone()).await;
@ -222,7 +231,7 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes
});
cx.run_until_parked();
headless.update(server_cx, |headless, cx| {
assert!(headless.buffer_store.read(cx).shared_buffers().is_empty())
assert!(!headless.buffer_store.read(cx).has_shared_buffers())
});
do_search(&project, cx.clone()).await;

View file

@ -104,7 +104,6 @@ pub enum CreatedEntry {
pub struct LoadedFile {
pub file: Arc<File>,
pub text: String,
pub diff_base: Option<String>,
}
pub struct LoadedBinaryFile {
@ -707,6 +706,30 @@ impl Worktree {
}
}
pub fn load_staged_file(&self, path: &Path, cx: &AppContext) -> Task<Result<Option<String>>> {
match self {
Worktree::Local(this) => {
let path = Arc::from(path);
let snapshot = this.snapshot();
cx.background_executor().spawn(async move {
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&*repo.work_directory)
{
return Ok(git_repo.repo_ptr.load_index_text(&repo_path));
}
}
}
Ok(None)
})
}
Worktree::Remote(_) => {
Task::ready(Err(anyhow!("remote worktrees can't yet load staged files")))
}
}
}
pub fn load_binary_file(
&self,
path: &Path,
@ -1362,28 +1385,9 @@ impl LocalWorktree {
let entry = self.refresh_entry(path.clone(), None, cx);
let is_private = self.is_path_private(path.as_ref());
cx.spawn(|this, mut cx| async move {
cx.spawn(|this, _cx| async move {
let abs_path = abs_path?;
let text = fs.load(&abs_path).await?;
let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() {
if let Some(git_repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let git_repo = git_repo.repo_ptr.clone();
index_task = Some(
cx.background_executor()
.spawn(async move { git_repo.load_index_text(&repo_path) }),
);
}
}
}
let diff_base = if let Some(index_task) = index_task {
index_task.await
} else {
None
};
let worktree = this
.upgrade()
@ -1413,11 +1417,7 @@ impl LocalWorktree {
}
};
Ok(LoadedFile {
file,
text,
diff_base,
})
Ok(LoadedFile { file, text })
})
}