From 5704b50fb1a147b6f0564b3250f8cc50a1de6507 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 4 Feb 2025 15:29:10 -0500 Subject: [PATCH] git: Compute and synchronize diffs from HEAD (#23626) This PR builds on #21258 to make it possible to use HEAD as a diff base. The buffer store is extended to support holding multiple change sets, and collab gains support for synchronizing the committed text of files when any collaborator requires it. Not implemented in this PR: - Exposing the diff from HEAD to the user - Decorating the diff from HEAD with information about which hunks are staged `test_random_multibuffer` now fails first at `SEED=13277`, similar to the previous high-water mark, but with various bugs in the multibuffer logic now shaken out. Release Notes: - N/A --------- Co-authored-by: Max Co-authored-by: Ben Co-authored-by: Max Brunsfeld Co-authored-by: Conrad Irwin Co-authored-by: Conrad --- crates/collab/src/rpc.rs | 5 +- crates/collab/src/tests/editor_tests.rs | 7 +- crates/collab/src/tests/integration_tests.rs | 134 +- .../random_project_collaboration_tests.rs | 8 +- crates/diagnostics/src/items.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 1 + crates/editor/src/editor.rs | 7 +- crates/editor/src/editor_tests.rs | 45 +- crates/editor/src/element.rs | 2 +- crates/editor/src/git/blame.rs | 8 +- crates/editor/src/git/project_diff.rs | 1296 +++++++++++++++++ crates/editor/src/hover_popover.rs | 3 +- crates/editor/src/proposed_changes_editor.rs | 8 +- crates/editor/src/test/editor_test_context.rs | 2 +- crates/fs/src/fs.rs | 29 +- crates/git/src/diff.rs | 76 +- crates/git/src/repository.rs | 52 +- crates/language/src/buffer.rs | 28 + crates/multi_buffer/src/multi_buffer.rs | 215 ++- crates/multi_buffer/src/multi_buffer_tests.rs | 150 +- crates/project/src/buffer_store.rs | 1103 ++++++++++---- crates/project/src/project.rs | 14 + crates/project/src/project_tests.rs | 106 +- crates/proto/proto/zed.proto | 49 +- crates/proto/src/proto.rs | 16 +- .../remote_server/src/remote_editing_tests.rs | 4 +- crates/rope/src/rope.rs | 4 + crates/sum_tree/src/sum_tree.rs | 4 + crates/worktree/src/worktree.rs | 24 + 29 files changed, 2799 insertions(+), 603 deletions(-) create mode 100644 crates/editor/src/git/project_diff.rs diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 6b6d1d9749..eea17d45fb 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -309,7 +309,8 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) - .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) .add_request_handler( forward_mutating_project_request::, ) @@ -348,7 +349,7 @@ impl Server { .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(broadcast_project_message_from_host::) - .add_message_handler(broadcast_project_message_from_host::) + .add_message_handler(broadcast_project_message_from_host::) .add_request_handler(get_users) .add_request_handler(fuzzy_search_users) .add_request_handler(request_contact) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index e35113c0c0..a094d9cd8c 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1991,10 +1991,9 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA .collect(), remote_url: Some("git@github.com:zed-industries/zed.git".to_string()), }; - client_a.fs().set_blame_for_repo( - Path::new("/my-repo/.git"), - vec![(Path::new("file.txt"), blame)], - ); + client_a + .fs() + .set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]); let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await; let project_id = active_call_a diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 3b691a2173..a512a9f10c 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -2558,13 +2558,27 @@ async fn test_git_diff_base_change( let project_remote = client_b.join_remote_project(project_id, cx_b).await; - let diff_base = " + let staged_text = " one three " .unindent(); - let new_diff_base = " + let committed_text = " + one + TWO + three + " + .unindent(); + + let new_committed_text = " + one + TWO_HUNDRED + three + " + .unindent(); + + let new_staged_text = " one two " @@ -2572,7 +2586,11 @@ async fn test_git_diff_base_change( client_a.fs().set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("a.txt"), diff_base.clone())], + &[("a.txt".into(), staged_text.clone())], + ); + client_a.fs().set_head_for_repo( + Path::new("/dir/.git"), + &[("a.txt".into(), committed_text.clone())], ); // Create the buffer @@ -2580,7 +2598,7 @@ async fn test_git_diff_base_change( .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - let change_set_local_a = project_local + let local_unstaged_changes_a = project_local .update(cx_a, |p, cx| { p.open_unstaged_changes(buffer_local_a.clone(), cx) }) @@ -2589,16 +2607,16 @@ async fn test_git_diff_base_change( // Wait for it to catch up to the new diff executor.run_until_parked(); - change_set_local_a.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_a.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &change_set.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); @@ -2608,7 +2626,7 @@ async fn test_git_diff_base_change( .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) .await .unwrap(); - let change_set_remote_a = project_remote + let remote_unstaged_changes_a = project_remote .update(cx_b, |p, cx| { p.open_unstaged_changes(buffer_remote_a.clone(), cx) }) @@ -2617,64 +2635,104 @@ async fn test_git_diff_base_change( // Wait remote buffer to catch up to the new diff executor.run_until_parked(); - change_set_remote_a.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &change_set.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); - // Update the staged text of the open buffer + // Open uncommitted changes on the guest, without opening them on the host first + let remote_uncommitted_changes_a = project_remote + .update(cx_b, |p, cx| { + p.open_uncommitted_changes(buffer_remote_a.clone(), cx) + }) + .await + .unwrap(); + executor.run_until_parked(); + remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| { + let buffer = buffer_remote_a.read(cx); + assert_eq!( + change_set.base_text_string().as_deref(), + Some(committed_text.as_str()) + ); + git::diff::assert_hunks( + change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + buffer, + &change_set.base_text_string().unwrap(), + &[(1..2, "TWO\n", "two\n")], + ); + }); + + // Update the index text of the open buffer client_a.fs().set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("a.txt"), new_diff_base.clone())], + &[("a.txt".into(), new_staged_text.clone())], + ); + client_a.fs().set_head_for_repo( + Path::new("/dir/.git"), + &[("a.txt".into(), new_committed_text.clone())], ); // Wait for buffer_local_a to receive it executor.run_until_parked(); - change_set_local_a.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_a.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &change_set.base_text_string().unwrap(), &[(2..3, "", "three\n")], ); }); - change_set_remote_a.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_a.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_a.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &change_set.base_text_string().unwrap(), &[(2..3, "", "three\n")], ); }); + remote_uncommitted_changes_a.read_with(cx_b, |change_set, cx| { + let buffer = buffer_remote_a.read(cx); + assert_eq!( + change_set.base_text_string().as_deref(), + Some(new_committed_text.as_str()) + ); + git::diff::assert_hunks( + change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), + buffer, + &change_set.base_text_string().unwrap(), + &[(1..2, "TWO_HUNDRED\n", "two\n")], + ); + }); + // Nested git dir - let diff_base = " + let staged_text = " one three " .unindent(); - let new_diff_base = " + let new_staged_text = " one two " @@ -2682,7 +2740,7 @@ async fn test_git_diff_base_change( client_a.fs().set_index_for_repo( Path::new("/dir/sub/.git"), - &[(Path::new("b.txt"), diff_base.clone())], + &[("b.txt".into(), staged_text.clone())], ); // Create the buffer @@ -2690,7 +2748,7 @@ async fn test_git_diff_base_change( .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) .await .unwrap(); - let change_set_local_b = project_local + let local_unstaged_changes_b = project_local .update(cx_a, |p, cx| { p.open_unstaged_changes(buffer_local_b.clone(), cx) }) @@ -2699,16 +2757,16 @@ async fn test_git_diff_base_change( // Wait for it to catch up to the new diff executor.run_until_parked(); - change_set_local_b.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_b.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &change_set.base_text_string().unwrap(), &[(1..2, "", "two\n")], ); }); @@ -2718,7 +2776,7 @@ async fn test_git_diff_base_change( .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) .await .unwrap(); - let change_set_remote_b = project_remote + let remote_unstaged_changes_b = project_remote .update(cx_b, |p, cx| { p.open_unstaged_changes(buffer_remote_b.clone(), cx) }) @@ -2726,52 +2784,52 @@ async fn test_git_diff_base_change( .unwrap(); executor.run_until_parked(); - change_set_remote_b.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(diff_base.as_str()) + Some(staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &diff_base, + &staged_text, &[(1..2, "", "two\n")], ); }); - // Update the staged text + // Updatet the staged text client_a.fs().set_index_for_repo( Path::new("/dir/sub/.git"), - &[(Path::new("b.txt"), new_diff_base.clone())], + &[("b.txt".into(), new_staged_text.clone())], ); // Wait for buffer_local_b to receive it executor.run_until_parked(); - change_set_local_b.read_with(cx_a, |change_set, cx| { + local_unstaged_changes_b.read_with(cx_a, |change_set, cx| { let buffer = buffer_local_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &new_staged_text, &[(2..3, "", "three\n")], ); }); - change_set_remote_b.read_with(cx_b, |change_set, cx| { + remote_unstaged_changes_b.read_with(cx_b, |change_set, cx| { let buffer = buffer_remote_b.read(cx); assert_eq!( change_set.base_text_string().as_deref(), - Some(new_diff_base.as_str()) + Some(new_staged_text.as_str()) ); git::diff::assert_hunks( change_set.diff_to_buffer.hunks_in_row_range(0..4, buffer), buffer, - &new_diff_base, + &new_staged_text, &[(2..3, "", "three\n")], ); }); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index b250473b61..e4d1ae79a5 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -953,8 +953,8 @@ impl RandomizedTest for ProjectCollaborationTest { let dot_git_dir = repo_path.join(".git"); let contents = contents - .iter() - .map(|(path, contents)| (path.as_path(), contents.clone())) + .into_iter() + .map(|(path, contents)| (path.into(), contents)) .collect::>(); if client.fs().metadata(&dot_git_dir).await?.is_none() { client.fs().create_dir(&dot_git_dir).await?; @@ -1339,7 +1339,7 @@ impl RandomizedTest for ProjectCollaborationTest { project .buffer_store() .read(cx) - .get_unstaged_changes(host_buffer.read(cx).remote_id()) + .get_unstaged_changes(host_buffer.read(cx).remote_id(), cx) .unwrap() .read(cx) .base_text_string() @@ -1348,7 +1348,7 @@ impl RandomizedTest for ProjectCollaborationTest { project .buffer_store() .read(cx) - .get_unstaged_changes(guest_buffer.read(cx).remote_id()) + .get_unstaged_changes(guest_buffer.read(cx).remote_id(), cx) .unwrap() .read(cx) .base_text_string() diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 2fa593e6ea..017adc5017 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -157,7 +157,7 @@ impl DiagnosticIndicator { (buffer, cursor_position) }); let new_diagnostic = buffer - .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) + .diagnostics_in_range::(cursor_position..cursor_position) .filter(|entry| !entry.range.is_empty()) .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) .map(|entry| entry.diagnostic); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index c1510cd231..6b00ab7db0 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -979,6 +979,7 @@ impl<'a> Iterator for WrapRows<'a> { Some(if soft_wrapped { RowInfo { + buffer_id: None, buffer_row: None, multibuffer_row: None, diff_status, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 0468770e0e..63f0068eda 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10137,12 +10137,12 @@ impl Editor { let mut diagnostics; if direction == Direction::Prev { diagnostics = buffer - .diagnostics_in_range::<_, usize>(0..search_start) + .diagnostics_in_range::(0..search_start) .collect::>(); diagnostics.reverse(); } else { diagnostics = buffer - .diagnostics_in_range::<_, usize>(search_start..buffer.len()) + .diagnostics_in_range::(search_start..buffer.len()) .collect::>(); }; let group = diagnostics @@ -11333,8 +11333,9 @@ impl Editor { if let Some(active_diagnostics) = self.active_diagnostics.as_mut() { let buffer = self.buffer.read(cx).snapshot(cx); let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer); + let primary_range_end = active_diagnostics.primary_range.end.to_offset(&buffer); let is_valid = buffer - .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) + .diagnostics_in_range::(primary_range_start..primary_range_end) .any(|entry| { entry.diagnostic.is_primary && !entry.range.is_empty() diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 2ce221fbda..35942cce25 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -12431,8 +12431,8 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { (buffer_2.clone(), base_text_2), (buffer_3.clone(), base_text_3), ] { - let change_set = cx - .new(|cx| BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)); @@ -13125,9 +13125,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) (buffer_2.clone(), file_2_old), (buffer_3.clone(), file_3_old), ] { - let change_set = cx.new(|cx| { - BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx) - }); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(&diff_base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)); @@ -13212,7 +13211,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext init_test(cx, |_| {}); let base = "aaa\nbbb\nccc\nddd\neee\nfff\nggg\n"; - let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\n"; + let text = "aaa\nBBB\nBB2\nccc\nDDD\nEEE\nfff\nggg\nhhh\niii\n"; let buffer = cx.new(|cx| Buffer::local(text.to_string(), cx)); let multi_buffer = cx.new(|cx| { @@ -13225,7 +13224,11 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext primary: None, }, ExcerptRange { - context: Point::new(5, 0)..Point::new(7, 0), + context: Point::new(4, 0)..Point::new(7, 0), + primary: None, + }, + ExcerptRange { + context: Point::new(9, 0)..Point::new(10, 0), primary: None, }, ], @@ -13239,8 +13242,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext }); editor .update(cx, |editor, _window, cx| { - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx)); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base, &buffer, cx)); editor .buffer .update(cx, |buffer, cx| buffer.add_change_set(change_set, cx)) @@ -13255,14 +13257,22 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext }); cx.executor().run_until_parked(); + // When the start of a hunk coincides with the start of its excerpt, + // the hunk is expanded. When the start of a a hunk is earlier than + // the start of its excerpt, the hunk is not expanded. cx.assert_state_with_diff( " ˇaaa - bbb + BBB + - ddd + - eee + + DDD + EEE fff + + iii " .unindent(), ); @@ -13500,8 +13510,8 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) { cx.set_state(indoc! { " one - TWO - ˇthree + ˇTWO + three four five "}); @@ -13514,15 +13524,14 @@ async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) { indoc! { " one - two - + TWO - ˇthree + + ˇTWO + three four five "} .to_string(), ); cx.update_editor(|editor, window, cx| { - editor.move_up(&Default::default(), window, cx); editor.move_up(&Default::default(), window, cx); editor.toggle_selected_diff_hunks(&Default::default(), window, cx); }); @@ -14402,12 +14411,8 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContex editor.buffer().update(cx, |multibuffer, cx| { let buffer = multibuffer.as_singleton().unwrap(); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let _ = - change_set.set_base_text(base_text.into(), buffer.read(cx).text_snapshot(), cx); - change_set - }); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); multibuffer.set_all_diff_hunks_expanded(cx); multibuffer.add_change_set(change_set, cx); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index bf3d9ab78d..632c81c6b1 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -5295,7 +5295,7 @@ impl EditorElement { if scrollbar_settings.diagnostics != ScrollbarDiagnostics::None { let diagnostics = snapshot .buffer_snapshot - .diagnostics_in_range::<_, Point>(Point::zero()..max_point) + .diagnostics_in_range::(Point::zero()..max_point) // Don't show diagnostics the user doesn't care about .filter(|diagnostic| { match ( diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index b3680c891f..d9c4926d33 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -697,7 +697,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: vec![ blame_entry("1b1b1b", 0..1), @@ -809,7 +809,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: vec![blame_entry("1b1b1b", 0..4)], ..Default::default() @@ -958,7 +958,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: blame_entries, ..Default::default() @@ -1000,7 +1000,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - Path::new("file.txt"), + "file.txt".into(), Blame { entries: blame_entries, ..Default::default() diff --git a/crates/editor/src/git/project_diff.rs b/crates/editor/src/git/project_diff.rs new file mode 100644 index 0000000000..8420aa9980 --- /dev/null +++ b/crates/editor/src/git/project_diff.rs @@ -0,0 +1,1296 @@ +use std::{ + any::{Any, TypeId}, + cmp::Ordering, + collections::HashSet, + ops::Range, + time::Duration, +}; + +use anyhow::{anyhow, Context as _}; +use collections::{BTreeMap, HashMap}; +use feature_flags::FeatureFlagAppExt; +use git::diff::{BufferDiff, DiffHunk}; +use gpui::{ + actions, AnyElement, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, + InteractiveElement, Render, Subscription, Task, WeakEntity, +}; +use language::{Buffer, BufferRow}; +use multi_buffer::{ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer}; +use project::{Project, ProjectEntryId, ProjectPath, WorktreeId}; +use text::{OffsetRangeExt, ToPoint}; +use theme::ActiveTheme; +use ui::prelude::*; +use util::{paths::compare_paths, ResultExt}; +use workspace::{ + item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams}, + ItemNavHistory, ToolbarItemLocation, Workspace, +}; + +use crate::{Editor, EditorEvent, DEFAULT_MULTIBUFFER_CONTEXT}; + +actions!(project_diff, [Deploy]); + +pub fn init(cx: &mut App) { + cx.observe_new(ProjectDiffEditor::register).detach(); +} + +const UPDATE_DEBOUNCE: Duration = Duration::from_millis(50); + +struct ProjectDiffEditor { + buffer_changes: BTreeMap>, + entry_order: HashMap>, + excerpts: Entity, + editor: Entity, + + project: Entity, + workspace: WeakEntity, + focus_handle: FocusHandle, + worktree_rescans: HashMap>, + _subscriptions: Vec, +} + +#[derive(Debug)] +struct Changes { + buffer: Entity, + hunks: Vec, +} + +impl ProjectDiffEditor { + fn register( + workspace: &mut Workspace, + _window: Option<&mut Window>, + _: &mut Context, + ) { + workspace.register_action(Self::deploy); + } + + fn deploy( + workspace: &mut Workspace, + _: &Deploy, + window: &mut Window, + cx: &mut Context, + ) { + if !cx.is_staff() { + return; + } + + if let Some(existing) = workspace.item_of_type::(cx) { + workspace.activate_item(&existing, true, true, window, cx); + } else { + let workspace_handle = cx.entity().downgrade(); + let project_diff = + cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx)); + workspace.add_item_to_active_pane(Box::new(project_diff), None, true, window, cx); + } + } + + fn new( + project: Entity, + workspace: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + // TODO diff change subscriptions. For that, needed: + // * `-20/+50` stats retrieval: some background process that reacts on file changes + let focus_handle = cx.focus_handle(); + let changed_entries_subscription = + cx.subscribe_in(&project, window, |project_diff_editor, _, e, window, cx| { + let mut worktree_to_rescan = None; + match e { + project::Event::WorktreeAdded(id) => { + worktree_to_rescan = Some(*id); + // project_diff_editor + // .buffer_changes + // .insert(*id, HashMap::default()); + } + project::Event::WorktreeRemoved(id) => { + project_diff_editor.buffer_changes.remove(id); + } + project::Event::WorktreeUpdatedEntries(id, _updated_entries) => { + // TODO cannot invalidate buffer entries without invalidating the corresponding excerpts and order entries. + worktree_to_rescan = Some(*id); + // let entry_changes = + // project_diff_editor.buffer_changes.entry(*id).or_default(); + // for (_, entry_id, change) in updated_entries.iter() { + // let changes = entry_changes.entry(*entry_id); + // match change { + // project::PathChange::Removed => { + // if let hash_map::Entry::Occupied(entry) = changes { + // entry.remove(); + // } + // } + // // TODO understand the invalidation case better: now, we do that but still rescan the entire worktree + // // What if we already have the buffer loaded inside the diff multi buffer and it was edited there? We should not do anything. + // _ => match changes { + // hash_map::Entry::Occupied(mut o) => o.get_mut().invalidate(), + // hash_map::Entry::Vacant(v) => { + // v.insert(None); + // } + // }, + // } + // } + } + project::Event::WorktreeUpdatedGitRepositories(id) => { + worktree_to_rescan = Some(*id); + // project_diff_editor.buffer_changes.clear(); + } + project::Event::DeletedEntry(id, _entry_id) => { + worktree_to_rescan = Some(*id); + // if let Some(entries) = project_diff_editor.buffer_changes.get_mut(id) { + // entries.remove(entry_id); + // } + } + project::Event::Closed => { + project_diff_editor.buffer_changes.clear(); + } + _ => {} + } + + if let Some(worktree_to_rescan) = worktree_to_rescan { + project_diff_editor.schedule_worktree_rescan(worktree_to_rescan, window, cx); + } + }); + + let excerpts = cx.new(|cx| MultiBuffer::new(project.read(cx).capability())); + + let editor = cx.new(|cx| { + let mut diff_display_editor = + Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, window, cx); + diff_display_editor.set_expand_all_diff_hunks(cx); + diff_display_editor + }); + + let mut new_self = Self { + project, + workspace, + buffer_changes: BTreeMap::default(), + entry_order: HashMap::default(), + worktree_rescans: HashMap::default(), + focus_handle, + editor, + excerpts, + _subscriptions: vec![changed_entries_subscription], + }; + new_self.schedule_rescan_all(window, cx); + new_self + } + + fn schedule_rescan_all(&mut self, window: &mut Window, cx: &mut Context) { + let mut current_worktrees = HashSet::::default(); + for worktree in self.project.read(cx).worktrees(cx).collect::>() { + let worktree_id = worktree.read(cx).id(); + current_worktrees.insert(worktree_id); + self.schedule_worktree_rescan(worktree_id, window, cx); + } + + self.worktree_rescans + .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); + self.buffer_changes + .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); + self.entry_order + .retain(|worktree_id, _| current_worktrees.contains(worktree_id)); + } + + fn schedule_worktree_rescan( + &mut self, + id: WorktreeId, + window: &mut Window, + cx: &mut Context, + ) { + let project = self.project.clone(); + self.worktree_rescans.insert( + id, + cx.spawn_in(window, |project_diff_editor, mut cx| async move { + cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let open_tasks = project + .update(&mut cx, |project, cx| { + let worktree = project.worktree_for_id(id, cx)?; + let snapshot = worktree.read(cx).snapshot(); + let applicable_entries = snapshot + .repositories() + .iter() + .flat_map(|entry| { + entry + .status() + .map(|git_entry| entry.join(git_entry.repo_path)) + }) + .filter_map(|path| { + let id = snapshot.entry_for_path(&path)?.id; + Some(( + id, + ProjectPath { + worktree_id: snapshot.id(), + path: path.into(), + }, + )) + }) + .collect::>(); + Some( + applicable_entries + .into_iter() + .map(|(entry_id, entry_path)| { + let open_task = project.open_path(entry_path.clone(), cx); + (entry_id, entry_path, open_task) + }) + .collect::>(), + ) + }) + .ok() + .flatten() + .unwrap_or_default(); + + let Some((buffers, mut new_entries, change_sets)) = cx + .spawn(|mut cx| async move { + let mut new_entries = Vec::new(); + let mut buffers = HashMap::< + ProjectEntryId, + (text::BufferSnapshot, Entity, BufferDiff), + >::default(); + let mut change_sets = Vec::new(); + for (entry_id, entry_path, open_task) in open_tasks { + let Some(buffer) = open_task + .await + .and_then(|(_, opened_model)| { + opened_model + .downcast::() + .map_err(|_| anyhow!("Unexpected non-buffer")) + }) + .with_context(|| { + format!("loading {:?} for git diff", entry_path.path) + }) + .log_err() + else { + continue; + }; + + let Some(change_set) = project + .update(&mut cx, |project, cx| { + project.open_unstaged_changes(buffer.clone(), cx) + })? + .await + .log_err() + else { + continue; + }; + + cx.update(|_, cx| { + buffers.insert( + entry_id, + ( + buffer.read(cx).text_snapshot(), + buffer, + change_set.read(cx).diff_to_buffer.clone(), + ), + ); + })?; + change_sets.push(change_set); + new_entries.push((entry_path, entry_id)); + } + + anyhow::Ok((buffers, new_entries, change_sets)) + }) + .await + .log_err() + else { + return; + }; + + let (new_changes, new_entry_order) = cx + .background_executor() + .spawn(async move { + let mut new_changes = HashMap::::default(); + for (entry_id, (buffer_snapshot, buffer, buffer_diff)) in buffers { + new_changes.insert( + entry_id, + Changes { + buffer, + hunks: buffer_diff + .hunks_in_row_range(0..BufferRow::MAX, &buffer_snapshot) + .collect::>(), + }, + ); + } + + new_entries.sort_by(|(project_path_a, _), (project_path_b, _)| { + compare_paths( + (project_path_a.path.as_ref(), true), + (project_path_b.path.as_ref(), true), + ) + }); + (new_changes, new_entries) + }) + .await; + + project_diff_editor + .update_in(&mut cx, |project_diff_editor, _window, cx| { + project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx); + project_diff_editor.editor.update(cx, |editor, cx| { + editor.buffer.update(cx, |buffer, cx| { + for change_set in change_sets { + buffer.add_change_set(change_set, cx) + } + }); + }); + }) + .ok(); + }), + ); + } + + fn update_excerpts( + &mut self, + worktree_id: WorktreeId, + new_changes: HashMap, + new_entry_order: Vec<(ProjectPath, ProjectEntryId)>, + + cx: &mut Context, + ) { + if let Some(current_order) = self.entry_order.get(&worktree_id) { + let current_entries = self.buffer_changes.entry(worktree_id).or_default(); + let mut new_order_entries = new_entry_order.iter().fuse().peekable(); + let mut excerpts_to_remove = Vec::new(); + let mut new_excerpt_hunks = BTreeMap::< + ExcerptId, + Vec<(ProjectPath, Entity, Vec>)>, + >::new(); + let mut excerpt_to_expand = + HashMap::<(u32, ExpandExcerptDirection), Vec>::default(); + let mut latest_excerpt_id = ExcerptId::min(); + + for (current_path, current_entry_id) in current_order { + let current_changes = match current_entries.get(current_entry_id) { + Some(current_changes) => { + if current_changes.hunks.is_empty() { + continue; + } + current_changes + } + None => continue, + }; + let buffer_excerpts = self + .excerpts + .read(cx) + .excerpts_for_buffer(¤t_changes.buffer, cx); + let last_current_excerpt_id = + buffer_excerpts.last().map(|(excerpt_id, _)| *excerpt_id); + let mut current_excerpts = buffer_excerpts.into_iter().fuse().peekable(); + loop { + match new_order_entries.peek() { + Some((new_path, new_entry)) => { + match compare_paths( + (current_path.path.as_ref(), true), + (new_path.path.as_ref(), true), + ) { + Ordering::Less => { + excerpts_to_remove + .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); + break; + } + Ordering::Greater => { + if let Some(new_changes) = new_changes.get(new_entry) { + if !new_changes.hunks.is_empty() { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths( + (new_path.path.as_ref(), true), + (probe.path.as_ref(), true), + ) + }) { + Ok(i) => hunks[i].2.extend( + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + }; + let _ = new_order_entries.next(); + } + Ordering::Equal => { + match new_changes.get(new_entry) { + Some(new_changes) => { + let buffer_snapshot = + new_changes.buffer.read(cx).snapshot(); + let mut current_hunks = + current_changes.hunks.iter().fuse().peekable(); + let mut new_hunks_unchanged = + Vec::with_capacity(new_changes.hunks.len()); + let mut new_hunks_with_updates = + Vec::with_capacity(new_changes.hunks.len()); + 'new_changes: for new_hunk in &new_changes.hunks { + loop { + match current_hunks.peek() { + Some(current_hunk) => { + match ( + current_hunk + .buffer_range + .start + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ), + current_hunk.buffer_range.end.cmp( + &new_hunk.buffer_range.end, + &buffer_snapshot, + ), + ) { + ( + Ordering::Equal, + Ordering::Equal, + ) => { + new_hunks_unchanged + .push(new_hunk); + let _ = current_hunks.next(); + continue 'new_changes; + } + (Ordering::Equal, _) + | (_, Ordering::Equal) => { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } + ( + Ordering::Less, + Ordering::Greater, + ) + | ( + Ordering::Greater, + Ordering::Less, + ) => { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } + ( + Ordering::Less, + Ordering::Less, + ) => { + if current_hunk + .buffer_range + .start + .cmp( + &new_hunk + .buffer_range + .end, + &buffer_snapshot, + ) + .is_le() + { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } else { + let _ = + current_hunks.next(); + } + } + ( + Ordering::Greater, + Ordering::Greater, + ) => { + if current_hunk + .buffer_range + .end + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ) + .is_ge() + { + new_hunks_with_updates + .push(new_hunk); + continue 'new_changes; + } else { + let _ = + current_hunks.next(); + } + } + } + } + None => { + new_hunks_with_updates.push(new_hunk); + continue 'new_changes; + } + } + } + } + + let mut excerpts_with_new_changes = + HashSet::::default(); + 'new_hunks: for new_hunk in new_hunks_with_updates { + loop { + match current_excerpts.peek() { + Some(( + current_excerpt_id, + current_excerpt_range, + )) => { + match ( + current_excerpt_range + .context + .start + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ), + current_excerpt_range + .context + .end + .cmp( + &new_hunk.buffer_range.end, + &buffer_snapshot, + ), + ) { + ( + Ordering::Less + | Ordering::Equal, + Ordering::Greater + | Ordering::Equal, + ) => { + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } + ( + Ordering::Greater + | Ordering::Equal, + Ordering::Less + | Ordering::Equal, + ) => { + let expand_up = current_excerpt_range + .context + .start + .to_point(&buffer_snapshot) + .row + .saturating_sub( + new_hunk + .buffer_range + .start + .to_point(&buffer_snapshot) + .row, + ); + let expand_down = new_hunk + .buffer_range + .end + .to_point(&buffer_snapshot) + .row + .saturating_sub( + current_excerpt_range + .context + .end + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_up.max(expand_down).max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::UpAndDown)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } + ( + Ordering::Less, + Ordering::Less, + ) => { + if current_excerpt_range + .context + .start + .cmp( + &new_hunk + .buffer_range + .end, + &buffer_snapshot, + ) + .is_le() + { + let expand_up = current_excerpt_range + .context + .start + .to_point(&buffer_snapshot) + .row + .saturating_sub( + new_hunk.buffer_range + .start + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_up.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Up)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } else { + if !new_changes + .hunks + .is_empty() + { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths( + (new_path.path.as_ref(), true), + (probe.path.as_ref(), true), + ) + }) { + Ok(i) => hunks[i].2.extend( + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + continue 'new_hunks; + } + } + /* TODO remove or leave? + [ ><<<<<<<--]----<-- + cur_s > cur_e < + > < + new_s>>>>>>>>< + */ + ( + Ordering::Greater, + Ordering::Greater, + ) => { + if current_excerpt_range + .context + .end + .cmp( + &new_hunk + .buffer_range + .start, + &buffer_snapshot, + ) + .is_ge() + { + let expand_down = new_hunk + .buffer_range + .end + .to_point(&buffer_snapshot) + .row + .saturating_sub( + current_excerpt_range + .context + .end + .to_point( + &buffer_snapshot, + ) + .row, + ); + excerpt_to_expand.entry((expand_down.max(DEFAULT_MULTIBUFFER_CONTEXT), ExpandExcerptDirection::Down)).or_default().push(*current_excerpt_id); + excerpts_with_new_changes + .insert( + *current_excerpt_id, + ); + continue 'new_hunks; + } else { + latest_excerpt_id = + *current_excerpt_id; + let _ = + current_excerpts.next(); + } + } + } + } + None => { + let hunks = new_excerpt_hunks + .entry(latest_excerpt_id) + .or_default(); + match hunks.binary_search_by( + |(probe, ..)| { + compare_paths( + ( + new_path.path.as_ref(), + true, + ), + (probe.path.as_ref(), true), + ) + }, + ) { + Ok(i) => hunks[i].2.extend( + new_changes.hunks.iter().map( + |hunk| { + hunk.buffer_range + .clone() + }, + ), + ), + Err(i) => hunks.insert( + i, + ( + new_path.clone(), + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| { + hunk.buffer_range + .clone() + }) + .collect(), + ), + ), + } + continue 'new_hunks; + } + } + } + } + + for (excerpt_id, excerpt_range) in current_excerpts { + if !excerpts_with_new_changes.contains(&excerpt_id) + && !new_hunks_unchanged.iter().any(|hunk| { + excerpt_range + .context + .start + .cmp( + &hunk.buffer_range.end, + &buffer_snapshot, + ) + .is_le() + && excerpt_range + .context + .end + .cmp( + &hunk.buffer_range.start, + &buffer_snapshot, + ) + .is_ge() + }) + { + excerpts_to_remove.push(excerpt_id); + } + latest_excerpt_id = excerpt_id; + } + } + None => excerpts_to_remove.extend( + current_excerpts.map(|(excerpt_id, _)| excerpt_id), + ), + } + let _ = new_order_entries.next(); + break; + } + } + } + None => { + excerpts_to_remove + .extend(current_excerpts.map(|(excerpt_id, _)| excerpt_id)); + break; + } + } + } + latest_excerpt_id = last_current_excerpt_id.unwrap_or(latest_excerpt_id); + } + + for (path, project_entry_id) in new_order_entries { + if let Some(changes) = new_changes.get(project_entry_id) { + if !changes.hunks.is_empty() { + let hunks = new_excerpt_hunks.entry(latest_excerpt_id).or_default(); + match hunks.binary_search_by(|(probe, ..)| { + compare_paths((path.path.as_ref(), true), (probe.path.as_ref(), true)) + }) { + Ok(i) => hunks[i] + .2 + .extend(changes.hunks.iter().map(|hunk| hunk.buffer_range.clone())), + Err(i) => hunks.insert( + i, + ( + path.clone(), + changes.buffer.clone(), + changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + ), + ), + } + } + } + } + + self.excerpts.update(cx, |multi_buffer, cx| { + for (mut after_excerpt_id, excerpts_to_add) in new_excerpt_hunks { + for (_, buffer, hunk_ranges) in excerpts_to_add { + let buffer_snapshot = buffer.read(cx).snapshot(); + let max_point = buffer_snapshot.max_point(); + let new_excerpts = multi_buffer.insert_excerpts_after( + after_excerpt_id, + buffer, + hunk_ranges.into_iter().map(|range| { + let mut extended_point_range = range.to_point(&buffer_snapshot); + extended_point_range.start.row = extended_point_range + .start + .row + .saturating_sub(DEFAULT_MULTIBUFFER_CONTEXT); + extended_point_range.end.row = (extended_point_range.end.row + + DEFAULT_MULTIBUFFER_CONTEXT) + .min(max_point.row); + ExcerptRange { + context: extended_point_range, + primary: None, + } + }), + cx, + ); + after_excerpt_id = new_excerpts.last().copied().unwrap_or(after_excerpt_id); + } + } + multi_buffer.remove_excerpts(excerpts_to_remove, cx); + for ((line_count, direction), excerpts) in excerpt_to_expand { + multi_buffer.expand_excerpts(excerpts, line_count, direction, cx); + } + }); + } else { + self.excerpts.update(cx, |multi_buffer, cx| { + for new_changes in new_entry_order + .iter() + .filter_map(|(_, entry_id)| new_changes.get(entry_id)) + { + multi_buffer.push_excerpts_with_context_lines( + new_changes.buffer.clone(), + new_changes + .hunks + .iter() + .map(|hunk| hunk.buffer_range.clone()) + .collect(), + DEFAULT_MULTIBUFFER_CONTEXT, + cx, + ); + } + }); + }; + + let mut new_changes = new_changes; + let mut new_entry_order = new_entry_order; + std::mem::swap( + self.buffer_changes.entry(worktree_id).or_default(), + &mut new_changes, + ); + std::mem::swap( + self.entry_order.entry(worktree_id).or_default(), + &mut new_entry_order, + ); + } +} + +impl EventEmitter for ProjectDiffEditor {} + +impl Focusable for ProjectDiffEditor { + fn focus_handle(&self, _: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Item for ProjectDiffEditor { + type Event = EditorEvent; + + fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) { + Editor::to_item_events(event, f) + } + + fn deactivated(&mut self, window: &mut Window, cx: &mut Context) { + self.editor + .update(cx, |editor, cx| editor.deactivated(window, cx)); + } + + fn navigate( + &mut self, + data: Box, + window: &mut Window, + cx: &mut Context, + ) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, window, cx)) + } + + fn tab_tooltip_text(&self, _: &App) -> Option { + Some("Project Diff".into()) + } + + fn tab_content(&self, params: TabContentParams, _window: &Window, _: &App) -> AnyElement { + if self.buffer_changes.is_empty() { + Label::new("No changes") + .color(if params.selected { + Color::Default + } else { + Color::Muted + }) + .into_any_element() + } else { + h_flex() + .gap_1() + .when(true, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::XCircle).color(Color::Error)) + .child(Label::new(self.buffer_changes.len().to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .when(true, |then| { + then.child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Indicator).color(Color::Warning)) + .child(Label::new(self.buffer_changes.len().to_string()).color( + if params.selected { + Color::Default + } else { + Color::Muted + }, + )), + ) + }) + .into_any_element() + } + } + + fn telemetry_event_text(&self) -> Option<&'static str> { + Some("Project Diagnostics Opened") + } + + fn for_each_project_item( + &self, + cx: &App, + f: &mut dyn FnMut(gpui::EntityId, &dyn project::ProjectItem), + ) { + self.editor.for_each_project_item(cx, f) + } + + fn is_singleton(&self, _: &App) -> bool { + false + } + + fn set_nav_history( + &mut self, + nav_history: ItemNavHistory, + _: &mut Window, + cx: &mut Context, + ) { + self.editor.update(cx, |editor, _| { + editor.set_nav_history(Some(nav_history)); + }); + } + + fn clone_on_split( + &self, + _workspace_id: Option, + window: &mut Window, + cx: &mut Context, + ) -> Option> + where + Self: Sized, + { + Some(cx.new(|cx| { + ProjectDiffEditor::new(self.project.clone(), self.workspace.clone(), window, cx) + })) + } + + fn is_dirty(&self, cx: &App) -> bool { + self.excerpts.read(cx).is_dirty(cx) + } + + fn has_conflict(&self, cx: &App) -> bool { + self.excerpts.read(cx).has_conflict(cx) + } + + fn can_save(&self, _: &App) -> bool { + true + } + + fn save( + &mut self, + format: bool, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.editor.save(format, project, window, cx) + } + + fn save_as( + &mut self, + _: Entity, + _: ProjectPath, + _window: &mut Window, + _: &mut Context, + ) -> Task> { + unreachable!() + } + + fn reload( + &mut self, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Task> { + self.editor.reload(project, window, cx) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a Entity, + _: &'a App, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } + + fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation { + ToolbarItemLocation::PrimaryLeft + } + + fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option> { + self.editor.breadcrumbs(theme, cx) + } + + fn added_to_workspace( + &mut self, + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) { + self.editor.update(cx, |editor, cx| { + editor.added_to_workspace(workspace, window, cx) + }); + } +} + +impl Render for ProjectDiffEditor { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let child = if self.buffer_changes.is_empty() { + div() + .bg(cx.theme().colors().editor_background) + .flex() + .items_center() + .justify_center() + .size_full() + .child(Label::new("No changes in the workspace")) + } else { + div().size_full().child(self.editor.clone()) + }; + + div() + .track_focus(&self.focus_handle) + .size_full() + .child(child) + } +} + +#[cfg(test)] +mod tests { + use git::status::{StatusCode, TrackedStatus}; + use gpui::{SemanticVersion, TestAppContext, VisualTestContext}; + use project::buffer_store::BufferChangeSet; + use serde_json::json; + use settings::SettingsStore; + use std::{ + ops::Deref as _, + path::{Path, PathBuf}, + }; + + use crate::test::editor_test_context::assert_state_with_diff; + + use super::*; + + // TODO finish + // #[gpui::test] + // async fn randomized_tests(cx: &mut TestAppContext) { + // // Create a new project (how?? temp fs?), + // let fs = FakeFs::new(cx.executor()); + // let project = Project::test(fs, [], cx).await; + + // // create random files with random content + + // // Commit it into git somehow (technically can do with "real" fs in a temp dir) + // // + // // Apply randomized changes to the project: select a random file, random change and apply to buffers + // } + + #[gpui::test(iterations = 30)] + async fn simple_edit_test(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + init_test(cx); + + let fs = fs::FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + ".git": {}, + "file_a": "This is file_a", + "file_b": "This is file_b", + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/root")], cx).await; + let workspace = + cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); + + let file_a_editor = workspace + .update(cx, |workspace, window, cx| { + let file_a_editor = + workspace.open_abs_path(PathBuf::from("/root/file_a"), true, window, cx); + ProjectDiffEditor::deploy(workspace, &Deploy, window, cx); + file_a_editor + }) + .unwrap() + .await + .expect("did not open an item at all") + .downcast::() + .expect("did not open an editor for file_a"); + let project_diff_editor = workspace + .update(cx, |workspace, _, cx| { + workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()) + }) + .unwrap() + .expect("did not find a ProjectDiffEditor"); + project_diff_editor.update(cx, |project_diff_editor, cx| { + assert!( + project_diff_editor.editor.read(cx).text(cx).is_empty(), + "Should have no changes after opening the diff on no git changes" + ); + }); + + let old_text = file_a_editor.update(cx, |editor, cx| editor.text(cx)); + let change = "an edit after git add"; + file_a_editor + .update_in(cx, |file_a_editor, window, cx| { + file_a_editor.insert(change, window, cx); + file_a_editor.save(false, project.clone(), window, cx) + }) + .await + .expect("failed to save a file"); + file_a_editor.update_in(cx, |file_a_editor, _window, cx| { + let change_set = cx.new(|cx| { + BufferChangeSet::new_with_base_text( + &old_text, + &file_a_editor.buffer().read(cx).as_singleton().unwrap(), + cx, + ) + }); + file_a_editor.buffer.update(cx, |buffer, cx| { + buffer.add_change_set(change_set.clone(), cx) + }); + project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.set_unstaged_change_set( + file_a_editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .read(cx) + .remote_id(), + change_set, + ); + }); + }); + }); + fs.set_status_for_repo_via_git_operation( + Path::new("/root/.git"), + &[( + Path::new("file_a"), + TrackedStatus { + worktree_status: StatusCode::Modified, + index_status: StatusCode::Unmodified, + } + .into(), + )], + ); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + let editor = project_diff_editor.update(cx, |diff_editor, _| diff_editor.editor.clone()); + + assert_state_with_diff( + &editor, + cx, + indoc::indoc! { + " + - This is file_a + + an edit after git addThis is file_aˇ", + }, + ); + } + + fn init_test(cx: &mut gpui::TestAppContext) { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } + + cx.update(|cx| { + assets::Assets.load_test_fonts(cx); + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + theme::init(theme::LoadThemes::JustBase, cx); + release_channel::init(SemanticVersion::default(), cx); + client::init_settings(cx); + language::init(cx); + Project::init_settings(cx); + workspace::init_settings(cx); + crate::init(cx); + cx.set_staff(true); + }); + } +} diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 2f1f00715e..dd37c34afe 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -279,9 +279,10 @@ fn show_hover( delay.await; } + let offset = anchor.to_offset(&snapshot.buffer_snapshot); let local_diagnostic = snapshot .buffer_snapshot - .diagnostics_in_range::<_, usize>(anchor..anchor) + .diagnostics_in_range::(offset..offset) // Find the entry with the most specific range .min_by_key(|entry| entry.range.len()); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index 4e3066692d..9a61656a58 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -111,11 +111,7 @@ impl ProposedChangesEditor { .read(cx) .change_set_for(buffer.remote_id())?; Some(change_set.update(cx, |change_set, cx| { - change_set.set_base_text( - base_buffer.read(cx).text(), - buffer, - cx, - ) + change_set.set_base_text(base_buffer.clone(), buffer, cx) })) }) .collect::>() @@ -192,7 +188,7 @@ impl ProposedChangesEditor { new_change_sets.push(cx.new(|cx| { let mut change_set = BufferChangeSet::new(&branch_buffer, cx); let _ = change_set.set_base_text( - location.buffer.read(cx).text(), + location.buffer.clone(), branch_buffer.read(cx).text_snapshot(), cx, ); diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index a4e6013400..6246ec14fb 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -292,7 +292,7 @@ impl EditorTestContext { let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); fs.set_index_for_repo( &Self::root_path().join(".git"), - &[(path.as_ref(), diff_base.to_string())], + &[(path.into(), diff_base.to_string())], ); self.cx.run_until_parked(); } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 2c64388ec3..c65877145c 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -5,9 +5,9 @@ mod mac_watcher; pub mod fs_watcher; use anyhow::{anyhow, Context as _, Result}; -#[cfg(any(test, feature = "test-support"))] -use git::status::FileStatus; use git::GitHostingProviderRegistry; +#[cfg(any(test, feature = "test-support"))] +use git::{repository::RepoPath, status::FileStatus}; #[cfg(any(target_os = "linux", target_os = "freebsd"))] use ashpd::desktop::trash; @@ -1270,25 +1270,32 @@ impl FakeFs { }) } - pub fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) { + pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) { self.with_git_state(dot_git, true, |state| { state.index_contents.clear(); state.index_contents.extend( - head_state + index_state .iter() - .map(|(path, content)| (path.to_path_buf(), content.clone())), + .map(|(path, content)| (path.clone(), content.clone())), ); }); } - pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(&Path, git::blame::Blame)>) { + pub fn set_head_for_repo(&self, dot_git: &Path, head_state: &[(RepoPath, String)]) { + self.with_git_state(dot_git, true, |state| { + state.head_contents.clear(); + state.head_contents.extend( + head_state + .iter() + .map(|(path, content)| (path.clone(), content.clone())), + ); + }); + } + + pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) { self.with_git_state(dot_git, true, |state| { state.blames.clear(); - state.blames.extend( - blames - .into_iter() - .map(|(path, blame)| (path.to_path_buf(), blame)), - ); + state.blames.extend(blames); }); } diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 4dc835ddac..7fd6628a89 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -74,31 +74,34 @@ impl BufferDiff { } } - pub fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self { + pub fn build(diff_base: Option<&str>, buffer: &text::BufferSnapshot) -> Self { let mut tree = SumTree::new(buffer); - let buffer_text = buffer.as_rope().to_string(); - let patch = Self::diff(diff_base, &buffer_text); + if let Some(diff_base) = diff_base { + let buffer_text = buffer.as_rope().to_string(); + let patch = Self::diff(diff_base, &buffer_text); - // A common case in Zed is that the empty buffer is represented as just a newline, - // but if we just compute a naive diff you get a "preserved" line in the middle, - // which is a bit odd. - if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 { - tree.push( - InternalDiffHunk { - buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0), - diff_base_byte_range: 0..diff_base.len() - 1, - }, - buffer, - ); - return Self { tree }; - } + // A common case in Zed is that the empty buffer is represented as just a newline, + // but if we just compute a naive diff you get a "preserved" line in the middle, + // which is a bit odd. + if buffer_text == "\n" && diff_base.ends_with("\n") && diff_base.len() > 1 { + tree.push( + InternalDiffHunk { + buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0), + diff_base_byte_range: 0..diff_base.len() - 1, + }, + buffer, + ); + return Self { tree }; + } - if let Some(patch) = patch { - let mut divergence = 0; - for hunk_index in 0..patch.num_hunks() { - let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); - tree.push(hunk, buffer); + if let Some(patch) = patch { + let mut divergence = 0; + for hunk_index in 0..patch.num_hunks() { + let hunk = + Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); + tree.push(hunk, buffer); + } } } @@ -125,11 +128,14 @@ impl BufferDiff { range: Range, buffer: &'a BufferSnapshot, ) -> impl 'a + Iterator { + let range = range.to_offset(buffer); + let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { - let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + let summary_range = summary.buffer_range.to_offset(buffer); + let before_start = summary_range.end < range.start; + let after_end = summary_range.start > range.end; !before_start && !after_end }); @@ -151,21 +157,25 @@ impl BufferDiff { }); let mut summaries = buffer.summaries_for_anchors_with_payload::(anchor_iter); - iter::from_fn(move || { + iter::from_fn(move || loop { let (start_point, (start_anchor, start_base)) = summaries.next()?; let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?; + if !start_anchor.is_valid(buffer) { + continue; + } + if end_point.column > 0 { end_point.row += 1; end_point.column = 0; end_anchor = buffer.anchor_before(end_point); } - Some(DiffHunk { + return Some(DiffHunk { row_range: start_point.row..end_point.row, diff_base_byte_range: start_base..end_base, buffer_range: start_anchor..end_anchor, - }) + }); }) } @@ -270,7 +280,7 @@ impl BufferDiff { } pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { - *self = Self::build(&diff_base.to_string(), buffer); + *self = Self::build(Some(&diff_base.to_string()), buffer); } #[cfg(test)] @@ -536,7 +546,7 @@ mod tests { let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text_1); let empty_diff = BufferDiff::new(&buffer); - let diff_1 = BufferDiff::build(&base_text, &buffer); + let diff_1 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_1.compare(&empty_diff, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0)); @@ -554,7 +564,7 @@ mod tests { " .unindent(), ); - let diff_2 = BufferDiff::build(&base_text, &buffer); + let diff_2 = BufferDiff::build(Some(&base_text), &buffer); assert_eq!(None, diff_2.compare(&diff_1, &buffer)); // Edit turns a deletion hunk into a modification. @@ -571,7 +581,7 @@ mod tests { " .unindent(), ); - let diff_3 = BufferDiff::build(&base_text, &buffer); + let diff_3 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_3.compare(&diff_2, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0)); @@ -588,7 +598,7 @@ mod tests { " .unindent(), ); - let diff_4 = BufferDiff::build(&base_text, &buffer); + let diff_4 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_4.compare(&diff_3, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0)); @@ -606,7 +616,7 @@ mod tests { " .unindent(), ); - let diff_5 = BufferDiff::build(&base_text, &buffer); + let diff_5 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_5.compare(&diff_4, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0)); @@ -624,7 +634,7 @@ mod tests { " .unindent(), ); - let diff_6 = BufferDiff::build(&base_text, &buffer); + let diff_6 = BufferDiff::build(Some(&base_text), &buffer); let range = diff_6.compare(&diff_5, &buffer).unwrap(); assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0)); } diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 7b82c2571d..a377740152 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -29,9 +29,15 @@ pub struct Branch { pub trait GitRepository: Send + Sync { fn reload_index(&self); - /// Loads a git repository entry's contents. + /// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path. + /// /// Note that for symlink entries, this will return the contents of the symlink, not the target. - fn load_index_text(&self, relative_file_path: &Path) -> Option; + fn load_index_text(&self, path: &RepoPath) -> Option; + + /// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path. + /// + /// Note that for symlink entries, this will return the contents of the symlink, not the target. + fn load_committed_text(&self, path: &RepoPath) -> Option; /// Returns the URL of the remote with the given name. fn remote_url(&self, name: &str) -> Option; @@ -106,15 +112,15 @@ impl GitRepository for RealGitRepository { repo.path().into() } - fn load_index_text(&self, relative_file_path: &Path) -> Option { - fn logic(repo: &git2::Repository, relative_file_path: &Path) -> Result> { + fn load_index_text(&self, path: &RepoPath) -> Option { + fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { const STAGE_NORMAL: i32 = 0; let index = repo.index()?; // This check is required because index.get_path() unwraps internally :( - check_path_to_repo_path_errors(relative_file_path)?; + check_path_to_repo_path_errors(path)?; - let oid = match index.get_path(relative_file_path, STAGE_NORMAL) { + let oid = match index.get_path(path, STAGE_NORMAL) { Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, _ => return Ok(None), }; @@ -123,13 +129,22 @@ impl GitRepository for RealGitRepository { Ok(Some(String::from_utf8(content)?)) } - match logic(&self.repository.lock(), relative_file_path) { + match logic(&self.repository.lock(), path) { Ok(value) => return value, - Err(err) => log::error!("Error loading head text: {:?}", err), + Err(err) => log::error!("Error loading index text: {:?}", err), } None } + fn load_committed_text(&self, path: &RepoPath) -> Option { + let repo = self.repository.lock(); + let head = repo.head().ok()?.peel_to_tree().log_err()?; + let oid = head.get_path(path).ok()?.id(); + let content = repo.find_blob(oid).log_err()?.content().to_owned(); + let content = String::from_utf8(content).log_err()?; + Some(content) + } + fn remote_url(&self, name: &str) -> Option { let repo = self.repository.lock(); let remote = repo.find_remote(name).ok()?; @@ -325,8 +340,9 @@ pub struct FakeGitRepository { pub struct FakeGitRepositoryState { pub dot_git_dir: PathBuf, pub event_emitter: smol::channel::Sender, - pub index_contents: HashMap, - pub blames: HashMap, + pub head_contents: HashMap, + pub index_contents: HashMap, + pub blames: HashMap, pub statuses: HashMap, pub current_branch_name: Option, pub branches: HashSet, @@ -343,6 +359,7 @@ impl FakeGitRepositoryState { FakeGitRepositoryState { dot_git_dir, event_emitter, + head_contents: Default::default(), index_contents: Default::default(), blames: Default::default(), statuses: Default::default(), @@ -355,9 +372,14 @@ impl FakeGitRepositoryState { impl GitRepository for FakeGitRepository { fn reload_index(&self) {} - fn load_index_text(&self, path: &Path) -> Option { + fn load_index_text(&self, path: &RepoPath) -> Option { let state = self.state.lock(); - state.index_contents.get(path).cloned() + state.index_contents.get(path.as_ref()).cloned() + } + + fn load_committed_text(&self, path: &RepoPath) -> Option { + let state = self.state.lock(); + state.head_contents.get(path.as_ref()).cloned() } fn remote_url(&self, _name: &str) -> Option { @@ -529,6 +551,12 @@ impl From<&Path> for RepoPath { } } +impl From> for RepoPath { + fn from(value: Arc) -> Self { + RepoPath(value) + } +} + impl From for RepoPath { fn from(value: PathBuf) -> Self { RepoPath::new(value) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index dc6399122c..ceb387d2e1 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1001,6 +1001,34 @@ impl Buffer { } } + #[cfg(any(test, feature = "test-support"))] + pub fn build_snapshot_sync( + text: Rope, + language: Option>, + language_registry: Option>, + cx: &mut App, + ) -> BufferSnapshot { + let entity_id = cx.reserve_entity::().entity_id(); + let buffer_id = entity_id.as_non_zero_u64().into(); + let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot(); + let mut syntax = SyntaxMap::new(&text).snapshot(); + if let Some(language) = language.clone() { + let text = text.clone(); + let language = language.clone(); + let language_registry = language_registry.clone(); + syntax.reparse(&text, language_registry, language); + } + BufferSnapshot { + text, + syntax, + file: None, + diagnostics: Default::default(), + remote_selections: Default::default(), + language, + non_text_state_update_count: 0, + } + } + /// Retrieve a snapshot of the buffer's current state. This is computationally /// cheap, and allows reading from the buffer on a background thread. pub fn snapshot(&self) -> BufferSnapshot { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 46ae229142..cc8afcb234 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -28,7 +28,7 @@ use smol::future::yield_now; use std::{ any::type_name, borrow::Cow, - cell::{Ref, RefCell, RefMut}, + cell::{Ref, RefCell}, cmp, fmt, future::Future, io, @@ -290,6 +290,7 @@ impl ExcerptBoundary { #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub struct RowInfo { + pub buffer_id: Option, pub buffer_row: Option, pub multibuffer_row: Option, pub diff_status: Option, @@ -1742,7 +1743,7 @@ impl MultiBuffer { } self.sync_diff_transforms( - snapshot, + &mut snapshot, vec![Edit { old: edit_start..edit_start, new: edit_start..edit_end, @@ -1775,7 +1776,7 @@ impl MultiBuffer { snapshot.has_conflict = false; self.sync_diff_transforms( - snapshot, + &mut snapshot, vec![Edit { old: start..prev_len, new: start..start, @@ -2053,7 +2054,7 @@ impl MultiBuffer { snapshot.trailing_excerpt_update_count += 1; } - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); cx.emit(Event::Edited { singleton_buffer_edited: false, edited_buffer: None, @@ -2218,7 +2219,7 @@ impl MultiBuffer { } self.sync_diff_transforms( - snapshot, + &mut snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { base_changed: base_text_changed, @@ -2388,7 +2389,7 @@ impl MultiBuffer { cx: &mut Context, ) { self.sync(cx); - let snapshot = self.snapshot.borrow_mut(); + let mut snapshot = self.snapshot.borrow_mut(); let mut excerpt_edits = Vec::new(); for range in ranges.iter() { let end_excerpt_id = range.end.excerpt_id; @@ -2422,7 +2423,7 @@ impl MultiBuffer { } self.sync_diff_transforms( - snapshot, + &mut snapshot, excerpt_edits, DiffChangeKind::ExpandOrCollapseHunks { expand }, ); @@ -2491,7 +2492,7 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); cx.emit(Event::Edited { singleton_buffer_edited: false, edited_buffer: None, @@ -2592,7 +2593,7 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); cx.emit(Event::Edited { singleton_buffer_edited: false, edited_buffer: None, @@ -2705,12 +2706,12 @@ impl MultiBuffer { drop(cursor); snapshot.excerpts = new_excerpts; - self.sync_diff_transforms(snapshot, edits, DiffChangeKind::BufferEdited); + self.sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); } fn sync_diff_transforms( &self, - mut snapshot: RefMut, + snapshot: &mut MultiBufferSnapshot, excerpt_edits: Vec>, change_kind: DiffChangeKind, ) { @@ -2791,11 +2792,23 @@ impl MultiBuffer { if excerpt_edits.peek().map_or(true, |next_edit| { next_edit.old.start >= old_diff_transforms.end(&()).0 }) { + let keep_next_old_transform = (old_diff_transforms.start().0 >= edit.old.end) + && match old_diff_transforms.item() { + Some(DiffTransform::BufferContent { + inserted_hunk_anchor: Some(hunk_anchor), + .. + }) => excerpts + .item() + .is_some_and(|excerpt| hunk_anchor.1.is_valid(&excerpt.buffer)), + _ => true, + }; + let mut excerpt_offset = edit.new.end; - if old_diff_transforms.start().0 < edit.old.end { + if !keep_next_old_transform { excerpt_offset += old_diff_transforms.end(&()).0 - edit.old.end; old_diff_transforms.next(&()); } + old_expanded_hunks.clear(); self.push_buffer_content_transform( &snapshot, @@ -2894,12 +2907,14 @@ impl MultiBuffer { buffer.anchor_before(edit_buffer_start)..buffer.anchor_after(edit_buffer_end); for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) { + let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); + let hunk_anchor = (excerpt.id, hunk.buffer_range.start); - if !hunk_anchor.1.is_valid(buffer) { + if hunk_buffer_range.start < excerpt_buffer_start { + log::trace!("skipping hunk that starts before excerpt"); continue; } - let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); let hunk_excerpt_start = excerpt_start + ExcerptOffset::new( hunk_buffer_range.start.saturating_sub(excerpt_buffer_start), @@ -2941,8 +2956,9 @@ impl MultiBuffer { if should_expand_hunk { did_expand_hunks = true; log::trace!( - "expanding hunk {:?}", + "expanding hunk {:?}, excerpt:{:?}", hunk_excerpt_start.value..hunk_excerpt_end.value, + excerpt.id ); if !hunk.diff_base_byte_range.is_empty() @@ -3389,12 +3405,12 @@ impl MultiBufferSnapshot { self.diff_hunks_in_range(Anchor::min()..Anchor::max()) } - pub fn diff_hunks_in_range( + pub fn diff_hunks_in_range( &self, range: Range, ) -> impl Iterator + '_ { - let range = range.start.to_offset(self)..range.end.to_offset(self); - self.lift_buffer_metadata(range.clone(), move |buffer, buffer_range| { + let query_range = range.start.to_point(self)..range.end.to_point(self); + self.lift_buffer_metadata(query_range.clone(), move |buffer, buffer_range| { let diff = self.diffs.get(&buffer.remote_id())?; let buffer_start = buffer.anchor_before(buffer_range.start); let buffer_end = buffer.anchor_after(buffer_range.end); @@ -3409,19 +3425,25 @@ impl MultiBufferSnapshot { }), ) }) - .map(|(range, hunk, excerpt)| { + .filter_map(move |(range, hunk, excerpt)| { + if range.start != range.end + && range.end == query_range.start + && !hunk.row_range.is_empty() + { + return None; + } let end_row = if range.end.column == 0 { range.end.row } else { range.end.row + 1 }; - MultiBufferDiffHunk { + Some(MultiBufferDiffHunk { row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row), buffer_id: excerpt.buffer_id, excerpt_id: excerpt.id, buffer_range: hunk.buffer_range.clone(), diff_base_byte_range: hunk.diff_base_byte_range.clone(), - } + }) }) } @@ -3560,8 +3582,8 @@ impl MultiBufferSnapshot { /// multi-buffer coordinates. fn lift_buffer_metadata<'a, D, M, I>( &'a self, - range: Range, - get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range) -> Option, + query_range: Range, + get_buffer_metadata: impl 'a + Fn(&'a BufferSnapshot, Range) -> Option, ) -> impl Iterator, M, &'a Excerpt)> + 'a where I: Iterator, M)> + 'a, @@ -3569,18 +3591,19 @@ impl MultiBufferSnapshot { { let max_position = D::from_text_summary(&self.text_summary()); let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None; - let mut cursor = self.cursor::>(); + let mut cursor = self.cursor::(); // Find the excerpt and buffer offset where the given range ends. - cursor.seek(&DimensionPair { - key: range.end, - value: None, - }); + cursor.seek(&query_range.end); let mut range_end = None; while let Some(region) = cursor.region() { if region.is_main_buffer { - let mut buffer_end = region.buffer_range.start.key; - let overshoot = range.end.saturating_sub(region.range.start.key); + let mut buffer_end = region.buffer_range.start; + let overshoot = if query_range.end > region.range.start { + query_range.end - region.range.start + } else { + D::default() + }; buffer_end.add_assign(&overshoot); range_end = Some((region.excerpt.id, buffer_end)); break; @@ -3588,13 +3611,10 @@ impl MultiBufferSnapshot { cursor.next(); } - cursor.seek(&DimensionPair { - key: range.start, - value: None, - }); + cursor.seek(&query_range.start); if let Some(region) = cursor.region().filter(|region| !region.is_main_buffer) { - if region.range.start.key > 0 { + if region.range.start > D::zero(&()) { cursor.prev() } } @@ -3613,14 +3633,18 @@ impl MultiBufferSnapshot { // and retrieve the metadata for the resulting range. else { let region = cursor.region()?; - let buffer_start = if region.is_main_buffer { - let start_overshoot = range.start.saturating_sub(region.range.start.key); - (region.buffer_range.start.key + start_overshoot) - .min(region.buffer_range.end.key) + let mut buffer_start; + if region.is_main_buffer { + buffer_start = region.buffer_range.start; + if query_range.start > region.range.start { + let overshoot = query_range.start - region.range.start; + buffer_start.add_assign(&overshoot); + } + buffer_start = buffer_start.min(region.buffer_range.end); } else { - cursor.main_buffer_position()?.key + buffer_start = cursor.main_buffer_position()?; }; - let mut buffer_end = excerpt.range.context.end.to_offset(&excerpt.buffer); + let mut buffer_end = excerpt.range.context.end.summary::(&excerpt.buffer); if let Some((end_excerpt_id, end_buffer_offset)) = range_end { if excerpt.id == end_excerpt_id { buffer_end = buffer_end.min(end_buffer_offset); @@ -3637,53 +3661,56 @@ impl MultiBufferSnapshot { }; // Visit each metadata item. - if let Some((range, metadata)) = metadata_iter.and_then(Iterator::next) { + if let Some((metadata_buffer_range, metadata)) = metadata_iter.and_then(Iterator::next) + { // Find the multibuffer regions that contain the start and end of // the metadata item's range. - if range.start > D::default() { + if metadata_buffer_range.start > D::default() { while let Some(region) = cursor.region() { - if !region.is_main_buffer - || region.buffer.remote_id() == excerpt.buffer_id - && region.buffer_range.end.value.unwrap() < range.start + if region.is_main_buffer + && (region.buffer_range.end >= metadata_buffer_range.start + || cursor.is_at_end_of_excerpt()) { - cursor.next(); - } else { break; } + cursor.next(); } } let start_region = cursor.region()?; while let Some(region) = cursor.region() { - if !region.is_main_buffer - || region.buffer.remote_id() == excerpt.buffer_id - && region.buffer_range.end.value.unwrap() <= range.end + if region.is_main_buffer + && (region.buffer_range.end > metadata_buffer_range.end + || cursor.is_at_end_of_excerpt()) { - cursor.next(); - } else { break; } + cursor.next(); } - let end_region = cursor - .region() - .filter(|region| region.buffer.remote_id() == excerpt.buffer_id); + let end_region = cursor.region(); // Convert the metadata item's range into multibuffer coordinates. - let mut start = start_region.range.start.value.unwrap(); - let region_buffer_start = start_region.buffer_range.start.value.unwrap(); - if start_region.is_main_buffer && range.start > region_buffer_start { - start.add_assign(&(range.start - region_buffer_start)); - } - let mut end = max_position; - if let Some(end_region) = end_region { - end = end_region.range.start.value.unwrap(); - debug_assert!(end_region.is_main_buffer); - let region_buffer_start = end_region.buffer_range.start.value.unwrap(); - if range.end > region_buffer_start { - end.add_assign(&(range.end - region_buffer_start)); - } + let mut start_position = start_region.range.start; + let region_buffer_start = start_region.buffer_range.start; + if start_region.is_main_buffer && metadata_buffer_range.start > region_buffer_start + { + start_position.add_assign(&(metadata_buffer_range.start - region_buffer_start)); + start_position = start_position.min(start_region.range.end); } - return Some((start..end, metadata, excerpt)); + let mut end_position = max_position; + if let Some(end_region) = &end_region { + end_position = end_region.range.start; + debug_assert!(end_region.is_main_buffer); + let region_buffer_start = end_region.buffer_range.start; + if metadata_buffer_range.end > region_buffer_start { + end_position.add_assign(&(metadata_buffer_range.end - region_buffer_start)); + } + end_position = end_position.min(end_region.range.end); + } + + if start_position <= query_range.end && end_position >= query_range.start { + return Some((start_position..end_position, metadata, excerpt)); + } } // When there are no more metadata items for this excerpt, move to the next excerpt. else { @@ -4509,7 +4536,16 @@ impl MultiBufferSnapshot { } let excerpt_start_position = D::from_text_summary(&cursor.start().text); - if let Some(excerpt) = cursor.item().filter(|excerpt| excerpt.id == excerpt_id) { + if let Some(excerpt) = cursor.item() { + if excerpt.id != excerpt_id { + let position = self.resolve_summary_for_anchor( + &Anchor::min(), + excerpt_start_position, + &mut diff_transforms_cursor, + ); + summaries.extend(excerpt_anchors.map(|_| position)); + continue; + } let excerpt_buffer_start = excerpt.range.context.start.summary::(&excerpt.buffer); let excerpt_buffer_end = excerpt.range.context.end.summary::(&excerpt.buffer); @@ -5525,7 +5561,7 @@ impl MultiBufferSnapshot { buffer_id: BufferId, group_id: usize, ) -> impl Iterator> + '_ { - self.lift_buffer_metadata(0..self.len(), move |buffer, _| { + self.lift_buffer_metadata(Point::zero()..self.max_point(), move |buffer, _| { if buffer.remote_id() != buffer_id { return None; }; @@ -5538,15 +5574,19 @@ impl MultiBufferSnapshot { .map(|(range, diagnostic, _)| DiagnosticEntry { diagnostic, range }) } - pub fn diagnostics_in_range<'a, T, O>( + pub fn diagnostics_in_range<'a, T>( &'a self, range: Range, - ) -> impl Iterator> + 'a + ) -> impl Iterator> + 'a where - T: 'a + ToOffset, - O: 'a + text::FromAnchor + Copy + TextDimension + Ord + Sub + fmt::Debug, + T: 'a + + text::ToOffset + + text::FromAnchor + + TextDimension + + Ord + + Sub + + fmt::Debug, { - let range = range.start.to_offset(self)..range.end.to_offset(self); self.lift_buffer_metadata(range, move |buffer, buffer_range| { Some( buffer @@ -6036,6 +6076,24 @@ where self.cached_region.clone() } + fn is_at_end_of_excerpt(&mut self) -> bool { + if self.diff_transforms.end(&()).1 < self.excerpts.end(&()) { + return false; + } else if self.diff_transforms.end(&()).1 > self.excerpts.end(&()) + || self.diff_transforms.item().is_none() + { + return true; + } + + self.diff_transforms.next(&()); + let next_transform = self.diff_transforms.item(); + self.diff_transforms.prev(&()); + + next_transform.map_or(true, |next_transform| { + matches!(next_transform, DiffTransform::BufferContent { .. }) + }) + } + fn main_buffer_position(&self) -> Option { let excerpt = self.excerpts.item()?; let buffer = &excerpt.buffer; @@ -6879,6 +6937,7 @@ impl<'a> Iterator for MultiBufferRows<'a> { if self.is_empty && self.point.row == 0 { self.point += Point::new(1, 0); return Some(RowInfo { + buffer_id: None, buffer_row: Some(0), multibuffer_row: Some(MultiBufferRow(0)), diff_status: None, @@ -6906,6 +6965,7 @@ impl<'a> Iterator for MultiBufferRows<'a> { .to_point(&last_excerpt.buffer) .row; return Some(RowInfo { + buffer_id: Some(last_excerpt.buffer_id), buffer_row: Some(last_row), multibuffer_row: Some(multibuffer_row), diff_status: None, @@ -6919,6 +6979,7 @@ impl<'a> Iterator for MultiBufferRows<'a> { let overshoot = self.point - region.range.start; let buffer_point = region.buffer_range.start + overshoot; let result = Some(RowInfo { + buffer_id: Some(region.buffer.remote_id()), buffer_row: Some(buffer_point.row), multibuffer_row: Some(MultiBufferRow(self.point.row)), diff_status: if region.is_inserted_hunk && self.point < region.range.end { diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index d85e5eba5d..61094a1b4f 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -19,12 +19,14 @@ fn init_logger() { #[gpui::test] fn test_empty_singleton(cx: &mut App) { let buffer = cx.new(|cx| Buffer::local("", cx)); + let buffer_id = buffer.read(cx).remote_id(); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot.text(), ""); assert_eq!( snapshot.row_infos(MultiBufferRow(0)).collect::>(), [RowInfo { + buffer_id: Some(buffer_id), buffer_row: Some(0), multibuffer_row: Some(MultiBufferRow(0)), diff_status: None @@ -359,13 +361,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) { let base_text = "one\ntwo\nthree\n"; let text = "one\nthree\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx); - change_set - }); - cx.run_until_parked(); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_change_set(change_set, cx) @@ -382,7 +378,7 @@ fn test_diff_boundary_anchors(cx: &mut TestAppContext) { let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let actual_text = snapshot.text(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default()); + let actual_diff = format_diff(&actual_text, &actual_row_infos, &Default::default(), None); pretty_assertions::assert_eq!( actual_diff, indoc! { @@ -409,13 +405,7 @@ fn test_diff_hunks_in_range(cx: &mut TestAppContext) { let base_text = "one\ntwo\nthree\nfour\nfive\nsix\nseven\neight\n"; let text = "one\nfour\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let snapshot = buffer.read(cx).snapshot(); - let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx); - change_set - }); - cx.run_until_parked(); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { (multibuffer.snapshot(cx), multibuffer.subscribe()) @@ -508,13 +498,7 @@ fn test_editing_text_in_diff_hunks(cx: &mut TestAppContext) { let base_text = "one\ntwo\nfour\nfive\nsix\nseven\n"; let text = "one\ntwo\nTHREE\nfour\nfive\nseven\n"; let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = cx.new(|cx| { - let mut change_set = BufferChangeSet::new(&buffer, cx); - let snapshot = buffer.read(cx).text_snapshot(); - let _ = change_set.set_base_text(base_text.into(), snapshot, cx); - change_set - }); - cx.run_until_parked(); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let (mut snapshot, mut subscription) = multibuffer.update(cx, |multibuffer, cx| { @@ -995,12 +979,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { let buffer = cx.new(|cx| Buffer::local("", cx)); let base_text = "a\nb\nc"; - let change_set = cx.new(|cx| { - let snapshot = buffer.read(cx).snapshot(); - let mut change_set = BufferChangeSet::new(&buffer, cx); - let _ = change_set.set_base_text(base_text.into(), snapshot.text, cx); - change_set - }); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_all_diff_hunks_expanded(cx); multibuffer.add_change_set(change_set.clone(), cx); @@ -1040,7 +1019,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { buffer.update(cx, |buffer, cx| { buffer.edit([(0..0, "a\nb\nc")], None, cx); change_set.update(cx, |change_set, cx| { - let _ = change_set.recalculate_diff(buffer.snapshot().text, cx); + change_set.recalculate_diff_sync(buffer.snapshot().text, cx); }); assert_eq!(buffer.text(), "a\nb\nc") }); @@ -1052,7 +1031,7 @@ fn test_empty_diff_excerpt(cx: &mut TestAppContext) { buffer.update(cx, |buffer, cx| { buffer.undo(cx); change_set.update(cx, |change_set, cx| { - let _ = change_set.recalculate_diff(buffer.snapshot().text, cx); + change_set.recalculate_diff_sync(buffer.snapshot().text, cx); }); assert_eq!(buffer.text(), "") }); @@ -1294,8 +1273,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { ); let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx)); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -1485,8 +1463,8 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_line_indents(&snapshot); // Recalculate the diff, changing the first diff hunk. - let _ = change_set.update(cx, |change_set, cx| { - change_set.recalculate_diff(buffer.read(cx).text_snapshot(), cx) + change_set.update(cx, |change_set, cx| { + change_set.recalculate_diff_sync(buffer.read(cx).text_snapshot(), cx); }); cx.run_until_parked(); assert_new_snapshot( @@ -1538,8 +1516,7 @@ fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { ); let buffer = cx.new(|cx| Buffer::local(text, cx)); - let change_set = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text.to_string(), &buffer, cx)); + let change_set = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text, &buffer, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -1840,10 +1817,8 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { let buffer_1 = cx.new(|cx| Buffer::local(text_1, cx)); let buffer_2 = cx.new(|cx| Buffer::local(text_2, cx)); - let change_set_1 = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1.to_string(), &buffer_1, cx)); - let change_set_2 = - cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2.to_string(), &buffer_2, cx)); + let change_set_1 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_1, &buffer_1, cx)); + let change_set_2 = cx.new(|cx| BufferChangeSet::new_with_base_text(base_text_2, &buffer_2, cx)); cx.run_until_parked(); let multibuffer = cx.new(|cx| { @@ -2028,6 +2003,7 @@ struct ReferenceMultibuffer { change_sets: HashMap>, } +#[derive(Debug)] struct ReferenceExcerpt { id: ExcerptId, buffer: Entity, @@ -2037,6 +2013,7 @@ struct ReferenceExcerpt { #[derive(Debug)] struct ReferenceRegion { + buffer_id: Option, range: Range, buffer_start: Option, status: Option, @@ -2117,37 +2094,26 @@ impl ReferenceMultibuffer { }; let diff = change_set.read(cx).diff_to_buffer.clone(); let excerpt_range = excerpt.range.to_offset(&buffer); - if excerpt_range.is_empty() { - return; - } for hunk in diff.hunks_intersecting_range(range, &buffer) { let hunk_range = hunk.buffer_range.to_offset(&buffer); - let hunk_precedes_excerpt = hunk - .buffer_range - .end - .cmp(&excerpt.range.start, &buffer) - .is_lt(); - let hunk_follows_excerpt = hunk - .buffer_range - .start - .cmp(&excerpt.range.end, &buffer) - .is_ge(); - if hunk_precedes_excerpt || hunk_follows_excerpt { + if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end { continue; } - if let Err(ix) = excerpt .expanded_diff_hunks .binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer)) { log::info!( - "expanding diff hunk {:?}. excerpt: {:?}", + "expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}", hunk_range, + excerpt_id, excerpt_range ); excerpt .expanded_diff_hunks .insert(ix, hunk.buffer_range.start); + } else { + log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}"); } } } @@ -2170,17 +2136,12 @@ impl ReferenceMultibuffer { .peekable(); while let Some(hunk) = hunks.next() { - if !hunk.buffer_range.start.is_valid(&buffer) { - continue; - } - // Ignore hunks that are outside the excerpt range. let mut hunk_range = hunk.buffer_range.to_offset(buffer); + hunk_range.end = hunk_range.end.min(buffer_range.end); - if hunk_range.start > buffer_range.end - || hunk_range.end < buffer_range.start - || buffer_range.is_empty() - { + if hunk_range.start > buffer_range.end || hunk_range.start < buffer_range.start { + log::trace!("skipping hunk outside excerpt range"); continue; } @@ -2188,6 +2149,12 @@ impl ReferenceMultibuffer { expanded_anchor.to_offset(&buffer).max(buffer_range.start) == hunk_range.start.max(buffer_range.start) }) { + log::trace!("skipping a hunk that's not marked as expanded"); + continue; + } + + if !hunk.buffer_range.start.is_valid(&buffer) { + log::trace!("skipping hunk with deleted start: {:?}", hunk.row_range); continue; } @@ -2196,6 +2163,7 @@ impl ReferenceMultibuffer { let len = text.len(); text.extend(buffer.text_for_range(offset..hunk_range.start)); regions.push(ReferenceRegion { + buffer_id: Some(buffer.remote_id()), range: len..text.len(), buffer_start: Some(buffer.offset_to_point(offset)), status: None, @@ -2212,6 +2180,7 @@ impl ReferenceMultibuffer { let len = text.len(); text.push_str(&base_text); regions.push(ReferenceRegion { + buffer_id: Some(base_buffer.remote_id()), range: len..text.len(), buffer_start: Some( base_buffer.offset_to_point(hunk.diff_base_byte_range.start), @@ -2228,6 +2197,7 @@ impl ReferenceMultibuffer { let len = text.len(); text.extend(buffer.text_for_range(offset..hunk_range.end)); regions.push(ReferenceRegion { + buffer_id: Some(buffer.remote_id()), range: len..text.len(), buffer_start: Some(buffer.offset_to_point(offset)), status: Some(DiffHunkStatus::Added), @@ -2241,6 +2211,7 @@ impl ReferenceMultibuffer { text.extend(buffer.text_for_range(offset..buffer_range.end)); text.push('\n'); regions.push(ReferenceRegion { + buffer_id: Some(buffer.remote_id()), range: len..text.len(), buffer_start: Some(buffer.offset_to_point(offset)), status: None, @@ -2250,6 +2221,7 @@ impl ReferenceMultibuffer { // Remove final trailing newline. if self.excerpts.is_empty() { regions.push(ReferenceRegion { + buffer_id: None, range: 0..1, buffer_start: Some(Point::new(0, 0)), status: None, @@ -2273,6 +2245,7 @@ impl ReferenceMultibuffer { + text[region.range.start..ix].matches('\n').count() as u32 }); RowInfo { + buffer_id: region.buffer_id, diff_status: region.status, buffer_row, multibuffer_row: Some(MultiBufferRow( @@ -2348,6 +2321,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { buffer.update(cx, |buf, cx| { let edit_count = rng.gen_range(1..5); buf.randomly_edit(&mut rng, edit_count, cx); + log::info!("buffer text:\n{}", buf.text()); needs_diff_calculation = true; }); cx.update(|cx| reference.diffs_updated(cx)); @@ -2440,7 +2414,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap() ..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap(); - log::info!("expanding diff hunks for excerpt {:?}", excerpt_ix); + log::info!( + "expanding diff hunks in range {:?} (excerpt id {:?}) index {excerpt_ix:?})", + range.to_offset(&snapshot), + excerpt.id + ); reference.expand_diff_hunks(excerpt.id, start..end, cx); multibuffer.expand_diff_hunks(vec![range], cx); }); @@ -2457,7 +2435,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { "recalculating diff for buffer {:?}", snapshot.remote_id(), ); - change_set.recalculate_diff(snapshot.text, cx) + change_set.recalculate_diff_sync(snapshot.text, cx); }); } reference.diffs_updated(cx); @@ -2471,14 +2449,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { .collect::(); let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx)); - let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx)); - change_set - .update(cx, |change_set, cx| { - let snapshot = buffer.read(cx).snapshot(); - change_set.set_base_text(base_text, snapshot.text, cx) - }) - .await - .unwrap(); + let change_set = + cx.new(|cx| BufferChangeSet::new_with_base_text(&base_text, &buffer, cx)); multibuffer.update(cx, |multibuffer, cx| { reference.add_change_set(change_set.clone(), cx); @@ -2553,12 +2525,28 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { .filter_map(|b| if b.next.is_some() { Some(b.row) } else { None }) .collect::>(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let actual_diff = format_diff(&actual_text, &actual_row_infos, &actual_boundary_rows); let (expected_text, expected_row_infos, expected_boundary_rows) = cx.update(|cx| reference.expected_content(cx)); - let expected_diff = - format_diff(&expected_text, &expected_row_infos, &expected_boundary_rows); + + let has_diff = actual_row_infos + .iter() + .any(|info| info.diff_status.is_some()) + || expected_row_infos + .iter() + .any(|info| info.diff_status.is_some()); + let actual_diff = format_diff( + &actual_text, + &actual_row_infos, + &actual_boundary_rows, + Some(has_diff), + ); + let expected_diff = format_diff( + &expected_text, + &expected_row_infos, + &expected_boundary_rows, + Some(has_diff), + ); log::info!("Multibuffer content:\n{}", actual_diff); @@ -2569,8 +2557,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { actual_text.split('\n').count() ); pretty_assertions::assert_eq!(actual_diff, expected_diff); - pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos); pretty_assertions::assert_eq!(actual_text, expected_text); + pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos); for _ in 0..5 { let start_row = rng.gen_range(0..=expected_row_infos.len()); @@ -2937,8 +2925,10 @@ fn format_diff( text: &str, row_infos: &Vec, boundary_rows: &HashSet, + has_diff: Option, ) -> String { - let has_diff = row_infos.iter().any(|info| info.diff_status.is_some()); + let has_diff = + has_diff.unwrap_or_else(|| row_infos.iter().any(|info| info.diff_status.is_some())); text.split('\n') .enumerate() .zip(row_infos) @@ -3002,7 +2992,7 @@ fn assert_new_snapshot( let line_infos = new_snapshot .row_infos(MultiBufferRow(0)) .collect::>(); - let actual_diff = format_diff(&actual_text, &line_infos, &Default::default()); + let actual_diff = format_diff(&actual_text, &line_infos, &Default::default(), None); pretty_assertions::assert_eq!(actual_diff, expected_diff); check_edits( snapshot, diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 921bc0adfd..4128990f8c 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -9,7 +9,11 @@ use anyhow::{anyhow, bail, Context as _, Result}; use client::Client; use collections::{hash_map, HashMap, HashSet}; use fs::Fs; -use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt}; +use futures::{ + channel::oneshot, + future::{OptionFuture, Shared}, + Future, FutureExt as _, StreamExt, +}; use git::{blame::Blame, diff::BufferDiff, repository::RepoPath}; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, @@ -34,18 +38,26 @@ use std::{ sync::Arc, time::Instant, }; -use text::{BufferId, LineEnding, Rope}; +use text::{BufferId, Rope}; use util::{debug_panic, maybe, ResultExt as _, TryFutureExt}; use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId}; +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +enum ChangeSetKind { + Unstaged, + Uncommitted, +} + /// A set of open buffers. pub struct BufferStore { state: BufferStoreState, #[allow(clippy::type_complexity)] loading_buffers: HashMap, Arc>>>>, #[allow(clippy::type_complexity)] - loading_change_sets: - HashMap, Arc>>>>, + loading_change_sets: HashMap< + (BufferId, ChangeSetKind), + Shared, Arc>>>, + >, worktree_store: Entity, opened_buffers: HashMap, downstream_client: Option<(AnyProtoClient, u64)>, @@ -55,18 +67,293 @@ pub struct BufferStore { #[derive(Hash, Eq, PartialEq, Clone)] struct SharedBuffer { buffer: Entity, - unstaged_changes: Option>, + change_set: Option>, lsp_handle: Option, } +#[derive(Default)] +struct BufferChangeSetState { + unstaged_changes: Option>, + uncommitted_changes: Option>, + recalculate_diff_task: Option>>, + language: Option>, + language_registry: Option>, + diff_updated_futures: Vec>, + buffer_subscription: Option, + + head_text: Option>, + index_text: Option>, + head_changed: bool, + index_changed: bool, +} + +#[derive(Clone, Debug)] +enum DiffBasesChange { + SetIndex(Option), + SetHead(Option), + SetEach { + index: Option, + head: Option, + }, + SetBoth(Option), +} + +impl BufferChangeSetState { + fn buffer_language_changed(&mut self, buffer: Entity, cx: &mut Context) { + self.language = buffer.read(cx).language().cloned(); + self.index_changed = self.index_text.is_some(); + self.head_changed = self.head_text.is_some(); + let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx); + } + + fn unstaged_changes(&self) -> Option> { + self.unstaged_changes.as_ref().and_then(|set| set.upgrade()) + } + + fn uncommitted_changes(&self) -> Option> { + self.uncommitted_changes + .as_ref() + .and_then(|set| set.upgrade()) + } + + fn handle_base_texts_updated( + &mut self, + buffer: text::BufferSnapshot, + message: proto::UpdateDiffBases, + cx: &mut Context, + ) { + use proto::update_diff_bases::Mode; + + let Some(mode) = Mode::from_i32(message.mode) else { + return; + }; + + let diff_bases_change = match mode { + Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text), + Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text), + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.staged_text), + Mode::IndexAndHead => DiffBasesChange::SetEach { + index: message.staged_text, + head: message.committed_text, + }, + }; + + let _ = self.diff_bases_changed(buffer, diff_bases_change, cx); + } + + fn diff_bases_changed( + &mut self, + buffer: text::BufferSnapshot, + diff_bases_change: DiffBasesChange, + cx: &mut Context, + ) -> oneshot::Receiver<()> { + match diff_bases_change { + DiffBasesChange::SetIndex(index) => { + self.index_text = index.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.index_changed = true; + } + DiffBasesChange::SetHead(head) => { + self.head_text = head.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_changed = true; + } + DiffBasesChange::SetBoth(mut text) => { + if let Some(text) = text.as_mut() { + text::LineEnding::normalize(text); + } + self.head_text = text.map(Arc::new); + self.index_text = self.head_text.clone(); + self.head_changed = true; + self.index_changed = true; + } + DiffBasesChange::SetEach { index, head } => { + self.index_text = index.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_text = head.map(|mut text| { + text::LineEnding::normalize(&mut text); + Arc::new(text) + }); + self.head_changed = true; + self.index_changed = true; + } + } + + self.recalculate_diffs(buffer, cx) + } + + fn recalculate_diffs( + &mut self, + buffer: text::BufferSnapshot, + cx: &mut Context, + ) -> oneshot::Receiver<()> { + let (tx, rx) = oneshot::channel(); + self.diff_updated_futures.push(tx); + + let language = self.language.clone(); + let language_registry = self.language_registry.clone(); + let unstaged_changes = self.unstaged_changes(); + let uncommitted_changes = self.uncommitted_changes(); + let head = self.head_text.clone(); + let index = self.index_text.clone(); + let index_changed = self.index_changed; + let head_changed = self.head_changed; + let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) { + (Some(index), Some(head)) => Arc::ptr_eq(index, head), + (None, None) => true, + _ => false, + }; + self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { + let snapshot = if index_changed { + let snapshot = cx.update(|cx| { + index.as_ref().map(|head| { + language::Buffer::build_snapshot( + Rope::from(head.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }) + })?; + cx.background_executor() + .spawn(OptionFuture::from(snapshot)) + .await + } else if let Some(unstaged_changes) = &unstaged_changes { + unstaged_changes.read_with(&cx, |change_set, _| change_set.base_text.clone())? + } else if let Some(uncommitted_changes) = &uncommitted_changes { + uncommitted_changes + .read_with(&cx, |change_set, _| change_set.staged_text.clone())? + } else { + return Ok(()); + }; + + if let Some(unstaged_changes) = &unstaged_changes { + let diff = cx + .background_executor() + .spawn({ + let buffer = buffer.clone(); + async move { + BufferDiff::build(index.as_ref().map(|index| index.as_str()), &buffer) + } + }) + .await; + + unstaged_changes.update(&mut cx, |unstaged_changes, cx| { + unstaged_changes.set_state(snapshot.clone(), diff, &buffer, cx); + })?; + + if let Some(uncommitted_changes) = &uncommitted_changes { + uncommitted_changes.update(&mut cx, |uncommitted_changes, _| { + uncommitted_changes.staged_text = snapshot; + })?; + } + } + + if let Some(uncommitted_changes) = &uncommitted_changes { + let (snapshot, diff) = if let (Some(unstaged_changes), true) = + (&unstaged_changes, index_matches_head) + { + unstaged_changes.read_with(&cx, |change_set, _| { + ( + change_set.base_text.clone(), + change_set.diff_to_buffer.clone(), + ) + })? + } else { + let snapshot = cx.update(|cx| { + head.as_deref().map(|head| { + language::Buffer::build_snapshot( + Rope::from(head.as_str()), + language.clone(), + language_registry.clone(), + cx, + ) + }) + })?; + let snapshot = cx.background_executor().spawn(OptionFuture::from(snapshot)); + let diff = cx.background_executor().spawn({ + let buffer = buffer.clone(); + let head = head.clone(); + async move { + BufferDiff::build(head.as_ref().map(|head| head.as_str()), &buffer) + } + }); + futures::join!(snapshot, diff) + }; + + uncommitted_changes.update(&mut cx, |change_set, cx| { + change_set.set_state(snapshot, diff, &buffer, cx); + })?; + + if index_changed || head_changed { + let staged_text = uncommitted_changes + .read_with(&cx, |change_set, _| change_set.staged_text.clone())?; + + let diff = if index_matches_head { + staged_text.as_ref().map(|buffer| BufferDiff::new(buffer)) + } else if let Some(staged_text) = staged_text { + Some( + cx.background_executor() + .spawn(async move { + BufferDiff::build( + head.as_ref().map(|head| head.as_str()), + &staged_text, + ) + }) + .await, + ) + } else { + None + }; + + uncommitted_changes.update(&mut cx, |change_set, _| { + change_set.staged_diff = diff; + })?; + } + } + + if let Some(this) = this.upgrade() { + this.update(&mut cx, |this, _| { + this.index_changed = false; + this.head_changed = false; + for tx in this.diff_updated_futures.drain(..) { + tx.send(()).ok(); + } + })?; + } + + Ok(()) + })); + + rx + } +} + pub struct BufferChangeSet { pub buffer_id: BufferId, pub base_text: Option, - pub language: Option>, - pub diff_to_buffer: git::diff::BufferDiff, - pub recalculate_diff_task: Option>>, - pub diff_updated_futures: Vec>, - pub language_registry: Option>, + pub diff_to_buffer: BufferDiff, + pub staged_text: Option, + // For an uncommitted changeset, this is the diff between HEAD and the index. + pub staged_diff: Option, +} + +impl std::fmt::Debug for BufferChangeSet { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BufferChangeSet") + .field("buffer_id", &self.buffer_id) + .field("base_text", &self.base_text.as_ref().map(|s| s.text())) + .field("diff_to_buffer", &self.diff_to_buffer) + .field("staged_text", &self.staged_text.as_ref().map(|s| s.text())) + .field("staged_diff", &self.staged_diff) + .finish() + } } pub enum BufferChangeSetEvent { @@ -98,7 +385,7 @@ struct LocalBufferStore { enum OpenBuffer { Complete { buffer: WeakEntity, - unstaged_changes: Option>, + change_set_state: Entity, }, Operations(Vec), } @@ -118,19 +405,48 @@ pub struct ProjectTransaction(pub HashMap, language::Transaction> impl EventEmitter for BufferStore {} impl RemoteBufferStore { - fn load_staged_text(&self, buffer_id: BufferId, cx: &App) -> Task>> { + fn open_unstaged_changes(&self, buffer_id: BufferId, cx: &App) -> Task>> { let project_id = self.project_id; let client = self.upstream_client.clone(); cx.background_executor().spawn(async move { - Ok(client - .request(proto::GetStagedText { + let response = client + .request(proto::OpenUnstagedChanges { project_id, buffer_id: buffer_id.to_proto(), }) - .await? - .staged_text) + .await?; + Ok(response.staged_text) }) } + + fn open_uncommitted_changes( + &self, + buffer_id: BufferId, + cx: &App, + ) -> Task> { + use proto::open_uncommitted_changes_response::Mode; + + let project_id = self.project_id; + let client = self.upstream_client.clone(); + cx.background_executor().spawn(async move { + let response = client + .request(proto::OpenUncommittedChanges { + project_id, + buffer_id: buffer_id.to_proto(), + }) + .await?; + let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?; + let bases = match mode { + Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.staged_text), + Mode::IndexAndHead => DiffBasesChange::SetEach { + head: response.committed_text, + index: response.staged_text, + }, + }; + Ok(bases) + }) + } + pub fn wait_for_remote_buffer( &mut self, id: BufferId, @@ -398,21 +714,39 @@ impl RemoteBufferStore { } impl LocalBufferStore { - fn load_staged_text(&self, buffer: &Entity, cx: &App) -> Task>> { - let Some(file) = buffer.read(cx).file() else { - return Task::ready(Ok(None)); - }; + fn worktree_for_buffer( + &self, + buffer: &Entity, + cx: &App, + ) -> Option<(Entity, Arc)> { + let file = buffer.read(cx).file()?; let worktree_id = file.worktree_id(cx); let path = file.path().clone(); - let Some(worktree) = self + let worktree = self .worktree_store .read(cx) - .worktree_for_id(worktree_id, cx) - else { - return Task::ready(Err(anyhow!("no such worktree"))); - }; + .worktree_for_id(worktree_id, cx)?; + Some((worktree, path)) + } - worktree.read(cx).load_staged_file(path.as_ref(), cx) + fn load_staged_text(&self, buffer: &Entity, cx: &App) -> Task>> { + if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) { + worktree.read(cx).load_staged_file(path.as_ref(), cx) + } else { + return Task::ready(Err(anyhow!("no such worktree"))); + } + } + + fn load_committed_text( + &self, + buffer: &Entity, + cx: &App, + ) -> Task>> { + if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) { + worktree.read(cx).load_committed_file(path.as_ref(), cx) + } else { + Task::ready(Err(anyhow!("no such worktree"))) + } } fn save_local_buffer( @@ -526,74 +860,145 @@ impl LocalBufferStore { ) { debug_assert!(worktree_handle.read(cx).is_local()); - let buffer_change_sets = this - .opened_buffers - .values() - .filter_map(|buffer| { - if let OpenBuffer::Complete { - buffer, - unstaged_changes, - } = buffer - { - let buffer = buffer.upgrade()?.read(cx); - let file = File::from_dyn(buffer.file())?; - if file.worktree != worktree_handle { - return None; - } - changed_repos - .iter() - .find(|(work_dir, _)| file.path.starts_with(work_dir))?; - let unstaged_changes = unstaged_changes.as_ref()?.upgrade()?; - let snapshot = buffer.text_snapshot(); - Some((unstaged_changes, snapshot, file.path.clone())) - } else { - None - } - }) - .collect::>(); + let mut change_set_state_updates = Vec::new(); + for buffer in this.opened_buffers.values() { + let OpenBuffer::Complete { + buffer, + change_set_state, + } = buffer + else { + continue; + }; + let Some(buffer) = buffer.upgrade() else { + continue; + }; + let buffer = buffer.read(cx); + let Some(file) = File::from_dyn(buffer.file()) else { + continue; + }; + if file.worktree != worktree_handle { + continue; + } + let change_set_state = change_set_state.read(cx); + if changed_repos + .iter() + .any(|(work_dir, _)| file.path.starts_with(work_dir)) + { + let snapshot = buffer.text_snapshot(); + change_set_state_updates.push(( + snapshot.clone(), + file.path.clone(), + change_set_state + .unstaged_changes + .as_ref() + .and_then(|set| set.upgrade()) + .is_some(), + change_set_state + .uncommitted_changes + .as_ref() + .and_then(|set| set.upgrade()) + .is_some(), + )) + } + } - if buffer_change_sets.is_empty() { + if change_set_state_updates.is_empty() { return; } cx.spawn(move |this, mut cx| async move { let snapshot = worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?; - let diff_bases_by_buffer = cx + let diff_bases_changes_by_buffer = cx .background_executor() .spawn(async move { - buffer_change_sets + change_set_state_updates .into_iter() - .filter_map(|(change_set, buffer_snapshot, path)| { - let local_repo = snapshot.local_repo_for_path(&path)?; - let relative_path = local_repo.relativize(&path).ok()?; - let base_text = local_repo.repo().load_index_text(&relative_path); - Some((change_set, buffer_snapshot, base_text)) - }) + .filter_map( + |(buffer_snapshot, path, needs_staged_text, needs_committed_text)| { + let local_repo = snapshot.local_repo_for_path(&path)?; + let relative_path = local_repo.relativize(&path).ok()?; + let staged_text = if needs_staged_text { + local_repo.repo().load_index_text(&relative_path) + } else { + None + }; + let committed_text = if needs_committed_text { + local_repo.repo().load_committed_text(&relative_path) + } else { + None + }; + let diff_bases_change = + match (needs_staged_text, needs_committed_text) { + (true, true) => Some(if staged_text == committed_text { + DiffBasesChange::SetBoth(staged_text) + } else { + DiffBasesChange::SetEach { + index: staged_text, + head: committed_text, + } + }), + (true, false) => { + Some(DiffBasesChange::SetIndex(staged_text)) + } + (false, true) => { + Some(DiffBasesChange::SetHead(committed_text)) + } + (false, false) => None, + }; + Some((buffer_snapshot, diff_bases_change)) + }, + ) .collect::>() }) .await; this.update(&mut cx, |this, cx| { - for (change_set, buffer_snapshot, staged_text) in diff_bases_by_buffer { - change_set.update(cx, |change_set, cx| { - if let Some(staged_text) = staged_text.clone() { - let _ = - change_set.set_base_text(staged_text, buffer_snapshot.clone(), cx); - } else { - change_set.unset_base_text(buffer_snapshot.clone(), cx); - } - }); + for (buffer_snapshot, diff_bases_change) in diff_bases_changes_by_buffer { + let Some(OpenBuffer::Complete { + change_set_state, .. + }) = this.opened_buffers.get_mut(&buffer_snapshot.remote_id()) + else { + continue; + }; + let Some(diff_bases_change) = diff_bases_change else { + continue; + }; - if let Some((client, project_id)) = &this.downstream_client.clone() { - client - .send(proto::UpdateDiffBase { + change_set_state.update(cx, |change_set_state, cx| { + use proto::update_diff_bases::Mode; + + if let Some((client, project_id)) = this.downstream_client.as_ref() { + let buffer_id = buffer_snapshot.remote_id().to_proto(); + let (staged_text, committed_text, mode) = match diff_bases_change + .clone() + { + DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly), + DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly), + DiffBasesChange::SetEach { index, head } => { + (index, head, Mode::IndexAndHead) + } + DiffBasesChange::SetBoth(text) => { + (text, None, Mode::IndexMatchesHead) + } + }; + let message = proto::UpdateDiffBases { project_id: *project_id, - buffer_id: buffer_snapshot.remote_id().to_proto(), + buffer_id, staged_text, - }) - .log_err(); - } + committed_text, + mode: mode as i32, + }; + + client.send(message).log_err(); + } + + let _ = change_set_state.diff_bases_changed( + buffer_snapshot, + diff_bases_change, + cx, + ); + }); } }) }) @@ -898,8 +1303,9 @@ impl BufferStore { client.add_entity_request_handler(Self::handle_blame_buffer); client.add_entity_request_handler(Self::handle_reload_buffers); client.add_entity_request_handler(Self::handle_get_permalink_to_line); - client.add_entity_request_handler(Self::handle_get_staged_text); - client.add_entity_message_handler(Self::handle_update_diff_base); + client.add_entity_request_handler(Self::handle_open_unstaged_changes); + client.add_entity_request_handler(Self::handle_open_uncommitted_changes); + client.add_entity_message_handler(Self::handle_update_diff_bases); } /// Creates a buffer store, optionally retaining its buffers. @@ -1022,24 +1428,93 @@ impl BufferStore { cx: &mut Context, ) -> Task>> { let buffer_id = buffer.read(cx).remote_id(); - if let Some(change_set) = self.get_unstaged_changes(buffer_id) { + if let Some(change_set) = self.get_unstaged_changes(buffer_id, cx) { return Task::ready(Ok(change_set)); } - let task = match self.loading_change_sets.entry(buffer_id) { + let task = match self + .loading_change_sets + .entry((buffer_id, ChangeSetKind::Unstaged)) + { hash_map::Entry::Occupied(e) => e.get().clone(), hash_map::Entry::Vacant(entry) => { - let load = match &self.state { + let staged_text = match &self.state { BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx), - BufferStoreState::Remote(this) => this.load_staged_text(buffer_id, cx), + BufferStoreState::Remote(this) => this.open_unstaged_changes(buffer_id, cx), }; entry .insert( cx.spawn(move |this, cx| async move { - Self::open_unstaged_changes_internal(this, load.await, buffer, cx) - .await - .map_err(Arc::new) + Self::open_change_set_internal( + this, + ChangeSetKind::Unstaged, + staged_text.await.map(DiffBasesChange::SetIndex), + buffer, + cx, + ) + .await + .map_err(Arc::new) + }) + .shared(), + ) + .clone() + } + }; + + cx.background_executor() + .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) }) + } + + pub fn open_uncommitted_changes( + &mut self, + buffer: Entity, + cx: &mut Context, + ) -> Task>> { + let buffer_id = buffer.read(cx).remote_id(); + if let Some(change_set) = self.get_uncommitted_changes(buffer_id, cx) { + return Task::ready(Ok(change_set)); + } + + let task = match self + .loading_change_sets + .entry((buffer_id, ChangeSetKind::Uncommitted)) + { + hash_map::Entry::Occupied(e) => e.get().clone(), + hash_map::Entry::Vacant(entry) => { + let changes = match &self.state { + BufferStoreState::Local(this) => { + let committed_text = this.load_committed_text(&buffer, cx); + let staged_text = this.load_staged_text(&buffer, cx); + cx.background_executor().spawn(async move { + let committed_text = committed_text.await?; + let staged_text = staged_text.await?; + let diff_bases_change = if committed_text == staged_text { + DiffBasesChange::SetBoth(committed_text) + } else { + DiffBasesChange::SetEach { + index: staged_text, + head: committed_text, + } + }; + Ok(diff_bases_change) + }) + } + BufferStoreState::Remote(this) => this.open_uncommitted_changes(buffer_id, cx), + }; + + entry + .insert( + cx.spawn(move |this, cx| async move { + Self::open_change_set_internal( + this, + ChangeSetKind::Uncommitted, + changes.await, + buffer, + cx, + ) + .await + .map_err(Arc::new) }) .shared(), ) @@ -1052,52 +1527,83 @@ impl BufferStore { } #[cfg(any(test, feature = "test-support"))] - pub fn set_change_set(&mut self, buffer_id: BufferId, change_set: Entity) { - self.loading_change_sets - .insert(buffer_id, Task::ready(Ok(change_set)).shared()); + pub fn set_unstaged_change_set( + &mut self, + buffer_id: BufferId, + change_set: Entity, + ) { + self.loading_change_sets.insert( + (buffer_id, ChangeSetKind::Unstaged), + Task::ready(Ok(change_set)).shared(), + ); } - pub async fn open_unstaged_changes_internal( + async fn open_change_set_internal( this: WeakEntity, - text: Result>, + kind: ChangeSetKind, + texts: Result, buffer: Entity, mut cx: AsyncApp, ) -> Result> { - let text = match text { + let diff_bases_change = match texts { Err(e) => { this.update(&mut cx, |this, cx| { let buffer_id = buffer.read(cx).remote_id(); - this.loading_change_sets.remove(&buffer_id); + this.loading_change_sets.remove(&(buffer_id, kind)); })?; return Err(e); } - Ok(text) => text, + Ok(change) => change, }; - let change_set = cx.new(|cx| BufferChangeSet::new(&buffer, cx)).unwrap(); - - if let Some(text) = text { - change_set - .update(&mut cx, |change_set, cx| { - let snapshot = buffer.read(cx).text_snapshot(); - change_set.set_base_text(text, snapshot, cx) - })? - .await - .ok(); - } - this.update(&mut cx, |this, cx| { let buffer_id = buffer.read(cx).remote_id(); - this.loading_change_sets.remove(&buffer_id); + this.loading_change_sets.remove(&(buffer_id, kind)); + if let Some(OpenBuffer::Complete { - unstaged_changes, .. + change_set_state, .. }) = this.opened_buffers.get_mut(&buffer.read(cx).remote_id()) { - *unstaged_changes = Some(change_set.downgrade()); - } - })?; + change_set_state.update(cx, |change_set_state, cx| { + let buffer_id = buffer.read(cx).remote_id(); + change_set_state.buffer_subscription.get_or_insert_with(|| { + cx.subscribe(&buffer, |this, buffer, event, cx| match event { + BufferEvent::LanguageChanged => { + this.buffer_language_changed(buffer, cx) + } + _ => {} + }) + }); - Ok(change_set) + let change_set = cx.new(|cx| BufferChangeSet { + buffer_id, + base_text: None, + diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), + staged_text: None, + staged_diff: None, + }); + match kind { + ChangeSetKind::Unstaged => { + change_set_state.unstaged_changes = Some(change_set.downgrade()) + } + ChangeSetKind::Uncommitted => { + change_set_state.uncommitted_changes = Some(change_set.downgrade()) + } + }; + + let buffer = buffer.read(cx).text_snapshot(); + let rx = change_set_state.diff_bases_changed(buffer, diff_bases_change, cx); + + Ok(async move { + rx.await.ok(); + Ok(change_set) + }) + }) + } else { + Err(anyhow!("buffer was closed")) + } + })?? + .await } pub fn create_buffer(&mut self, cx: &mut Context) -> Task>> { @@ -1303,7 +1809,7 @@ impl BufferStore { let is_remote = buffer.read(cx).replica_id() != 0; let open_buffer = OpenBuffer::Complete { buffer: buffer.downgrade(), - unstaged_changes: None, + change_set_state: cx.new(|_| BufferChangeSetState::default()), }; let handle = cx.entity().downgrade(); @@ -1384,12 +1890,39 @@ impl BufferStore { }) } - pub fn get_unstaged_changes(&self, buffer_id: BufferId) -> Option> { + pub fn get_unstaged_changes( + &self, + buffer_id: BufferId, + cx: &App, + ) -> Option> { if let OpenBuffer::Complete { - unstaged_changes, .. + change_set_state, .. } = self.opened_buffers.get(&buffer_id)? { - unstaged_changes.as_ref()?.upgrade() + change_set_state + .read(cx) + .unstaged_changes + .as_ref()? + .upgrade() + } else { + None + } + } + + pub fn get_uncommitted_changes( + &self, + buffer_id: BufferId, + cx: &App, + ) -> Option> { + if let OpenBuffer::Complete { + change_set_state, .. + } = self.opened_buffers.get(&buffer_id)? + { + change_set_state + .read(cx) + .uncommitted_changes + .as_ref()? + .upgrade() } else { None } @@ -1509,21 +2042,14 @@ impl BufferStore { ) -> impl Future { let mut futures = Vec::new(); for buffer in buffers { - let buffer = buffer.read(cx).text_snapshot(); if let Some(OpenBuffer::Complete { - unstaged_changes, .. - }) = self.opened_buffers.get_mut(&buffer.remote_id()) + change_set_state, .. + }) = self.opened_buffers.get_mut(&buffer.read(cx).remote_id()) { - if let Some(unstaged_changes) = unstaged_changes - .as_ref() - .and_then(|changes| changes.upgrade()) - { - unstaged_changes.update(cx, |unstaged_changes, cx| { - futures.push(unstaged_changes.recalculate_diff(buffer.clone(), cx)); - }); - } else { - unstaged_changes.take(); - } + let buffer = buffer.read(cx).text_snapshot(); + futures.push(change_set_state.update(cx, |change_set_state, cx| { + change_set_state.recalculate_diffs(buffer, cx) + })); } } async move { @@ -1632,7 +2158,7 @@ impl BufferStore { .entry(buffer_id) .or_insert_with(|| SharedBuffer { buffer: buffer.clone(), - unstaged_changes: None, + change_set: None, lsp_handle: None, }); @@ -1937,11 +2463,11 @@ impl BufferStore { }) } - pub async fn handle_get_staged_text( + pub async fn handle_open_unstaged_changes( this: Entity, - request: TypedEnvelope, + request: TypedEnvelope, mut cx: AsyncApp, - ) -> Result { + ) -> Result { let buffer_id = BufferId::new(request.payload.buffer_id)?; let change_set = this .update(&mut cx, |this, cx| { @@ -1957,43 +2483,92 @@ impl BufferStore { .or_default(); debug_assert!(shared_buffers.contains_key(&buffer_id)); if let Some(shared) = shared_buffers.get_mut(&buffer_id) { - shared.unstaged_changes = Some(change_set.clone()); + shared.change_set = Some(change_set.clone()); } })?; let staged_text = change_set.read_with(&cx, |change_set, _| { change_set.base_text.as_ref().map(|buffer| buffer.text()) })?; - Ok(proto::GetStagedTextResponse { staged_text }) + Ok(proto::OpenUnstagedChangesResponse { staged_text }) } - pub async fn handle_update_diff_base( + pub async fn handle_open_uncommitted_changes( this: Entity, - request: TypedEnvelope, + request: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let buffer_id = BufferId::new(request.payload.buffer_id)?; + let change_set = this + .update(&mut cx, |this, cx| { + let buffer = this.get(buffer_id)?; + Some(this.open_uncommitted_changes(buffer, cx)) + })? + .ok_or_else(|| anyhow!("no such buffer"))? + .await?; + this.update(&mut cx, |this, _| { + let shared_buffers = this + .shared_buffers + .entry(request.original_sender_id.unwrap_or(request.sender_id)) + .or_default(); + debug_assert!(shared_buffers.contains_key(&buffer_id)); + if let Some(shared) = shared_buffers.get_mut(&buffer_id) { + shared.change_set = Some(change_set.clone()); + } + })?; + change_set.read_with(&cx, |change_set, _| { + use proto::open_uncommitted_changes_response::Mode; + + let mode; + let staged_text; + let committed_text; + if let Some(committed_buffer) = &change_set.base_text { + committed_text = Some(committed_buffer.text()); + if let Some(staged_buffer) = &change_set.staged_text { + if staged_buffer.remote_id() == committed_buffer.remote_id() { + mode = Mode::IndexMatchesHead; + staged_text = None; + } else { + mode = Mode::IndexAndHead; + staged_text = Some(staged_buffer.text()); + } + } else { + mode = Mode::IndexAndHead; + staged_text = None; + } + } else { + mode = Mode::IndexAndHead; + committed_text = None; + staged_text = change_set.staged_text.as_ref().map(|buffer| buffer.text()); + } + + proto::OpenUncommittedChangesResponse { + committed_text, + staged_text, + mode: mode.into(), + } + }) + } + + pub async fn handle_update_diff_bases( + this: Entity, + request: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { let buffer_id = BufferId::new(request.payload.buffer_id)?; - let Some((buffer, change_set)) = this.update(&mut cx, |this, _| { - if let OpenBuffer::Complete { - unstaged_changes, + this.update(&mut cx, |this, cx| { + if let Some(OpenBuffer::Complete { + change_set_state, buffer, - } = this.opened_buffers.get(&buffer_id)? + }) = this.opened_buffers.get_mut(&buffer_id) { - Some((buffer.upgrade()?, unstaged_changes.as_ref()?.upgrade()?)) - } else { - None + if let Some(buffer) = buffer.upgrade() { + let buffer = buffer.read(cx).text_snapshot(); + change_set_state.update(cx, |change_set_state, cx| { + change_set_state.handle_base_texts_updated(buffer, request.payload, cx); + }) + } } - })? - else { - return Ok(()); - }; - change_set.update(&mut cx, |change_set, cx| { - if let Some(staged_text) = request.payload.staged_text { - let _ = change_set.set_base_text(staged_text, buffer.read(cx).text_snapshot(), cx); - } else { - change_set.unset_base_text(buffer.read(cx).text_snapshot(), cx) - } - })?; - Ok(()) + }) } pub fn reload_buffers( @@ -2050,7 +2625,7 @@ impl BufferStore { buffer_id, SharedBuffer { buffer: buffer.clone(), - unstaged_changes: None, + change_set: None, lsp_handle: None, }, ); @@ -2208,54 +2783,27 @@ impl BufferStore { impl EventEmitter for BufferChangeSet {} impl BufferChangeSet { - pub fn new(buffer: &Entity, cx: &mut Context) -> Self { - cx.subscribe(buffer, |this, buffer, event, cx| match event { - BufferEvent::LanguageChanged => { - this.language = buffer.read(cx).language().cloned(); - if let Some(base_text) = &this.base_text { - let snapshot = language::Buffer::build_snapshot( - base_text.as_rope().clone(), - this.language.clone(), - this.language_registry.clone(), - cx, - ); - this.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { - let base_text = cx.background_executor().spawn(snapshot).await; - this.update(&mut cx, |this, cx| { - this.base_text = Some(base_text); - cx.emit(BufferChangeSetEvent::DiffChanged { - changed_range: text::Anchor::MIN..text::Anchor::MAX, - }); - }) - })); - } - } - _ => {} - }) - .detach(); - - let buffer = buffer.read(cx); - - Self { - buffer_id: buffer.remote_id(), - base_text: None, - diff_to_buffer: git::diff::BufferDiff::new(buffer), - recalculate_diff_task: None, - diff_updated_futures: Vec::new(), - language: buffer.language().cloned(), - language_registry: buffer.language_registry(), - } - } - - #[cfg(any(test, feature = "test-support"))] - pub fn new_with_base_text( - base_text: String, - buffer: &Entity, + fn set_state( + &mut self, + base_text: Option, + diff: BufferDiff, + buffer: &text::BufferSnapshot, cx: &mut Context, - ) -> Self { - let mut this = Self::new(&buffer, cx); - let _ = this.set_base_text(base_text, buffer.read(cx).text_snapshot(), cx); - this + ) { + if let Some(base_text) = base_text.as_ref() { + let changed_range = if Some(base_text.remote_id()) + != self.base_text.as_ref().map(|buffer| buffer.remote_id()) + { + Some(text::Anchor::MIN..text::Anchor::MAX) + } else { + diff.compare(&self.diff_to_buffer, buffer) + }; + if let Some(changed_range) = changed_range { + cx.emit(BufferChangeSetEvent::DiffChanged { changed_range }); + } + } + self.base_text = base_text; + self.diff_to_buffer = diff; } pub fn diff_hunks_intersecting_range<'a>( @@ -2276,102 +2824,81 @@ impl BufferChangeSet { .hunks_intersecting_range_rev(range, buffer_snapshot) } + /// Used in cases where the change set isn't derived from git. + pub fn set_base_text( + &mut self, + base_buffer: Entity, + buffer: text::BufferSnapshot, + cx: &mut Context, + ) -> oneshot::Receiver<()> { + let (tx, rx) = oneshot::channel(); + let this = cx.weak_entity(); + let base_buffer = base_buffer.read(cx).snapshot(); + cx.spawn(|_, mut cx| async move { + let diff = cx + .background_executor() + .spawn({ + let base_buffer = base_buffer.clone(); + let buffer = buffer.clone(); + async move { BufferDiff::build(Some(&base_buffer.text()), &buffer) } + }) + .await; + let Some(this) = this.upgrade() else { + tx.send(()).ok(); + return; + }; + this.update(&mut cx, |this, cx| { + this.set_state(Some(base_buffer), diff, &buffer, cx); + }) + .log_err(); + tx.send(()).ok(); + }) + .detach(); + rx + } + #[cfg(any(test, feature = "test-support"))] pub fn base_text_string(&self) -> Option { self.base_text.as_ref().map(|buffer| buffer.text()) } - pub fn set_base_text( - &mut self, - mut base_text: String, - buffer_snapshot: text::BufferSnapshot, - cx: &mut Context, - ) -> oneshot::Receiver<()> { - LineEnding::normalize(&mut base_text); - self.recalculate_diff_internal(base_text, buffer_snapshot, true, cx) + pub fn new(buffer: &Entity, cx: &mut App) -> Self { + BufferChangeSet { + buffer_id: buffer.read(cx).remote_id(), + base_text: None, + diff_to_buffer: BufferDiff::new(&buffer.read(cx).text_snapshot()), + staged_text: None, + staged_diff: None, + } } - pub fn unset_base_text( + #[cfg(any(test, feature = "test-support"))] + pub fn new_with_base_text(base_text: &str, buffer: &Entity, cx: &mut App) -> Self { + let mut base_text = base_text.to_owned(); + text::LineEnding::normalize(&mut base_text); + let diff_to_buffer = BufferDiff::build(Some(&base_text), &buffer.read(cx).text_snapshot()); + let base_text = language::Buffer::build_snapshot_sync(base_text.into(), None, None, cx); + BufferChangeSet { + buffer_id: buffer.read(cx).remote_id(), + base_text: Some(base_text), + diff_to_buffer, + staged_text: None, + staged_diff: None, + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn recalculate_diff_sync( &mut self, - buffer_snapshot: text::BufferSnapshot, + snapshot: text::BufferSnapshot, cx: &mut Context, ) { - if self.base_text.is_some() { - self.base_text = None; - self.diff_to_buffer = BufferDiff::new(&buffer_snapshot); - self.recalculate_diff_task.take(); - cx.notify(); + let mut base_text = self.base_text.as_ref().map(|buffer| buffer.text()); + if let Some(base_text) = base_text.as_mut() { + text::LineEnding::normalize(base_text); } - } - - pub fn recalculate_diff( - &mut self, - buffer_snapshot: text::BufferSnapshot, - cx: &mut Context, - ) -> oneshot::Receiver<()> { - if let Some(base_text) = self.base_text.clone() { - self.recalculate_diff_internal(base_text.text(), buffer_snapshot, false, cx) - } else { - oneshot::channel().1 - } - } - - fn recalculate_diff_internal( - &mut self, - base_text: String, - buffer_snapshot: text::BufferSnapshot, - base_text_changed: bool, - cx: &mut Context, - ) -> oneshot::Receiver<()> { - let (tx, rx) = oneshot::channel(); - self.diff_updated_futures.push(tx); - self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { - let (old_diff, new_base_text) = this.update(&mut cx, |this, cx| { - let new_base_text = if base_text_changed { - let base_text_rope: Rope = base_text.as_str().into(); - let snapshot = language::Buffer::build_snapshot( - base_text_rope, - this.language.clone(), - this.language_registry.clone(), - cx, - ); - cx.background_executor() - .spawn(async move { Some(snapshot.await) }) - } else { - Task::ready(None) - }; - (this.diff_to_buffer.clone(), new_base_text) - })?; - - let diff = cx.background_executor().spawn(async move { - let new_diff = BufferDiff::build(&base_text, &buffer_snapshot); - let changed_range = if base_text_changed { - Some(text::Anchor::MIN..text::Anchor::MAX) - } else { - new_diff.compare(&old_diff, &buffer_snapshot) - }; - (new_diff, changed_range) - }); - - let (new_base_text, (diff, changed_range)) = futures::join!(new_base_text, diff); - - this.update(&mut cx, |this, cx| { - if let Some(new_base_text) = new_base_text { - this.base_text = Some(new_base_text) - } - this.diff_to_buffer = diff; - - this.recalculate_diff_task.take(); - for tx in this.diff_updated_futures.drain(..) { - tx.send(()).ok(); - } - if let Some(changed_range) = changed_range { - cx.emit(BufferChangeSetEvent::DiffChanged { changed_range }); - } - })?; - Ok(()) - })); - rx + let diff_to_buffer = BufferDiff::build(base_text.as_deref(), &snapshot); + self.set_state(self.base_text.clone(), diff_to_buffer, &snapshot, cx); } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 252fab9eaf..069044bbc4 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1970,6 +1970,20 @@ impl Project { }) } + pub fn open_uncommitted_changes( + &mut self, + buffer: Entity, + cx: &mut Context, + ) -> Task>> { + if self.is_disconnected(cx) { + return Task::ready(Err(anyhow!(ErrorCode::Disconnected))); + } + + self.buffer_store.update(cx, |buffer_store, cx| { + buffer_store.open_uncommitted_changes(buffer, cx) + }) + } + pub fn open_buffer_by_id( &mut self, id: BufferId, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 85280ae1a4..97dbc3bd24 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -5624,7 +5624,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { fs.set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("src/main.rs"), staged_contents)], + &[("src/main.rs".into(), staged_contents)], ); let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; @@ -5669,7 +5669,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { fs.set_index_for_repo( Path::new("/dir/.git"), - &[(Path::new("src/main.rs"), staged_contents)], + &[("src/main.rs".into(), staged_contents)], ); cx.run_until_parked(); @@ -5684,6 +5684,108 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_uncommitted_changes_for_buffer(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let committed_contents = r#" + fn main() { + println!("hello world"); + } + "# + .unindent(); + let staged_contents = r#" + fn main() { + println!("goodbye world"); + } + "# + .unindent(); + let file_contents = r#" + // print goodbye + fn main() { + println!("goodbye world"); + } + "# + .unindent(); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/dir", + json!({ + ".git": {}, + "src": { + "main.rs": file_contents, + } + }), + ) + .await; + + fs.set_index_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), staged_contents)], + ); + fs.set_head_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), committed_contents)], + ); + + let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/dir/src/main.rs", cx) + }) + .await + .unwrap(); + let uncommitted_changes = project + .update(cx, |project, cx| { + project.open_uncommitted_changes(buffer.clone(), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + uncommitted_changes.update(cx, |uncommitted_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &uncommitted_changes.base_text.as_ref().unwrap().text(), + &[ + (0..1, "", "// print goodbye\n"), + ( + 2..3, + " println!(\"hello world\");\n", + " println!(\"goodbye world\");\n", + ), + ], + ); + }); + + let committed_contents = r#" + // print goodbye + fn main() { + } + "# + .unindent(); + + fs.set_head_for_repo( + Path::new("/dir/.git"), + &[("src/main.rs".into(), committed_contents)], + ); + + cx.run_until_parked(); + uncommitted_changes.update(cx, |uncommitted_changes, cx| { + let snapshot = buffer.read(cx).snapshot(); + assert_hunks( + uncommitted_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + &snapshot, + &uncommitted_changes.base_text.as_ref().unwrap().text(), + &[(2..3, "", " println!(\"goodbye world\");\n")], + ); + }); +} + async fn search( project: &Entity, query: SearchQuery, diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index c719038921..976e1e73fd 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -129,7 +129,7 @@ message Envelope { GetPrivateUserInfo get_private_user_info = 102; GetPrivateUserInfoResponse get_private_user_info_response = 103; UpdateUserPlan update_user_plan = 234; - UpdateDiffBase update_diff_base = 104; + UpdateDiffBases update_diff_bases = 104; AcceptTermsOfService accept_terms_of_service = 239; AcceptTermsOfServiceResponse accept_terms_of_service_response = 240; @@ -304,15 +304,18 @@ message Envelope { SyncExtensionsResponse sync_extensions_response = 286; InstallExtension install_extension = 287; - GetStagedText get_staged_text = 288; - GetStagedTextResponse get_staged_text_response = 289; + OpenUnstagedChanges open_unstaged_changes = 288; + OpenUnstagedChangesResponse open_unstaged_changes_response = 289; RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; Stage stage = 293; Unstage unstage = 294; Commit commit = 295; - OpenCommitMessageBuffer open_commit_message_buffer = 296; // current max + OpenCommitMessageBuffer open_commit_message_buffer = 296; + + OpenUncommittedChanges open_uncommitted_changes = 297; + OpenUncommittedChangesResponse open_uncommitted_changes_response = 298; // current max } reserved 87 to 88; @@ -2035,21 +2038,53 @@ message WorktreeMetadata { string abs_path = 4; } -message UpdateDiffBase { +message UpdateDiffBases { uint64 project_id = 1; uint64 buffer_id = 2; + + enum Mode { + // No collaborator is using the unstaged diff. + HEAD_ONLY = 0; + // No collaborator is using the diff from HEAD. + INDEX_ONLY = 1; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD are the same for this path. + INDEX_MATCHES_HEAD = 2; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD differ for this path, + // where None means the path doesn't exist in that state of the repo. + INDEX_AND_HEAD = 3; + } + optional string staged_text = 3; + optional string committed_text = 4; + Mode mode = 5; } -message GetStagedText { +message OpenUnstagedChanges { uint64 project_id = 1; uint64 buffer_id = 2; } -message GetStagedTextResponse { +message OpenUnstagedChangesResponse { optional string staged_text = 1; } +message OpenUncommittedChanges { + uint64 project_id = 1; + uint64 buffer_id = 2; +} + +message OpenUncommittedChangesResponse { + enum Mode { + INDEX_MATCHES_HEAD = 0; + INDEX_AND_HEAD = 1; + } + optional string staged_text = 1; + optional string committed_text = 2; + Mode mode = 3; +} + message GetNotifications { optional uint64 before_id = 1; } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index eabd0d3ec7..ec35aef44e 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -219,8 +219,10 @@ messages!( (GetImplementationResponse, Background), (GetLlmToken, Background), (GetLlmTokenResponse, Background), - (GetStagedText, Foreground), - (GetStagedTextResponse, Foreground), + (OpenUnstagedChanges, Foreground), + (OpenUnstagedChangesResponse, Foreground), + (OpenUncommittedChanges, Foreground), + (OpenUncommittedChangesResponse, Foreground), (GetUsers, Foreground), (Hello, Foreground), (IncomingCall, Foreground), @@ -309,7 +311,7 @@ messages!( (UpdateUserChannels, Foreground), (UpdateContacts, Foreground), (UpdateDiagnosticSummary, Foreground), - (UpdateDiffBase, Foreground), + (UpdateDiffBases, Foreground), (UpdateFollowers, Foreground), (UpdateInviteInfo, Foreground), (UpdateLanguageServer, Foreground), @@ -422,7 +424,8 @@ request_messages!( (GetProjectSymbols, GetProjectSymbolsResponse), (GetReferences, GetReferencesResponse), (GetSignatureHelp, GetSignatureHelpResponse), - (GetStagedText, GetStagedTextResponse), + (OpenUnstagedChanges, OpenUnstagedChangesResponse), + (OpenUncommittedChanges, OpenUncommittedChangesResponse), (GetSupermavenApiKey, GetSupermavenApiKeyResponse), (GetTypeDefinition, GetTypeDefinitionResponse), (LinkedEditingRange, LinkedEditingRangeResponse), @@ -543,7 +546,8 @@ entity_messages!( GetProjectSymbols, GetReferences, GetSignatureHelp, - GetStagedText, + OpenUnstagedChanges, + OpenUncommittedChanges, GetTypeDefinition, InlayHints, JoinProject, @@ -575,7 +579,7 @@ entity_messages!( UpdateBuffer, UpdateBufferFile, UpdateDiagnosticSummary, - UpdateDiffBase, + UpdateDiffBases, UpdateLanguageServer, UpdateProject, UpdateProjectCollaborator, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 06391fcffc..be18bad293 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -46,7 +46,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test .await; fs.set_index_for_repo( Path::new("/code/project1/.git"), - &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], + &[("src/lib.rs".into(), "fn one() -> usize { 0 }".into())], ); let (project, _headless) = init_test(&fs, cx, server_cx).await; @@ -147,7 +147,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test fs.set_index_for_repo( Path::new("/code/project1/.git"), - &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())], + &[("src/lib2.rs".into(), "fn one() -> usize { 100 }".into())], ); cx.executor().run_until_parked(); change_set.update(cx, |change_set, _| { diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index b384b2fc5c..7f9f8f7503 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -450,6 +450,10 @@ impl Rope { self.clip_point(Point::new(row, u32::MAX), Bias::Left) .column } + + pub fn ptr_eq(&self, other: &Self) -> bool { + self.chunks.ptr_eq(&other.chunks) + } } impl<'a> From<&'a str> for Rope { diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 5c89daa8ff..58ca7dbfa9 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -516,6 +516,10 @@ impl SumTree { } } + pub fn ptr_eq(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.0, &other.0) + } + fn push_tree_recursive( &mut self, other: SumTree, diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index d848271976..955afbe209 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -895,6 +895,30 @@ impl Worktree { } } + pub fn load_committed_file(&self, path: &Path, cx: &App) -> Task>> { + match self { + Worktree::Local(this) => { + let path = Arc::from(path); + let snapshot = this.snapshot(); + cx.background_executor().spawn(async move { + if let Some(repo) = snapshot.repository_for_path(&path) { + if let Some(repo_path) = repo.relativize(&path).log_err() { + if let Some(git_repo) = + snapshot.git_repositories.get(&repo.work_directory_id) + { + return Ok(git_repo.repo_ptr.load_committed_text(&repo_path)); + } + } + } + Ok(None) + }) + } + Worktree::Remote(_) => Task::ready(Err(anyhow!( + "remote worktrees can't yet load committed files" + ))), + } + } + pub fn load_binary_file( &self, path: &Path,