Rework diff rendering to allow putting the cursor into deleted text, soft-wrapping and scrolling deleted text correctly (#22994)

Closes #12553

* [x] Fix `diff_hunk_before`
* [x] Fix failure to show deleted text when expanding hunk w/ cursor on
second line of the hunk
* [x] Failure to expand diff hunk below the cursor.
* [x] Delete the whole file, and expand the diff. Backspace over the
deleted hunk, panic!
* [x] Go-to-line now counts the diff hunks, but it should not
* [x] backspace at the beginning of a deleted hunk deletes too much text
* [x] Indent guides are rendered incorrectly 
* [ ] Fix randomized multi buffer tests

Maybe:
* [ ] Buffer search should include deleted text (in vim mode it turns
out I use `/x` all the time to jump to the next x I can see).
* [ ] vim: should refuse to switch into insert mode if selection is
fully within a diff.
* [ ] vim `o` command when cursor is on last line of deleted hunk.
* [ ] vim `shift-o` on first line of deleted hunk moves cursor but
doesn't insert line
* [x] `enter` at end of diff hunk inserts a new line but doesn't move
cursor
* [x] (`shift-enter` at start of diff hunk does nothing)
* [ ] Inserting a line just before an expanded hunk collapses it

Release Notes:


- Improved diff rendering, allowing you to navigate with your cursor
inside of deleted text in diff hunks.

---------

Co-authored-by: Conrad <conrad@zed.dev>
Co-authored-by: Cole <cole@zed.dev>
Co-authored-by: Mikayla <mikayla@zed.dev>
Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
Co-authored-by: Michael <michael@zed.dev>
Co-authored-by: Agus <agus@zed.dev>
Co-authored-by: João <joao@zed.dev>
This commit is contained in:
Max Brunsfeld 2025-01-24 13:18:22 -08:00 committed by GitHub
parent 1fdae4bae0
commit d2c55cbe3d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
64 changed files with 7653 additions and 5495 deletions

6
Cargo.lock generated
View file

@ -7754,15 +7754,21 @@ dependencies = [
"ctor", "ctor",
"env_logger 0.11.6", "env_logger 0.11.6",
"futures 0.3.31", "futures 0.3.31",
"git",
"gpui", "gpui",
"indoc",
"itertools 0.14.0", "itertools 0.14.0",
"language", "language",
"log", "log",
"parking_lot", "parking_lot",
"pretty_assertions",
"project",
"rand 0.8.5", "rand 0.8.5",
"rope",
"serde", "serde",
"settings", "settings",
"smallvec", "smallvec",
"smol",
"sum_tree", "sum_tree",
"text", "text",
"theme", "theme",

View file

@ -117,7 +117,7 @@
"ctrl-alt-space": "editor::ShowCharacterPalette", "ctrl-alt-space": "editor::ShowCharacterPalette",
"ctrl-;": "editor::ToggleLineNumbers", "ctrl-;": "editor::ToggleLineNumbers",
"ctrl-k ctrl-r": "editor::RevertSelectedHunks", "ctrl-k ctrl-r": "editor::RevertSelectedHunks",
"ctrl-'": "editor::ToggleHunkDiff", "ctrl-'": "editor::ToggleSelectedDiffHunks",
"ctrl-\"": "editor::ExpandAllHunkDiffs", "ctrl-\"": "editor::ExpandAllHunkDiffs",
"ctrl-i": "editor::ShowSignatureHelp", "ctrl-i": "editor::ShowSignatureHelp",
"alt-g b": "editor::ToggleGitBlame", "alt-g b": "editor::ToggleGitBlame",

View file

@ -127,7 +127,7 @@
"ctrl-cmd-space": "editor::ShowCharacterPalette", "ctrl-cmd-space": "editor::ShowCharacterPalette",
"cmd-;": "editor::ToggleLineNumbers", "cmd-;": "editor::ToggleLineNumbers",
"cmd-alt-z": "editor::RevertSelectedHunks", "cmd-alt-z": "editor::RevertSelectedHunks",
"cmd-'": "editor::ToggleHunkDiff", "cmd-'": "editor::ToggleSelectedDiffHunks",
"cmd-\"": "editor::ExpandAllHunkDiffs", "cmd-\"": "editor::ExpandAllHunkDiffs",
"cmd-alt-g b": "editor::ToggleGitBlame", "cmd-alt-g b": "editor::ToggleGitBlame",
"cmd-i": "editor::ShowSignatureHelp", "cmd-i": "editor::ShowSignatureHelp",

View file

@ -436,7 +436,7 @@
"bindings": { "bindings": {
"d": "vim::CurrentLine", "d": "vim::CurrentLine",
"s": ["vim::PushOperator", "DeleteSurrounds"], "s": ["vim::PushOperator", "DeleteSurrounds"],
"o": "editor::ToggleHunkDiff", // "d o" "o": "editor::ToggleSelectedDiffHunks", // "d o"
"p": "editor::RevertSelectedHunks" // "d p" "p": "editor::RevertSelectedHunks" // "d p"
} }
}, },

View file

@ -250,22 +250,19 @@ impl InlineAssistant {
let newest_selection = newest_selection.unwrap(); let newest_selection = newest_selection.unwrap();
let mut codegen_ranges = Vec::new(); let mut codegen_ranges = Vec::new();
for (excerpt_id, buffer, buffer_range) in for (buffer, buffer_range, excerpt_id) in
snapshot.excerpts_in_ranges(selections.iter().map(|selection| { snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| {
snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end) snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end)
})) }))
{ {
let start = Anchor { let start = buffer.anchor_before(buffer_range.start);
buffer_id: Some(buffer.remote_id()), let end = buffer.anchor_after(buffer_range.end);
codegen_ranges.push(Anchor::range_in_buffer(
excerpt_id, excerpt_id,
text_anchor: buffer.anchor_before(buffer_range.start), buffer.remote_id(),
}; start..end,
let end = Anchor { ));
buffer_id: Some(buffer.remote_id()),
excerpt_id,
text_anchor: buffer.anchor_after(buffer_range.end),
};
codegen_ranges.push(start..end);
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
self.telemetry.report_assistant_event(AssistantEvent { self.telemetry.report_assistant_event(AssistantEvent {
@ -823,7 +820,7 @@ impl InlineAssistant {
let ranges = multibuffer_snapshot.range_to_buffer_ranges(assist.range.clone()); let ranges = multibuffer_snapshot.range_to_buffer_ranges(assist.range.clone());
ranges ranges
.first() .first()
.and_then(|(excerpt, _)| excerpt.buffer().language()) .and_then(|(buffer, _, _)| buffer.language())
.map(|language| language.name()) .map(|language| language.name())
}); });
report_assistant_event( report_assistant_event(
@ -2648,17 +2645,17 @@ impl CodegenAlternative {
) -> Self { ) -> Self {
let snapshot = multi_buffer.read(cx).snapshot(cx); let snapshot = multi_buffer.read(cx).snapshot(cx);
let (old_excerpt, _) = snapshot let (buffer, _, _) = snapshot
.range_to_buffer_ranges(range.clone()) .range_to_buffer_ranges(range.clone())
.pop() .pop()
.unwrap(); .unwrap();
let old_buffer = cx.new_model(|cx| { let old_buffer = cx.new_model(|cx| {
let text = old_excerpt.buffer().as_rope().clone(); let text = buffer.as_rope().clone();
let line_ending = old_excerpt.buffer().line_ending(); let line_ending = buffer.line_ending();
let language = old_excerpt.buffer().language().cloned(); let language = buffer.language().cloned();
let language_registry = multi_buffer let language_registry = multi_buffer
.read(cx) .read(cx)
.buffer(old_excerpt.buffer_id()) .buffer(buffer.remote_id())
.unwrap() .unwrap()
.read(cx) .read(cx)
.language_registry(); .language_registry();
@ -2898,7 +2895,7 @@ impl CodegenAlternative {
let ranges = snapshot.range_to_buffer_ranges(self.range.clone()); let ranges = snapshot.range_to_buffer_ranges(self.range.clone());
ranges ranges
.first() .first()
.and_then(|(excerpt, _)| excerpt.buffer().language()) .and_then(|(buffer, _, _)| buffer.language())
.map(|language| language.name()) .map(|language| language.name())
}; };

View file

@ -255,17 +255,17 @@ impl CodegenAlternative {
) -> Self { ) -> Self {
let snapshot = buffer.read(cx).snapshot(cx); let snapshot = buffer.read(cx).snapshot(cx);
let (old_excerpt, _) = snapshot let (old_buffer, _, _) = snapshot
.range_to_buffer_ranges(range.clone()) .range_to_buffer_ranges(range.clone())
.pop() .pop()
.unwrap(); .unwrap();
let old_buffer = cx.new_model(|cx| { let old_buffer = cx.new_model(|cx| {
let text = old_excerpt.buffer().as_rope().clone(); let text = old_buffer.as_rope().clone();
let line_ending = old_excerpt.buffer().line_ending(); let line_ending = old_buffer.line_ending();
let language = old_excerpt.buffer().language().cloned(); let language = old_buffer.language().cloned();
let language_registry = buffer let language_registry = buffer
.read(cx) .read(cx)
.buffer(old_excerpt.buffer_id()) .buffer(old_buffer.remote_id())
.unwrap() .unwrap()
.read(cx) .read(cx)
.language_registry(); .language_registry();
@ -475,7 +475,7 @@ impl CodegenAlternative {
let ranges = snapshot.range_to_buffer_ranges(self.range.clone()); let ranges = snapshot.range_to_buffer_ranges(self.range.clone());
ranges ranges
.first() .first()
.and_then(|(excerpt, _)| excerpt.buffer().language()) .and_then(|(buffer, _, _)| buffer.language())
.map(|language| language.name()) .map(|language| language.name())
}; };

View file

@ -320,22 +320,18 @@ impl InlineAssistant {
let newest_selection = newest_selection.unwrap(); let newest_selection = newest_selection.unwrap();
let mut codegen_ranges = Vec::new(); let mut codegen_ranges = Vec::new();
for (excerpt_id, buffer, buffer_range) in for (buffer, buffer_range, excerpt_id) in
snapshot.excerpts_in_ranges(selections.iter().map(|selection| { snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| {
snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end) snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end)
})) }))
{ {
let start = Anchor { let anchor_range = Anchor::range_in_buffer(
buffer_id: Some(buffer.remote_id()),
excerpt_id, excerpt_id,
text_anchor: buffer.anchor_before(buffer_range.start), buffer.remote_id(),
}; buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end),
let end = Anchor { );
buffer_id: Some(buffer.remote_id()),
excerpt_id, codegen_ranges.push(anchor_range);
text_anchor: buffer.anchor_after(buffer_range.end),
};
codegen_ranges.push(start..end);
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
self.telemetry.report_assistant_event(AssistantEvent { self.telemetry.report_assistant_event(AssistantEvent {
@ -901,7 +897,7 @@ impl InlineAssistant {
let ranges = snapshot.range_to_buffer_ranges(assist.range.clone()); let ranges = snapshot.range_to_buffer_ranges(assist.range.clone());
ranges ranges
.first() .first()
.and_then(|(excerpt, _)| excerpt.buffer().language()) .and_then(|(buffer, _, _)| buffer.language())
.map(|language| language.name()) .map(|language| language.name())
}); });
report_assistant_event( report_assistant_event(

View file

@ -10,7 +10,7 @@ use editor::{
ToggleCodeActions, Undo, ToggleCodeActions, Undo,
}, },
test::editor_test_context::{AssertionContextManager, EditorTestContext}, test::editor_test_context::{AssertionContextManager, EditorTestContext},
Editor, Editor, RowInfo,
}; };
use fs::Fs; use fs::Fs;
use futures::StreamExt; use futures::StreamExt;
@ -20,7 +20,6 @@ use language::{
language_settings::{AllLanguageSettings, InlayHintSettings}, language_settings::{AllLanguageSettings, InlayHintSettings},
FakeLspAdapter, FakeLspAdapter,
}; };
use multi_buffer::MultiBufferRow;
use project::{ use project::{
project_settings::{InlineBlameSettings, ProjectSettings}, project_settings::{InlineBlameSettings, ProjectSettings},
SERVER_PROGRESS_THROTTLE_TIMEOUT, SERVER_PROGRESS_THROTTLE_TIMEOUT,
@ -2019,7 +2018,15 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
let blame = editor_b.blame().expect("editor_b should have blame now"); let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| { let entries = blame.update(cx, |blame, cx| {
blame blame
.blame_for_rows((0..4).map(MultiBufferRow).map(Some), cx) .blame_for_rows(
&(0..4)
.map(|row| RowInfo {
buffer_row: Some(row),
..Default::default()
})
.collect::<Vec<_>>(),
cx,
)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });
@ -2058,7 +2065,15 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
let blame = editor_b.blame().expect("editor_b should have blame now"); let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| { let entries = blame.update(cx, |blame, cx| {
blame blame
.blame_for_rows((0..4).map(MultiBufferRow).map(Some), cx) .blame_for_rows(
&(0..4)
.map(|row| RowInfo {
buffer_row: Some(row),
..Default::default()
})
.collect::<Vec<_>>(),
cx,
)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });
@ -2085,7 +2100,15 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
let blame = editor_b.blame().expect("editor_b should have blame now"); let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| { let entries = blame.update(cx, |blame, cx| {
blame blame
.blame_for_rows((0..4).map(MultiBufferRow).map(Some), cx) .blame_for_rows(
&(0..4)
.map(|row| RowInfo {
buffer_row: Some(row),
..Default::default()
})
.collect::<Vec<_>>(),
cx,
)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}); });

View file

@ -2593,7 +2593,7 @@ async fn test_git_diff_base_change(
change_set_local_a.read_with(cx_a, |change_set, cx| { change_set_local_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx); let buffer = buffer_local_a.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(diff_base.as_str()) Some(diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2621,7 +2621,7 @@ async fn test_git_diff_base_change(
change_set_remote_a.read_with(cx_b, |change_set, cx| { change_set_remote_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx); let buffer = buffer_remote_a.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(diff_base.as_str()) Some(diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2643,7 +2643,7 @@ async fn test_git_diff_base_change(
change_set_local_a.read_with(cx_a, |change_set, cx| { change_set_local_a.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_a.read(cx); let buffer = buffer_local_a.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str()) Some(new_diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2657,7 +2657,7 @@ async fn test_git_diff_base_change(
change_set_remote_a.read_with(cx_b, |change_set, cx| { change_set_remote_a.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_a.read(cx); let buffer = buffer_remote_a.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str()) Some(new_diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2703,7 +2703,7 @@ async fn test_git_diff_base_change(
change_set_local_b.read_with(cx_a, |change_set, cx| { change_set_local_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx); let buffer = buffer_local_b.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(diff_base.as_str()) Some(diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2730,7 +2730,7 @@ async fn test_git_diff_base_change(
change_set_remote_b.read_with(cx_b, |change_set, cx| { change_set_remote_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx); let buffer = buffer_remote_b.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(diff_base.as_str()) Some(diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2752,7 +2752,7 @@ async fn test_git_diff_base_change(
change_set_local_b.read_with(cx_a, |change_set, cx| { change_set_local_b.read_with(cx_a, |change_set, cx| {
let buffer = buffer_local_b.read(cx); let buffer = buffer_local_b.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str()) Some(new_diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(
@ -2766,7 +2766,7 @@ async fn test_git_diff_base_change(
change_set_remote_b.read_with(cx_b, |change_set, cx| { change_set_remote_b.read_with(cx_b, |change_set, cx| {
let buffer = buffer_remote_b.read(cx); let buffer = buffer_remote_b.read(cx);
assert_eq!( assert_eq!(
change_set.base_text_string(cx).as_deref(), change_set.base_text_string().as_deref(),
Some(new_diff_base.as_str()) Some(new_diff_base.as_str())
); );
git::diff::assert_hunks( git::diff::assert_hunks(

View file

@ -1342,7 +1342,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.get_unstaged_changes(host_buffer.read(cx).remote_id()) .get_unstaged_changes(host_buffer.read(cx).remote_id())
.unwrap() .unwrap()
.read(cx) .read(cx)
.base_text_string(cx) .base_text_string()
}); });
let guest_diff_base = guest_project.read_with(client_cx, |project, cx| { let guest_diff_base = guest_project.read_with(client_cx, |project, cx| {
project project
@ -1351,7 +1351,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.get_unstaged_changes(guest_buffer.read(cx).remote_id()) .get_unstaged_changes(guest_buffer.read(cx).remote_id())
.unwrap() .unwrap()
.read(cx) .read(cx)
.base_text_string(cx) .base_text_string()
}); });
assert_eq!( assert_eq!(
guest_diff_base, host_diff_base, guest_diff_base, host_diff_base,

View file

@ -1,11 +1,11 @@
use std::time::Duration; use std::time::Duration;
use editor::{AnchorRangeExt, Editor}; use editor::Editor;
use gpui::{ use gpui::{
EventEmitter, IntoElement, ParentElement, Render, Styled, Subscription, Task, View, EventEmitter, IntoElement, ParentElement, Render, Styled, Subscription, Task, View,
ViewContext, WeakView, ViewContext, WeakView,
}; };
use language::{Diagnostic, DiagnosticEntry}; use language::Diagnostic;
use ui::{h_flex, prelude::*, Button, ButtonLike, Color, Icon, IconName, Label, Tooltip}; use ui::{h_flex, prelude::*, Button, ButtonLike, Color, Icon, IconName, Label, Tooltip};
use workspace::{item::ItemHandle, StatusItemView, ToolbarItemEvent, Workspace}; use workspace::{item::ItemHandle, StatusItemView, ToolbarItemEvent, Workspace};
@ -148,11 +148,7 @@ impl DiagnosticIndicator {
(buffer, cursor_position) (buffer, cursor_position)
}); });
let new_diagnostic = buffer let new_diagnostic = buffer
.diagnostics_in_range(cursor_position..cursor_position, false) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
.map(|DiagnosticEntry { diagnostic, range }| DiagnosticEntry {
diagnostic,
range: range.to_offset(&buffer),
})
.filter(|entry| !entry.range.is_empty()) .filter(|entry| !entry.range.is_empty())
.min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
.map(|entry| entry.diagnostic); .map(|entry| entry.diagnostic);

View file

@ -372,7 +372,7 @@ gpui::actions!(
ToggleAutoSignatureHelp, ToggleAutoSignatureHelp,
ToggleGitBlame, ToggleGitBlame,
ToggleGitBlameInline, ToggleGitBlameInline,
ToggleHunkDiff, ToggleSelectedDiffHunks,
ToggleIndentGuides, ToggleIndentGuides,
ToggleInlayHints, ToggleInlayHints,
ToggleInlineCompletions, ToggleInlineCompletions,

View file

@ -30,8 +30,8 @@ use crate::{
hover_links::InlayHighlight, movement::TextLayoutDetails, EditorStyle, InlayId, RowExt, hover_links::InlayHighlight, movement::TextLayoutDetails, EditorStyle, InlayId, RowExt,
}; };
pub use block_map::{ pub use block_map::{
Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap, Block, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap, BlockPlacement,
BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, BlockPoint, BlockProperties, BlockRows, BlockStyle, CustomBlockId, RenderBlock,
StickyHeaderExcerpt, StickyHeaderExcerpt,
}; };
use block_map::{BlockRow, BlockSnapshot}; use block_map::{BlockRow, BlockSnapshot};
@ -54,7 +54,7 @@ use language::{
use lsp::DiagnosticSeverity; use lsp::DiagnosticSeverity;
use multi_buffer::{ use multi_buffer::{
Anchor, AnchorRangeExt, MultiBuffer, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, Anchor, AnchorRangeExt, MultiBuffer, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot,
ToOffset, ToPoint, RowInfo, ToOffset, ToPoint,
}; };
use serde::Deserialize; use serde::Deserialize;
use std::{ use std::{
@ -68,7 +68,7 @@ use std::{
}; };
use sum_tree::{Bias, TreeMap}; use sum_tree::{Bias, TreeMap};
use tab_map::{TabMap, TabSnapshot}; use tab_map::{TabMap, TabSnapshot};
use text::LineIndent; use text::{BufferId, LineIndent};
use ui::{px, SharedString, WindowContext}; use ui::{px, SharedString, WindowContext};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use wrap_map::{WrapMap, WrapSnapshot}; use wrap_map::{WrapMap, WrapSnapshot};
@ -367,10 +367,14 @@ impl DisplayMap {
block_map.unfold_buffer(buffer_id, self.buffer.read(cx), cx) block_map.unfold_buffer(buffer_id, self.buffer.read(cx), cx)
} }
pub(crate) fn buffer_folded(&self, buffer_id: language::BufferId) -> bool { pub(crate) fn is_buffer_folded(&self, buffer_id: language::BufferId) -> bool {
self.block_map.folded_buffers.contains(&buffer_id) self.block_map.folded_buffers.contains(&buffer_id)
} }
pub(crate) fn folded_buffers(&self) -> &HashSet<BufferId> {
&self.block_map.folded_buffers
}
pub fn insert_creases( pub fn insert_creases(
&mut self, &mut self,
creases: impl IntoIterator<Item = Crease<Anchor>>, creases: impl IntoIterator<Item = Crease<Anchor>>,
@ -716,13 +720,8 @@ impl DisplaySnapshot {
self.buffer_snapshot.len() == 0 self.buffer_snapshot.len() == 0
} }
pub fn buffer_rows( pub fn row_infos(&self, start_row: DisplayRow) -> impl Iterator<Item = RowInfo> + '_ {
&self, self.block_snapshot.row_infos(BlockRow(start_row.0))
start_row: DisplayRow,
) -> impl Iterator<Item = Option<MultiBufferRow>> + '_ {
self.block_snapshot
.buffer_rows(BlockRow(start_row.0))
.map(|row| row.map(MultiBufferRow))
} }
pub fn widest_line_number(&self) -> u32 { pub fn widest_line_number(&self) -> u32 {

View file

@ -7,8 +7,8 @@ use collections::{Bound, HashMap, HashSet};
use gpui::{AnyElement, AppContext, EntityId, Pixels, WindowContext}; use gpui::{AnyElement, AppContext, EntityId, Pixels, WindowContext};
use language::{Chunk, Patch, Point}; use language::{Chunk, Patch, Point};
use multi_buffer::{ use multi_buffer::{
Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, ToOffset, Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, RowInfo,
ToPoint as _, ToOffset, ToPoint as _,
}; };
use parking_lot::Mutex; use parking_lot::Mutex;
use std::{ use std::{
@ -399,9 +399,9 @@ pub struct BlockChunks<'a> {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct BlockBufferRows<'a> { pub struct BlockRows<'a> {
transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>, transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>,
input_buffer_rows: wrap_map::WrapBufferRows<'a>, input_rows: wrap_map::WrapRows<'a>,
output_row: BlockRow, output_row: BlockRow,
started: bool, started: bool,
} }
@ -777,14 +777,12 @@ impl BlockMap {
if let Some(new_buffer_id) = new_buffer_id { if let Some(new_buffer_id) = new_buffer_id {
let first_excerpt = excerpt_boundary.next.clone().unwrap(); let first_excerpt = excerpt_boundary.next.clone().unwrap();
if folded_buffers.contains(&new_buffer_id) { if folded_buffers.contains(&new_buffer_id) {
let mut buffer_end = Point::new(excerpt_boundary.row.0, 0) let mut last_excerpt_end_row = first_excerpt.end_row;
+ excerpt_boundary.next.as_ref().unwrap().text_summary.lines;
while let Some(next_boundary) = boundaries.peek() { while let Some(next_boundary) = boundaries.peek() {
if let Some(next_excerpt_boundary) = &next_boundary.next { if let Some(next_excerpt_boundary) = &next_boundary.next {
if next_excerpt_boundary.buffer_id == new_buffer_id { if next_excerpt_boundary.buffer_id == new_buffer_id {
buffer_end = Point::new(next_boundary.row.0, 0) last_excerpt_end_row = next_excerpt_boundary.end_row;
+ next_excerpt_boundary.text_summary.lines;
} else { } else {
break; break;
} }
@ -793,7 +791,15 @@ impl BlockMap {
boundaries.next(); boundaries.next();
} }
let wrap_end_row = wrap_snapshot.make_wrap_point(buffer_end, Bias::Right).row(); let wrap_end_row = wrap_snapshot
.make_wrap_point(
Point::new(
last_excerpt_end_row.0,
buffer.line_len(last_excerpt_end_row),
),
Bias::Right,
)
.row();
return Some(( return Some((
BlockPlacement::Replace(WrapRow(wrap_row)..=WrapRow(wrap_end_row)), BlockPlacement::Replace(WrapRow(wrap_row)..=WrapRow(wrap_end_row)),
@ -1360,7 +1366,7 @@ impl BlockSnapshot {
} }
} }
pub(super) fn buffer_rows(&self, start_row: BlockRow) -> BlockBufferRows { pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&start_row, Bias::Right, &()); cursor.seek(&start_row, Bias::Right, &());
let (output_start, input_start) = cursor.start(); let (output_start, input_start) = cursor.start();
@ -1373,9 +1379,9 @@ impl BlockSnapshot {
0 0
}; };
let input_start_row = input_start.0 + overshoot; let input_start_row = input_start.0 + overshoot;
BlockBufferRows { BlockRows {
transforms: cursor, transforms: cursor,
input_buffer_rows: self.wrap_snapshot.buffer_rows(input_start_row), input_rows: self.wrap_snapshot.row_infos(input_start_row),
output_row: start_row, output_row: start_row,
started: false, started: false,
} }
@ -1480,7 +1486,7 @@ impl BlockSnapshot {
} }
BlockId::ExcerptBoundary(next_excerpt_id) => { BlockId::ExcerptBoundary(next_excerpt_id) => {
if let Some(next_excerpt_id) = next_excerpt_id { if let Some(next_excerpt_id) = next_excerpt_id {
let excerpt_range = buffer.range_for_excerpt::<Point>(next_excerpt_id)?; let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?;
self.wrap_snapshot self.wrap_snapshot
.make_wrap_point(excerpt_range.start, Bias::Left) .make_wrap_point(excerpt_range.start, Bias::Left)
} else { } else {
@ -1488,10 +1494,9 @@ impl BlockSnapshot {
.make_wrap_point(buffer.max_point(), Bias::Left) .make_wrap_point(buffer.max_point(), Bias::Left)
} }
} }
BlockId::FoldedBuffer(excerpt_id) => self.wrap_snapshot.make_wrap_point( BlockId::FoldedBuffer(excerpt_id) => self
buffer.range_for_excerpt::<Point>(excerpt_id)?.start, .wrap_snapshot
Bias::Left, .make_wrap_point(buffer.range_for_excerpt(excerpt_id)?.start, Bias::Left),
),
}; };
let wrap_row = WrapRow(wrap_point.row()); let wrap_row = WrapRow(wrap_point.row());
@ -1832,8 +1837,8 @@ impl<'a> Iterator for BlockChunks<'a> {
} }
} }
impl<'a> Iterator for BlockBufferRows<'a> { impl<'a> Iterator for BlockRows<'a> {
type Item = Option<u32>; type Item = RowInfo;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if self.started { if self.started {
@ -1862,7 +1867,7 @@ impl<'a> Iterator for BlockBufferRows<'a> {
.as_ref() .as_ref()
.map_or(true, |block| block.is_replacement()) .map_or(true, |block| block.is_replacement())
{ {
self.input_buffer_rows.seek(self.transforms.start().1 .0); self.input_rows.seek(self.transforms.start().1 .0);
} }
} }
@ -1870,15 +1875,15 @@ impl<'a> Iterator for BlockBufferRows<'a> {
if let Some(block) = transform.block.as_ref() { if let Some(block) = transform.block.as_ref() {
if block.is_replacement() && self.transforms.start().0 == self.output_row { if block.is_replacement() && self.transforms.start().0 == self.output_row {
if matches!(block, Block::FoldedBuffer { .. }) { if matches!(block, Block::FoldedBuffer { .. }) {
Some(None) Some(RowInfo::default())
} else { } else {
Some(self.input_buffer_rows.next().unwrap()) Some(self.input_rows.next().unwrap())
} }
} else { } else {
Some(None) Some(RowInfo::default())
} }
} else { } else {
Some(self.input_buffer_rows.next().unwrap()) Some(self.input_rows.next().unwrap())
} }
} }
} }
@ -2153,7 +2158,10 @@ mod tests {
); );
assert_eq!( assert_eq!(
snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), snapshot
.row_infos(BlockRow(0))
.map(|row_info| row_info.buffer_row)
.collect::<Vec<_>>(),
&[ &[
Some(0), Some(0),
None, None,
@ -2603,7 +2611,10 @@ mod tests {
"\n\n\n111\n\n\n\n\n222\n\n\n333\n\n\n444\n\n\n\n\n555\n\n\n666\n" "\n\n\n111\n\n\n\n\n222\n\n\n333\n\n\n444\n\n\n\n\n555\n\n\n666\n"
); );
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![ vec![
None, None,
None, None,
@ -2679,7 +2690,10 @@ mod tests {
"\n\n\n111\n\n\n\n\n\n222\n\n\n\n333\n\n\n444\n\n\n\n\n\n\n555\n\n\n666\n\n" "\n\n\n111\n\n\n\n\n\n222\n\n\n\n333\n\n\n444\n\n\n\n\n\n\n555\n\n\n666\n\n"
); );
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![ vec![
None, None,
None, None,
@ -2754,7 +2768,10 @@ mod tests {
"\n\n\n\n\n\n222\n\n\n\n333\n\n\n444\n\n\n\n\n\n\n555\n\n\n666\n\n" "\n\n\n\n\n\n222\n\n\n\n333\n\n\n444\n\n\n\n\n\n\n555\n\n\n666\n\n"
); );
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![ vec![
None, None,
None, None,
@ -2819,7 +2836,10 @@ mod tests {
); );
assert_eq!(blocks_snapshot.text(), "\n\n\n\n\n\n\n\n555\n\n\n666\n\n"); assert_eq!(blocks_snapshot.text(), "\n\n\n\n\n\n\n\n555\n\n\n666\n\n");
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![ vec![
None, None,
None, None,
@ -2873,7 +2893,10 @@ mod tests {
"Should have extra newline for 111 buffer, due to a new block added when it was folded" "Should have extra newline for 111 buffer, due to a new block added when it was folded"
); );
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![ vec![
None, None,
None, None,
@ -2927,7 +2950,10 @@ mod tests {
"Should have a single, first buffer left after folding" "Should have a single, first buffer left after folding"
); );
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![ vec![
None, None,
None, None,
@ -2997,7 +3023,10 @@ mod tests {
); );
assert_eq!(blocks_snapshot.text(), "\n"); assert_eq!(blocks_snapshot.text(), "\n");
assert_eq!( assert_eq!(
blocks_snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(), blocks_snapshot
.row_infos(BlockRow(0))
.map(|i| i.buffer_row)
.collect::<Vec<_>>(),
vec![None, None], vec![None, None],
"When fully folded, should be no buffer rows" "When fully folded, should be no buffer rows"
); );
@ -3295,7 +3324,8 @@ mod tests {
let mut sorted_blocks_iter = expected_blocks.into_iter().peekable(); let mut sorted_blocks_iter = expected_blocks.into_iter().peekable();
let input_buffer_rows = buffer_snapshot let input_buffer_rows = buffer_snapshot
.buffer_rows(MultiBufferRow(0)) .row_infos(MultiBufferRow(0))
.map(|row| row.buffer_row)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut expected_buffer_rows = Vec::new(); let mut expected_buffer_rows = Vec::new();
let mut expected_text = String::new(); let mut expected_text = String::new();
@ -3450,7 +3480,8 @@ mod tests {
); );
assert_eq!( assert_eq!(
blocks_snapshot blocks_snapshot
.buffer_rows(BlockRow(start_row as u32)) .row_infos(BlockRow(start_row as u32))
.map(|row_info| row_info.buffer_row)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
&expected_buffer_rows[start_row..], &expected_buffer_rows[start_row..],
"incorrect buffer_rows starting at row {:?}", "incorrect buffer_rows starting at row {:?}",

View file

@ -4,7 +4,9 @@ use super::{
}; };
use gpui::{AnyElement, ElementId, WindowContext}; use gpui::{AnyElement, ElementId, WindowContext};
use language::{Chunk, ChunkRenderer, Edit, Point, TextSummary}; use language::{Chunk, ChunkRenderer, Edit, Point, TextSummary};
use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToOffset}; use multi_buffer::{
Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset,
};
use std::{ use std::{
any::TypeId, any::TypeId,
cmp::{self, Ordering}, cmp::{self, Ordering},
@ -336,9 +338,7 @@ impl FoldMap {
let mut folds = self.snapshot.folds.iter().peekable(); let mut folds = self.snapshot.folds.iter().peekable();
while let Some(fold) = folds.next() { while let Some(fold) = folds.next() {
if let Some(next_fold) = folds.peek() { if let Some(next_fold) = folds.peek() {
let comparison = fold let comparison = fold.range.cmp(&next_fold.range, self.snapshot.buffer());
.range
.cmp(&next_fold.range, &self.snapshot.inlay_snapshot.buffer);
assert!(comparison.is_le()); assert!(comparison.is_le());
} }
} }
@ -578,6 +578,10 @@ pub struct FoldSnapshot {
} }
impl FoldSnapshot { impl FoldSnapshot {
pub fn buffer(&self) -> &MultiBufferSnapshot {
&self.inlay_snapshot.buffer
}
#[cfg(test)] #[cfg(test)]
pub fn text(&self) -> String { pub fn text(&self) -> String {
self.chunks(FoldOffset(0)..self.len(), false, Highlights::default()) self.chunks(FoldOffset(0)..self.len(), false, Highlights::default())
@ -673,7 +677,7 @@ impl FoldSnapshot {
(line_end - line_start) as u32 (line_end - line_start) as u32
} }
pub fn buffer_rows(&self, start_row: u32) -> FoldBufferRows { pub fn row_infos(&self, start_row: u32) -> FoldRows {
if start_row > self.transforms.summary().output.lines.row { if start_row > self.transforms.summary().output.lines.row {
panic!("invalid display row {}", start_row); panic!("invalid display row {}", start_row);
} }
@ -684,11 +688,11 @@ impl FoldSnapshot {
let overshoot = fold_point.0 - cursor.start().0 .0; let overshoot = fold_point.0 - cursor.start().0 .0;
let inlay_point = InlayPoint(cursor.start().1 .0 + overshoot); let inlay_point = InlayPoint(cursor.start().1 .0 + overshoot);
let input_buffer_rows = self.inlay_snapshot.buffer_rows(inlay_point.row()); let input_rows = self.inlay_snapshot.row_infos(inlay_point.row());
FoldBufferRows { FoldRows {
fold_point, fold_point,
input_buffer_rows, input_rows,
cursor, cursor,
} }
} }
@ -843,8 +847,8 @@ fn push_isomorphic(transforms: &mut SumTree<Transform>, summary: TextSummary) {
transforms.update_last( transforms.update_last(
|last| { |last| {
if !last.is_fold() { if !last.is_fold() {
last.summary.input += summary.clone(); last.summary.input += summary;
last.summary.output += summary.clone(); last.summary.output += summary;
did_merge = true; did_merge = true;
} }
}, },
@ -854,7 +858,7 @@ fn push_isomorphic(transforms: &mut SumTree<Transform>, summary: TextSummary) {
transforms.push( transforms.push(
Transform { Transform {
summary: TransformSummary { summary: TransformSummary {
input: summary.clone(), input: summary,
output: summary, output: summary,
}, },
placeholder: None, placeholder: None,
@ -1134,25 +1138,25 @@ impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct FoldBufferRows<'a> { pub struct FoldRows<'a> {
cursor: Cursor<'a, Transform, (FoldPoint, InlayPoint)>, cursor: Cursor<'a, Transform, (FoldPoint, InlayPoint)>,
input_buffer_rows: InlayBufferRows<'a>, input_rows: InlayBufferRows<'a>,
fold_point: FoldPoint, fold_point: FoldPoint,
} }
impl<'a> FoldBufferRows<'a> { impl<'a> FoldRows<'a> {
pub(crate) fn seek(&mut self, row: u32) { pub(crate) fn seek(&mut self, row: u32) {
let fold_point = FoldPoint::new(row, 0); let fold_point = FoldPoint::new(row, 0);
self.cursor.seek(&fold_point, Bias::Left, &()); self.cursor.seek(&fold_point, Bias::Left, &());
let overshoot = fold_point.0 - self.cursor.start().0 .0; let overshoot = fold_point.0 - self.cursor.start().0 .0;
let inlay_point = InlayPoint(self.cursor.start().1 .0 + overshoot); let inlay_point = InlayPoint(self.cursor.start().1 .0 + overshoot);
self.input_buffer_rows.seek(inlay_point.row()); self.input_rows.seek(inlay_point.row());
self.fold_point = fold_point; self.fold_point = fold_point;
} }
} }
impl<'a> Iterator for FoldBufferRows<'a> { impl<'a> Iterator for FoldRows<'a> {
type Item = Option<u32>; type Item = RowInfo;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let mut traversed_fold = false; let mut traversed_fold = false;
@ -1166,11 +1170,11 @@ impl<'a> Iterator for FoldBufferRows<'a> {
if self.cursor.item().is_some() { if self.cursor.item().is_some() {
if traversed_fold { if traversed_fold {
self.input_buffer_rows.seek(self.cursor.start().1.row()); self.input_rows.seek(self.cursor.start().1 .0.row);
self.input_buffer_rows.next(); self.input_rows.next();
} }
*self.fold_point.row_mut() += 1; *self.fold_point.row_mut() += 1;
self.input_buffer_rows.next() self.input_rows.next()
} else { } else {
None None
} }
@ -1683,12 +1687,12 @@ mod tests {
.row(); .row();
expected_buffer_rows.extend( expected_buffer_rows.extend(
inlay_snapshot inlay_snapshot
.buffer_rows(prev_row) .row_infos(prev_row)
.take((1 + fold_start - prev_row) as usize), .take((1 + fold_start - prev_row) as usize),
); );
prev_row = 1 + fold_end; prev_row = 1 + fold_end;
} }
expected_buffer_rows.extend(inlay_snapshot.buffer_rows(prev_row)); expected_buffer_rows.extend(inlay_snapshot.row_infos(prev_row));
assert_eq!( assert_eq!(
expected_buffer_rows.len(), expected_buffer_rows.len(),
@ -1777,7 +1781,7 @@ mod tests {
let mut fold_row = 0; let mut fold_row = 0;
while fold_row < expected_buffer_rows.len() as u32 { while fold_row < expected_buffer_rows.len() as u32 {
assert_eq!( assert_eq!(
snapshot.buffer_rows(fold_row).collect::<Vec<_>>(), snapshot.row_infos(fold_row).collect::<Vec<_>>(),
expected_buffer_rows[(fold_row as usize)..], expected_buffer_rows[(fold_row as usize)..],
"wrong buffer rows starting at fold row {}", "wrong buffer rows starting at fold row {}",
fold_row, fold_row,
@ -1892,10 +1896,19 @@ mod tests {
let (snapshot, _) = map.read(inlay_snapshot, vec![]); let (snapshot, _) = map.read(inlay_snapshot, vec![]);
assert_eq!(snapshot.text(), "aa⋯cccc\nd⋯eeeee\nffffff\n"); assert_eq!(snapshot.text(), "aa⋯cccc\nd⋯eeeee\nffffff\n");
assert_eq!( assert_eq!(
snapshot.buffer_rows(0).collect::<Vec<_>>(), snapshot
.row_infos(0)
.map(|info| info.buffer_row)
.collect::<Vec<_>>(),
[Some(0), Some(3), Some(5), Some(6)] [Some(0), Some(3), Some(5), Some(6)]
); );
assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [Some(6)]); assert_eq!(
snapshot
.row_infos(3)
.map(|info| info.buffer_row)
.collect::<Vec<_>>(),
[Some(6)]
);
} }
fn init_test(cx: &mut gpui::AppContext) { fn init_test(cx: &mut gpui::AppContext) {

View file

@ -1,7 +1,9 @@
use crate::{HighlightStyles, InlayId}; use crate::{HighlightStyles, InlayId};
use collections::BTreeSet; use collections::BTreeSet;
use language::{Chunk, Edit, Point, TextSummary}; use language::{Chunk, Edit, Point, TextSummary};
use multi_buffer::{Anchor, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, ToOffset}; use multi_buffer::{
Anchor, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot, RowInfo, ToOffset,
};
use std::{ use std::{
cmp, cmp,
ops::{Add, AddAssign, Range, Sub, SubAssign}, ops::{Add, AddAssign, Range, Sub, SubAssign},
@ -67,11 +69,11 @@ impl Inlay {
impl sum_tree::Item for Transform { impl sum_tree::Item for Transform {
type Summary = TransformSummary; type Summary = TransformSummary;
fn summary(&self, _cx: &()) -> Self::Summary { fn summary(&self, _: &()) -> Self::Summary {
match self { match self {
Transform::Isomorphic(summary) => TransformSummary { Transform::Isomorphic(summary) => TransformSummary {
input: summary.clone(), input: *summary,
output: summary.clone(), output: *summary,
}, },
Transform::Inlay(inlay) => TransformSummary { Transform::Inlay(inlay) => TransformSummary {
input: TextSummary::default(), input: TextSummary::default(),
@ -362,14 +364,14 @@ impl<'a> InlayBufferRows<'a> {
} }
impl<'a> Iterator for InlayBufferRows<'a> { impl<'a> Iterator for InlayBufferRows<'a> {
type Item = Option<u32>; type Item = RowInfo;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let buffer_row = if self.inlay_row == 0 { let buffer_row = if self.inlay_row == 0 {
self.buffer_rows.next().unwrap() self.buffer_rows.next().unwrap()
} else { } else {
match self.transforms.item()? { match self.transforms.item()? {
Transform::Inlay(_) => None, Transform::Inlay(_) => Default::default(),
Transform::Isomorphic(_) => self.buffer_rows.next().unwrap(), Transform::Isomorphic(_) => self.buffer_rows.next().unwrap(),
} }
}; };
@ -448,7 +450,7 @@ impl InlayMap {
new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &()); new_transforms.append(cursor.slice(&buffer_edit.old.start, Bias::Left, &()), &());
if let Some(Transform::Isomorphic(transform)) = cursor.item() { if let Some(Transform::Isomorphic(transform)) = cursor.item() {
if cursor.end(&()).0 == buffer_edit.old.start { if cursor.end(&()).0 == buffer_edit.old.start {
push_isomorphic(&mut new_transforms, transform.clone()); push_isomorphic(&mut new_transforms, *transform);
cursor.next(&()); cursor.next(&());
} }
} }
@ -892,7 +894,7 @@ impl InlaySnapshot {
} }
pub fn text_summary(&self) -> TextSummary { pub fn text_summary(&self) -> TextSummary {
self.transforms.summary().output.clone() self.transforms.summary().output
} }
pub fn text_summary_for_range(&self, range: Range<InlayOffset>) -> TextSummary { pub fn text_summary_for_range(&self, range: Range<InlayOffset>) -> TextSummary {
@ -945,7 +947,7 @@ impl InlaySnapshot {
summary summary
} }
pub fn buffer_rows(&self, row: u32) -> InlayBufferRows<'_> { pub fn row_infos(&self, row: u32) -> InlayBufferRows<'_> {
let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&()); let mut cursor = self.transforms.cursor::<(InlayPoint, Point)>(&());
let inlay_point = InlayPoint::new(row, 0); let inlay_point = InlayPoint::new(row, 0);
cursor.seek(&inlay_point, Bias::Left, &()); cursor.seek(&inlay_point, Bias::Left, &());
@ -967,7 +969,7 @@ impl InlaySnapshot {
InlayBufferRows { InlayBufferRows {
transforms: cursor, transforms: cursor,
inlay_row: inlay_point.row(), inlay_row: inlay_point.row(),
buffer_rows: self.buffer.buffer_rows(buffer_row), buffer_rows: self.buffer.row_infos(buffer_row),
max_buffer_row, max_buffer_row,
} }
} }
@ -1477,7 +1479,10 @@ mod tests {
); );
assert_eq!(inlay_snapshot.text(), "|123|\nabc\n|456|def\n|567|\n\nghi"); assert_eq!(inlay_snapshot.text(), "|123|\nabc\n|456|def\n|567|\n\nghi");
assert_eq!( assert_eq!(
inlay_snapshot.buffer_rows(0).collect::<Vec<_>>(), inlay_snapshot
.row_infos(0)
.map(|info| info.buffer_row)
.collect::<Vec<_>>(),
vec![Some(0), None, Some(1), None, None, Some(2)] vec![Some(0), None, Some(1), None, None, Some(2)]
); );
} }
@ -1548,7 +1553,7 @@ mod tests {
} }
assert_eq!(inlay_snapshot.text(), expected_text.to_string()); assert_eq!(inlay_snapshot.text(), expected_text.to_string());
let expected_buffer_rows = inlay_snapshot.buffer_rows(0).collect::<Vec<_>>(); let expected_buffer_rows = inlay_snapshot.row_infos(0).collect::<Vec<_>>();
assert_eq!( assert_eq!(
expected_buffer_rows.len() as u32, expected_buffer_rows.len() as u32,
expected_text.max_point().row + 1 expected_text.max_point().row + 1
@ -1556,7 +1561,7 @@ mod tests {
for row_start in 0..expected_buffer_rows.len() { for row_start in 0..expected_buffer_rows.len() {
assert_eq!( assert_eq!(
inlay_snapshot inlay_snapshot
.buffer_rows(row_start as u32) .row_infos(row_start as u32)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
&expected_buffer_rows[row_start..], &expected_buffer_rows[row_start..],
"incorrect buffer rows starting at {}", "incorrect buffer rows starting at {}",

View file

@ -272,8 +272,8 @@ impl TabSnapshot {
} }
} }
pub fn buffer_rows(&self, row: u32) -> fold_map::FoldBufferRows<'_> { pub fn rows(&self, row: u32) -> fold_map::FoldRows<'_> {
self.fold_snapshot.buffer_rows(row) self.fold_snapshot.row_infos(row)
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,11 +1,11 @@
use super::{ use super::{
fold_map::FoldBufferRows, fold_map::FoldRows,
tab_map::{self, TabEdit, TabPoint, TabSnapshot}, tab_map::{self, TabEdit, TabPoint, TabSnapshot},
Highlights, Highlights,
}; };
use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task}; use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task};
use language::{Chunk, Point}; use language::{Chunk, Point};
use multi_buffer::MultiBufferSnapshot; use multi_buffer::{MultiBufferSnapshot, RowInfo};
use smol::future::yield_now; use smol::future::yield_now;
use std::sync::LazyLock; use std::sync::LazyLock;
use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration}; use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
@ -60,16 +60,16 @@ pub struct WrapChunks<'a> {
} }
#[derive(Clone)] #[derive(Clone)]
pub struct WrapBufferRows<'a> { pub struct WrapRows<'a> {
input_buffer_rows: FoldBufferRows<'a>, input_buffer_rows: FoldRows<'a>,
input_buffer_row: Option<u32>, input_buffer_row: RowInfo,
output_row: u32, output_row: u32,
soft_wrapped: bool, soft_wrapped: bool,
max_output_row: u32, max_output_row: u32,
transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
} }
impl<'a> WrapBufferRows<'a> { impl<'a> WrapRows<'a> {
pub(crate) fn seek(&mut self, start_row: u32) { pub(crate) fn seek(&mut self, start_row: u32) {
self.transforms self.transforms
.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); .seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
@ -717,7 +717,7 @@ impl WrapSnapshot {
self.transforms.summary().output.longest_row self.transforms.summary().output.longest_row
} }
pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { pub fn row_infos(&self, start_row: u32) -> WrapRows {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = transforms.start().1.row(); let mut input_row = transforms.start().1.row();
@ -725,9 +725,9 @@ impl WrapSnapshot {
input_row += start_row - transforms.start().0.row(); input_row += start_row - transforms.start().0.row();
} }
let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic()); let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row); let mut input_buffer_rows = self.tab_snapshot.rows(input_row);
let input_buffer_row = input_buffer_rows.next().unwrap(); let input_buffer_row = input_buffer_rows.next().unwrap();
WrapBufferRows { WrapRows {
transforms, transforms,
input_buffer_row, input_buffer_row,
input_buffer_rows, input_buffer_rows,
@ -847,7 +847,7 @@ impl WrapSnapshot {
} }
let text = language::Rope::from(self.text().as_str()); let text = language::Rope::from(self.text().as_str());
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0); let mut input_buffer_rows = self.tab_snapshot.rows(0);
let mut expected_buffer_rows = Vec::new(); let mut expected_buffer_rows = Vec::new();
let mut prev_tab_row = 0; let mut prev_tab_row = 0;
for display_row in 0..=self.max_point().row() { for display_row in 0..=self.max_point().row() {
@ -855,7 +855,7 @@ impl WrapSnapshot {
if tab_point.row() == prev_tab_row && display_row != 0 { if tab_point.row() == prev_tab_row && display_row != 0 {
expected_buffer_rows.push(None); expected_buffer_rows.push(None);
} else { } else {
expected_buffer_rows.push(input_buffer_rows.next().unwrap()); expected_buffer_rows.push(input_buffer_rows.next().unwrap().buffer_row);
} }
prev_tab_row = tab_point.row(); prev_tab_row = tab_point.row();
@ -864,7 +864,8 @@ impl WrapSnapshot {
for start_display_row in 0..expected_buffer_rows.len() { for start_display_row in 0..expected_buffer_rows.len() {
assert_eq!( assert_eq!(
self.buffer_rows(start_display_row as u32) self.row_infos(start_display_row as u32)
.map(|row_info| row_info.buffer_row)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
&expected_buffer_rows[start_display_row..], &expected_buffer_rows[start_display_row..],
"invalid buffer_rows({}..)", "invalid buffer_rows({}..)",
@ -958,8 +959,8 @@ impl<'a> Iterator for WrapChunks<'a> {
} }
} }
impl<'a> Iterator for WrapBufferRows<'a> { impl<'a> Iterator for WrapRows<'a> {
type Item = Option<u32>; type Item = RowInfo;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if self.output_row > self.max_output_row { if self.output_row > self.max_output_row {
@ -968,6 +969,7 @@ impl<'a> Iterator for WrapBufferRows<'a> {
let buffer_row = self.input_buffer_row; let buffer_row = self.input_buffer_row;
let soft_wrapped = self.soft_wrapped; let soft_wrapped = self.soft_wrapped;
let diff_status = self.input_buffer_row.diff_status;
self.output_row += 1; self.output_row += 1;
self.transforms self.transforms
@ -979,7 +981,15 @@ impl<'a> Iterator for WrapBufferRows<'a> {
self.soft_wrapped = true; self.soft_wrapped = true;
} }
Some(if soft_wrapped { None } else { buffer_row }) Some(if soft_wrapped {
RowInfo {
buffer_row: None,
multibuffer_row: None,
diff_status,
}
} else {
buffer_row
})
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -19,11 +19,11 @@ use language::{
}, },
BracketPairConfig, BracketPairConfig,
Capability::ReadWrite, Capability::ReadWrite,
FakeLspAdapter, IndentGuide, LanguageConfig, LanguageConfigOverride, LanguageMatcher, FakeLspAdapter, LanguageConfig, LanguageConfigOverride, LanguageMatcher, LanguageName,
LanguageName, Override, ParsedMarkdown, Point, Override, ParsedMarkdown, Point,
}; };
use language_settings::{Formatter, FormatterList, IndentGuideSettings}; use language_settings::{Formatter, FormatterList, IndentGuideSettings};
use multi_buffer::MultiBufferIndentGuide; use multi_buffer::IndentGuide;
use parking_lot::Mutex; use parking_lot::Mutex;
use pretty_assertions::{assert_eq, assert_ne}; use pretty_assertions::{assert_eq, assert_ne};
use project::{buffer_store::BufferChangeSet, FakeFs}; use project::{buffer_store::BufferChangeSet, FakeFs};
@ -3363,8 +3363,8 @@ async fn test_custom_newlines_cause_no_false_positive_diffs(
let snapshot = editor.snapshot(cx); let snapshot = editor.snapshot(cx);
assert_eq!( assert_eq!(
snapshot snapshot
.diff_map .buffer_snapshot
.diff_hunks_in_range(0..snapshot.buffer_snapshot.len(), &snapshot.buffer_snapshot) .diff_hunks_in_range(0..snapshot.buffer_snapshot.len())
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
Vec::new(), Vec::new(),
"Should not have any diffs for files with custom newlines" "Should not have any diffs for files with custom newlines"
@ -5480,6 +5480,109 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
}); });
} }
#[gpui::test]
async fn test_fold_function_bodies(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let base_text = r#"
impl A {
// this is an unstaged comment
fn b() {
c();
}
// this is another unstaged comment
fn d() {
// e
// f
}
}
fn g() {
// h
}
"#
.unindent();
let text = r#"
ˇimpl A {
fn b() {
c();
}
fn d() {
// e
// f
}
}
fn g() {
// h
}
"#
.unindent();
let mut cx = EditorLspTestContext::new_rust(Default::default(), cx).await;
cx.set_state(&text);
cx.set_diff_base(&base_text);
cx.update_editor(|editor, cx| {
editor.expand_all_diff_hunks(&Default::default(), cx);
});
cx.assert_state_with_diff(
"
ˇimpl A {
- // this is an unstaged comment
fn b() {
c();
}
- // this is another unstaged comment
-
fn d() {
// e
// f
}
}
fn g() {
// h
}
"
.unindent(),
);
let expected_display_text = "
impl A {
// this is an unstaged comment
fn b() {
}
// this is another unstaged comment
fn d() {
}
}
fn g() {
}
"
.unindent();
cx.update_editor(|editor, cx| {
editor.fold_function_bodies(&FoldFunctionBodies, cx);
assert_eq!(editor.display_text(cx), expected_display_text);
});
}
#[gpui::test] #[gpui::test]
async fn test_autoindent(cx: &mut gpui::TestAppContext) { async fn test_autoindent(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -10319,7 +10422,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
} }
#[gpui::test] #[gpui::test]
async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { async fn test_go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await; let mut cx = EditorTestContext::new(cx).await;
@ -10420,7 +10523,26 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext)
); );
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
for _ in 0..3 { editor.go_to_prev_hunk(&GoToPrevHunk, cx);
});
cx.assert_editor_state(
&r#"
ˇuse some::modified;
fn main() {
println!("hello there");
println!("around the");
println!("world");
}
"#
.unindent(),
);
cx.update_editor(|editor, cx| {
for _ in 0..2 {
editor.go_to_prev_hunk(&GoToPrevHunk, cx); editor.go_to_prev_hunk(&GoToPrevHunk, cx);
} }
}); });
@ -10442,11 +10564,10 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext)
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.fold(&Fold, cx); editor.fold(&Fold, cx);
});
//Make sure that the fold only gets one hunk cx.update_editor(|editor, cx| {
for _ in 0..4 { editor.go_to_next_hunk(&GoToHunk, cx);
editor.go_to_next_hunk(&GoToHunk, cx);
}
}); });
cx.assert_editor_state( cx.assert_editor_state(
@ -11815,6 +11936,39 @@ async fn test_modification_reverts(cx: &mut gpui::TestAppContext) {
); );
} }
#[gpui::test]
async fn test_deleting_over_diff_hunk(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
let base_text = indoc! {r#"
one
two
three
"#};
cx.set_diff_base(base_text);
cx.set_state("\nˇ\n");
cx.executor().run_until_parked();
cx.update_editor(|editor, cx| {
editor.expand_selected_diff_hunks(cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, cx| {
editor.backspace(&Default::default(), cx);
});
cx.run_until_parked();
cx.assert_state_with_diff(
indoc! {r#"
- two
- threeˇ
+
"#}
.to_string(),
);
}
#[gpui::test] #[gpui::test]
async fn test_deletion_reverts(cx: &mut gpui::TestAppContext) { async fn test_deletion_reverts(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -12019,13 +12173,11 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
(buffer_3.clone(), base_text_3), (buffer_3.clone(), base_text_3),
] { ] {
let change_set = cx.new_model(|cx| { let change_set = cx.new_model(|cx| {
BufferChangeSet::new_with_base_text( BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)
diff_base.to_string(),
buffer.read(cx).text_snapshot(),
cx,
)
}); });
editor.diff_map.add_change_set(change_set, cx) editor
.buffer
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
} }
}); });
cx.executor().run_until_parked(); cx.executor().run_until_parked();
@ -12385,7 +12537,10 @@ async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) {
} }
#[gpui::test] #[gpui::test]
async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) { async fn test_toggle_selected_diff_hunks(
executor: BackgroundExecutor,
cx: &mut gpui::TestAppContext,
) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await; let mut cx = EditorTestContext::new(cx).await;
@ -12423,7 +12578,7 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.go_to_next_hunk(&GoToHunk, cx); editor.go_to_next_hunk(&GoToHunk, cx);
editor.toggle_hunk_diff(&ToggleHunkDiff, cx); editor.toggle_selected_diff_hunks(&ToggleSelectedDiffHunks, cx);
}); });
executor.run_until_parked(); executor.run_until_parked();
cx.assert_state_with_diff( cx.assert_state_with_diff(
@ -12443,12 +12598,34 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
); );
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
for _ in 0..3 { for _ in 0..2 {
editor.go_to_next_hunk(&GoToHunk, cx); editor.go_to_next_hunk(&GoToHunk, cx);
editor.toggle_hunk_diff(&ToggleHunkDiff, cx); editor.toggle_selected_diff_hunks(&ToggleSelectedDiffHunks, cx);
} }
}); });
executor.run_until_parked(); executor.run_until_parked();
cx.assert_state_with_diff(
r#"
- use some::mod;
+ ˇuse some::modified;
fn main() {
- println!("hello");
+ println!("hello there");
+ println!("around the");
println!("world");
}
"#
.unindent(),
);
cx.update_editor(|editor, cx| {
editor.go_to_next_hunk(&GoToHunk, cx);
editor.toggle_selected_diff_hunks(&ToggleSelectedDiffHunks, cx);
});
executor.run_until_parked();
cx.assert_state_with_diff( cx.assert_state_with_diff(
r#" r#"
- use some::mod; - use some::mod;
@ -12534,7 +12711,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
executor.run_until_parked(); executor.run_until_parked();
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); editor.expand_all_diff_hunks(&ExpandAllHunkDiffs, cx);
}); });
executor.run_until_parked(); executor.run_until_parked();
cx.assert_state_with_diff( cx.assert_state_with_diff(
@ -12579,7 +12756,7 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
); );
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); editor.expand_all_diff_hunks(&ExpandAllHunkDiffs, cx);
}); });
executor.run_until_parked(); executor.run_until_parked();
cx.assert_state_with_diff( cx.assert_state_with_diff(
@ -12602,170 +12779,6 @@ async fn test_diff_base_change_with_expanded_diff_hunks(
); );
} }
#[gpui::test]
async fn test_fold_unfold_diff_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let diff_base = r#"
use some::mod1;
use some::mod2;
const A: u32 = 42;
const B: u32 = 42;
const C: u32 = 42;
fn main() {
println!("hello");
println!("world");
}
fn another() {
println!("another");
}
fn another2() {
println!("another2");
}
"#
.unindent();
cx.set_state(
&r#"
«use some::mod2;
const A: u32 = 42;
const C: u32 = 42;
fn main() {
//println!("hello");
println!("world");
//
//ˇ»
}
fn another() {
println!("another");
println!("another");
}
println!("another2");
}
"#
.unindent(),
);
cx.set_diff_base(&diff_base);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx);
});
executor.run_until_parked();
cx.assert_state_with_diff(
r#"
- use some::mod1;
«use some::mod2;
const A: u32 = 42;
- const B: u32 = 42;
const C: u32 = 42;
fn main() {
- println!("hello");
+ //println!("hello");
println!("world");
+ //
+ //ˇ»
}
fn another() {
println!("another");
+ println!("another");
}
- fn another2() {
println!("another2");
}
"#
.unindent(),
);
// Fold across some of the diff hunks. They should no longer appear expanded.
cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx));
cx.executor().run_until_parked();
// Hunks are not shown if their position is within a fold
cx.assert_state_with_diff(
r#"
«use some::mod2;
const A: u32 = 42;
const C: u32 = 42;
fn main() {
//println!("hello");
println!("world");
//
//ˇ»
}
fn another() {
println!("another");
+ println!("another");
}
- fn another2() {
println!("another2");
}
"#
.unindent(),
);
cx.update_editor(|editor, cx| {
editor.select_all(&SelectAll, cx);
editor.unfold_lines(&UnfoldLines, cx);
});
cx.executor().run_until_parked();
// The deletions reappear when unfolding.
cx.assert_state_with_diff(
r#"
- use some::mod1;
«use some::mod2;
const A: u32 = 42;
- const B: u32 = 42;
const C: u32 = 42;
fn main() {
- println!("hello");
+ //println!("hello");
println!("world");
+ //
+ //
}
fn another() {
println!("another");
+ println!("another");
}
- fn another2() {
println!("another2");
}
ˇ»"#
.unindent(),
);
}
#[gpui::test] #[gpui::test]
async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) { async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -12849,13 +12862,11 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
(buffer_3.clone(), file_3_old), (buffer_3.clone(), file_3_old),
] { ] {
let change_set = cx.new_model(|cx| { let change_set = cx.new_model(|cx| {
BufferChangeSet::new_with_base_text( BufferChangeSet::new_with_base_text(diff_base.to_string(), &buffer, cx)
diff_base.to_string(),
buffer.read(cx).text_snapshot(),
cx,
)
}); });
editor.diff_map.add_change_set(change_set, cx) editor
.buffer
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx));
} }
}) })
.unwrap(); .unwrap();
@ -12895,7 +12906,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.select_all(&SelectAll, cx); editor.select_all(&SelectAll, cx);
editor.toggle_hunk_diff(&ToggleHunkDiff, cx); editor.toggle_selected_diff_hunks(&ToggleSelectedDiffHunks, cx);
}); });
cx.executor().run_until_parked(); cx.executor().run_until_parked();
@ -12962,17 +12973,18 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx)); let editor = cx.add_window(|cx| Editor::new(EditorMode::Full, multi_buffer, None, true, cx));
editor editor
.update(cx, |editor, cx| { .update(cx, |editor, cx| {
let buffer = buffer.read(cx).text_snapshot();
let change_set = cx let change_set = cx
.new_model(|cx| BufferChangeSet::new_with_base_text(base.to_string(), buffer, cx)); .new_model(|cx| BufferChangeSet::new_with_base_text(base.to_string(), &buffer, cx));
editor.diff_map.add_change_set(change_set, cx) editor
.buffer
.update(cx, |buffer, cx| buffer.add_change_set(change_set, cx))
}) })
.unwrap(); .unwrap();
let mut cx = EditorTestContext::for_editor(editor, cx).await; let mut cx = EditorTestContext::for_editor(editor, cx).await;
cx.run_until_parked(); cx.run_until_parked();
cx.update_editor(|editor, cx| editor.expand_all_hunk_diffs(&Default::default(), cx)); cx.update_editor(|editor, cx| editor.expand_all_diff_hunks(&Default::default(), cx));
cx.executor().run_until_parked(); cx.executor().run_until_parked();
cx.assert_state_with_diff( cx.assert_state_with_diff(
@ -12981,8 +12993,6 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut gpui::TestAppContext
- bbb - bbb
+ BBB + BBB
- ddd
- eee
+ EEE + EEE
fff fff
" "
@ -13036,7 +13046,7 @@ async fn test_edits_around_expanded_insertion_hunks(
executor.run_until_parked(); executor.run_until_parked();
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); editor.expand_all_diff_hunks(&ExpandAllHunkDiffs, cx);
}); });
executor.run_until_parked(); executor.run_until_parked();
@ -13055,7 +13065,7 @@ async fn test_edits_around_expanded_insertion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13078,7 +13088,7 @@ async fn test_edits_around_expanded_insertion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13102,7 +13112,7 @@ async fn test_edits_around_expanded_insertion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13127,7 +13137,7 @@ async fn test_edits_around_expanded_insertion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13153,7 +13163,7 @@ async fn test_edits_around_expanded_insertion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13164,21 +13174,63 @@ async fn test_edits_around_expanded_insertion_hunks(
executor.run_until_parked(); executor.run_until_parked();
cx.assert_state_with_diff( cx.assert_state_with_diff(
r#" r#"
use some::mod1;
- use some::mod2;
-
- const A: u32 = 42;
ˇ ˇ
fn main() { fn main() {
println!("hello"); println!("hello");
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
} }
#[gpui::test]
async fn test_toggling_adjacent_diff_hunks(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.set_diff_base(indoc! { "
one
two
three
four
five
"
});
cx.set_state(indoc! { "
one
ˇthree
five
"});
cx.run_until_parked();
cx.update_editor(|editor, cx| {
editor.toggle_selected_diff_hunks(&Default::default(), cx);
});
cx.assert_state_with_diff(
indoc! { "
one
- two
ˇthree
- four
five
"}
.to_string(),
);
cx.update_editor(|editor, cx| {
editor.toggle_selected_diff_hunks(&Default::default(), cx);
});
cx.assert_state_with_diff(
indoc! { "
one
ˇthree
five
"}
.to_string(),
);
}
#[gpui::test] #[gpui::test]
async fn test_edits_around_expanded_deletion_hunks( async fn test_edits_around_expanded_deletion_hunks(
executor: BackgroundExecutor, executor: BackgroundExecutor,
@ -13227,7 +13279,7 @@ async fn test_edits_around_expanded_deletion_hunks(
executor.run_until_parked(); executor.run_until_parked();
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); editor.expand_all_diff_hunks(&ExpandAllHunkDiffs, cx);
}); });
executor.run_until_parked(); executor.run_until_parked();
@ -13246,7 +13298,7 @@ async fn test_edits_around_expanded_deletion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13269,7 +13321,7 @@ async fn test_edits_around_expanded_deletion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13292,7 +13344,7 @@ async fn test_edits_around_expanded_deletion_hunks(
println!("world"); println!("world");
} }
"# "#
.unindent(), .unindent(),
); );
@ -13316,6 +13368,71 @@ async fn test_edits_around_expanded_deletion_hunks(
println!("world"); println!("world");
} }
"#
.unindent(),
);
}
#[gpui::test]
async fn test_backspace_after_deletion_hunk(
executor: BackgroundExecutor,
cx: &mut gpui::TestAppContext,
) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let base_text = r#"
one
two
three
four
five
"#
.unindent();
executor.run_until_parked();
cx.set_state(
&r#"
one
two
fˇour
five
"#
.unindent(),
);
cx.set_diff_base(&base_text);
executor.run_until_parked();
cx.update_editor(|editor, cx| {
editor.expand_all_diff_hunks(&ExpandAllHunkDiffs, cx);
});
executor.run_until_parked();
cx.assert_state_with_diff(
r#"
one
two
- three
fˇour
five
"#
.unindent(),
);
cx.update_editor(|editor, cx| {
editor.backspace(&Backspace, cx);
editor.backspace(&Backspace, cx);
});
executor.run_until_parked();
cx.assert_state_with_diff(
r#"
one
two
- threeˇ
- four
+ our
five
"# "#
.unindent(), .unindent(),
); );
@ -13369,7 +13486,7 @@ async fn test_edit_after_expanded_modification_hunk(
cx.set_diff_base(&diff_base); cx.set_diff_base(&diff_base);
executor.run_until_parked(); executor.run_until_parked();
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.expand_all_hunk_diffs(&ExpandAllHunkDiffs, cx); editor.expand_all_diff_hunks(&ExpandAllHunkDiffs, cx);
}); });
executor.run_until_parked(); executor.run_until_parked();
@ -13478,22 +13595,14 @@ fn assert_indent_guides(
); );
} }
let expected: Vec<_> = expected
.into_iter()
.map(|guide| MultiBufferIndentGuide {
multibuffer_row_range: MultiBufferRow(guide.start_row)..MultiBufferRow(guide.end_row),
buffer: guide,
})
.collect();
assert_eq!(indent_guides, expected, "Indent guides do not match"); assert_eq!(indent_guides, expected, "Indent guides do not match");
} }
fn indent_guide(buffer_id: BufferId, start_row: u32, end_row: u32, depth: u32) -> IndentGuide { fn indent_guide(buffer_id: BufferId, start_row: u32, end_row: u32, depth: u32) -> IndentGuide {
IndentGuide { IndentGuide {
buffer_id, buffer_id,
start_row, start_row: MultiBufferRow(start_row),
end_row, end_row: MultiBufferRow(end_row),
depth, depth,
tab_size: 4, tab_size: 4,
settings: IndentGuideSettings { settings: IndentGuideSettings {
@ -13945,6 +14054,105 @@ async fn test_active_indent_guide_non_matching_indent(cx: &mut gpui::TestAppCont
); );
} }
#[gpui::test]
async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let text = indoc! {
"
impl A {
fn b() {
0;
3;
5;
6;
7;
}
}
"
};
let base_text = indoc! {
"
impl A {
fn b() {
0;
1;
2;
3;
4;
}
fn c() {
5;
6;
7;
}
}
"
};
cx.update_editor(|editor, cx| {
editor.set_text(text, cx);
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = multibuffer.as_singleton().unwrap();
let change_set = cx.new_model(|cx| {
let mut change_set = BufferChangeSet::new(&buffer, cx);
change_set.recalculate_diff_sync(
base_text.into(),
buffer.read(cx).text_snapshot(),
true,
cx,
);
change_set
});
multibuffer.set_all_diff_hunks_expanded(cx);
multibuffer.add_change_set(change_set, cx);
buffer.read(cx).remote_id()
})
});
cx.assert_state_with_diff(
indoc! { "
impl A {
fn b() {
0;
- 1;
- 2;
3;
- 4;
- }
- fn c() {
5;
6;
7;
}
}
ˇ"
}
.to_string(),
);
let mut actual_guides = cx.update_editor(|editor, cx| {
editor
.snapshot(cx)
.buffer_snapshot
.indent_guides_in_range(Anchor::min()..Anchor::max(), false, cx)
.map(|guide| (guide.start_row..=guide.end_row, guide.depth))
.collect::<Vec<_>>()
});
actual_guides.sort_by_key(|item| (*item.0.start(), item.1));
assert_eq!(
actual_guides,
vec![
(MultiBufferRow(1)..=MultiBufferRow(12), 0),
(MultiBufferRow(2)..=MultiBufferRow(6), 1),
(MultiBufferRow(9)..=MultiBufferRow(11), 1),
]
);
}
#[gpui::test] #[gpui::test]
fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) { fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -15229,7 +15437,7 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
#[track_caller] #[track_caller]
fn assert_hunk_revert( fn assert_hunk_revert(
not_reverted_text_with_selections: &str, not_reverted_text_with_selections: &str,
expected_not_reverted_hunk_statuses: Vec<DiffHunkStatus>, expected_hunk_statuses_before: Vec<DiffHunkStatus>,
expected_reverted_text_with_selections: &str, expected_reverted_text_with_selections: &str,
base_text: &str, base_text: &str,
cx: &mut EditorLspTestContext, cx: &mut EditorLspTestContext,
@ -15238,12 +15446,12 @@ fn assert_hunk_revert(
cx.set_diff_base(base_text); cx.set_diff_base(base_text);
cx.executor().run_until_parked(); cx.executor().run_until_parked();
let reverted_hunk_statuses = cx.update_editor(|editor, cx| { let actual_hunk_statuses_before = cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx); let snapshot = editor.snapshot(cx);
let reverted_hunk_statuses = snapshot let reverted_hunk_statuses = snapshot
.diff_map .buffer_snapshot
.diff_hunks_in_range(0..snapshot.buffer_snapshot.len(), &snapshot.buffer_snapshot) .diff_hunks_in_range(0..snapshot.buffer_snapshot.len())
.map(|hunk| hunk_status(&hunk)) .map(|hunk| hunk.status())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
editor.revert_selected_hunks(&RevertSelectedHunks, cx); editor.revert_selected_hunks(&RevertSelectedHunks, cx);
@ -15251,5 +15459,5 @@ fn assert_hunk_revert(
}); });
cx.executor().run_until_parked(); cx.executor().run_until_parked();
cx.assert_editor_state(expected_reverted_text_with_selections); cx.assert_editor_state(expected_reverted_text_with_selections);
assert_eq!(reverted_hunk_statuses, expected_not_reverted_hunk_statuses); assert_eq!(actual_hunk_statuses_before, expected_hunk_statuses_before);
} }

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,3 @@
use std::{sync::Arc, time::Duration};
use anyhow::Result; use anyhow::Result;
use collections::HashMap; use collections::HashMap;
use git::{ use git::{
@ -9,9 +7,10 @@ use git::{
use gpui::{AppContext, Model, ModelContext, Subscription, Task}; use gpui::{AppContext, Model, ModelContext, Subscription, Task};
use http_client::HttpClient; use http_client::HttpClient;
use language::{markdown, Bias, Buffer, BufferSnapshot, Edit, LanguageRegistry, ParsedMarkdown}; use language::{markdown, Bias, Buffer, BufferSnapshot, Edit, LanguageRegistry, ParsedMarkdown};
use multi_buffer::MultiBufferRow; use multi_buffer::RowInfo;
use project::{Project, ProjectItem}; use project::{Project, ProjectItem};
use smallvec::SmallVec; use smallvec::SmallVec;
use std::{sync::Arc, time::Duration};
use sum_tree::SumTree; use sum_tree::SumTree;
use url::Url; use url::Url;
@ -194,15 +193,15 @@ impl GitBlame {
pub fn blame_for_rows<'a>( pub fn blame_for_rows<'a>(
&'a mut self, &'a mut self,
rows: impl 'a + IntoIterator<Item = Option<MultiBufferRow>>, rows: &'a [RowInfo],
cx: &AppContext, cx: &AppContext,
) -> impl 'a + Iterator<Item = Option<BlameEntry>> { ) -> impl 'a + Iterator<Item = Option<BlameEntry>> {
self.sync(cx); self.sync(cx);
let mut cursor = self.entries.cursor::<u32>(&()); let mut cursor = self.entries.cursor::<u32>(&());
rows.into_iter().map(move |row| { rows.into_iter().map(move |info| {
let row = row?; let row = info.buffer_row?;
cursor.seek_forward(&row.0, Bias::Right, &()); cursor.seek_forward(&row, Bias::Right, &());
cursor.item()?.blame.clone() cursor.item()?.blame.clone()
}) })
} }
@ -563,15 +562,38 @@ mod tests {
use unindent::Unindent as _; use unindent::Unindent as _;
use util::RandomCharIter; use util::RandomCharIter;
macro_rules! assert_blame_rows { // macro_rules! assert_blame_rows {
($blame:expr, $rows:expr, $expected:expr, $cx:expr) => { // ($blame:expr, $rows:expr, $expected:expr, $cx:expr) => {
assert_eq!( // assert_eq!(
$blame // $blame
.blame_for_rows($rows.map(MultiBufferRow).map(Some), $cx) // .blame_for_rows($rows.map(MultiBufferRow).map(Some), $cx)
.collect::<Vec<_>>(), // .collect::<Vec<_>>(),
$expected // $expected
); // );
}; // };
// }
#[track_caller]
fn assert_blame_rows(
blame: &mut GitBlame,
rows: Range<u32>,
expected: Vec<Option<BlameEntry>>,
cx: &mut ModelContext<GitBlame>,
) {
assert_eq!(
blame
.blame_for_rows(
&rows
.map(|row| RowInfo {
buffer_row: Some(row),
..Default::default()
})
.collect::<Vec<_>>(),
cx
)
.collect::<Vec<_>>(),
expected
);
} }
fn init_test(cx: &mut gpui::TestAppContext) { fn init_test(cx: &mut gpui::TestAppContext) {
@ -634,7 +656,15 @@ mod tests {
blame.update(cx, |blame, cx| { blame.update(cx, |blame, cx| {
assert_eq!( assert_eq!(
blame blame
.blame_for_rows((0..1).map(MultiBufferRow).map(Some), cx) .blame_for_rows(
&(0..1)
.map(|row| RowInfo {
buffer_row: Some(row),
..Default::default()
})
.collect::<Vec<_>>(),
cx
)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![None] vec![None]
); );
@ -698,7 +728,15 @@ mod tests {
// All lines // All lines
assert_eq!( assert_eq!(
blame blame
.blame_for_rows((0..8).map(MultiBufferRow).map(Some), cx) .blame_for_rows(
&(0..8)
.map(|buffer_row| RowInfo {
buffer_row: Some(buffer_row),
..Default::default()
})
.collect::<Vec<_>>(),
cx
)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![ vec![
Some(blame_entry("1b1b1b", 0..1)), Some(blame_entry("1b1b1b", 0..1)),
@ -714,7 +752,15 @@ mod tests {
// Subset of lines // Subset of lines
assert_eq!( assert_eq!(
blame blame
.blame_for_rows((1..4).map(MultiBufferRow).map(Some), cx) .blame_for_rows(
&(1..4)
.map(|buffer_row| RowInfo {
buffer_row: Some(buffer_row),
..Default::default()
})
.collect::<Vec<_>>(),
cx
)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![ vec![
Some(blame_entry("0d0d0d", 1..2)), Some(blame_entry("0d0d0d", 1..2)),
@ -725,7 +771,17 @@ mod tests {
// Subset of lines, with some not displayed // Subset of lines, with some not displayed
assert_eq!( assert_eq!(
blame blame
.blame_for_rows(vec![Some(MultiBufferRow(1)), None, None], cx) .blame_for_rows(
&[
RowInfo {
buffer_row: Some(1),
..Default::default()
},
Default::default(),
Default::default(),
],
cx
)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
vec![Some(blame_entry("0d0d0d", 1..2)), None, None] vec![Some(blame_entry("0d0d0d", 1..2)), None, None]
); );
@ -777,16 +833,16 @@ mod tests {
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
// Sanity check before edits: make sure that we get the same blame entry for all // Sanity check before edits: make sure that we get the same blame entry for all
// lines. // lines.
assert_blame_rows!( assert_blame_rows(
blame, blame,
(0..4), 0..4,
vec![ vec![
Some(blame_entry("1b1b1b", 0..4)), Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)), Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)), Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)), Some(blame_entry("1b1b1b", 0..4)),
], ],
cx cx,
); );
}); });
@ -795,11 +851,11 @@ mod tests {
buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "X")], None, cx); buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "X")], None, cx);
}); });
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
assert_blame_rows!( assert_blame_rows(
blame, blame,
(0..2), 0..2,
vec![None, Some(blame_entry("1b1b1b", 0..4))], vec![None, Some(blame_entry("1b1b1b", 0..4))],
cx cx,
); );
}); });
// Modify a single line, in the middle of the line // Modify a single line, in the middle of the line
@ -807,21 +863,21 @@ mod tests {
buffer.edit([(Point::new(1, 2)..Point::new(1, 2), "X")], None, cx); buffer.edit([(Point::new(1, 2)..Point::new(1, 2), "X")], None, cx);
}); });
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
assert_blame_rows!( assert_blame_rows(
blame, blame,
(1..4), 1..4,
vec![ vec![
None, None,
Some(blame_entry("1b1b1b", 0..4)), Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)) Some(blame_entry("1b1b1b", 0..4)),
], ],
cx cx,
); );
}); });
// Before we insert a newline at the end, sanity check: // Before we insert a newline at the end, sanity check:
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
assert_blame_rows!(blame, (3..4), vec![Some(blame_entry("1b1b1b", 0..4))], cx); assert_blame_rows(blame, 3..4, vec![Some(blame_entry("1b1b1b", 0..4))], cx);
}); });
// Insert a newline at the end // Insert a newline at the end
buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
@ -829,17 +885,17 @@ mod tests {
}); });
// Only the new line is marked as edited: // Only the new line is marked as edited:
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
assert_blame_rows!( assert_blame_rows(
blame, blame,
(3..5), 3..5,
vec![Some(blame_entry("1b1b1b", 0..4)), None], vec![Some(blame_entry("1b1b1b", 0..4)), None],
cx cx,
); );
}); });
// Before we insert a newline at the start, sanity check: // Before we insert a newline at the start, sanity check:
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
assert_blame_rows!(blame, (2..3), vec![Some(blame_entry("1b1b1b", 0..4)),], cx); assert_blame_rows(blame, 2..3, vec![Some(blame_entry("1b1b1b", 0..4))], cx);
}); });
// Usage example // Usage example
@ -849,11 +905,11 @@ mod tests {
}); });
// Only the new line is marked as edited: // Only the new line is marked as edited:
git_blame.update(cx, |blame, cx| { git_blame.update(cx, |blame, cx| {
assert_blame_rows!( assert_blame_rows(
blame, blame,
(2..4), 2..4,
vec![None, Some(blame_entry("1b1b1b", 0..4)),], vec![None, Some(blame_entry("1b1b1b", 0..4))],
cx cx,
); );
}); });
} }

View file

@ -146,7 +146,7 @@ impl ProjectDiffEditor {
let editor = cx.new_view(|cx| { let editor = cx.new_view(|cx| {
let mut diff_display_editor = let mut diff_display_editor =
Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, cx); Editor::for_multibuffer(excerpts.clone(), Some(project.clone()), true, cx);
diff_display_editor.set_expand_all_diff_hunks(); diff_display_editor.set_expand_all_diff_hunks(cx);
diff_display_editor diff_display_editor
}); });
@ -310,9 +310,11 @@ impl ProjectDiffEditor {
.update(&mut cx, |project_diff_editor, cx| { .update(&mut cx, |project_diff_editor, cx| {
project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx); project_diff_editor.update_excerpts(id, new_changes, new_entry_order, cx);
project_diff_editor.editor.update(cx, |editor, cx| { project_diff_editor.editor.update(cx, |editor, cx| {
for change_set in change_sets { editor.buffer.update(cx, |buffer, cx| {
editor.diff_map.add_change_set(change_set, cx) for change_set in change_sets {
} buffer.add_change_set(change_set, cx)
}
});
}); });
}) })
.ok(); .ok();
@ -1105,6 +1107,8 @@ mod tests {
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use crate::test::editor_test_context::assert_state_with_diff;
use super::*; use super::*;
// TODO finish // TODO finish
@ -1183,19 +1187,13 @@ mod tests {
let change_set = cx.new_model(|cx| { let change_set = cx.new_model(|cx| {
BufferChangeSet::new_with_base_text( BufferChangeSet::new_with_base_text(
old_text.clone(), old_text.clone(),
file_a_editor &file_a_editor.buffer().read(cx).as_singleton().unwrap(),
.buffer()
.read(cx)
.as_singleton()
.unwrap()
.read(cx)
.text_snapshot(),
cx, cx,
) )
}); });
file_a_editor file_a_editor.buffer.update(cx, |buffer, cx| {
.diff_map buffer.add_change_set(change_set.clone(), cx)
.add_change_set(change_set.clone(), cx); });
project.update(cx, |project, cx| { project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| { project.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.set_change_set( buffer_store.set_change_set(
@ -1225,15 +1223,17 @@ mod tests {
cx.executor() cx.executor()
.advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100));
cx.run_until_parked(); cx.run_until_parked();
let editor = project_diff_editor.update(cx, |view, _| view.editor.clone());
project_diff_editor.update(cx, |project_diff_editor, cx| { assert_state_with_diff(
assert_eq!( &editor,
// TODO assert it better: extract added text (based on the background changes) and deleted text (based on the deleted blocks added) cx,
project_diff_editor.editor.read(cx).text(cx), indoc::indoc! {
format!("{change}{old_text}"), "
"Should have a new change shown in the beginning, and the old text shown as deleted text afterwards" - This is file_a
); + an edit after git addThis is file_aˇ",
}); },
);
} }
fn init_test(cx: &mut gpui::TestAppContext) { fn init_test(cx: &mut gpui::TestAppContext) {

View file

@ -265,12 +265,9 @@ fn show_hover(
let local_diagnostic = snapshot let local_diagnostic = snapshot
.buffer_snapshot .buffer_snapshot
.diagnostics_in_range(anchor..anchor, false) .diagnostics_in_range::<_, usize>(anchor..anchor)
// Find the entry with the most specific range // Find the entry with the most specific range
.min_by_key(|entry| { .min_by_key(|entry| entry.range.len());
let range = entry.range.to_offset(&snapshot.buffer_snapshot);
range.end - range.start
});
let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic { let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic {
let text = match local_diagnostic.diagnostic.source { let text = match local_diagnostic.diagnostic.source {
@ -279,6 +276,15 @@ fn show_hover(
} }
None => local_diagnostic.diagnostic.message.clone(), None => local_diagnostic.diagnostic.message.clone(),
}; };
let local_diagnostic = DiagnosticEntry {
diagnostic: local_diagnostic.diagnostic,
range: snapshot
.buffer_snapshot
.anchor_before(local_diagnostic.range.start)
..snapshot
.buffer_snapshot
.anchor_after(local_diagnostic.range.end),
};
let mut border_color: Option<Hsla> = None; let mut border_color: Option<Hsla> = None;
let mut background_color: Option<Hsla> = None; let mut background_color: Option<Hsla> = None;
@ -770,7 +776,7 @@ impl InfoPopover {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct DiagnosticPopover { pub struct DiagnosticPopover {
local_diagnostic: DiagnosticEntry<Anchor>, pub(crate) local_diagnostic: DiagnosticEntry<Anchor>,
parsed_content: Option<View<Markdown>>, parsed_content: Option<View<Markdown>>,
border_color: Option<Hsla>, border_color: Option<Hsla>,
background_color: Option<Hsla>, background_color: Option<Hsla>,
@ -823,10 +829,6 @@ impl DiagnosticPopover {
diagnostic_div.into_any_element() diagnostic_div.into_any_element()
} }
pub fn group_id(&self) -> usize {
self.local_diagnostic.diagnostic.group_id
}
} }
#[cfg(test)] #[cfg(test)]

File diff suppressed because it is too large Load diff

View file

@ -2,17 +2,16 @@ use std::{ops::Range, time::Duration};
use collections::HashSet; use collections::HashSet;
use gpui::{AppContext, Task}; use gpui::{AppContext, Task};
use language::{language_settings::language_settings, BufferRow}; use language::language_settings::language_settings;
use multi_buffer::{MultiBufferIndentGuide, MultiBufferRow}; use multi_buffer::{IndentGuide, MultiBufferRow};
use text::{BufferId, LineIndent, Point}; use text::{LineIndent, Point};
use ui::ViewContext; use ui::ViewContext;
use util::ResultExt; use util::ResultExt;
use crate::{DisplaySnapshot, Editor}; use crate::{DisplaySnapshot, Editor};
struct ActiveIndentedRange { struct ActiveIndentedRange {
buffer_id: BufferId, row_range: Range<MultiBufferRow>,
row_range: Range<BufferRow>,
indent: LineIndent, indent: LineIndent,
} }
@ -36,7 +35,7 @@ impl Editor {
visible_buffer_range: Range<MultiBufferRow>, visible_buffer_range: Range<MultiBufferRow>,
snapshot: &DisplaySnapshot, snapshot: &DisplaySnapshot,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) -> Option<Vec<MultiBufferIndentGuide>> { ) -> Option<Vec<IndentGuide>> {
let show_indent_guides = self.should_show_indent_guides().unwrap_or_else(|| { let show_indent_guides = self.should_show_indent_guides().unwrap_or_else(|| {
if let Some(buffer) = self.buffer().read(cx).as_singleton() { if let Some(buffer) = self.buffer().read(cx).as_singleton() {
language_settings( language_settings(
@ -66,7 +65,7 @@ impl Editor {
pub fn find_active_indent_guide_indices( pub fn find_active_indent_guide_indices(
&mut self, &mut self,
indent_guides: &[MultiBufferIndentGuide], indent_guides: &[IndentGuide],
snapshot: &DisplaySnapshot, snapshot: &DisplaySnapshot,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) -> Option<HashSet<usize>> { ) -> Option<HashSet<usize>> {
@ -134,9 +133,7 @@ impl Editor {
.iter() .iter()
.enumerate() .enumerate()
.filter(|(_, indent_guide)| { .filter(|(_, indent_guide)| {
indent_guide.buffer_id == active_indent_range.buffer_id indent_guide.indent_level() == active_indent_range.indent.len(indent_guide.tab_size)
&& indent_guide.indent_level()
== active_indent_range.indent.len(indent_guide.tab_size)
}); });
let mut matches = HashSet::default(); let mut matches = HashSet::default();
@ -158,7 +155,7 @@ pub fn indent_guides_in_range(
ignore_disabled_for_language: bool, ignore_disabled_for_language: bool,
snapshot: &DisplaySnapshot, snapshot: &DisplaySnapshot,
cx: &AppContext, cx: &AppContext,
) -> Vec<MultiBufferIndentGuide> { ) -> Vec<IndentGuide> {
let start_anchor = snapshot let start_anchor = snapshot
.buffer_snapshot .buffer_snapshot
.anchor_before(Point::new(visible_buffer_range.start.0, 0)); .anchor_before(Point::new(visible_buffer_range.start.0, 0));
@ -169,14 +166,12 @@ pub fn indent_guides_in_range(
snapshot snapshot
.buffer_snapshot .buffer_snapshot
.indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx) .indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx)
.into_iter()
.filter(|indent_guide| { .filter(|indent_guide| {
if editor.buffer_folded(indent_guide.buffer_id, cx) { if editor.is_buffer_folded(indent_guide.buffer_id, cx) {
return false; return false;
} }
let start = let start = MultiBufferRow(indent_guide.start_row.0.saturating_sub(1));
MultiBufferRow(indent_guide.multibuffer_row_range.start.0.saturating_sub(1));
// Filter out indent guides that are inside a fold // Filter out indent guides that are inside a fold
// All indent guides that are starting "offscreen" have a start value of the first visible row minus one // All indent guides that are starting "offscreen" have a start value of the first visible row minus one
// Therefore checking if a line is folded at first visible row minus one causes the other indent guides that are not related to the fold to disappear as well // Therefore checking if a line is folded at first visible row minus one causes the other indent guides that are not related to the fold to disappear as well
@ -193,24 +188,11 @@ async fn resolve_indented_range(
snapshot: DisplaySnapshot, snapshot: DisplaySnapshot,
buffer_row: MultiBufferRow, buffer_row: MultiBufferRow,
) -> Option<ActiveIndentedRange> { ) -> Option<ActiveIndentedRange> {
let (buffer_row, buffer_snapshot, buffer_id) = snapshot
if let Some((_, buffer_id, snapshot)) = snapshot.buffer_snapshot.as_singleton() { .buffer_snapshot
(buffer_row.0, snapshot, buffer_id)
} else {
let (snapshot, point) = snapshot.buffer_snapshot.buffer_line_for_row(buffer_row)?;
let buffer_id = snapshot.remote_id();
(point.start.row, snapshot, buffer_id)
};
buffer_snapshot
.enclosing_indent(buffer_row) .enclosing_indent(buffer_row)
.await .await
.map(|(row_range, indent)| ActiveIndentedRange { .map(|(row_range, indent)| ActiveIndentedRange { row_range, indent })
row_range,
indent,
buffer_id,
})
} }
fn should_recalculate_indented_range( fn should_recalculate_indented_range(
@ -222,23 +204,23 @@ fn should_recalculate_indented_range(
if prev_row.0 == new_row.0 { if prev_row.0 == new_row.0 {
return false; return false;
} }
if let Some((_, _, snapshot)) = snapshot.buffer_snapshot.as_singleton() { if snapshot.buffer_snapshot.is_singleton() {
if !current_indent_range.row_range.contains(&new_row.0) { if !current_indent_range.row_range.contains(&new_row) {
return true; return true;
} }
let old_line_indent = snapshot.line_indent_for_row(prev_row.0); let old_line_indent = snapshot.buffer_snapshot.line_indent_for_row(prev_row);
let new_line_indent = snapshot.line_indent_for_row(new_row.0); let new_line_indent = snapshot.buffer_snapshot.line_indent_for_row(new_row);
if old_line_indent.is_line_empty() if old_line_indent.is_line_empty()
|| new_line_indent.is_line_empty() || new_line_indent.is_line_empty()
|| old_line_indent != new_line_indent || old_line_indent != new_line_indent
|| snapshot.max_point().row == new_row.0 || snapshot.buffer_snapshot.max_point().row == new_row.0
{ {
return true; return true;
} }
let next_line_indent = snapshot.line_indent_for_row(new_row.0 + 1); let next_line_indent = snapshot.buffer_snapshot.line_indent_for_row(new_row + 1);
next_line_indent.is_line_empty() || next_line_indent != old_line_indent next_line_indent.is_line_empty() || next_line_indent != old_line_indent
} else { } else {
true true

View file

@ -20,7 +20,6 @@ use language::{
SelectionGoal, SelectionGoal,
}; };
use lsp::DiagnosticSeverity; use lsp::DiagnosticSeverity;
use multi_buffer::AnchorRangeExt;
use project::{ use project::{
lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Project, lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Project,
ProjectItem as _, ProjectPath, ProjectItem as _, ProjectPath,
@ -528,6 +527,7 @@ fn deserialize_anchor(buffer: &MultiBufferSnapshot, anchor: proto::EditorAnchor)
excerpt_id, excerpt_id,
text_anchor: language::proto::deserialize_anchor(anchor.anchor?)?, text_anchor: language::proto::deserialize_anchor(anchor.anchor?)?,
buffer_id: buffer.buffer_id_for_excerpt(excerpt_id), buffer_id: buffer.buffer_id_for_excerpt(excerpt_id),
diff_base_anchor: None,
}) })
} }
@ -1435,59 +1435,34 @@ impl SearchableItem for Editor {
cx.background_executor().spawn(async move { cx.background_executor().spawn(async move {
let mut ranges = Vec::new(); let mut ranges = Vec::new();
if let Some((_, _, excerpt_buffer)) = buffer.as_singleton() { let search_within_ranges = if search_within_ranges.is_empty() {
let search_within_ranges = if search_within_ranges.is_empty() { vec![buffer.anchor_before(0)..buffer.anchor_after(buffer.len())]
vec![None]
} else {
search_within_ranges
.into_iter()
.map(|range| Some(range.to_offset(&buffer)))
.collect::<Vec<_>>()
};
for range in search_within_ranges {
let buffer = &buffer;
ranges.extend(
query
.search(excerpt_buffer, range.clone())
.await
.into_iter()
.map(|matched_range| {
let offset = range.clone().map(|r| r.start).unwrap_or(0);
buffer.anchor_after(matched_range.start + offset)
..buffer.anchor_before(matched_range.end + offset)
}),
);
}
} else { } else {
let search_within_ranges = if search_within_ranges.is_empty() { search_within_ranges
vec![buffer.anchor_before(0)..buffer.anchor_after(buffer.len())]
} else {
search_within_ranges
};
for (excerpt_id, search_buffer, search_range) in
buffer.excerpts_in_ranges(search_within_ranges)
{
if !search_range.is_empty() {
ranges.extend(
query
.search(search_buffer, Some(search_range.clone()))
.await
.into_iter()
.map(|match_range| {
let start = search_buffer
.anchor_after(search_range.start + match_range.start);
let end = search_buffer
.anchor_before(search_range.start + match_range.end);
buffer.anchor_in_excerpt(excerpt_id, start).unwrap()
..buffer.anchor_in_excerpt(excerpt_id, end).unwrap()
}),
);
}
}
}; };
for (search_buffer, search_range, excerpt_id) in
buffer.ranges_to_buffer_ranges(search_within_ranges.into_iter())
{
ranges.extend(
query
.search(search_buffer, Some(search_range.clone()))
.await
.into_iter()
.map(|match_range| {
let start =
search_buffer.anchor_after(search_range.start + match_range.start);
let end =
search_buffer.anchor_before(search_range.start + match_range.end);
Anchor::range_in_buffer(
excerpt_id,
search_buffer.remote_id(),
start..end,
)
}),
);
}
ranges ranges
}) })
} }

View file

@ -61,7 +61,7 @@ impl ProposedChangesEditor {
let mut this = Self { let mut this = Self {
editor: cx.new_view(|cx| { editor: cx.new_view(|cx| {
let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, true, cx); let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, true, cx);
editor.set_expand_all_diff_hunks(); editor.set_expand_all_diff_hunks(cx);
editor.set_completion_provider(None); editor.set_completion_provider(None);
editor.clear_code_action_providers(); editor.clear_code_action_providers();
editor.set_semantics_provider( editor.set_semantics_provider(
@ -104,16 +104,10 @@ impl ProposedChangesEditor {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let base_buffer = buffer.base_buffer()?; let base_buffer = buffer.base_buffer()?;
let buffer = buffer.text_snapshot(); let buffer = buffer.text_snapshot();
let change_set = this.editor.update(cx, |editor, _| { let change_set = this
Some( .multibuffer
editor .read(cx)
.diff_map .change_set_for(buffer.remote_id())?;
.diff_bases
.get(&buffer.remote_id())?
.change_set
.clone(),
)
})?;
Some(change_set.update(cx, |change_set, cx| { Some(change_set.update(cx, |change_set, cx| {
change_set.set_base_text( change_set.set_base_text(
base_buffer.read(cx).text(), base_buffer.read(cx).text(),
@ -193,7 +187,7 @@ impl ProposedChangesEditor {
} else { } else {
branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx)); branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx));
new_change_sets.push(cx.new_model(|cx| { new_change_sets.push(cx.new_model(|cx| {
let mut change_set = BufferChangeSet::new(branch_buffer.read(cx)); let mut change_set = BufferChangeSet::new(&branch_buffer, cx);
let _ = change_set.set_base_text( let _ = change_set.set_base_text(
location.buffer.read(cx).text(), location.buffer.read(cx).text(),
branch_buffer.read(cx).text_snapshot(), branch_buffer.read(cx).text_snapshot(),
@ -223,9 +217,11 @@ impl ProposedChangesEditor {
self.buffer_entries = buffer_entries; self.buffer_entries = buffer_entries;
self.editor.update(cx, |editor, cx| { self.editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |selections| selections.refresh()); editor.change_selections(None, cx, |selections| selections.refresh());
for change_set in new_change_sets { editor.buffer.update(cx, |buffer, cx| {
editor.diff_map.add_change_set(change_set, cx) for change_set in new_change_sets {
} buffer.add_change_set(change_set, cx)
}
})
}); });
} }

View file

@ -323,8 +323,7 @@ impl SelectionsCollection {
self.all(cx).last().unwrap().clone() self.all(cx).last().unwrap().clone()
} }
#[cfg(any(test, feature = "test-support"))] pub fn ranges<D: TextDimension + Ord + Sub<D, Output = D>>(
pub fn ranges<D: TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug>(
&self, &self,
cx: &mut AppContext, cx: &mut AppContext,
) -> Vec<Range<D>> { ) -> Vec<Range<D>> {
@ -332,9 +331,9 @@ impl SelectionsCollection {
.iter() .iter()
.map(|s| { .map(|s| {
if s.reversed { if s.reversed {
s.end.clone()..s.start.clone() s.end..s.start
} else { } else {
s.start.clone()..s.end.clone() s.start..s.end
} }
}) })
.collect() .collect()
@ -921,7 +920,7 @@ pub(crate) fn resolve_selections<'a, D, I>(
map: &'a DisplaySnapshot, map: &'a DisplaySnapshot,
) -> impl 'a + Iterator<Item = Selection<D>> ) -> impl 'a + Iterator<Item = Selection<D>>
where where
D: TextDimension + Clone + Ord + Sub<D, Output = D>, D: TextDimension + Ord + Sub<D, Output = D>,
I: 'a + IntoIterator<Item = &'a Selection<Anchor>>, I: 'a + IntoIterator<Item = &'a Selection<Anchor>>,
{ {
let (to_convert, selections) = resolve_selections_display(selections, map).tee(); let (to_convert, selections) = resolve_selections_display(selections, map).tee();

View file

@ -5,6 +5,7 @@ use crate::actions::ShowSignatureHelp;
use crate::{Editor, EditorSettings, ToggleAutoSignatureHelp}; use crate::{Editor, EditorSettings, ToggleAutoSignatureHelp};
use gpui::{AppContext, ViewContext}; use gpui::{AppContext, ViewContext};
use language::markdown::parse_markdown; use language::markdown::parse_markdown;
use language::BufferSnapshot;
use multi_buffer::{Anchor, ToOffset}; use multi_buffer::{Anchor, ToOffset};
use settings::Settings; use settings::Settings;
use std::ops::Range; use std::ops::Range;
@ -94,13 +95,14 @@ impl Editor {
(a, b) if b <= buffer_snapshot.len() => a - 1..b, (a, b) if b <= buffer_snapshot.len() => a - 1..b,
(a, b) => a - 1..b - 1, (a, b) => a - 1..b - 1,
}; };
let not_quote_like_brackets = |start: Range<usize>, end: Range<usize>| { let not_quote_like_brackets =
let text = buffer_snapshot.text(); |buffer: &BufferSnapshot, start: Range<usize>, end: Range<usize>| {
let (text_start, text_end) = (text.get(start), text.get(end)); let text_start = buffer.text_for_range(start).collect::<String>();
QUOTE_PAIRS let text_end = buffer.text_for_range(end).collect::<String>();
.into_iter() QUOTE_PAIRS
.all(|(start, end)| text_start != Some(start) && text_end != Some(end)) .into_iter()
}; .all(|(start, end)| text_start != start && text_end != end)
};
let previous_position = old_cursor_position.to_offset(&buffer_snapshot); let previous_position = old_cursor_position.to_offset(&buffer_snapshot);
let previous_brackets_range = bracket_range(previous_position); let previous_brackets_range = bracket_range(previous_position);

View file

@ -15,7 +15,7 @@ fn task_context_with_editor(
}; };
let (selection, buffer, editor_snapshot) = { let (selection, buffer, editor_snapshot) = {
let selection = editor.selections.newest_adjusted(cx); let selection = editor.selections.newest_adjusted(cx);
let Some((buffer, _, _)) = editor let Some((buffer, _)) = editor
.buffer() .buffer()
.read(cx) .read(cx)
.point_to_buffer_offset(selection.start, cx) .point_to_buffer_offset(selection.start, cx)

View file

@ -67,6 +67,13 @@ pub(crate) fn rust_lang() -> Arc<Language> {
("<" @open ">" @close) ("<" @open ">" @close)
("\"" @open "\"" @close) ("\"" @open "\"" @close)
(closure_parameters "|" @open "|" @close)"#})), (closure_parameters "|" @open "|" @close)"#})),
text_objects: Some(Cow::from(indoc! {r#"
(function_item
body: (_
"{"
(_)* @function.inside
"}" )) @function.around
"#})),
..Default::default() ..Default::default()
}) })
.expect("Could not parse queries"); .expect("Could not parse queries");

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DiffRowHighlight, DisplayPoint, display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer,
Editor, MultiBuffer, RowExt, RowExt,
}; };
use collections::BTreeMap; use collections::BTreeMap;
use futures::Future; use futures::Future;
@ -11,7 +11,7 @@ use gpui::{
}; };
use itertools::Itertools; use itertools::Itertools;
use language::{Buffer, BufferSnapshot, LanguageRegistry}; use language::{Buffer, BufferSnapshot, LanguageRegistry};
use multi_buffer::{ExcerptRange, ToPoint}; use multi_buffer::{ExcerptRange, MultiBufferRow};
use parking_lot::RwLock; use parking_lot::RwLock;
use project::{FakeFs, Project}; use project::{FakeFs, Project};
use std::{ use std::{
@ -333,85 +333,8 @@ impl EditorTestContext {
/// ///
/// Diff hunks are indicated by lines starting with `+` and `-`. /// Diff hunks are indicated by lines starting with `+` and `-`.
#[track_caller] #[track_caller]
pub fn assert_state_with_diff(&mut self, expected_diff: String) { pub fn assert_state_with_diff(&mut self, expected_diff_text: String) {
let has_diff_markers = expected_diff assert_state_with_diff(&self.editor, &mut self.cx, &expected_diff_text);
.lines()
.any(|line| line.starts_with("+") || line.starts_with("-"));
let expected_diff_text = expected_diff
.split('\n')
.map(|line| {
let trimmed = line.trim();
if trimmed.is_empty() {
String::new()
} else if has_diff_markers {
line.to_string()
} else {
format!(" {line}")
}
})
.join("\n");
let actual_selections = self.editor_selections();
let actual_marked_text =
generate_marked_text(&self.buffer_text(), &actual_selections, true);
// Read the actual diff from the editor's row highlights and block
// decorations.
let actual_diff = self.editor.update(&mut self.cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
let insertions = editor
.highlighted_rows::<DiffRowHighlight>()
.map(|(range, _)| {
let start = range.start.to_point(&snapshot.buffer_snapshot);
let end = range.end.to_point(&snapshot.buffer_snapshot);
start.row..end.row
})
.collect::<Vec<_>>();
let deletions = editor
.diff_map
.hunks
.iter()
.filter_map(|hunk| {
if hunk.blocks.is_empty() {
return None;
}
let row = hunk
.hunk_range
.start
.to_point(&snapshot.buffer_snapshot)
.row;
let (_, buffer, _) = editor
.buffer()
.read(cx)
.excerpt_containing(hunk.hunk_range.start, cx)
.expect("no excerpt for expanded buffer's hunk start");
let buffer_id = buffer.read(cx).remote_id();
let change_set = &editor
.diff_map
.diff_bases
.get(&buffer_id)
.expect("should have a diff base for expanded hunk")
.change_set;
let deleted_text = change_set
.read(cx)
.base_text
.as_ref()
.expect("no base text for expanded hunk")
.read(cx)
.as_rope()
.slice(hunk.diff_base_byte_range.clone())
.to_string();
if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status {
Some((row, deleted_text))
} else {
None
}
})
.collect::<Vec<_>>();
format_diff(actual_marked_text, deletions, insertions)
});
pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state");
} }
/// Make an assertion about the editor's text and the ranges and directions /// Make an assertion about the editor's text and the ranges and directions
@ -504,44 +427,49 @@ impl EditorTestContext {
} }
} }
fn format_diff( #[track_caller]
text: String, pub fn assert_state_with_diff(
actual_deletions: Vec<(u32, String)>, editor: &View<Editor>,
actual_insertions: Vec<Range<u32>>, cx: &mut VisualTestContext,
) -> String { expected_diff_text: &str,
let mut diff = String::new(); ) {
for (row, line) in text.split('\n').enumerate() { let (snapshot, selections) = editor.update(cx, |editor, cx| {
let row = row as u32; (
if row > 0 { editor.snapshot(cx).buffer_snapshot.clone(),
diff.push('\n'); editor.selections.ranges::<usize>(cx),
} )
if let Some(text) = actual_deletions });
.iter()
.find_map(|(deletion_row, deleted_text)| { let actual_marked_text = generate_marked_text(&snapshot.text(), &selections, true);
if *deletion_row == row {
Some(deleted_text) // Read the actual diff.
} else { let line_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
None let has_diff = line_infos.iter().any(|info| info.diff_status.is_some());
let actual_diff = actual_marked_text
.split('\n')
.zip(line_infos)
.map(|(line, info)| {
let mut marker = match info.diff_status {
Some(DiffHunkStatus::Added) => "+ ",
Some(DiffHunkStatus::Removed) => "- ",
Some(DiffHunkStatus::Modified) => unreachable!(),
None => {
if has_diff {
" "
} else {
""
}
} }
}) };
{ if line.is_empty() {
for line in text.lines() { marker = marker.trim();
diff.push('-');
if !line.is_empty() {
diff.push(' ');
diff.push_str(line);
}
diff.push('\n');
} }
} format!("{marker}{line}")
let marker = if actual_insertions.iter().any(|range| range.contains(&row)) { })
"+ " .collect::<Vec<_>>()
} else { .join("\n");
" "
}; pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state");
diff.push_str(format!("{marker}{line}").trim_end());
}
diff
} }
impl Deref for EditorTestContext { impl Deref for EditorTestContext {

View file

@ -9,7 +9,7 @@ use futures::future::join_all;
pub use open_path_prompt::OpenPathDelegate; pub use open_path_prompt::OpenPathDelegate;
use collections::HashMap; use collections::HashMap;
use editor::{scroll::Autoscroll, Bias, Editor}; use editor::Editor;
use file_finder_settings::{FileFinderSettings, FileFinderWidth}; use file_finder_settings::{FileFinderSettings, FileFinderWidth};
use file_icons::FileIcons; use file_icons::FileIcons;
use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
@ -1162,13 +1162,7 @@ impl PickerDelegate for FileFinderDelegate {
active_editor active_editor
.downgrade() .downgrade()
.update(&mut cx, |editor, cx| { .update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot; editor.go_to_singleton_buffer_point(Point::new(row, col), cx);
let point = snapshot
.buffer_snapshot
.clip_point(Point::new(row, col), Bias::Left);
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([point..point])
});
}) })
.log_err(); .log_err();
} }

View file

@ -74,7 +74,7 @@ impl BufferDiff {
} }
} }
pub async fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self { pub fn build(diff_base: &str, buffer: &text::BufferSnapshot) -> Self {
let mut tree = SumTree::new(buffer); let mut tree = SumTree::new(buffer);
let buffer_text = buffer.as_rope().to_string(); let buffer_text = buffer.as_rope().to_string();
@ -119,32 +119,38 @@ impl BufferDiff {
!before_start && !after_end !before_start && !after_end
}); });
let anchor_iter = std::iter::from_fn(move || { let anchor_iter = iter::from_fn(move || {
cursor.next(buffer); cursor.next(buffer);
cursor.item() cursor.item()
}) })
.flat_map(move |hunk| { .flat_map(move |hunk| {
[ [
(&hunk.buffer_range.start, hunk.diff_base_byte_range.start), (
(&hunk.buffer_range.end, hunk.diff_base_byte_range.end), &hunk.buffer_range.start,
(hunk.buffer_range.start, hunk.diff_base_byte_range.start),
),
(
&hunk.buffer_range.end,
(hunk.buffer_range.end, hunk.diff_base_byte_range.end),
),
] ]
.into_iter()
}); });
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter); let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
iter::from_fn(move || { iter::from_fn(move || {
let (start_point, start_base) = summaries.next()?; let (start_point, (start_anchor, start_base)) = summaries.next()?;
let (mut end_point, end_base) = summaries.next()?; let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?;
if end_point.column > 0 { if end_point.column > 0 {
end_point.row += 1; end_point.row += 1;
end_point.column = 0; end_point.column = 0;
end_anchor = buffer.anchor_before(end_point);
} }
Some(DiffHunk { Some(DiffHunk {
row_range: start_point.row..end_point.row, row_range: start_point.row..end_point.row,
diff_base_byte_range: start_base..end_base, diff_base_byte_range: start_base..end_base,
buffer_range: buffer.anchor_before(start_point)..buffer.anchor_after(end_point), buffer_range: start_anchor..end_anchor,
}) })
}) })
} }
@ -162,7 +168,7 @@ impl BufferDiff {
!before_start && !after_end !before_start && !after_end
}); });
std::iter::from_fn(move || { iter::from_fn(move || {
cursor.prev(buffer); cursor.prev(buffer);
let hunk = cursor.item()?; let hunk = cursor.item()?;
@ -186,8 +192,8 @@ impl BufferDiff {
self.tree = SumTree::new(buffer); self.tree = SumTree::new(buffer);
} }
pub async fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) { pub fn update(&mut self, diff_base: &Rope, buffer: &text::BufferSnapshot) {
*self = Self::build(&diff_base.to_string(), buffer).await; *self = Self::build(&diff_base.to_string(), buffer);
} }
#[cfg(test)] #[cfg(test)]
@ -346,7 +352,7 @@ mod tests {
let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
let mut diff = BufferDiff::new(&buffer); let mut diff = BufferDiff::new(&buffer);
smol::block_on(diff.update(&diff_base_rope, &buffer)); diff.update(&diff_base_rope, &buffer);
assert_hunks( assert_hunks(
diff.hunks(&buffer), diff.hunks(&buffer),
&buffer, &buffer,
@ -355,7 +361,7 @@ mod tests {
); );
buffer.edit([(0..0, "point five\n")]); buffer.edit([(0..0, "point five\n")]);
smol::block_on(diff.update(&diff_base_rope, &buffer)); diff.update(&diff_base_rope, &buffer);
assert_hunks( assert_hunks(
diff.hunks(&buffer), diff.hunks(&buffer),
&buffer, &buffer,
@ -407,7 +413,7 @@ mod tests {
let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text); let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text);
let mut diff = BufferDiff::new(&buffer); let mut diff = BufferDiff::new(&buffer);
smol::block_on(diff.update(&diff_base_rope, &buffer)); diff.update(&diff_base_rope, &buffer);
assert_eq!(diff.hunks(&buffer).count(), 8); assert_eq!(diff.hunks(&buffer).count(), 8);
assert_hunks( assert_hunks(

View file

@ -16,6 +16,7 @@ doctest = false
anyhow.workspace = true anyhow.workspace = true
editor.workspace = true editor.workspace = true
gpui.workspace = true gpui.workspace = true
language.workspace = true
menu.workspace = true menu.workspace = true
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true

View file

@ -20,7 +20,7 @@ pub(crate) struct SelectionStats {
} }
pub struct CursorPosition { pub struct CursorPosition {
position: Option<Point>, position: Option<(Point, bool)>,
selected_count: SelectionStats, selected_count: SelectionStats,
context: Option<FocusHandle>, context: Option<FocusHandle>,
workspace: WeakView<Workspace>, workspace: WeakView<Workspace>,
@ -97,8 +97,11 @@ impl CursorPosition {
} }
} }
} }
cursor_position.position = cursor_position.position = last_selection.and_then(|s| {
last_selection.map(|s| s.head().to_point(&buffer)); buffer
.point_to_buffer_point(s.head().to_point(&buffer))
.map(|(_, point, is_main_buffer)| (point, is_main_buffer))
});
cursor_position.context = Some(editor.focus_handle(cx)); cursor_position.context = Some(editor.focus_handle(cx));
} }
} }
@ -163,9 +166,10 @@ impl CursorPosition {
impl Render for CursorPosition { impl Render for CursorPosition {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement { fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
div().when_some(self.position, |el, position| { div().when_some(self.position, |el, (position, is_main_buffer)| {
let mut text = format!( let mut text = format!(
"{}{FILE_ROW_COLUMN_DELIMITER}{}", "{}{}{FILE_ROW_COLUMN_DELIMITER}{}",
if is_main_buffer { "" } else { "(deleted) " },
position.row + 1, position.row + 1,
position.column + 1 position.column + 1
); );
@ -183,8 +187,12 @@ impl Render for CursorPosition {
.active_item(cx) .active_item(cx)
.and_then(|item| item.act_as::<Editor>(cx)) .and_then(|item| item.act_as::<Editor>(cx))
{ {
workspace if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx)
.toggle_modal(cx, |cx| crate::GoToLine::new(editor, cx)) {
workspace.toggle_modal(cx, |cx| {
crate::GoToLine::new(editor, buffer, cx)
})
}
} }
}); });
} }

View file

@ -1,13 +1,15 @@
pub mod cursor_position; pub mod cursor_position;
use cursor_position::LineIndicatorFormat; use cursor_position::LineIndicatorFormat;
use editor::{scroll::Autoscroll, Editor}; use editor::{scroll::Autoscroll, Anchor, Editor, MultiBuffer, ToPoint};
use gpui::{ use gpui::{
div, prelude::*, AnyWindowHandle, AppContext, DismissEvent, EventEmitter, FocusHandle, div, prelude::*, AnyWindowHandle, AppContext, DismissEvent, EventEmitter, FocusHandle,
FocusableView, Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, FocusableView, Model, Render, SharedString, Styled, Subscription, View, ViewContext,
VisualContext,
}; };
use language::Buffer;
use settings::Settings; use settings::Settings;
use text::{Bias, Point}; use text::Point;
use theme::ActiveTheme; use theme::ActiveTheme;
use ui::prelude::*; use ui::prelude::*;
use util::paths::FILE_ROW_COLUMN_DELIMITER; use util::paths::FILE_ROW_COLUMN_DELIMITER;
@ -21,6 +23,7 @@ pub fn init(cx: &mut AppContext) {
pub struct GoToLine { pub struct GoToLine {
line_editor: View<Editor>, line_editor: View<Editor>,
active_editor: View<Editor>, active_editor: View<Editor>,
active_buffer: Model<Buffer>,
current_text: SharedString, current_text: SharedString,
prev_scroll_position: Option<gpui::Point<f32>>, prev_scroll_position: Option<gpui::Point<f32>>,
_subscriptions: Vec<Subscription>, _subscriptions: Vec<Subscription>,
@ -42,22 +45,43 @@ impl GoToLine {
let handle = cx.view().downgrade(); let handle = cx.view().downgrade();
editor editor
.register_action(move |_: &editor::actions::ToggleGoToLine, cx| { .register_action(move |_: &editor::actions::ToggleGoToLine, cx| {
let Some(editor) = handle.upgrade() else { let Some(editor_handle) = handle.upgrade() else {
return; return;
}; };
let Some(workspace) = editor.read(cx).workspace() else { let Some(workspace) = editor_handle.read(cx).workspace() else {
return;
};
let editor = editor_handle.read(cx);
let Some((_, buffer, _)) = editor.active_excerpt(cx) else {
return; return;
}; };
workspace.update(cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
workspace.toggle_modal(cx, move |cx| GoToLine::new(editor, cx)); workspace.toggle_modal(cx, move |cx| GoToLine::new(editor_handle, buffer, cx));
}) })
}) })
.detach(); .detach();
} }
pub fn new(active_editor: View<Editor>, cx: &mut ViewContext<Self>) -> Self { pub fn new(
let cursor = active_editor: View<Editor>,
active_editor.update(cx, |editor, cx| editor.selections.last::<Point>(cx).head()); active_buffer: Model<Buffer>,
cx: &mut ViewContext<Self>,
) -> Self {
let (cursor, last_line, scroll_position) = active_editor.update(cx, |editor, cx| {
let cursor = editor.selections.last::<Point>(cx).head();
let snapshot = active_buffer.read(cx).snapshot();
let last_line = editor
.buffer()
.read(cx)
.excerpts_for_buffer(&active_buffer, cx)
.into_iter()
.map(move |(_, range)| text::ToPoint::to_point(&range.context.end, &snapshot).row)
.max()
.unwrap_or(0);
(cursor, last_line, editor.scroll_position(cx))
});
let line = cursor.row + 1; let line = cursor.row + 1;
let column = cursor.column + 1; let column = cursor.column + 1;
@ -69,15 +93,17 @@ impl GoToLine {
}); });
let line_editor_change = cx.subscribe(&line_editor, Self::on_line_editor_event); let line_editor_change = cx.subscribe(&line_editor, Self::on_line_editor_event);
let editor = active_editor.read(cx); let current_text = format!(
let last_line = editor.buffer().read(cx).snapshot(cx).max_point().row; "Current Line: {} of {} (column {})",
let scroll_position = active_editor.update(cx, |editor, cx| editor.scroll_position(cx)); line,
last_line + 1,
let current_text = format!("{} of {} (column {})", line, last_line + 1, column); column
);
Self { Self {
line_editor, line_editor,
active_editor, active_editor,
active_buffer,
current_text: current_text.into(), current_text: current_text.into(),
prev_scroll_position: Some(scroll_position), prev_scroll_position: Some(scroll_position),
_subscriptions: vec![line_editor_change, cx.on_release(Self::release)], _subscriptions: vec![line_editor_change, cx.on_release(Self::release)],
@ -113,35 +139,40 @@ impl GoToLine {
} }
fn highlight_current_line(&mut self, cx: &mut ViewContext<Self>) { fn highlight_current_line(&mut self, cx: &mut ViewContext<Self>) {
if let Some(point) = self.point_from_query(cx) { self.active_editor.update(cx, |editor, cx| {
self.active_editor.update(cx, |active_editor, cx| { editor.clear_row_highlights::<GoToLineRowHighlights>();
let snapshot = active_editor.snapshot(cx).display_snapshot; let multibuffer = editor.buffer().read(cx);
let start = snapshot.buffer_snapshot.clip_point(point, Bias::Left); let snapshot = multibuffer.snapshot(cx);
let end = start + Point::new(1, 0); let Some(start) = self.anchor_from_query(&multibuffer, cx) else {
let start = snapshot.buffer_snapshot.anchor_before(start); return;
let end = snapshot.buffer_snapshot.anchor_after(end); };
active_editor.clear_row_highlights::<GoToLineRowHighlights>(); let start_point = start.to_point(&snapshot);
active_editor.highlight_rows::<GoToLineRowHighlights>( let end_point = start_point + Point::new(1, 0);
start..end, let end = snapshot.anchor_after(end_point);
cx.theme().colors().editor_highlighted_line_background, editor.highlight_rows::<GoToLineRowHighlights>(
true, start..end,
cx, cx.theme().colors().editor_highlighted_line_background,
); true,
active_editor.request_autoscroll(Autoscroll::center(), cx); cx,
}); );
cx.notify(); editor.request_autoscroll(Autoscroll::center(), cx);
} });
cx.notify();
} }
fn point_from_query(&self, cx: &ViewContext<Self>) -> Option<Point> { fn anchor_from_query(
let (row, column) = self.line_column_from_query(cx); &self,
Some(Point::new( multibuffer: &MultiBuffer,
row?.saturating_sub(1), cx: &ViewContext<Editor>,
column.unwrap_or(0).saturating_sub(1), ) -> Option<Anchor> {
)) let (Some(row), column) = self.line_column_from_query(cx) else {
return None;
};
let point = Point::new(row.saturating_sub(1), column.unwrap_or(0).saturating_sub(1));
multibuffer.buffer_point_to_anchor(&self.active_buffer, point, cx)
} }
fn line_column_from_query(&self, cx: &ViewContext<Self>) -> (Option<u32>, Option<u32>) { fn line_column_from_query(&self, cx: &AppContext) -> (Option<u32>, Option<u32>) {
let input = self.line_editor.read(cx).text(cx); let input = self.line_editor.read(cx).text(cx);
let mut components = input let mut components = input
.splitn(2, FILE_ROW_COLUMN_DELIMITER) .splitn(2, FILE_ROW_COLUMN_DELIMITER)
@ -157,18 +188,18 @@ impl GoToLine {
} }
fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) { fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
if let Some(point) = self.point_from_query(cx) { self.active_editor.update(cx, |editor, cx| {
self.active_editor.update(cx, |editor, cx| { let multibuffer = editor.buffer().read(cx);
let snapshot = editor.snapshot(cx).display_snapshot; let Some(start) = self.anchor_from_query(&multibuffer, cx) else {
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left); return;
editor.change_selections(Some(Autoscroll::center()), cx, |s| { };
s.select_ranges([point..point]) editor.change_selections(Some(Autoscroll::center()), cx, |s| {
}); s.select_anchor_ranges([start..start])
editor.focus(cx);
cx.notify();
}); });
self.prev_scroll_position.take(); editor.focus(cx);
} cx.notify()
});
self.prev_scroll_position.take();
cx.emit(DismissEvent); cx.emit(DismissEvent);
} }
@ -205,7 +236,6 @@ impl Render for GoToLine {
.px_2() .px_2()
.py_1() .py_1()
.gap_1() .gap_1()
.child(Label::new("Current Line:").color(Color::Muted))
.child(Label::new(help_text).color(Color::Muted)), .child(Label::new(help_text).color(Color::Muted)),
) )
} }

View file

@ -6,7 +6,7 @@ pub use crate::{
}; };
use crate::{ use crate::{
diagnostic_set::{DiagnosticEntry, DiagnosticGroup}, diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
language_settings::{language_settings, IndentGuideSettings, LanguageSettings}, language_settings::{language_settings, LanguageSettings},
markdown::parse_markdown, markdown::parse_markdown,
outline::OutlineItem, outline::OutlineItem,
syntax_map::{ syntax_map::{
@ -144,7 +144,7 @@ struct BufferBranchState {
/// An immutable, cheaply cloneable representation of a fixed /// An immutable, cheaply cloneable representation of a fixed
/// state of a buffer. /// state of a buffer.
pub struct BufferSnapshot { pub struct BufferSnapshot {
text: text::BufferSnapshot, pub text: text::BufferSnapshot,
pub(crate) syntax: SyntaxSnapshot, pub(crate) syntax: SyntaxSnapshot,
file: Option<Arc<dyn File>>, file: Option<Arc<dyn File>>,
diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>, diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
@ -587,22 +587,6 @@ pub struct Runnable {
pub buffer: BufferId, pub buffer: BufferId,
} }
#[derive(Clone, Debug, PartialEq)]
pub struct IndentGuide {
pub buffer_id: BufferId,
pub start_row: BufferRow,
pub end_row: BufferRow,
pub depth: u32,
pub tab_size: u32,
pub settings: IndentGuideSettings,
}
impl IndentGuide {
pub fn indent_level(&self) -> u32 {
self.depth * self.tab_size
}
}
#[derive(Clone)] #[derive(Clone)]
pub struct EditPreview { pub struct EditPreview {
applied_edits_snapshot: text::BufferSnapshot, applied_edits_snapshot: text::BufferSnapshot,
@ -937,6 +921,36 @@ impl Buffer {
} }
} }
pub fn build_snapshot(
text: Rope,
language: Option<Arc<Language>>,
language_registry: Option<Arc<LanguageRegistry>>,
cx: &mut AppContext,
) -> impl Future<Output = BufferSnapshot> {
let entity_id = cx.reserve_model::<Self>().entity_id();
let buffer_id = entity_id.as_non_zero_u64().into();
async move {
let text =
TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
let mut syntax = SyntaxMap::new(&text).snapshot();
if let Some(language) = language.clone() {
let text = text.clone();
let language = language.clone();
let language_registry = language_registry.clone();
syntax.reparse(&text, language_registry, language);
}
BufferSnapshot {
text,
syntax,
file: None,
diagnostics: Default::default(),
remote_selections: Default::default(),
language,
non_text_state_update_count: 0,
}
}
}
/// Retrieve a snapshot of the buffer's current state. This is computationally /// Retrieve a snapshot of the buffer's current state. This is computationally
/// cheap, and allows reading from the buffer on a background thread. /// cheap, and allows reading from the buffer on a background thread.
pub fn snapshot(&self) -> BufferSnapshot { pub fn snapshot(&self) -> BufferSnapshot {
@ -2633,7 +2647,8 @@ impl Buffer {
last_end = Some(range.end); last_end = Some(range.end);
let new_text_len = rng.gen_range(0..10); let new_text_len = rng.gen_range(0..10);
let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
new_text = new_text.to_uppercase();
edits.push((range, new_text)); edits.push((range, new_text));
} }
@ -3730,10 +3745,8 @@ impl BufferSnapshot {
pub fn runnable_ranges( pub fn runnable_ranges(
&self, &self,
range: Range<Anchor>, offset_range: Range<usize>,
) -> impl Iterator<Item = RunnableRange> + '_ { ) -> impl Iterator<Item = RunnableRange> + '_ {
let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| { let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
grammar.runnable_config.as_ref().map(|config| &config.query) grammar.runnable_config.as_ref().map(|config| &config.query)
}); });
@ -3833,245 +3846,6 @@ impl BufferSnapshot {
}) })
} }
pub fn indent_guides_in_range(
&self,
range: Range<Anchor>,
ignore_disabled_for_language: bool,
cx: &AppContext,
) -> Vec<IndentGuide> {
let language_settings =
language_settings(self.language().map(|l| l.name()), self.file.as_ref(), cx);
let settings = language_settings.indent_guides;
if !ignore_disabled_for_language && !settings.enabled {
return Vec::new();
}
let tab_size = language_settings.tab_size.get() as u32;
let start_row = range.start.to_point(self).row;
let end_row = range.end.to_point(self).row;
let row_range = start_row..end_row + 1;
let mut row_indents = self.line_indents_in_row_range(row_range.clone());
let mut result_vec = Vec::new();
let mut indent_stack = SmallVec::<[IndentGuide; 8]>::new();
while let Some((first_row, mut line_indent)) = row_indents.next() {
let current_depth = indent_stack.len() as u32;
// When encountering empty, continue until found useful line indent
// then add to the indent stack with the depth found
let mut found_indent = false;
let mut last_row = first_row;
if line_indent.is_line_empty() {
let mut trailing_row = end_row;
while !found_indent {
let (target_row, new_line_indent) =
if let Some(display_row) = row_indents.next() {
display_row
} else {
// This means we reached the end of the given range and found empty lines at the end.
// We need to traverse further until we find a non-empty line to know if we need to add
// an indent guide for the last visible indent.
trailing_row += 1;
const TRAILING_ROW_SEARCH_LIMIT: u32 = 25;
if trailing_row > self.max_point().row
|| trailing_row > end_row + TRAILING_ROW_SEARCH_LIMIT
{
break;
}
let new_line_indent = self.line_indent_for_row(trailing_row);
(trailing_row, new_line_indent)
};
if new_line_indent.is_line_empty() {
continue;
}
last_row = target_row.min(end_row);
line_indent = new_line_indent;
found_indent = true;
break;
}
} else {
found_indent = true
}
let depth = if found_indent {
line_indent.len(tab_size) / tab_size
+ ((line_indent.len(tab_size) % tab_size) > 0) as u32
} else {
current_depth
};
match depth.cmp(&current_depth) {
Ordering::Less => {
for _ in 0..(current_depth - depth) {
let mut indent = indent_stack.pop().unwrap();
if last_row != first_row {
// In this case, we landed on an empty row, had to seek forward,
// and discovered that the indent we where on is ending.
// This means that the last display row must
// be on line that ends this indent range, so we
// should display the range up to the first non-empty line
indent.end_row = first_row.saturating_sub(1);
}
result_vec.push(indent)
}
}
Ordering::Greater => {
for next_depth in current_depth..depth {
indent_stack.push(IndentGuide {
buffer_id: self.remote_id(),
start_row: first_row,
end_row: last_row,
depth: next_depth,
tab_size,
settings,
});
}
}
_ => {}
}
for indent in indent_stack.iter_mut() {
indent.end_row = last_row;
}
}
result_vec.extend(indent_stack);
result_vec
}
pub async fn enclosing_indent(
&self,
mut buffer_row: BufferRow,
) -> Option<(Range<BufferRow>, LineIndent)> {
let max_row = self.max_point().row;
if buffer_row >= max_row {
return None;
}
let mut target_indent = self.line_indent_for_row(buffer_row);
// If the current row is at the start of an indented block, we want to return this
// block as the enclosing indent.
if !target_indent.is_line_empty() && buffer_row < max_row {
let next_line_indent = self.line_indent_for_row(buffer_row + 1);
if !next_line_indent.is_line_empty()
&& target_indent.raw_len() < next_line_indent.raw_len()
{
target_indent = next_line_indent;
buffer_row += 1;
}
}
const SEARCH_ROW_LIMIT: u32 = 25000;
const SEARCH_WHITESPACE_ROW_LIMIT: u32 = 2500;
const YIELD_INTERVAL: u32 = 100;
let mut accessed_row_counter = 0;
// If there is a blank line at the current row, search for the next non indented lines
if target_indent.is_line_empty() {
let start = buffer_row.saturating_sub(SEARCH_WHITESPACE_ROW_LIMIT);
let end = (max_row + 1).min(buffer_row + SEARCH_WHITESPACE_ROW_LIMIT);
let mut non_empty_line_above = None;
for (row, indent) in self
.text
.reversed_line_indents_in_row_range(start..buffer_row)
{
accessed_row_counter += 1;
if accessed_row_counter == YIELD_INTERVAL {
accessed_row_counter = 0;
yield_now().await;
}
if !indent.is_line_empty() {
non_empty_line_above = Some((row, indent));
break;
}
}
let mut non_empty_line_below = None;
for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
accessed_row_counter += 1;
if accessed_row_counter == YIELD_INTERVAL {
accessed_row_counter = 0;
yield_now().await;
}
if !indent.is_line_empty() {
non_empty_line_below = Some((row, indent));
break;
}
}
let (row, indent) = match (non_empty_line_above, non_empty_line_below) {
(Some((above_row, above_indent)), Some((below_row, below_indent))) => {
if above_indent.raw_len() >= below_indent.raw_len() {
(above_row, above_indent)
} else {
(below_row, below_indent)
}
}
(Some(above), None) => above,
(None, Some(below)) => below,
_ => return None,
};
target_indent = indent;
buffer_row = row;
}
let start = buffer_row.saturating_sub(SEARCH_ROW_LIMIT);
let end = (max_row + 1).min(buffer_row + SEARCH_ROW_LIMIT);
let mut start_indent = None;
for (row, indent) in self
.text
.reversed_line_indents_in_row_range(start..buffer_row)
{
accessed_row_counter += 1;
if accessed_row_counter == YIELD_INTERVAL {
accessed_row_counter = 0;
yield_now().await;
}
if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
start_indent = Some((row, indent));
break;
}
}
let (start_row, start_indent_size) = start_indent?;
let mut end_indent = (end, None);
for (row, indent) in self.text.line_indents_in_row_range((buffer_row + 1)..end) {
accessed_row_counter += 1;
if accessed_row_counter == YIELD_INTERVAL {
accessed_row_counter = 0;
yield_now().await;
}
if !indent.is_line_empty() && indent.raw_len() < target_indent.raw_len() {
end_indent = (row.saturating_sub(1), Some(indent));
break;
}
}
let (end_row, end_indent_size) = end_indent;
let indent = if let Some(end_indent_size) = end_indent_size {
if start_indent_size.raw_len() > end_indent_size.raw_len() {
start_indent_size
} else {
end_indent_size
}
} else {
start_indent_size
};
Some((start_row..end_row, indent))
}
/// Returns selections for remote peers intersecting the given range. /// Returns selections for remote peers intersecting the given range.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
pub fn selections_in_range( pub fn selections_in_range(
@ -4395,6 +4169,10 @@ impl<'a> BufferChunks<'a> {
self.range.start self.range.start
} }
pub fn range(&self) -> Range<usize> {
self.range.clone()
}
fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) { fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
let depth = match endpoint.severity { let depth = match endpoint.severity {
DiagnosticSeverity::ERROR => &mut self.error_depth, DiagnosticSeverity::ERROR => &mut self.error_depth,

View file

@ -21,7 +21,7 @@ use std::{
}; };
use syntax_map::TreeSitterOptions; use syntax_map::TreeSitterOptions;
use text::network::Network; use text::network::Network;
use text::{BufferId, LineEnding, LineIndent}; use text::{BufferId, LineEnding};
use text::{Point, ToPoint}; use text::{Point, ToPoint};
use unindent::Unindent as _; use unindent::Unindent as _;
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter}; use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
@ -2475,92 +2475,6 @@ fn test_serialization(cx: &mut gpui::AppContext) {
assert_eq!(buffer2.read(cx).text(), "abcDF"); assert_eq!(buffer2.read(cx).text(), "abcDF");
} }
#[gpui::test]
async fn test_find_matching_indent(cx: &mut TestAppContext) {
cx.update(|cx| init_settings(cx, |_| {}));
async fn enclosing_indent(
text: impl Into<String>,
buffer_row: u32,
cx: &mut TestAppContext,
) -> Option<(Range<u32>, LineIndent)> {
let buffer = cx.new_model(|cx| Buffer::local(text, cx));
let snapshot = cx.read(|cx| buffer.read(cx).snapshot());
snapshot.enclosing_indent(buffer_row).await
}
assert_eq!(
enclosing_indent(
"
fn b() {
if c {
let d = 2;
}
}"
.unindent(),
1,
cx,
)
.await,
Some((
1..2,
LineIndent {
tabs: 0,
spaces: 4,
line_blank: false,
}
))
);
assert_eq!(
enclosing_indent(
"
fn b() {
if c {
let d = 2;
}
}"
.unindent(),
2,
cx,
)
.await,
Some((
1..2,
LineIndent {
tabs: 0,
spaces: 4,
line_blank: false,
}
))
);
assert_eq!(
enclosing_indent(
"
fn b() {
if c {
let d = 2;
let e = 5;
}
}"
.unindent(),
3,
cx,
)
.await,
Some((
1..4,
LineIndent {
tabs: 0,
spaces: 4,
line_blank: false,
}
))
);
}
#[gpui::test] #[gpui::test]
fn test_branch_and_merge(cx: &mut TestAppContext) { fn test_branch_and_merge(cx: &mut TestAppContext) {
cx.update(|cx| init_settings(cx, |_| {})); cx.update(|cx| init_settings(cx, |_| {}));

View file

@ -131,15 +131,15 @@ impl SyntaxTreeView {
let snapshot = editor_state let snapshot = editor_state
.editor .editor
.update(cx, |editor, cx| editor.snapshot(cx)); .update(cx, |editor, cx| editor.snapshot(cx));
let (excerpt, buffer, range) = editor_state.editor.update(cx, |editor, cx| { let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| {
let selection_range = editor.selections.last::<usize>(cx).range(); let selection_range = editor.selections.last::<usize>(cx).range();
let multi_buffer = editor.buffer().read(cx); let multi_buffer = editor.buffer().read(cx);
let (excerpt, range) = snapshot let (buffer, range, excerpt_id) = snapshot
.buffer_snapshot .buffer_snapshot
.range_to_buffer_ranges(selection_range) .range_to_buffer_ranges(selection_range)
.pop()?; .pop()?;
let buffer = multi_buffer.buffer(excerpt.buffer_id()).unwrap().clone(); let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap().clone();
Some((excerpt, buffer, range)) Some((buffer, range, excerpt_id))
})?; })?;
// If the cursor has moved into a different excerpt, retrieve a new syntax layer // If the cursor has moved into a different excerpt, retrieve a new syntax layer
@ -148,16 +148,16 @@ impl SyntaxTreeView {
.active_buffer .active_buffer
.get_or_insert_with(|| BufferState { .get_or_insert_with(|| BufferState {
buffer: buffer.clone(), buffer: buffer.clone(),
excerpt_id: excerpt.id(), excerpt_id,
active_layer: None, active_layer: None,
}); });
let mut prev_layer = None; let mut prev_layer = None;
if did_reparse { if did_reparse {
prev_layer = buffer_state.active_layer.take(); prev_layer = buffer_state.active_layer.take();
} }
if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt.id() { if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id {
buffer_state.buffer = buffer.clone(); buffer_state.buffer = buffer.clone();
buffer_state.excerpt_id = excerpt.id(); buffer_state.excerpt_id = excerpt_id;
buffer_state.active_layer = None; buffer_state.active_layer = None;
} }

View file

@ -27,12 +27,16 @@ collections.workspace = true
ctor.workspace = true ctor.workspace = true
env_logger.workspace = true env_logger.workspace = true
futures.workspace = true futures.workspace = true
git.workspace = true
gpui.workspace = true gpui.workspace = true
itertools.workspace = true itertools.workspace = true
language.workspace = true language.workspace = true
log.workspace = true log.workspace = true
parking_lot.workspace = true parking_lot.workspace = true
project.workspace = true
rand.workspace = true rand.workspace = true
rope.workspace = true
smol.workspace = true
settings.workspace = true settings.workspace = true
serde.workspace = true serde.workspace = true
smallvec.workspace = true smallvec.workspace = true
@ -45,7 +49,10 @@ util.workspace = true
[dev-dependencies] [dev-dependencies]
gpui = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rand.workspace = true rand.workspace = true
settings = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] }
text = { workspace = true, features = ["test-support"] } text = { workspace = true, features = ["test-support"] }
util = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
indoc.workspace = true

View file

@ -12,14 +12,38 @@ pub struct Anchor {
pub buffer_id: Option<BufferId>, pub buffer_id: Option<BufferId>,
pub excerpt_id: ExcerptId, pub excerpt_id: ExcerptId,
pub text_anchor: text::Anchor, pub text_anchor: text::Anchor,
pub diff_base_anchor: Option<text::Anchor>,
} }
impl Anchor { impl Anchor {
pub fn in_buffer(
excerpt_id: ExcerptId,
buffer_id: BufferId,
text_anchor: text::Anchor,
) -> Self {
Self {
buffer_id: Some(buffer_id),
excerpt_id,
text_anchor,
diff_base_anchor: None,
}
}
pub fn range_in_buffer(
excerpt_id: ExcerptId,
buffer_id: BufferId,
range: Range<text::Anchor>,
) -> Range<Self> {
Self::in_buffer(excerpt_id, buffer_id, range.start)
..Self::in_buffer(excerpt_id, buffer_id, range.end)
}
pub fn min() -> Self { pub fn min() -> Self {
Self { Self {
buffer_id: None, buffer_id: None,
excerpt_id: ExcerptId::min(), excerpt_id: ExcerptId::min(),
text_anchor: text::Anchor::MIN, text_anchor: text::Anchor::MIN,
diff_base_anchor: None,
} }
} }
@ -28,22 +52,47 @@ impl Anchor {
buffer_id: None, buffer_id: None,
excerpt_id: ExcerptId::max(), excerpt_id: ExcerptId::max(),
text_anchor: text::Anchor::MAX, text_anchor: text::Anchor::MAX,
diff_base_anchor: None,
} }
} }
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot); let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
if excerpt_id_cmp.is_eq() { if excerpt_id_cmp.is_ne() {
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() { return excerpt_id_cmp;
Ordering::Equal
} else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer)
} else {
Ordering::Equal
}
} else {
excerpt_id_cmp
} }
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
return Ordering::Equal;
}
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer);
if text_cmp.is_ne() {
return text_cmp;
}
if self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some() {
if let Some(diff_base) = snapshot.diffs.get(&excerpt.buffer_id) {
let self_anchor = self
.diff_base_anchor
.filter(|a| diff_base.base_text.can_resolve(a));
let other_anchor = other
.diff_base_anchor
.filter(|a| diff_base.base_text.can_resolve(a));
return match (self_anchor, other_anchor) {
(Some(a), Some(b)) => a.cmp(&b, &diff_base.base_text),
(Some(_), None) => match other.text_anchor.bias {
Bias::Left => Ordering::Greater,
Bias::Right => Ordering::Less,
},
(None, Some(_)) => match self.text_anchor.bias {
Bias::Left => Ordering::Less,
Bias::Right => Ordering::Greater,
},
(None, None) => Ordering::Equal,
};
}
}
}
Ordering::Equal
} }
pub fn bias(&self) -> Bias { pub fn bias(&self) -> Bias {
@ -57,6 +106,14 @@ impl Anchor {
buffer_id: self.buffer_id, buffer_id: self.buffer_id,
excerpt_id: self.excerpt_id, excerpt_id: self.excerpt_id,
text_anchor: self.text_anchor.bias_left(&excerpt.buffer), text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
diff_base_anchor: self.diff_base_anchor.map(|a| {
if let Some(base) = snapshot.diffs.get(&excerpt.buffer_id) {
if a.buffer_id == Some(base.base_text.remote_id()) {
return a.bias_left(&base.base_text);
}
}
a
}),
}; };
} }
} }
@ -70,6 +127,14 @@ impl Anchor {
buffer_id: self.buffer_id, buffer_id: self.buffer_id,
excerpt_id: self.excerpt_id, excerpt_id: self.excerpt_id,
text_anchor: self.text_anchor.bias_right(&excerpt.buffer), text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
diff_base_anchor: self.diff_base_anchor.map(|a| {
if let Some(base) = snapshot.diffs.get(&excerpt.buffer_id) {
if a.buffer_id == Some(base.base_text.remote_id()) {
return a.bias_right(&base.base_text);
}
}
a
}),
}; };
} }
} }

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,264 @@
use std::{
fmt::{Debug, Display},
marker::PhantomData,
ops::{Add, AddAssign, Sub, SubAssign},
};
use text::Point;
#[repr(transparent)]
pub struct TypedOffset<T> {
pub value: usize,
_marker: PhantomData<T>,
}
#[repr(transparent)]
pub struct TypedPoint<T> {
pub value: Point,
_marker: PhantomData<T>,
}
#[repr(transparent)]
pub struct TypedRow<T> {
pub value: u32,
_marker: PhantomData<T>,
}
impl<T> TypedOffset<T> {
pub fn new(offset: usize) -> Self {
Self {
value: offset,
_marker: PhantomData,
}
}
pub fn saturating_sub(self, n: TypedOffset<T>) -> Self {
Self {
value: self.value.saturating_sub(n.value),
_marker: PhantomData,
}
}
pub fn zero() -> Self {
Self::new(0)
}
pub fn is_zero(&self) -> bool {
self.value == 0
}
}
impl<T> TypedPoint<T> {
pub fn new(row: u32, column: u32) -> Self {
Self {
value: Point::new(row, column),
_marker: PhantomData,
}
}
pub fn wrap(point: Point) -> Self {
Self {
value: point,
_marker: PhantomData,
}
}
pub fn row(&self) -> u32 {
self.value.row
}
pub fn column(&self) -> u32 {
self.value.column
}
pub fn zero() -> Self {
Self::wrap(Point::zero())
}
pub fn is_zero(&self) -> bool {
self.value.is_zero()
}
}
impl<T> TypedRow<T> {
pub fn new(row: u32) -> Self {
Self {
value: row,
_marker: PhantomData,
}
}
}
impl<T> Copy for TypedOffset<T> {}
impl<T> Copy for TypedPoint<T> {}
impl<T> Copy for TypedRow<T> {}
impl<T> Clone for TypedOffset<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> Clone for TypedPoint<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> Clone for TypedRow<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T> Default for TypedOffset<T> {
fn default() -> Self {
Self::new(0)
}
}
impl<T> Default for TypedPoint<T> {
fn default() -> Self {
Self::wrap(Point::default())
}
}
impl<T> Default for TypedRow<T> {
fn default() -> Self {
Self::new(0)
}
}
impl<T> PartialOrd for TypedOffset<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.value.cmp(&other.value))
}
}
impl<T> PartialOrd for TypedPoint<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.value.cmp(&other.value))
}
}
impl<T> PartialOrd for TypedRow<T> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.value.cmp(&other.value))
}
}
impl<T> Ord for TypedOffset<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.value.cmp(&other.value)
}
}
impl<T> Ord for TypedPoint<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.value.cmp(&other.value)
}
}
impl<T> Ord for TypedRow<T> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.value.cmp(&other.value)
}
}
impl<T> PartialEq for TypedOffset<T> {
fn eq(&self, other: &Self) -> bool {
self.value == other.value
}
}
impl<T> PartialEq for TypedPoint<T> {
fn eq(&self, other: &Self) -> bool {
self.value == other.value
}
}
impl<T> PartialEq for TypedRow<T> {
fn eq(&self, other: &Self) -> bool {
self.value == other.value
}
}
impl<T> Eq for TypedOffset<T> {}
impl<T> Eq for TypedPoint<T> {}
impl<T> Eq for TypedRow<T> {}
impl<T> Debug for TypedOffset<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}Offset({})", type_name::<T>(), self.value)
}
}
impl<T> Debug for TypedPoint<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}Point({}, {})",
type_name::<T>(),
self.value.row,
self.value.column
)
}
}
impl<T> Debug for TypedRow<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}Row({})", type_name::<T>(), self.value)
}
}
impl<T> Display for TypedOffset<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.value, f)
}
}
impl<T> Display for TypedRow<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.value, f)
}
}
fn type_name<T>() -> &'static str {
std::any::type_name::<T>().split("::").last().unwrap()
}
impl<T> Add<TypedOffset<T>> for TypedOffset<T> {
type Output = Self;
fn add(self, other: Self) -> Self {
TypedOffset::new(self.value + other.value)
}
}
impl<T> Add<TypedPoint<T>> for TypedPoint<T> {
type Output = Self;
fn add(self, other: Self) -> Self {
TypedPoint::wrap(self.value + other.value)
}
}
impl<T> Sub<TypedOffset<T>> for TypedOffset<T> {
type Output = Self;
fn sub(self, other: Self) -> Self {
TypedOffset::new(self.value - other.value)
}
}
impl<T> Sub<TypedPoint<T>> for TypedPoint<T> {
type Output = Self;
fn sub(self, other: Self) -> Self {
TypedPoint::wrap(self.value - other.value)
}
}
impl<T> AddAssign<TypedOffset<T>> for TypedOffset<T> {
fn add_assign(&mut self, other: Self) {
self.value += other.value;
}
}
impl<T> AddAssign<TypedPoint<T>> for TypedPoint<T> {
fn add_assign(&mut self, other: Self) {
self.value += other.value;
}
}
impl<T> SubAssign<Self> for TypedOffset<T> {
fn sub_assign(&mut self, other: Self) {
self.value -= other.value;
}
}
impl<T> SubAssign<Self> for TypedRow<T> {
fn sub_assign(&mut self, other: Self) {
self.value -= other.value;
}
}

View file

@ -1042,7 +1042,7 @@ impl OutlinePanel {
.show_excerpt_controls(); .show_excerpt_controls();
let expand_excerpt_control_height = 1.0; let expand_excerpt_control_height = 1.0;
if let Some(buffer_id) = scroll_to_buffer { if let Some(buffer_id) = scroll_to_buffer {
let current_folded = active_editor.read(cx).buffer_folded(buffer_id, cx); let current_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx);
if current_folded { if current_folded {
if show_excerpt_controls { if show_excerpt_controls {
let previous_buffer_id = self let previous_buffer_id = self
@ -1059,7 +1059,9 @@ impl OutlinePanel {
.skip_while(|id| *id != buffer_id) .skip_while(|id| *id != buffer_id)
.nth(1); .nth(1);
if let Some(previous_buffer_id) = previous_buffer_id { if let Some(previous_buffer_id) = previous_buffer_id {
if !active_editor.read(cx).buffer_folded(previous_buffer_id, cx) if !active_editor
.read(cx)
.is_buffer_folded(previous_buffer_id, cx)
{ {
offset.y += expand_excerpt_control_height; offset.y += expand_excerpt_control_height;
} }
@ -1418,7 +1420,7 @@ impl OutlinePanel {
}; };
active_editor.update(cx, |editor, cx| { active_editor.update(cx, |editor, cx| {
buffers_to_unfold.retain(|buffer_id| editor.buffer_folded(*buffer_id, cx)); buffers_to_unfold.retain(|buffer_id| editor.is_buffer_folded(*buffer_id, cx));
}); });
self.select_entry(selected_entry, true, cx); self.select_entry(selected_entry, true, cx);
if buffers_to_unfold.is_empty() { if buffers_to_unfold.is_empty() {
@ -1504,7 +1506,7 @@ impl OutlinePanel {
if collapsed { if collapsed {
active_editor.update(cx, |editor, cx| { active_editor.update(cx, |editor, cx| {
buffers_to_fold.retain(|buffer_id| !editor.buffer_folded(*buffer_id, cx)); buffers_to_fold.retain(|buffer_id| !editor.is_buffer_folded(*buffer_id, cx));
}); });
self.select_entry(selected_entry, true, cx); self.select_entry(selected_entry, true, cx);
if buffers_to_fold.is_empty() { if buffers_to_fold.is_empty() {
@ -1569,7 +1571,7 @@ impl OutlinePanel {
self.collapsed_entries self.collapsed_entries
.retain(|entry| !expanded_entries.contains(entry)); .retain(|entry| !expanded_entries.contains(entry));
active_editor.update(cx, |editor, cx| { active_editor.update(cx, |editor, cx| {
buffers_to_unfold.retain(|buffer_id| editor.buffer_folded(*buffer_id, cx)); buffers_to_unfold.retain(|buffer_id| editor.is_buffer_folded(*buffer_id, cx));
}); });
if buffers_to_unfold.is_empty() { if buffers_to_unfold.is_empty() {
self.update_cached_entries(None, cx); self.update_cached_entries(None, cx);
@ -1617,7 +1619,7 @@ impl OutlinePanel {
self.collapsed_entries.extend(new_entries); self.collapsed_entries.extend(new_entries);
active_editor.update(cx, |editor, cx| { active_editor.update(cx, |editor, cx| {
buffers_to_fold.retain(|buffer_id| !editor.buffer_folded(*buffer_id, cx)); buffers_to_fold.retain(|buffer_id| !editor.is_buffer_folded(*buffer_id, cx));
}); });
if buffers_to_fold.is_empty() { if buffers_to_fold.is_empty() {
self.update_cached_entries(None, cx); self.update_cached_entries(None, cx);
@ -1707,7 +1709,7 @@ impl OutlinePanel {
active_editor.update(cx, |editor, cx| { active_editor.update(cx, |editor, cx| {
buffers_to_toggle.retain(|buffer_id| { buffers_to_toggle.retain(|buffer_id| {
let folded = editor.buffer_folded(*buffer_id, cx); let folded = editor.is_buffer_folded(*buffer_id, cx);
if fold { if fold {
!folded !folded
} else { } else {
@ -2471,7 +2473,7 @@ impl OutlinePanel {
let worktree = file.map(|file| file.worktree.read(cx).snapshot()); let worktree = file.map(|file| file.worktree.read(cx).snapshot());
let is_new = new_entries.contains(&excerpt_id) let is_new = new_entries.contains(&excerpt_id)
|| !outline_panel.excerpts.contains_key(&buffer_id); || !outline_panel.excerpts.contains_key(&buffer_id);
let is_folded = active_editor.read(cx).buffer_folded(buffer_id, cx); let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx);
buffer_excerpts buffer_excerpts
.entry(buffer_id) .entry(buffer_id)
.or_insert_with(|| (is_new, is_folded, Vec::new(), entry_id, worktree)) .or_insert_with(|| (is_new, is_folded, Vec::new(), entry_id, worktree))
@ -2875,7 +2877,7 @@ impl OutlinePanel {
.excerpt_containing(selection, cx)?; .excerpt_containing(selection, cx)?;
let buffer_id = buffer.read(cx).remote_id(); let buffer_id = buffer.read(cx).remote_id();
if editor.read(cx).buffer_folded(buffer_id, cx) { if editor.read(cx).is_buffer_folded(buffer_id, cx) {
return self return self
.fs_entries .fs_entries
.iter() .iter()
@ -3593,7 +3595,7 @@ impl OutlinePanel {
None None
}; };
if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider { if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider {
if !active_editor.read(cx).buffer_folded(buffer_id, cx) { if !active_editor.read(cx).is_buffer_folded(buffer_id, cx) {
outline_panel.add_excerpt_entries( outline_panel.add_excerpt_entries(
&mut generation_state, &mut generation_state,
buffer_id, buffer_id,
@ -4004,12 +4006,12 @@ impl OutlinePanel {
.filter(|(match_range, _)| { .filter(|(match_range, _)| {
let editor = active_editor.read(cx); let editor = active_editor.read(cx);
if let Some(buffer_id) = match_range.start.buffer_id { if let Some(buffer_id) = match_range.start.buffer_id {
if editor.buffer_folded(buffer_id, cx) { if editor.is_buffer_folded(buffer_id, cx) {
return false; return false;
} }
} }
if let Some(buffer_id) = match_range.start.buffer_id { if let Some(buffer_id) = match_range.start.buffer_id {
if editor.buffer_folded(buffer_id, cx) { if editor.is_buffer_folded(buffer_id, cx) {
return false; return false;
} }
} }
@ -4883,7 +4885,7 @@ fn subscribe_for_editor_events(
} }
}) })
.map(|buffer_id| { .map(|buffer_id| {
if editor.read(cx).buffer_folded(*buffer_id, cx) { if editor.read(cx).is_buffer_folded(*buffer_id, cx) {
latest_folded_buffer_id = Some(*buffer_id); latest_folded_buffer_id = Some(*buffer_id);
false false
} else { } else {

View file

@ -21,7 +21,7 @@ use language::{
deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version, deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
split_operations, split_operations,
}, },
Buffer, BufferEvent, Capability, DiskState, File as _, Language, Operation, Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation,
}; };
use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope}; use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope};
use serde::Deserialize; use serde::Deserialize;
@ -60,14 +60,14 @@ struct SharedBuffer {
lsp_handle: Option<OpenLspBufferHandle>, lsp_handle: Option<OpenLspBufferHandle>,
} }
#[derive(Debug)]
pub struct BufferChangeSet { pub struct BufferChangeSet {
pub buffer_id: BufferId, pub buffer_id: BufferId,
pub base_text: Option<Model<Buffer>>, pub base_text: Option<language::BufferSnapshot>,
pub language: Option<Arc<Language>>,
pub diff_to_buffer: git::diff::BufferDiff, pub diff_to_buffer: git::diff::BufferDiff,
pub recalculate_diff_task: Option<Task<Result<()>>>, pub recalculate_diff_task: Option<Task<Result<()>>>,
pub diff_updated_futures: Vec<oneshot::Sender<()>>, pub diff_updated_futures: Vec<oneshot::Sender<()>>,
pub base_text_version: usize, pub language_registry: Option<Arc<LanguageRegistry>>,
} }
enum BufferStoreState { enum BufferStoreState {
@ -1080,9 +1080,9 @@ impl BufferStore {
Ok(text) => text, Ok(text) => text,
}; };
let change_set = buffer.update(&mut cx, |buffer, cx| { let change_set = cx
cx.new_model(|_| BufferChangeSet::new(buffer)) .new_model(|cx| BufferChangeSet::new(&buffer, cx))
})?; .unwrap();
if let Some(text) = text { if let Some(text) = text {
change_set change_set
@ -1976,11 +1976,8 @@ impl BufferStore {
shared.unstaged_changes = Some(change_set.clone()); shared.unstaged_changes = Some(change_set.clone());
} }
})?; })?;
let staged_text = change_set.read_with(&cx, |change_set, cx| { let staged_text = change_set.read_with(&cx, |change_set, _| {
change_set change_set.base_text.as_ref().map(|buffer| buffer.text())
.base_text
.as_ref()
.map(|buffer| buffer.read(cx).text())
})?; })?;
Ok(proto::GetStagedTextResponse { staged_text }) Ok(proto::GetStagedTextResponse { staged_text })
} }
@ -2225,25 +2222,51 @@ impl BufferStore {
} }
impl BufferChangeSet { impl BufferChangeSet {
pub fn new(buffer: &text::BufferSnapshot) -> Self { pub fn new(buffer: &Model<Buffer>, cx: &mut ModelContext<Self>) -> Self {
cx.subscribe(buffer, |this, buffer, event, cx| match event {
BufferEvent::LanguageChanged => {
this.language = buffer.read(cx).language().cloned();
if let Some(base_text) = &this.base_text {
let snapshot = language::Buffer::build_snapshot(
base_text.as_rope().clone(),
this.language.clone(),
this.language_registry.clone(),
cx,
);
this.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
let base_text = cx.background_executor().spawn(snapshot).await;
this.update(&mut cx, |this, cx| {
this.base_text = Some(base_text);
cx.notify();
})
}));
}
}
_ => {}
})
.detach();
let buffer = buffer.read(cx);
Self { Self {
buffer_id: buffer.remote_id(), buffer_id: buffer.remote_id(),
base_text: None, base_text: None,
diff_to_buffer: git::diff::BufferDiff::new(buffer), diff_to_buffer: git::diff::BufferDiff::new(buffer),
recalculate_diff_task: None, recalculate_diff_task: None,
diff_updated_futures: Vec::new(), diff_updated_futures: Vec::new(),
base_text_version: 0, language: buffer.language().cloned(),
language_registry: buffer.language_registry(),
} }
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub fn new_with_base_text( pub fn new_with_base_text(
base_text: String, base_text: String,
buffer: text::BufferSnapshot, buffer: &Model<Buffer>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Self { ) -> Self {
let mut this = Self::new(&buffer); let mut this = Self::new(&buffer, cx);
let _ = this.set_base_text(base_text, buffer, cx); let _ = this.set_base_text(base_text, buffer.read(cx).text_snapshot(), cx);
this this
} }
@ -2266,8 +2289,8 @@ impl BufferChangeSet {
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub fn base_text_string(&self, cx: &AppContext) -> Option<String> { pub fn base_text_string(&self) -> Option<String> {
self.base_text.as_ref().map(|buffer| buffer.read(cx).text()) self.base_text.as_ref().map(|buffer| buffer.text())
} }
pub fn set_base_text( pub fn set_base_text(
@ -2289,7 +2312,6 @@ impl BufferChangeSet {
self.base_text = None; self.base_text = None;
self.diff_to_buffer = BufferDiff::new(&buffer_snapshot); self.diff_to_buffer = BufferDiff::new(&buffer_snapshot);
self.recalculate_diff_task.take(); self.recalculate_diff_task.take();
self.base_text_version += 1;
cx.notify(); cx.notify();
} }
} }
@ -2300,7 +2322,7 @@ impl BufferChangeSet {
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> oneshot::Receiver<()> { ) -> oneshot::Receiver<()> {
if let Some(base_text) = self.base_text.clone() { if let Some(base_text) = self.base_text.clone() {
self.recalculate_diff_internal(base_text.read(cx).text(), buffer_snapshot, false, cx) self.recalculate_diff_internal(base_text.text(), buffer_snapshot, false, cx)
} else { } else {
oneshot::channel().1 oneshot::channel().1
} }
@ -2316,19 +2338,30 @@ impl BufferChangeSet {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
self.diff_updated_futures.push(tx); self.diff_updated_futures.push(tx);
self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move { self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
let (base_text, diff) = cx let new_base_text = if base_text_changed {
let base_text_rope: Rope = base_text.as_str().into();
let snapshot = this.update(&mut cx, |this, cx| {
language::Buffer::build_snapshot(
base_text_rope,
this.language.clone(),
this.language_registry.clone(),
cx,
)
})?;
Some(cx.background_executor().spawn(snapshot).await)
} else {
None
};
let diff = cx
.background_executor() .background_executor()
.spawn(async move { .spawn({
let diff = BufferDiff::build(&base_text, &buffer_snapshot).await; let buffer_snapshot = buffer_snapshot.clone();
(base_text, diff) async move { BufferDiff::build(&base_text, &buffer_snapshot) }
}) })
.await; .await;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
if base_text_changed { if let Some(new_base_text) = new_base_text {
this.base_text_version += 1; this.base_text = Some(new_base_text)
this.base_text = Some(cx.new_model(|cx| {
Buffer::local_normalized(Rope::from(base_text), LineEnding::default(), cx)
}));
} }
this.diff_to_buffer = diff; this.diff_to_buffer = diff;
this.recalculate_diff_task.take(); this.recalculate_diff_task.take();
@ -2341,6 +2374,33 @@ impl BufferChangeSet {
})); }));
rx rx
} }
#[cfg(any(test, feature = "test-support"))]
pub fn recalculate_diff_sync(
&mut self,
mut base_text: String,
buffer_snapshot: text::BufferSnapshot,
base_text_changed: bool,
cx: &mut ModelContext<Self>,
) {
LineEnding::normalize(&mut base_text);
let diff = BufferDiff::build(&base_text, &buffer_snapshot);
if base_text_changed {
self.base_text = Some(
cx.background_executor()
.clone()
.block(Buffer::build_snapshot(
base_text.into(),
self.language.clone(),
self.language_registry.clone(),
cx,
)),
);
}
self.diff_to_buffer = diff;
self.recalculate_diff_task.take();
cx.notify();
}
} }
impl OpenBuffer { impl OpenBuffer {

View file

@ -1851,14 +1851,11 @@ impl LocalLspStore {
let edits_since_save = std::cell::LazyCell::new(|| { let edits_since_save = std::cell::LazyCell::new(|| {
let saved_version = buffer.read(cx).saved_version(); let saved_version = buffer.read(cx).saved_version();
Patch::new( Patch::new(snapshot.edits_since::<PointUtf16>(saved_version).collect())
snapshot
.edits_since::<Unclipped<PointUtf16>>(saved_version)
.collect(),
)
}); });
let mut sanitized_diagnostics = Vec::new(); let mut sanitized_diagnostics = Vec::new();
for entry in diagnostics { for entry in diagnostics {
let start; let start;
let end; let end;
@ -1866,8 +1863,8 @@ impl LocalLspStore {
// Some diagnostics are based on files on disk instead of buffers' // Some diagnostics are based on files on disk instead of buffers'
// current contents. Adjust these diagnostics' ranges to reflect // current contents. Adjust these diagnostics' ranges to reflect
// any unsaved edits. // any unsaved edits.
start = (*edits_since_save).old_to_new(entry.range.start); start = Unclipped((*edits_since_save).old_to_new(entry.range.start.0));
end = (*edits_since_save).old_to_new(entry.range.end); end = Unclipped((*edits_since_save).old_to_new(entry.range.end.0));
} else { } else {
start = entry.range.start; start = entry.range.start;
end = entry.range.end; end = entry.range.end;

View file

@ -5651,7 +5651,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
assert_hunks( assert_hunks(
unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
&snapshot, &snapshot,
&unstaged_changes.base_text.as_ref().unwrap().read(cx).text(), &unstaged_changes.base_text.as_ref().unwrap().text(),
&[ &[
(0..1, "", "// print goodbye\n"), (0..1, "", "// print goodbye\n"),
( (
@ -5681,7 +5681,7 @@ async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
assert_hunks( assert_hunks(
unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
&snapshot, &snapshot,
&unstaged_changes.base_text.as_ref().unwrap().read(cx).text(), &unstaged_changes.base_text.as_ref().unwrap().text(),
&[(2..3, "", " println!(\"goodbye world\");\n")], &[(2..3, "", " println!(\"goodbye world\");\n")],
); );
}); });

View file

@ -12,7 +12,7 @@ use language::{
use rpc::{proto, AnyProtoClient, TypedEnvelope}; use rpc::{proto, AnyProtoClient, TypedEnvelope};
use settings::{watch_config_file, SettingsLocation}; use settings::{watch_config_file, SettingsLocation};
use task::{TaskContext, TaskVariables, VariableName}; use task::{TaskContext, TaskVariables, VariableName};
use text::BufferId; use text::{BufferId, OffsetRangeExt};
use util::ResultExt; use util::ResultExt;
use crate::{ use crate::{
@ -125,12 +125,10 @@ impl TaskStore {
.filter_map(|(k, v)| Some((k.parse().log_err()?, v))), .filter_map(|(k, v)| Some((k.parse().log_err()?, v))),
); );
for range in location let snapshot = location.buffer.read(cx).snapshot();
.buffer let range = location.range.to_offset(&snapshot);
.read(cx)
.snapshot() for range in snapshot.runnable_ranges(range) {
.runnable_ranges(location.range.clone())
{
for (capture_name, value) in range.extra_captures { for (capture_name, value) in range.extra_captures {
variables.insert(VariableName::Custom(capture_name.into()), value); variables.insert(VariableName::Custom(capture_name.into()), value);
} }

View file

@ -86,9 +86,9 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
.await .await
.unwrap(); .unwrap();
change_set.update(cx, |change_set, cx| { change_set.update(cx, |change_set, _| {
assert_eq!( assert_eq!(
change_set.base_text_string(cx).unwrap(), change_set.base_text_string().unwrap(),
"fn one() -> usize { 0 }" "fn one() -> usize { 0 }"
); );
}); });
@ -150,9 +150,9 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
&[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())], &[(Path::new("src/lib2.rs"), "fn one() -> usize { 100 }".into())],
); );
cx.executor().run_until_parked(); cx.executor().run_until_parked();
change_set.update(cx, |change_set, cx| { change_set.update(cx, |change_set, _| {
assert_eq!( assert_eq!(
change_set.base_text_string(cx).unwrap(), change_set.base_text_string().unwrap(),
"fn one() -> usize { 100 }" "fn one() -> usize { 100 }"
); );
}); });

View file

@ -4,16 +4,17 @@ mod point;
mod point_utf16; mod point_utf16;
mod unclipped; mod unclipped;
use chunk::{Chunk, ChunkSlice}; use chunk::Chunk;
use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
use smallvec::SmallVec; use smallvec::SmallVec;
use std::{ use std::{
cmp, fmt, io, mem, cmp, fmt, io, mem,
ops::{AddAssign, Range}, ops::{self, AddAssign, Range},
str, str,
}; };
use sum_tree::{Bias, Dimension, SumTree}; use sum_tree::{Bias, Dimension, SumTree};
pub use chunk::ChunkSlice;
pub use offset_utf16::OffsetUtf16; pub use offset_utf16::OffsetUtf16;
pub use point::Point; pub use point::Point;
pub use point_utf16::PointUtf16; pub use point_utf16::PointUtf16;
@ -221,7 +222,7 @@ impl Rope {
} }
pub fn summary(&self) -> TextSummary { pub fn summary(&self) -> TextSummary {
self.chunks.summary().text.clone() self.chunks.summary().text
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
@ -962,7 +963,7 @@ impl sum_tree::Summary for ChunkSummary {
} }
/// Summary of a string of text. /// Summary of a string of text.
#[derive(Clone, Debug, Default, Eq, PartialEq)] #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
pub struct TextSummary { pub struct TextSummary {
/// Length in UTF-8 /// Length in UTF-8
pub len: usize, pub len: usize,
@ -989,6 +990,27 @@ impl TextSummary {
column: self.last_line_len_utf16, column: self.last_line_len_utf16,
} }
} }
pub fn newline() -> Self {
Self {
len: 1,
len_utf16: OffsetUtf16(1),
first_line_chars: 0,
last_line_chars: 0,
last_line_len_utf16: 0,
lines: Point::new(1, 0),
longest_row: 0,
longest_row_chars: 0,
}
}
pub fn add_newline(&mut self) {
self.len += 1;
self.len_utf16 += OffsetUtf16(self.len_utf16.0 + 1);
self.last_line_chars = 0;
self.last_line_len_utf16 = 0;
self.lines += Point::new(1, 0);
}
} }
impl<'a> From<&'a str> for TextSummary { impl<'a> From<&'a str> for TextSummary {
@ -1048,7 +1070,7 @@ impl sum_tree::Summary for TextSummary {
} }
} }
impl std::ops::Add<Self> for TextSummary { impl ops::Add<Self> for TextSummary {
type Output = Self; type Output = Self;
fn add(mut self, rhs: Self) -> Self::Output { fn add(mut self, rhs: Self) -> Self::Output {
@ -1057,7 +1079,7 @@ impl std::ops::Add<Self> for TextSummary {
} }
} }
impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { impl<'a> ops::AddAssign<&'a Self> for TextSummary {
fn add_assign(&mut self, other: &'a Self) { fn add_assign(&mut self, other: &'a Self) {
let joined_chars = self.last_line_chars + other.first_line_chars; let joined_chars = self.last_line_chars + other.first_line_chars;
if joined_chars > self.longest_row_chars { if joined_chars > self.longest_row_chars {
@ -1087,13 +1109,15 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
} }
} }
impl std::ops::AddAssign<Self> for TextSummary { impl ops::AddAssign<Self> for TextSummary {
fn add_assign(&mut self, other: Self) { fn add_assign(&mut self, other: Self) {
*self += &other; *self += &other;
} }
} }
pub trait TextDimension: 'static + for<'a> Dimension<'a, ChunkSummary> { pub trait TextDimension:
'static + Clone + Copy + Default + for<'a> Dimension<'a, ChunkSummary> + std::fmt::Debug
{
fn from_text_summary(summary: &TextSummary) -> Self; fn from_text_summary(summary: &TextSummary) -> Self;
fn from_chunk(chunk: ChunkSlice) -> Self; fn from_chunk(chunk: ChunkSlice) -> Self;
fn add_assign(&mut self, other: &Self); fn add_assign(&mut self, other: &Self);
@ -1129,7 +1153,7 @@ impl<'a> sum_tree::Dimension<'a, ChunkSummary> for TextSummary {
impl TextDimension for TextSummary { impl TextDimension for TextSummary {
fn from_text_summary(summary: &TextSummary) -> Self { fn from_text_summary(summary: &TextSummary) -> Self {
summary.clone() *summary
} }
fn from_chunk(chunk: ChunkSlice) -> Self { fn from_chunk(chunk: ChunkSlice) -> Self {
@ -1240,6 +1264,118 @@ impl TextDimension for PointUtf16 {
} }
} }
/// A pair of text dimensions in which only the first dimension is used for comparison,
/// but both dimensions are updated during addition and subtraction.
#[derive(Clone, Copy, Debug)]
pub struct DimensionPair<K, V> {
pub key: K,
pub value: Option<V>,
}
impl<K: Default, V: Default> Default for DimensionPair<K, V> {
fn default() -> Self {
Self {
key: Default::default(),
value: Some(Default::default()),
}
}
}
impl<K, V> cmp::Ord for DimensionPair<K, V>
where
K: cmp::Ord,
{
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.key.cmp(&other.key)
}
}
impl<K, V> cmp::PartialOrd for DimensionPair<K, V>
where
K: cmp::PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
self.key.partial_cmp(&other.key)
}
}
impl<K, V> cmp::PartialEq for DimensionPair<K, V>
where
K: cmp::PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.key.eq(&other.key)
}
}
impl<K, V> ops::Sub for DimensionPair<K, V>
where
K: ops::Sub<K, Output = K>,
V: ops::Sub<V, Output = V>,
{
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
Self {
key: self.key - rhs.key,
value: self.value.zip(rhs.value).map(|(a, b)| a - b),
}
}
}
impl<K, V> cmp::Eq for DimensionPair<K, V> where K: cmp::Eq {}
impl<'a, K, V> sum_tree::Dimension<'a, ChunkSummary> for DimensionPair<K, V>
where
K: sum_tree::Dimension<'a, ChunkSummary>,
V: sum_tree::Dimension<'a, ChunkSummary>,
{
fn zero(_cx: &()) -> Self {
Self {
key: K::zero(_cx),
value: Some(V::zero(_cx)),
}
}
fn add_summary(&mut self, summary: &'a ChunkSummary, _cx: &()) {
self.key.add_summary(summary, _cx);
if let Some(value) = &mut self.value {
value.add_summary(summary, _cx);
}
}
}
impl<K, V> TextDimension for DimensionPair<K, V>
where
K: TextDimension,
V: TextDimension,
{
fn add_assign(&mut self, other: &Self) {
self.key.add_assign(&other.key);
if let Some(value) = &mut self.value {
if let Some(other_value) = other.value.as_ref() {
value.add_assign(other_value);
} else {
self.value.take();
}
}
}
fn from_chunk(chunk: ChunkSlice) -> Self {
Self {
key: K::from_chunk(chunk),
value: Some(V::from_chunk(chunk)),
}
}
fn from_text_summary(summary: &TextSummary) -> Self {
Self {
key: K::from_text_summary(summary),
value: Some(V::from_text_summary(summary)),
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -1,4 +1,4 @@
use crate::{chunk::ChunkSlice, ChunkSummary, TextDimension, TextSummary}; use crate::ChunkSummary;
use std::ops::{Add, AddAssign, Sub, SubAssign}; use std::ops::{Add, AddAssign, Sub, SubAssign};
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -22,20 +22,6 @@ impl<'a, T: sum_tree::Dimension<'a, ChunkSummary>> sum_tree::Dimension<'a, Chunk
} }
} }
impl<T: TextDimension> TextDimension for Unclipped<T> {
fn from_text_summary(summary: &TextSummary) -> Self {
Unclipped(T::from_text_summary(summary))
}
fn from_chunk(chunk: ChunkSlice) -> Self {
Unclipped(T::from_chunk(chunk))
}
fn add_assign(&mut self, other: &Self) {
TextDimension::add_assign(&mut self.0, &other.0);
}
}
impl<T: Add<T, Output = T>> Add<Unclipped<T>> for Unclipped<T> { impl<T: Add<T, Output = T>> Add<Unclipped<T>> for Unclipped<T> {
type Output = Unclipped<T>; type Output = Unclipped<T>;

View file

@ -115,14 +115,29 @@ impl<'a, T: Summary, D1: Dimension<'a, T>, D2: Dimension<'a, T>> Dimension<'a, T
} }
} }
impl<'a, S: Summary, D1: SeekTarget<'a, S, D1> + Dimension<'a, S>, D2: Dimension<'a, S>> impl<'a, S, D1, D2> SeekTarget<'a, S, (D1, D2)> for D1
SeekTarget<'a, S, (D1, D2)> for D1 where
S: Summary,
D1: SeekTarget<'a, S, D1> + Dimension<'a, S>,
D2: Dimension<'a, S>,
{ {
fn cmp(&self, cursor_location: &(D1, D2), cx: &S::Context) -> Ordering { fn cmp(&self, cursor_location: &(D1, D2), cx: &S::Context) -> Ordering {
self.cmp(&cursor_location.0, cx) self.cmp(&cursor_location.0, cx)
} }
} }
impl<'a, S, D1, D2, D3> SeekTarget<'a, S, ((D1, D2), D3)> for D1
where
S: Summary,
D1: SeekTarget<'a, S, D1> + Dimension<'a, S>,
D2: Dimension<'a, S>,
D3: Dimension<'a, S>,
{
fn cmp(&self, cursor_location: &((D1, D2), D3), cx: &S::Context) -> Ordering {
self.cmp(&cursor_location.0 .0, cx)
}
}
struct End<D>(PhantomData<D>); struct End<D>(PhantomData<D>);
impl<D> End<D> { impl<D> End<D> {

View file

@ -5,11 +5,7 @@ pub mod terminal_scrollbar;
pub mod terminal_tab_tooltip; pub mod terminal_tab_tooltip;
use collections::HashSet; use collections::HashSet;
use editor::{ use editor::{actions::SelectAll, scroll::ScrollbarAutoHide, Editor, EditorSettings};
actions::SelectAll,
scroll::{Autoscroll, ScrollbarAutoHide},
Editor, EditorSettings,
};
use futures::{stream::FuturesUnordered, StreamExt}; use futures::{stream::FuturesUnordered, StreamExt};
use gpui::{ use gpui::{
anchored, deferred, div, impl_actions, AnyElement, AppContext, DismissEvent, EventEmitter, anchored, deferred, div, impl_actions, AnyElement, AppContext, DismissEvent, EventEmitter,
@ -17,7 +13,6 @@ use gpui::{
MouseDownEvent, Pixels, Render, ScrollWheelEvent, Stateful, Styled, Subscription, Task, View, MouseDownEvent, Pixels, Render, ScrollWheelEvent, Stateful, Styled, Subscription, Task, View,
VisualContext, WeakModel, WeakView, VisualContext, WeakModel, WeakView,
}; };
use language::Bias;
use persistence::TERMINAL_DB; use persistence::TERMINAL_DB;
use project::{search::SearchQuery, terminals::TerminalKind, Fs, Metadata, Project}; use project::{search::SearchQuery, terminals::TerminalKind, Fs, Metadata, Project};
use schemars::JsonSchema; use schemars::JsonSchema;
@ -885,19 +880,13 @@ fn subscribe_for_terminal_events(
active_editor active_editor
.downgrade() .downgrade()
.update(&mut cx, |editor, cx| { .update(&mut cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot; editor.go_to_singleton_buffer_point(
let point = snapshot.buffer_snapshot.clip_point(
language::Point::new( language::Point::new(
row.saturating_sub(1), row.saturating_sub(1),
col.saturating_sub(1), col.saturating_sub(1),
), ),
Bias::Left,
);
editor.change_selections(
Some(Autoscroll::center()),
cx, cx,
|s| s.select_ranges([point..point]), )
);
}) })
.log_err(); .log_err();
} }

View file

@ -42,6 +42,7 @@ where
self.0 self.0
} }
#[must_use]
pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self { pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self {
let mut old_edits_iter = self.0.iter().cloned().peekable(); let mut old_edits_iter = self.0.iter().cloned().peekable();
let mut new_edits_iter = new_edits_iter.into_iter().peekable(); let mut new_edits_iter = new_edits_iter.into_iter().peekable();

View file

@ -1507,9 +1507,9 @@ impl Buffer {
let mut rope_cursor = self.visible_text.cursor(0); let mut rope_cursor = self.visible_text.cursor(0);
disjoint_ranges.map(move |range| { disjoint_ranges.map(move |range| {
position.add_assign(&rope_cursor.summary(range.start)); position.add_assign(&rope_cursor.summary(range.start));
let start = position.clone(); let start = position;
position.add_assign(&rope_cursor.summary(range.end)); position.add_assign(&rope_cursor.summary(range.end));
let end = position.clone(); let end = position;
start..end start..end
}) })
} }
@ -2029,11 +2029,11 @@ impl BufferSnapshot {
row_range: Range<u32>, row_range: Range<u32>,
) -> impl Iterator<Item = (u32, LineIndent)> + '_ { ) -> impl Iterator<Item = (u32, LineIndent)> + '_ {
let start = Point::new(row_range.start, 0).to_offset(self); let start = Point::new(row_range.start, 0).to_offset(self);
let end = Point::new(row_range.end - 1, self.line_len(row_range.end - 1)).to_offset(self); let end = Point::new(row_range.end, self.line_len(row_range.end)).to_offset(self);
let mut chunks = self.as_rope().chunks_in_range(start..end); let mut chunks = self.as_rope().chunks_in_range(start..end);
let mut row = row_range.start; let mut row = row_range.start;
let mut done = start == end; let mut done = false;
std::iter::from_fn(move || { std::iter::from_fn(move || {
if done { if done {
None None
@ -2071,7 +2071,7 @@ impl BufferSnapshot {
} }
let mut row = end_point.row; let mut row = end_point.row;
let mut done = start == end; let mut done = false;
std::iter::from_fn(move || { std::iter::from_fn(move || {
if done { if done {
None None
@ -2168,7 +2168,7 @@ impl BufferSnapshot {
} }
position.add_assign(&text_cursor.summary(fragment_offset)); position.add_assign(&text_cursor.summary(fragment_offset));
(position.clone(), payload) (position, payload)
}) })
} }
@ -2176,10 +2176,14 @@ impl BufferSnapshot {
where where
D: TextDimension, D: TextDimension,
{ {
self.text_summary_for_range(0..self.offset_for_anchor(anchor))
}
pub fn offset_for_anchor(&self, anchor: &Anchor) -> usize {
if *anchor == Anchor::MIN { if *anchor == Anchor::MIN {
D::zero(&()) 0
} else if *anchor == Anchor::MAX { } else if *anchor == Anchor::MAX {
D::from_text_summary(&self.visible_text.summary()) self.visible_text.len()
} else { } else {
let anchor_key = InsertionFragmentKey { let anchor_key = InsertionFragmentKey {
timestamp: anchor.timestamp, timestamp: anchor.timestamp,
@ -2217,7 +2221,7 @@ impl BufferSnapshot {
if fragment.visible { if fragment.visible {
fragment_offset += anchor.offset - insertion.split_offset; fragment_offset += anchor.offset - insertion.split_offset;
} }
self.text_summary_for_range(0..fragment_offset) fragment_offset
} }
} }
@ -2580,16 +2584,16 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo
} }
let fragment_summary = self.visible_cursor.summary(visible_end); let fragment_summary = self.visible_cursor.summary(visible_end);
let mut new_end = self.new_end.clone(); let mut new_end = self.new_end;
new_end.add_assign(&fragment_summary); new_end.add_assign(&fragment_summary);
if let Some((edit, range)) = pending_edit.as_mut() { if let Some((edit, range)) = pending_edit.as_mut() {
edit.new.end = new_end.clone(); edit.new.end = new_end;
range.end = end_anchor; range.end = end_anchor;
} else { } else {
pending_edit = Some(( pending_edit = Some((
Edit { Edit {
old: self.old_end.clone()..self.old_end.clone(), old: self.old_end..self.old_end,
new: self.new_end.clone()..new_end.clone(), new: self.new_end..new_end,
}, },
start_anchor..end_anchor, start_anchor..end_anchor,
)); ));
@ -2609,16 +2613,16 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo
self.deleted_cursor.seek_forward(cursor.start().deleted); self.deleted_cursor.seek_forward(cursor.start().deleted);
} }
let fragment_summary = self.deleted_cursor.summary(deleted_end); let fragment_summary = self.deleted_cursor.summary(deleted_end);
let mut old_end = self.old_end.clone(); let mut old_end = self.old_end;
old_end.add_assign(&fragment_summary); old_end.add_assign(&fragment_summary);
if let Some((edit, range)) = pending_edit.as_mut() { if let Some((edit, range)) = pending_edit.as_mut() {
edit.old.end = old_end.clone(); edit.old.end = old_end;
range.end = end_anchor; range.end = end_anchor;
} else { } else {
pending_edit = Some(( pending_edit = Some((
Edit { Edit {
old: self.old_end.clone()..old_end.clone(), old: self.old_end..old_end,
new: self.new_end.clone()..self.new_end.clone(), new: self.new_end..self.new_end,
}, },
start_anchor..end_anchor, start_anchor..end_anchor,
)); ));

View file

@ -138,22 +138,27 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext<Vim>) {
Vim::action(editor, cx, |vim, action: &GoToLine, cx| { Vim::action(editor, cx, |vim, action: &GoToLine, cx| {
vim.switch_mode(Mode::Normal, false, cx); vim.switch_mode(Mode::Normal, false, cx);
let result = vim.update_editor(cx, |vim, editor, cx| { let result = vim.update_editor(cx, |vim, editor, cx| {
action.range.head().buffer_row(vim, editor, cx) let snapshot = editor.snapshot(cx);
let buffer_row = action.range.head().buffer_row(vim, editor, cx)?;
let current = editor.selections.newest::<Point>(cx);
let target = snapshot
.buffer_snapshot
.clip_point(Point::new(buffer_row.0, current.head().column), Bias::Left);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([target..target]);
});
anyhow::Ok(())
}); });
let buffer_row = match result { if let Some(e @ Err(_)) = result {
None => return, let Some(workspace) = vim.workspace(cx) else {
Some(e @ Err(_)) => {
let Some(workspace) = vim.workspace(cx) else {
return;
};
workspace.update(cx, |workspace, cx| {
e.notify_err(workspace, cx);
});
return; return;
} };
Some(Ok(result)) => result, workspace.update(cx, |workspace, cx| {
}; e.notify_err(workspace, cx);
vim.move_cursor(Motion::StartOfDocument, Some(buffer_row.0 as usize + 1), cx); });
return;
}
}); });
Vim::action(editor, cx, |vim, action: &YankCommand, cx| { Vim::action(editor, cx, |vim, action: &YankCommand, cx| {
@ -462,7 +467,22 @@ impl Position {
) -> Result<MultiBufferRow> { ) -> Result<MultiBufferRow> {
let snapshot = editor.snapshot(cx); let snapshot = editor.snapshot(cx);
let target = match self { let target = match self {
Position::Line { row, offset } => row.saturating_add_signed(offset.saturating_sub(1)), Position::Line { row, offset } => {
if let Some(anchor) = editor.active_excerpt(cx).and_then(|(_, buffer, _)| {
editor.buffer().read(cx).buffer_point_to_anchor(
&buffer,
Point::new(row.saturating_sub(1), 0),
cx,
)
}) {
anchor
.to_point(&snapshot.buffer_snapshot)
.row
.saturating_add_signed(*offset)
} else {
row.saturating_add_signed(offset.saturating_sub(1))
}
}
Position::Mark { name, offset } => { Position::Mark { name, offset } => {
let Some(mark) = vim.marks.get(&name.to_string()).and_then(|vec| vec.last()) else { let Some(mark) = vim.marks.get(&name.to_string()).and_then(|vec| vec.last()) else {
return Err(anyhow!("mark {} not set", name)); return Err(anyhow!("mark {} not set", name));
@ -697,7 +717,8 @@ fn generate_commands(_: &AppContext) -> Vec<VimCommand> {
VimCommand::new(("foldc", "lose"), editor::actions::Fold) VimCommand::new(("foldc", "lose"), editor::actions::Fold)
.bang(editor::actions::FoldRecursive) .bang(editor::actions::FoldRecursive)
.range(act_on_range), .range(act_on_range),
VimCommand::new(("dif", "fupdate"), editor::actions::ToggleHunkDiff).range(act_on_range), VimCommand::new(("dif", "fupdate"), editor::actions::ToggleSelectedDiffHunks)
.range(act_on_range),
VimCommand::new(("rev", "ert"), editor::actions::RevertSelectedHunks).range(act_on_range), VimCommand::new(("rev", "ert"), editor::actions::RevertSelectedHunks).range(act_on_range),
VimCommand::new(("d", "elete"), VisualDeleteLine).range(select_range), VimCommand::new(("d", "elete"), VisualDeleteLine).range(select_range),
VimCommand::new(("y", "ank"), gpui::NoAction).range(|_, range| { VimCommand::new(("y", "ank"), gpui::NoAction).range(|_, range| {

View file

@ -4,7 +4,7 @@ use editor::{
self, find_boundary, find_preceding_boundary_display_point, FindRange, TextLayoutDetails, self, find_boundary, find_preceding_boundary_display_point, FindRange, TextLayoutDetails,
}, },
scroll::Autoscroll, scroll::Autoscroll,
Anchor, Bias, DisplayPoint, Editor, RowExt, ToOffset, Anchor, Bias, DisplayPoint, Editor, RowExt, ToOffset, ToPoint,
}; };
use gpui::{actions, impl_actions, px, ViewContext}; use gpui::{actions, impl_actions, px, ViewContext};
use language::{CharKind, Point, Selection, SelectionGoal}; use language::{CharKind, Point, Selection, SelectionGoal};
@ -847,7 +847,10 @@ impl Motion {
SelectionGoal::None, SelectionGoal::None,
), ),
CurrentLine => (next_line_end(map, point, times), SelectionGoal::None), CurrentLine => (next_line_end(map, point, times), SelectionGoal::None),
StartOfDocument => (start_of_document(map, point, times), SelectionGoal::None), StartOfDocument => (
start_of_document(map, point, maybe_times),
SelectionGoal::None,
),
EndOfDocument => ( EndOfDocument => (
end_of_document(map, point, maybe_times), end_of_document(map, point, maybe_times),
SelectionGoal::None, SelectionGoal::None,
@ -1956,25 +1959,96 @@ fn start_of_next_sentence(map: &DisplaySnapshot, end_of_sentence: usize) -> Opti
Some(map.buffer_snapshot.len()) Some(map.buffer_snapshot.len())
} }
fn start_of_document(map: &DisplaySnapshot, point: DisplayPoint, line: usize) -> DisplayPoint { fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -> DisplayPoint {
let mut new_point = Point::new((line - 1) as u32, 0).to_display_point(map); let point = map.display_point_to_point(display_point, Bias::Left);
*new_point.column_mut() = point.column(); let Some(mut excerpt) = map.buffer_snapshot.excerpt_containing(point..point) else {
map.clip_point(new_point, Bias::Left) return display_point;
};
let offset = excerpt.buffer().point_to_offset(
excerpt
.buffer()
.clip_point(Point::new((line - 1) as u32, point.column), Bias::Left),
);
let buffer_range = excerpt.buffer_range();
if offset >= buffer_range.start && offset <= buffer_range.end {
let point = map
.buffer_snapshot
.offset_to_point(excerpt.map_offset_from_buffer(offset));
return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left);
}
let mut last_position = None;
for (excerpt, buffer, range) in map.buffer_snapshot.excerpts() {
let excerpt_range = language::ToOffset::to_offset(&range.context.start, &buffer)
..language::ToOffset::to_offset(&range.context.end, &buffer);
if offset >= excerpt_range.start && offset <= excerpt_range.end {
let text_anchor = buffer.anchor_after(offset);
let anchor = Anchor::in_buffer(excerpt, buffer.remote_id(), text_anchor);
return anchor.to_display_point(map);
} else if offset <= excerpt_range.start {
let anchor = Anchor::in_buffer(excerpt, buffer.remote_id(), range.context.start);
return anchor.to_display_point(map);
} else {
last_position = Some(Anchor::in_buffer(
excerpt,
buffer.remote_id(),
range.context.end,
));
}
}
let mut last_point = last_position.unwrap().to_point(&map.buffer_snapshot);
last_point.column = point.column;
map.clip_point(
map.point_to_display_point(
map.buffer_snapshot.clip_point(point, Bias::Left),
Bias::Left,
),
Bias::Left,
)
}
fn start_of_document(
map: &DisplaySnapshot,
display_point: DisplayPoint,
maybe_times: Option<usize>,
) -> DisplayPoint {
if let Some(times) = maybe_times {
return go_to_line(map, display_point, times);
}
let point = map.display_point_to_point(display_point, Bias::Left);
let mut first_point = Point::zero();
first_point.column = point.column;
map.clip_point(
map.point_to_display_point(
map.buffer_snapshot.clip_point(first_point, Bias::Left),
Bias::Left,
),
Bias::Left,
)
} }
fn end_of_document( fn end_of_document(
map: &DisplaySnapshot, map: &DisplaySnapshot,
point: DisplayPoint, display_point: DisplayPoint,
line: Option<usize>, maybe_times: Option<usize>,
) -> DisplayPoint { ) -> DisplayPoint {
let new_row = if let Some(line) = line { if let Some(times) = maybe_times {
(line - 1) as u32 return go_to_line(map, display_point, times);
} else {
map.buffer_snapshot.max_row().0
}; };
let point = map.display_point_to_point(display_point, Bias::Left);
let mut last_point = map.buffer_snapshot.max_point();
last_point.column = point.column;
let new_point = Point::new(new_row, point.column()); map.clip_point(
map.clip_point(new_point.to_display_point(map), Bias::Left) map.point_to_display_point(
map.buffer_snapshot.clip_point(last_point, Bias::Left),
Bias::Left,
),
Bias::Left,
)
} }
fn matching_tag(map: &DisplaySnapshot, head: DisplayPoint) -> Option<DisplayPoint> { fn matching_tag(map: &DisplaySnapshot, head: DisplayPoint) -> Option<DisplayPoint> {
@ -2545,7 +2619,7 @@ fn section_motion(
direction: Direction, direction: Direction,
is_start: bool, is_start: bool,
) -> DisplayPoint { ) -> DisplayPoint {
if let Some((_, _, buffer)) = map.buffer_snapshot.as_singleton() { if map.buffer_snapshot.as_singleton().is_some() {
for _ in 0..times { for _ in 0..times {
let offset = map let offset = map
.display_point_to_point(display_point, Bias::Left) .display_point_to_point(display_point, Bias::Left)
@ -2553,13 +2627,14 @@ fn section_motion(
let range = if direction == Direction::Prev { let range = if direction == Direction::Prev {
0..offset 0..offset
} else { } else {
offset..buffer.len() offset..map.buffer_snapshot.len()
}; };
// we set a max start depth here because we want a section to only be "top level" // we set a max start depth here because we want a section to only be "top level"
// similar to vim's default of '{' in the first column. // similar to vim's default of '{' in the first column.
// (and without it, ]] at the start of editor.rs is -very- slow) // (and without it, ]] at the start of editor.rs is -very- slow)
let mut possibilities = buffer let mut possibilities = map
.buffer_snapshot
.text_object_ranges(range, language::TreeSitterOptions::max_start_depth(3)) .text_object_ranges(range, language::TreeSitterOptions::max_start_depth(3))
.filter(|(_, object)| { .filter(|(_, object)| {
matches!( matches!(
@ -2591,7 +2666,7 @@ fn section_motion(
let offset = if direction == Direction::Prev { let offset = if direction == Direction::Prev {
possibilities.max().unwrap_or(0) possibilities.max().unwrap_or(0)
} else { } else {
possibilities.min().unwrap_or(buffer.len()) possibilities.min().unwrap_or(map.buffer_snapshot.len())
}; };
let new_point = map.clip_point(offset.to_display_point(&map), Bias::Left); let new_point = map.clip_point(offset.to_display_point(&map), Bias::Left);

View file

@ -494,7 +494,7 @@ pub fn surrounding_html_tag(
let snapshot = &map.buffer_snapshot; let snapshot = &map.buffer_snapshot;
let offset = head.to_offset(map, Bias::Left); let offset = head.to_offset(map, Bias::Left);
let excerpt = snapshot.excerpt_containing(offset..offset)?; let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
let buffer = excerpt.buffer(); let buffer = excerpt.buffer();
let offset = excerpt.map_offset_to_buffer(offset); let offset = excerpt.map_offset_to_buffer(offset);
@ -664,7 +664,7 @@ fn text_object(
let snapshot = &map.buffer_snapshot; let snapshot = &map.buffer_snapshot;
let offset = relative_to.to_offset(map, Bias::Left); let offset = relative_to.to_offset(map, Bias::Left);
let excerpt = snapshot.excerpt_containing(offset..offset)?; let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
let buffer = excerpt.buffer(); let buffer = excerpt.buffer();
let offset = excerpt.map_offset_to_buffer(offset); let offset = excerpt.map_offset_to_buffer(offset);
@ -710,7 +710,7 @@ fn argument(
let offset = relative_to.to_offset(map, Bias::Left); let offset = relative_to.to_offset(map, Bias::Left);
// The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level // The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level
let excerpt = snapshot.excerpt_containing(offset..offset)?; let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
let buffer = excerpt.buffer(); let buffer = excerpt.buffer();
fn comma_delimited_range_at( fn comma_delimited_range_at(

View file

@ -102,6 +102,8 @@ use crate::persistence::{
SerializedAxis, SerializedAxis,
}; };
pub const SERIALIZATION_THROTTLE_TIME: Duration = Duration::from_millis(200);
static ZED_WINDOW_SIZE: LazyLock<Option<Size<Pixels>>> = LazyLock::new(|| { static ZED_WINDOW_SIZE: LazyLock<Option<Size<Pixels>>> = LazyLock::new(|| {
env::var("ZED_WINDOW_SIZE") env::var("ZED_WINDOW_SIZE")
.ok() .ok()
@ -4344,7 +4346,6 @@ impl Workspace {
cx: &mut AsyncWindowContext, cx: &mut AsyncWindowContext,
) -> Result<()> { ) -> Result<()> {
const CHUNK_SIZE: usize = 200; const CHUNK_SIZE: usize = 200;
const THROTTLE_TIME: Duration = Duration::from_millis(200);
let mut serializable_items = items_rx.ready_chunks(CHUNK_SIZE); let mut serializable_items = items_rx.ready_chunks(CHUNK_SIZE);
@ -4369,7 +4370,9 @@ impl Workspace {
} }
} }
cx.background_executor().timer(THROTTLE_TIME).await; cx.background_executor()
.timer(SERIALIZATION_THROTTLE_TIME)
.await;
} }
Ok(()) Ok(())

View file

@ -1469,7 +1469,7 @@ mod tests {
use workspace::{ use workspace::{
item::{Item, ItemHandle}, item::{Item, ItemHandle},
open_new, open_paths, pane, NewFile, OpenVisible, SaveIntent, SplitDirection, open_new, open_paths, pane, NewFile, OpenVisible, SaveIntent, SplitDirection,
WorkspaceHandle, WorkspaceHandle, SERIALIZATION_THROTTLE_TIME,
}; };
#[gpui::test] #[gpui::test]
@ -2866,7 +2866,9 @@ mod tests {
}) })
.unwrap(); .unwrap();
cx.run_until_parked(); cx.background_executor
.advance_clock(SERIALIZATION_THROTTLE_TIME);
cx.update(|_| {});
editor_1.assert_released(); editor_1.assert_released();
editor_2.assert_released(); editor_2.assert_released();
buffer.assert_released(); buffer.assert_released();

View file

@ -6,7 +6,6 @@ use cli::{ipc::IpcSender, CliRequest, CliResponse};
use client::parse_zed_link; use client::parse_zed_link;
use collections::HashMap; use collections::HashMap;
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use editor::scroll::Autoscroll;
use editor::Editor; use editor::Editor;
use fs::Fs; use fs::Fs;
use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender};
@ -14,7 +13,7 @@ use futures::channel::{mpsc, oneshot};
use futures::future::join_all; use futures::future::join_all;
use futures::{FutureExt, SinkExt, StreamExt}; use futures::{FutureExt, SinkExt, StreamExt};
use gpui::{AppContext, AsyncAppContext, Global, WindowHandle}; use gpui::{AppContext, AsyncAppContext, Global, WindowHandle};
use language::{Bias, Point}; use language::Point;
use recent_projects::{open_ssh_project, SshSettings}; use recent_projects::{open_ssh_project, SshSettings};
use remote::SshConnectionOptions; use remote::SshConnectionOptions;
use settings::Settings; use settings::Settings;
@ -236,11 +235,7 @@ pub async fn open_paths_with_positions(
workspace workspace
.update(cx, |_, cx| { .update(cx, |_, cx| {
active_editor.update(cx, |editor, cx| { active_editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx).display_snapshot; editor.go_to_singleton_buffer_point(point, cx);
let point = snapshot.buffer_snapshot.clip_point(point, Bias::Left);
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
s.select_ranges([point..point])
});
}); });
}) })
.log_err(); .log_err();