Merge branch 'main' into multi-server-completions-tailwind
This commit is contained in:
commit
ff3865a4ad
427 changed files with 43123 additions and 12861 deletions
|
@ -14,8 +14,7 @@ use crate::{
|
|||
CodeLabel, LanguageScope, Outline,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
use fs::LineEnding;
|
||||
pub use clock::ReplicaId;
|
||||
use futures::FutureExt as _;
|
||||
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
|
||||
use lsp::LanguageServerId;
|
||||
|
@ -348,13 +347,17 @@ impl CharKind {
|
|||
}
|
||||
|
||||
impl Buffer {
|
||||
pub fn new<T: Into<String>>(
|
||||
replica_id: ReplicaId,
|
||||
base_text: T,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
|
||||
Self::build(
|
||||
TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
|
||||
TextBuffer::new(replica_id, id, base_text.into()),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
|
||||
Self::build(
|
||||
TextBuffer::new(replica_id, remote_id, base_text),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
@ -1295,6 +1298,10 @@ impl Buffer {
|
|||
self.text.forget_transaction(transaction_id);
|
||||
}
|
||||
|
||||
pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
|
||||
self.text.merge_transactions(transaction, destination);
|
||||
}
|
||||
|
||||
pub fn wait_for_edits(
|
||||
&mut self,
|
||||
edit_ids: impl IntoIterator<Item = clock::Local>,
|
||||
|
@ -1661,6 +1668,22 @@ impl Buffer {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn undo_transaction(
|
||||
&mut self,
|
||||
transaction_id: TransactionId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> bool {
|
||||
let was_dirty = self.is_dirty();
|
||||
let old_version = self.version.clone();
|
||||
if let Some(operation) = self.text.undo_transaction(transaction_id) {
|
||||
self.send_operation(Operation::Buffer(operation), cx);
|
||||
self.did_edit(&old_version, was_dirty, cx);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn undo_to_transaction(
|
||||
&mut self,
|
||||
transaction_id: TransactionId,
|
||||
|
@ -2146,27 +2169,46 @@ impl BufferSnapshot {
|
|||
|
||||
pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
|
||||
let offset = position.to_offset(self);
|
||||
let mut range = 0..self.len();
|
||||
let mut scope = self.language.clone().map(|language| LanguageScope {
|
||||
language,
|
||||
override_id: None,
|
||||
});
|
||||
let mut scope = None;
|
||||
let mut smallest_range: Option<Range<usize>> = None;
|
||||
|
||||
// Use the layer that has the smallest node intersecting the given point.
|
||||
for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
|
||||
let mut cursor = layer.node().walk();
|
||||
while cursor.goto_first_child_for_byte(offset).is_some() {}
|
||||
let node_range = cursor.node().byte_range();
|
||||
if node_range.to_inclusive().contains(&offset) && node_range.len() < range.len() {
|
||||
range = node_range;
|
||||
scope = Some(LanguageScope {
|
||||
language: layer.language.clone(),
|
||||
override_id: layer.override_id(offset, &self.text),
|
||||
});
|
||||
|
||||
let mut range = None;
|
||||
loop {
|
||||
let child_range = cursor.node().byte_range();
|
||||
if !child_range.to_inclusive().contains(&offset) {
|
||||
break;
|
||||
}
|
||||
|
||||
range = Some(child_range);
|
||||
if cursor.goto_first_child_for_byte(offset).is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(range) = range {
|
||||
if smallest_range
|
||||
.as_ref()
|
||||
.map_or(true, |smallest_range| range.len() < smallest_range.len())
|
||||
{
|
||||
smallest_range = Some(range);
|
||||
scope = Some(LanguageScope {
|
||||
language: layer.language.clone(),
|
||||
override_id: layer.override_id(offset, &self.text),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scope
|
||||
scope.or_else(|| {
|
||||
self.language.clone().map(|language| LanguageScope {
|
||||
language,
|
||||
override_id: None,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
|
||||
|
@ -2458,7 +2500,9 @@ impl BufferSnapshot {
|
|||
|
||||
matches.advance();
|
||||
|
||||
let Some((open, close)) = open.zip(close) else { continue };
|
||||
let Some((open, close)) = open.zip(close) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let bracket_range = open.start..=close.end;
|
||||
if !bracket_range.overlaps(&range) {
|
||||
|
|
|
@ -5,7 +5,6 @@ use crate::language_settings::{
|
|||
use super::*;
|
||||
use clock::ReplicaId;
|
||||
use collections::BTreeMap;
|
||||
use fs::LineEnding;
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use indoc::indoc;
|
||||
use proto::deserialize_operation;
|
||||
|
@ -20,6 +19,7 @@ use std::{
|
|||
time::{Duration, Instant},
|
||||
};
|
||||
use text::network::Network;
|
||||
use text::LineEnding;
|
||||
use unindent::Unindent as _;
|
||||
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
|
||||
|
||||
|
@ -43,8 +43,8 @@ fn test_line_endings(cx: &mut gpui::AppContext) {
|
|||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer =
|
||||
Buffer::new(0, "one\r\ntwo\rthree", cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
assert_eq!(buffer.text(), "one\ntwo\nthree");
|
||||
assert_eq!(buffer.line_ending(), LineEnding::Windows);
|
||||
|
||||
|
@ -138,8 +138,8 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
|
||||
|
||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
|
||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
|
||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
|
||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
|
||||
let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
|
||||
buffer1.update(cx, {
|
||||
let buffer1_ops = buffer1_ops.clone();
|
||||
|
@ -222,7 +222,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
||||
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
|
||||
|
||||
let text = "a\nccc\ndddd\nffffff\n";
|
||||
|
@ -254,7 +254,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||
]
|
||||
.join("\n");
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
|
||||
// Spawn a task to format the buffer's whitespace.
|
||||
// Pause so that the foratting task starts running.
|
||||
|
@ -318,8 +318,9 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
||||
let text = "fn a() {}";
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
|
@ -443,7 +444,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "{}", cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.set_sync_parse_timeout(Duration::ZERO);
|
||||
buffer
|
||||
});
|
||||
|
@ -491,8 +493,9 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
@ -576,8 +579,9 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
@ -613,7 +617,9 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(language), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
|
||||
});
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
// extra context nodes are included in the outline.
|
||||
|
@ -655,8 +661,9 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer =
|
||||
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
// point is at the start of an item
|
||||
|
@ -877,7 +884,8 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
|
|||
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() { b(|c| {}) }";
|
||||
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -917,7 +925,8 @@ fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
|
|||
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() {}";
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
||||
|
@ -959,7 +968,8 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
|
|||
|
||||
cx.add_model(|cx| {
|
||||
let text = "fn a() {}";
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
|
||||
|
@ -1000,6 +1010,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {
|
||||
c;
|
||||
|
@ -1007,7 +1018,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1073,6 +1083,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {
|
||||
b();
|
||||
|
@ -1080,7 +1091,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
"
|
||||
.replace("|", "") // marker to preserve trailing whitespace
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1136,13 +1146,13 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {
|
||||
i
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1198,11 +1208,11 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
|||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
"
|
||||
fn a() {}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
)
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
|
@ -1254,7 +1264,8 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
|||
|
||||
cx.add_model(|cx| {
|
||||
let text = "a\nb";
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.edit(
|
||||
[(0..1, "\n"), (2..3, "\n")],
|
||||
Some(AutoindentMode::EachLine),
|
||||
|
@ -1280,7 +1291,8 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
|||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.edit(
|
||||
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
|
||||
Some(AutoindentMode::EachLine),
|
||||
|
@ -1317,7 +1329,8 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
|
|||
}
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// When this text was copied, both of the quotation marks were at the same
|
||||
// indent level, but the indentation of the first line was not included in
|
||||
|
@ -1402,7 +1415,8 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
|
|||
}
|
||||
"#
|
||||
.unindent();
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// The original indent columns are not known, so this text is
|
||||
// auto-indented in a block as if the first line was copied in
|
||||
|
@ -1481,7 +1495,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
|||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx).with_language(
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
|
||||
Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "Markdown".into(),
|
||||
|
@ -1557,7 +1571,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||
false,
|
||||
);
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
||||
buffer.set_language_registry(language_registry);
|
||||
buffer.set_language(Some(html_language), cx);
|
||||
buffer.edit(
|
||||
|
@ -1593,7 +1607,8 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(ruby_lang()), cx);
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
|
||||
|
||||
let text = r#"
|
||||
class C
|
||||
|
@ -1631,7 +1646,7 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_scope_at(cx: &mut AppContext) {
|
||||
fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
|
@ -1683,7 +1698,8 @@ fn test_language_scope_at(cx: &mut AppContext) {
|
|||
|
||||
let text = r#"a["b"] = <C d="e"></C>;"#;
|
||||
|
||||
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(language), cx);
|
||||
let buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
let config = snapshot.language_scope_at(0).unwrap();
|
||||
|
@ -1718,6 +1734,74 @@ fn test_language_scope_at(cx: &mut AppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
brackets: BracketPairConfig {
|
||||
pairs: vec![
|
||||
BracketPair {
|
||||
start: "{".into(),
|
||||
end: "}".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
},
|
||||
BracketPair {
|
||||
start: "'".into(),
|
||||
end: "'".into(),
|
||||
close: true,
|
||||
newline: false,
|
||||
},
|
||||
],
|
||||
disabled_scopes_by_bracket_ix: vec![
|
||||
Vec::new(), //
|
||||
vec!["string".into()],
|
||||
],
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
)
|
||||
.with_override_query(
|
||||
r#"
|
||||
(string_literal) @string
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let text = r#"
|
||||
const S: &'static str = "hello";
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
|
||||
.with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
// By default, all brackets are enabled
|
||||
let config = snapshot.language_scope_at(0).unwrap();
|
||||
assert_eq!(
|
||||
config.brackets().map(|e| e.1).collect::<Vec<_>>(),
|
||||
&[true, true]
|
||||
);
|
||||
|
||||
// Within a string, the quotation brackets are disabled.
|
||||
let string_config = snapshot
|
||||
.language_scope_at(text.find("ello").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
string_config.brackets().map(|e| e.1).collect::<Vec<_>>(),
|
||||
&[true, false]
|
||||
);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
@ -1739,7 +1823,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
|||
language_registry.add(Arc::new(html_lang()));
|
||||
language_registry.add(Arc::new(erb_lang()));
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
||||
buffer.set_language_registry(language_registry.clone());
|
||||
buffer.set_language(
|
||||
language_registry
|
||||
|
@ -1771,7 +1855,7 @@ fn test_serialization(cx: &mut gpui::AppContext) {
|
|||
let mut now = Instant::now();
|
||||
|
||||
let buffer1 = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, "abc", cx);
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
|
||||
buffer.edit([(3..3, "D")], None, cx);
|
||||
|
||||
now += Duration::from_secs(1);
|
||||
|
@ -1826,7 +1910,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
let mut replica_ids = Vec::new();
|
||||
let mut buffers = Vec::new();
|
||||
let network = Rc::new(RefCell::new(Network::new(rng.clone())));
|
||||
let base_buffer = cx.add_model(|cx| Buffer::new(0, base_text.as_str(), cx));
|
||||
let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
|
||||
|
||||
for i in 0..rng.gen_range(min_peers..=max_peers) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
|
@ -2327,7 +2411,8 @@ fn assert_bracket_pairs(
|
|||
) {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, expected_text.clone(), cx).with_language(Arc::new(language), cx)
|
||||
Buffer::new(0, cx.model_id() as u64, expected_text.clone())
|
||||
.with_language(Arc::new(language), cx)
|
||||
});
|
||||
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ use futures::{
|
|||
FutureExt, TryFutureExt as _,
|
||||
};
|
||||
use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
|
||||
use highlight_map::HighlightMap;
|
||||
pub use highlight_map::HighlightMap;
|
||||
use lazy_static::lazy_static;
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
|
@ -57,6 +57,7 @@ pub use diagnostic_set::DiagnosticEntry;
|
|||
pub use lsp::LanguageServerId;
|
||||
pub use outline::{Outline, OutlineItem};
|
||||
pub use syntax_map::{OwnedSyntaxLayerInfo, SyntaxLayerInfo};
|
||||
pub use text::LineEnding;
|
||||
pub use tree_sitter::{Parser, Tree};
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
|
|
|
@ -20,17 +20,17 @@ pub fn deserialize_fingerprint(fingerprint: &str) -> Result<RopeFingerprint> {
|
|||
.map_err(|error| anyhow!("invalid fingerprint: {}", error))
|
||||
}
|
||||
|
||||
pub fn deserialize_line_ending(message: proto::LineEnding) -> fs::LineEnding {
|
||||
pub fn deserialize_line_ending(message: proto::LineEnding) -> text::LineEnding {
|
||||
match message {
|
||||
proto::LineEnding::Unix => fs::LineEnding::Unix,
|
||||
proto::LineEnding::Windows => fs::LineEnding::Windows,
|
||||
proto::LineEnding::Unix => text::LineEnding::Unix,
|
||||
proto::LineEnding::Windows => text::LineEnding::Windows,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_line_ending(message: fs::LineEnding) -> proto::LineEnding {
|
||||
pub fn serialize_line_ending(message: text::LineEnding) -> proto::LineEnding {
|
||||
match message {
|
||||
fs::LineEnding::Unix => proto::LineEnding::Unix,
|
||||
fs::LineEnding::Windows => proto::LineEnding::Windows,
|
||||
text::LineEnding::Unix => proto::LineEnding::Unix,
|
||||
text::LineEnding::Windows => proto::LineEnding::Windows,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -207,6 +207,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
|||
}
|
||||
}
|
||||
|
||||
// This behavior is currently copied in the collab database, for snapshotting channel notes
|
||||
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
|
||||
Ok(
|
||||
match message
|
||||
|
|
|
@ -72,7 +72,7 @@ pub struct SyntaxMapMatch<'a> {
|
|||
|
||||
struct SyntaxMapCapturesLayer<'a> {
|
||||
depth: usize,
|
||||
captures: QueryCaptures<'a, 'a, TextProvider<'a>>,
|
||||
captures: QueryCaptures<'a, 'a, TextProvider<'a>, &'a [u8]>,
|
||||
next_capture: Option<QueryCapture<'a>>,
|
||||
grammar_index: usize,
|
||||
_query_cursor: QueryCursorHandle,
|
||||
|
@ -83,7 +83,7 @@ struct SyntaxMapMatchesLayer<'a> {
|
|||
next_pattern_index: usize,
|
||||
next_captures: Vec<QueryCapture<'a>>,
|
||||
has_next: bool,
|
||||
matches: QueryMatches<'a, 'a, TextProvider<'a>>,
|
||||
matches: QueryMatches<'a, 'a, TextProvider<'a>, &'a [u8]>,
|
||||
grammar_index: usize,
|
||||
_query_cursor: QueryCursorHandle,
|
||||
}
|
||||
|
@ -310,7 +310,9 @@ impl SyntaxSnapshot {
|
|||
// Ignore edits that end before the start of this layer, and don't consider them
|
||||
// for any subsequent layers at this same depth.
|
||||
loop {
|
||||
let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else { continue 'outer };
|
||||
let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else {
|
||||
continue 'outer;
|
||||
};
|
||||
if edit_range.end.cmp(&layer.range.start, text).is_le() {
|
||||
first_edit_ix_for_depth += 1;
|
||||
} else {
|
||||
|
@ -391,7 +393,9 @@ impl SyntaxSnapshot {
|
|||
.filter::<_, ()>(|summary| summary.contains_unknown_injections);
|
||||
cursor.next(text);
|
||||
while let Some(layer) = cursor.item() {
|
||||
let SyntaxLayerContent::Pending { language_name } = &layer.content else { unreachable!() };
|
||||
let SyntaxLayerContent::Pending { language_name } = &layer.content else {
|
||||
unreachable!()
|
||||
};
|
||||
if registry
|
||||
.language_for_name_or_extension(language_name)
|
||||
.now_or_never()
|
||||
|
@ -533,7 +537,9 @@ impl SyntaxSnapshot {
|
|||
|
||||
let content = match step.language {
|
||||
ParseStepLanguage::Loaded { language } => {
|
||||
let Some(grammar) = language.grammar() else { continue };
|
||||
let Some(grammar) = language.grammar() else {
|
||||
continue;
|
||||
};
|
||||
let tree;
|
||||
let changed_ranges;
|
||||
|
||||
|
@ -1279,7 +1285,9 @@ fn get_injections(
|
|||
}
|
||||
|
||||
for (language, mut included_ranges) in combined_injection_ranges.drain() {
|
||||
included_ranges.sort_unstable();
|
||||
included_ranges.sort_unstable_by(|a, b| {
|
||||
Ord::cmp(&a.start_byte, &b.start_byte).then_with(|| Ord::cmp(&a.end_byte, &b.end_byte))
|
||||
});
|
||||
queue.push(ParseStep {
|
||||
depth,
|
||||
language: ParseStepLanguage::Loaded { language },
|
||||
|
@ -1697,7 +1705,7 @@ impl std::fmt::Debug for SyntaxLayer {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
|
||||
impl<'a> tree_sitter::TextProvider<&'a [u8]> for TextProvider<'a> {
|
||||
type I = ByteChunks<'a>;
|
||||
|
||||
fn text(&mut self, node: tree_sitter::Node) -> Self::I {
|
||||
|
|
|
@ -932,8 +932,12 @@ fn check_interpolation(
|
|||
.zip(new_syntax_map.layers.iter())
|
||||
{
|
||||
assert_eq!(old_layer.range, new_layer.range);
|
||||
let Some(old_tree) = old_layer.content.tree() else { continue };
|
||||
let Some(new_tree) = new_layer.content.tree() else { continue };
|
||||
let Some(old_tree) = old_layer.content.tree() else {
|
||||
continue;
|
||||
};
|
||||
let Some(new_tree) = new_layer.content.tree() else {
|
||||
continue;
|
||||
};
|
||||
let old_start_byte = old_layer.range.start.to_offset(old_buffer);
|
||||
let new_start_byte = new_layer.range.start.to_offset(new_buffer);
|
||||
let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue