Remove 2 suffix for lsp, language, fuzzy
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
c5a1950522
commit
9f99e58834
73 changed files with 652 additions and 15589 deletions
|
@ -18,7 +18,8 @@ use crate::{
|
|||
use anyhow::{anyhow, Result};
|
||||
pub use clock::ReplicaId;
|
||||
use futures::channel::oneshot;
|
||||
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
|
||||
use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
|
||||
use lazy_static::lazy_static;
|
||||
use lsp::LanguageServerId;
|
||||
use parking_lot::Mutex;
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
@ -52,14 +53,23 @@ pub use {tree_sitter_rust, tree_sitter_typescript};
|
|||
|
||||
pub use lsp::DiagnosticSeverity;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
|
||||
}
|
||||
|
||||
pub struct Buffer {
|
||||
text: TextBuffer,
|
||||
diff_base: Option<String>,
|
||||
git_diff: git::diff::BufferDiff,
|
||||
file: Option<Arc<dyn File>>,
|
||||
saved_version: clock::Global,
|
||||
saved_version_fingerprint: RopeFingerprint,
|
||||
/// The mtime of the file when this buffer was last loaded from
|
||||
/// or saved to disk.
|
||||
saved_mtime: SystemTime,
|
||||
/// The version vector when this buffer was last loaded from
|
||||
/// or saved to disk.
|
||||
saved_version: clock::Global,
|
||||
/// A hash of the current contents of the buffer's file.
|
||||
file_fingerprint: RopeFingerprint,
|
||||
transaction_depth: usize,
|
||||
was_dirty_before_starting_transaction: Option<bool>,
|
||||
reload_task: Option<Task<Result<()>>>,
|
||||
|
@ -190,8 +200,8 @@ pub struct Completion {
|
|||
pub old_range: Range<Anchor>,
|
||||
pub new_text: String,
|
||||
pub label: CodeLabel,
|
||||
pub documentation: Option<Documentation>,
|
||||
pub server_id: LanguageServerId,
|
||||
pub documentation: Option<Documentation>,
|
||||
pub lsp_completion: lsp::CompletionItem,
|
||||
}
|
||||
|
||||
|
@ -422,8 +432,7 @@ impl Buffer {
|
|||
.ok_or_else(|| anyhow!("missing line_ending"))?,
|
||||
));
|
||||
this.saved_version = proto::deserialize_version(&message.saved_version);
|
||||
this.saved_version_fingerprint =
|
||||
proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
||||
this.file_fingerprint = proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
|
||||
this.saved_mtime = message
|
||||
.saved_mtime
|
||||
.ok_or_else(|| anyhow!("invalid saved_mtime"))?
|
||||
|
@ -439,7 +448,7 @@ impl Buffer {
|
|||
diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
|
||||
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
|
||||
saved_version: proto::serialize_version(&self.saved_version),
|
||||
saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
|
||||
saved_version_fingerprint: proto::serialize_fingerprint(self.file_fingerprint),
|
||||
saved_mtime: Some(self.saved_mtime.into()),
|
||||
}
|
||||
}
|
||||
|
@ -477,7 +486,7 @@ impl Buffer {
|
|||
));
|
||||
|
||||
let text_operations = self.text.operations().clone();
|
||||
cx.background().spawn(async move {
|
||||
cx.background_executor().spawn(async move {
|
||||
let since = since.unwrap_or_default();
|
||||
operations.extend(
|
||||
text_operations
|
||||
|
@ -509,7 +518,7 @@ impl Buffer {
|
|||
Self {
|
||||
saved_mtime,
|
||||
saved_version: buffer.version(),
|
||||
saved_version_fingerprint: buffer.as_rope().fingerprint(),
|
||||
file_fingerprint: buffer.as_rope().fingerprint(),
|
||||
reload_task: None,
|
||||
transaction_depth: 0,
|
||||
was_dirty_before_starting_transaction: None,
|
||||
|
@ -576,7 +585,7 @@ impl Buffer {
|
|||
}
|
||||
|
||||
pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
|
||||
self.saved_version_fingerprint
|
||||
self.file_fingerprint
|
||||
}
|
||||
|
||||
pub fn saved_mtime(&self) -> SystemTime {
|
||||
|
@ -604,7 +613,7 @@ impl Buffer {
|
|||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.saved_version = version;
|
||||
self.saved_version_fingerprint = fingerprint;
|
||||
self.file_fingerprint = fingerprint;
|
||||
self.saved_mtime = mtime;
|
||||
cx.emit(Event::Saved);
|
||||
cx.notify();
|
||||
|
@ -620,13 +629,14 @@ impl Buffer {
|
|||
let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
|
||||
let file = this.file.as_ref()?.as_local()?;
|
||||
Some((file.mtime(), file.load(cx)))
|
||||
}) else {
|
||||
})?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let new_text = new_text.await?;
|
||||
let diff = this
|
||||
.update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))
|
||||
.update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
|
||||
.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if this.version() == diff.base_version {
|
||||
|
@ -652,8 +662,7 @@ impl Buffer {
|
|||
}
|
||||
|
||||
this.reload_task.take();
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
}));
|
||||
rx
|
||||
}
|
||||
|
@ -667,14 +676,14 @@ impl Buffer {
|
|||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.saved_version = version;
|
||||
self.saved_version_fingerprint = fingerprint;
|
||||
self.file_fingerprint = fingerprint;
|
||||
self.text.set_line_ending(line_ending);
|
||||
self.saved_mtime = mtime;
|
||||
if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
|
||||
file.buffer_reloaded(
|
||||
self.remote_id(),
|
||||
&self.saved_version,
|
||||
self.saved_version_fingerprint,
|
||||
self.file_fingerprint,
|
||||
self.line_ending(),
|
||||
self.saved_mtime,
|
||||
cx,
|
||||
|
@ -736,20 +745,18 @@ impl Buffer {
|
|||
let snapshot = self.snapshot();
|
||||
|
||||
let mut diff = self.git_diff.clone();
|
||||
let diff = cx.background().spawn(async move {
|
||||
let diff = cx.background_executor().spawn(async move {
|
||||
diff.update(&diff_base, &snapshot).await;
|
||||
diff
|
||||
});
|
||||
|
||||
let handle = cx.weak_handle();
|
||||
Some(cx.spawn_weak(|_, mut cx| async move {
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
let buffer_diff = diff.await;
|
||||
if let Some(this) = handle.upgrade(&mut cx) {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.git_diff = buffer_diff;
|
||||
this.git_diff_update_count += 1;
|
||||
})
|
||||
}
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.git_diff = buffer_diff;
|
||||
this.git_diff_update_count += 1;
|
||||
})
|
||||
.ok();
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -847,7 +854,7 @@ impl Buffer {
|
|||
let mut syntax_snapshot = syntax_map.snapshot();
|
||||
drop(syntax_map);
|
||||
|
||||
let parse_task = cx.background().spawn({
|
||||
let parse_task = cx.background_executor().spawn({
|
||||
let language = language.clone();
|
||||
let language_registry = language_registry.clone();
|
||||
async move {
|
||||
|
@ -857,7 +864,7 @@ impl Buffer {
|
|||
});
|
||||
|
||||
match cx
|
||||
.background()
|
||||
.background_executor()
|
||||
.block_with_timeout(self.sync_parse_timeout, parse_task)
|
||||
{
|
||||
Ok(new_syntax_snapshot) => {
|
||||
|
@ -886,7 +893,8 @@ impl Buffer {
|
|||
if parse_again {
|
||||
this.reparse(cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
@ -919,9 +927,9 @@ impl Buffer {
|
|||
|
||||
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let Some(indent_sizes) = self.compute_autoindents() {
|
||||
let indent_sizes = cx.background().spawn(indent_sizes);
|
||||
let indent_sizes = cx.background_executor().spawn(indent_sizes);
|
||||
match cx
|
||||
.background()
|
||||
.background_executor()
|
||||
.block_with_timeout(Duration::from_micros(500), indent_sizes)
|
||||
{
|
||||
Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
|
||||
|
@ -930,7 +938,8 @@ impl Buffer {
|
|||
let indent_sizes = indent_sizes.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.apply_autoindents(indent_sizes, cx);
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
@ -1169,36 +1178,72 @@ impl Buffer {
|
|||
pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
|
||||
let old_text = self.as_rope().clone();
|
||||
let base_version = self.version();
|
||||
cx.background().spawn(async move {
|
||||
let old_text = old_text.to_string();
|
||||
let line_ending = LineEnding::detect(&new_text);
|
||||
LineEnding::normalize(&mut new_text);
|
||||
let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
|
||||
let mut edits = Vec::new();
|
||||
let mut offset = 0;
|
||||
let empty: Arc<str> = "".into();
|
||||
for change in diff.iter_all_changes() {
|
||||
let value = change.value();
|
||||
let end_offset = offset + value.len();
|
||||
match change.tag() {
|
||||
ChangeTag::Equal => {
|
||||
offset = end_offset;
|
||||
cx.background_executor()
|
||||
.spawn_labeled(*BUFFER_DIFF_TASK, async move {
|
||||
let old_text = old_text.to_string();
|
||||
let line_ending = LineEnding::detect(&new_text);
|
||||
LineEnding::normalize(&mut new_text);
|
||||
|
||||
let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
|
||||
let empty: Arc<str> = "".into();
|
||||
|
||||
let mut edits = Vec::new();
|
||||
let mut old_offset = 0;
|
||||
let mut new_offset = 0;
|
||||
let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
|
||||
for change in diff.iter_all_changes().map(Some).chain([None]) {
|
||||
if let Some(change) = &change {
|
||||
let len = change.value().len();
|
||||
match change.tag() {
|
||||
ChangeTag::Equal => {
|
||||
old_offset += len;
|
||||
new_offset += len;
|
||||
}
|
||||
ChangeTag::Delete => {
|
||||
let old_end_offset = old_offset + len;
|
||||
if let Some((last_old_range, _)) = &mut last_edit {
|
||||
last_old_range.end = old_end_offset;
|
||||
} else {
|
||||
last_edit =
|
||||
Some((old_offset..old_end_offset, new_offset..new_offset));
|
||||
}
|
||||
old_offset = old_end_offset;
|
||||
}
|
||||
ChangeTag::Insert => {
|
||||
let new_end_offset = new_offset + len;
|
||||
if let Some((_, last_new_range)) = &mut last_edit {
|
||||
last_new_range.end = new_end_offset;
|
||||
} else {
|
||||
last_edit =
|
||||
Some((old_offset..old_offset, new_offset..new_end_offset));
|
||||
}
|
||||
new_offset = new_end_offset;
|
||||
}
|
||||
}
|
||||
}
|
||||
ChangeTag::Delete => {
|
||||
edits.push((offset..end_offset, empty.clone()));
|
||||
offset = end_offset;
|
||||
}
|
||||
ChangeTag::Insert => {
|
||||
edits.push((offset..offset, value.into()));
|
||||
|
||||
if let Some((old_range, new_range)) = &last_edit {
|
||||
if old_offset > old_range.end
|
||||
|| new_offset > new_range.end
|
||||
|| change.is_none()
|
||||
{
|
||||
let text = if new_range.is_empty() {
|
||||
empty.clone()
|
||||
} else {
|
||||
new_text[new_range.clone()].into()
|
||||
};
|
||||
edits.push((old_range.clone(), text));
|
||||
last_edit.take();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Diff {
|
||||
base_version,
|
||||
line_ending,
|
||||
edits,
|
||||
}
|
||||
})
|
||||
|
||||
Diff {
|
||||
base_version,
|
||||
line_ending,
|
||||
edits,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Spawn a background task that searches the buffer for any whitespace
|
||||
|
@ -1207,7 +1252,7 @@ impl Buffer {
|
|||
let old_text = self.as_rope().clone();
|
||||
let line_ending = self.line_ending();
|
||||
let base_version = self.version();
|
||||
cx.background().spawn(async move {
|
||||
cx.background_executor().spawn(async move {
|
||||
let ranges = trailing_whitespace_ranges(&old_text);
|
||||
let empty = Arc::<str>::from("");
|
||||
Diff {
|
||||
|
@ -1282,12 +1327,12 @@ impl Buffer {
|
|||
}
|
||||
|
||||
pub fn is_dirty(&self) -> bool {
|
||||
self.saved_version_fingerprint != self.as_rope().fingerprint()
|
||||
self.file_fingerprint != self.as_rope().fingerprint()
|
||||
|| self.file.as_ref().map_or(false, |file| file.is_deleted())
|
||||
}
|
||||
|
||||
pub fn has_conflict(&self) -> bool {
|
||||
self.saved_version_fingerprint != self.as_rope().fingerprint()
|
||||
self.file_fingerprint != self.as_rope().fingerprint()
|
||||
&& self
|
||||
.file
|
||||
.as_ref()
|
||||
|
@ -1458,95 +1503,82 @@ impl Buffer {
|
|||
return None;
|
||||
}
|
||||
|
||||
// Non-generic part hoisted out to reduce LLVM IR size.
|
||||
fn tail(
|
||||
this: &mut Buffer,
|
||||
edits: Vec<(Range<usize>, Arc<str>)>,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
cx: &mut ModelContext<Buffer>,
|
||||
) -> Option<clock::Lamport> {
|
||||
this.start_transaction();
|
||||
this.pending_autoindent.take();
|
||||
let autoindent_request = autoindent_mode
|
||||
.and_then(|mode| this.language.as_ref().map(|_| (this.snapshot(), mode)));
|
||||
self.start_transaction();
|
||||
self.pending_autoindent.take();
|
||||
let autoindent_request = autoindent_mode
|
||||
.and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
|
||||
|
||||
let edit_operation = this.text.edit(edits.iter().cloned());
|
||||
let edit_id = edit_operation.timestamp();
|
||||
let edit_operation = self.text.edit(edits.iter().cloned());
|
||||
let edit_id = edit_operation.timestamp();
|
||||
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||
.map(|((ix, (range, _)), new_text)| {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
delta +=
|
||||
new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
if let Some((before_edit, mode)) = autoindent_request {
|
||||
let mut delta = 0isize;
|
||||
let entries = edits
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.zip(&edit_operation.as_edit().unwrap().new_text)
|
||||
.map(|((ix, (range, _)), new_text)| {
|
||||
let new_text_length = new_text.len();
|
||||
let old_start = range.start.to_point(&before_edit);
|
||||
let new_start = (delta + range.start as isize) as usize;
|
||||
delta += new_text_length as isize - (range.end as isize - range.start as isize);
|
||||
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
let mut range_of_insertion_to_indent = 0..new_text_length;
|
||||
let mut first_line_is_new = false;
|
||||
let mut original_indent_column = None;
|
||||
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column
|
||||
<= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = true;
|
||||
// When inserting an entire line at the beginning of an existing line,
|
||||
// treat the insertion as new.
|
||||
if new_text.contains('\n')
|
||||
&& old_start.column <= before_edit.indent_size_for_line(old_start.row).len
|
||||
{
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column =
|
||||
Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}));
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
// When inserting text starting with a newline, avoid auto-indenting the
|
||||
// previous line.
|
||||
if new_text.starts_with('\n') {
|
||||
range_of_insertion_to_indent.start += 1;
|
||||
first_line_is_new = true;
|
||||
}
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Avoid auto-indenting after the insertion.
|
||||
if let AutoindentMode::Block {
|
||||
original_indent_columns,
|
||||
} = &mode
|
||||
{
|
||||
original_indent_column = Some(
|
||||
original_indent_columns.get(ix).copied().unwrap_or_else(|| {
|
||||
indent_size_for_text(
|
||||
new_text[range_of_insertion_to_indent.clone()].chars(),
|
||||
)
|
||||
.len
|
||||
}),
|
||||
);
|
||||
if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
|
||||
range_of_insertion_to_indent.end -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
AutoindentRequestEntry {
|
||||
first_line_is_new,
|
||||
original_indent_column,
|
||||
indent_size: before_edit.language_indent_size_at(range.start, cx),
|
||||
range: this
|
||||
.anchor_before(new_start + range_of_insertion_to_indent.start)
|
||||
..this.anchor_after(new_start + range_of_insertion_to_indent.end),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
this.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
}
|
||||
|
||||
this.end_transaction(cx);
|
||||
this.send_operation(Operation::Buffer(edit_operation), cx);
|
||||
Some(edit_id)
|
||||
self.autoindent_requests.push(Arc::new(AutoindentRequest {
|
||||
before_edit,
|
||||
entries,
|
||||
is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
|
||||
}));
|
||||
}
|
||||
tail(self, edits, autoindent_mode, cx)
|
||||
|
||||
self.end_transaction(cx);
|
||||
self.send_operation(Operation::Buffer(edit_operation), cx);
|
||||
Some(edit_id)
|
||||
}
|
||||
|
||||
fn did_edit(
|
||||
|
@ -1879,9 +1911,7 @@ impl Buffer {
|
|||
}
|
||||
}
|
||||
|
||||
impl Entity for Buffer {
|
||||
type Event = Event;
|
||||
}
|
||||
impl EventEmitter<Event> for Buffer {}
|
||||
|
||||
impl Deref for Buffer {
|
||||
type Target = TextBuffer;
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
use super::*;
|
||||
use crate::language_settings::{
|
||||
AllLanguageSettings, AllLanguageSettingsContent, LanguageSettingsContent,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
use crate::Buffer;
|
||||
use clock::ReplicaId;
|
||||
use collections::BTreeMap;
|
||||
use gpui::{AppContext, ModelHandle};
|
||||
use gpui::{AppContext, Model};
|
||||
use gpui::{Context, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use proto::deserialize_operation;
|
||||
use rand::prelude::*;
|
||||
use regex::RegexBuilder;
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
env,
|
||||
ops::Range,
|
||||
rc::Rc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use text::network::Network;
|
||||
use text::LineEnding;
|
||||
use text::{Point, ToPoint};
|
||||
use unindent::Unindent as _;
|
||||
use util::{assert_set_eq, post_inc, test::marked_text_ranges, RandomCharIter};
|
||||
|
||||
|
@ -42,8 +42,8 @@ fn init_logger() {
|
|||
fn test_line_endings(cx: &mut gpui::AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
|
||||
cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "one\r\ntwo\rthree")
|
||||
.with_language(Arc::new(rust_lang()), cx);
|
||||
assert_eq!(buffer.text(), "one\ntwo\nthree");
|
||||
assert_eq!(buffer.line_ending(), LineEnding::Windows);
|
||||
|
@ -135,24 +135,24 @@ fn test_select_language() {
|
|||
#[gpui::test]
|
||||
fn test_edit_events(cx: &mut gpui::AppContext) {
|
||||
let mut now = Instant::now();
|
||||
let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer_1_events = Arc::new(Mutex::new(Vec::new()));
|
||||
let buffer_2_events = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
|
||||
let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
|
||||
let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
|
||||
let buffer1 = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcdef"));
|
||||
let buffer2 = cx.new_model(|cx| Buffer::new(1, cx.entity_id().as_u64(), "abcdef"));
|
||||
let buffer1_ops = Arc::new(Mutex::new(Vec::new()));
|
||||
buffer1.update(cx, {
|
||||
let buffer1_ops = buffer1_ops.clone();
|
||||
|buffer, cx| {
|
||||
let buffer_1_events = buffer_1_events.clone();
|
||||
cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
|
||||
Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
|
||||
event => buffer_1_events.borrow_mut().push(event),
|
||||
Event::Operation(op) => buffer1_ops.lock().push(op),
|
||||
event => buffer_1_events.lock().push(event),
|
||||
})
|
||||
.detach();
|
||||
let buffer_2_events = buffer_2_events.clone();
|
||||
cx.subscribe(&buffer2, move |_, _, event, _| {
|
||||
buffer_2_events.borrow_mut().push(event.clone())
|
||||
buffer_2_events.lock().push(event.clone())
|
||||
})
|
||||
.detach();
|
||||
|
||||
|
@ -179,12 +179,10 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
// Incorporating a set of remote ops emits a single edited event,
|
||||
// followed by a dirty changed event.
|
||||
buffer2.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
|
||||
.unwrap();
|
||||
buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
|
||||
});
|
||||
assert_eq!(
|
||||
mem::take(&mut *buffer_1_events.borrow_mut()),
|
||||
mem::take(&mut *buffer_1_events.lock()),
|
||||
vec![
|
||||
Event::Edited,
|
||||
Event::DirtyChanged,
|
||||
|
@ -193,7 +191,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
]
|
||||
);
|
||||
assert_eq!(
|
||||
mem::take(&mut *buffer_2_events.borrow_mut()),
|
||||
mem::take(&mut *buffer_2_events.lock()),
|
||||
vec![Event::Edited, Event::DirtyChanged]
|
||||
);
|
||||
|
||||
|
@ -205,28 +203,26 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
|
|||
// Incorporating the remote ops again emits a single edited event,
|
||||
// followed by a dirty changed event.
|
||||
buffer2.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
|
||||
.unwrap();
|
||||
buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap();
|
||||
});
|
||||
assert_eq!(
|
||||
mem::take(&mut *buffer_1_events.borrow_mut()),
|
||||
mem::take(&mut *buffer_1_events.lock()),
|
||||
vec![Event::Edited, Event::DirtyChanged,]
|
||||
);
|
||||
assert_eq!(
|
||||
mem::take(&mut *buffer_2_events.borrow_mut()),
|
||||
mem::take(&mut *buffer_2_events.lock()),
|
||||
vec![Event::Edited, Event::DirtyChanged]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
||||
async fn test_apply_diff(cx: &mut TestAppContext) {
|
||||
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
|
||||
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
|
||||
let anchor = buffer.update(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
|
||||
|
||||
let text = "a\nccc\ndddd\nffffff\n";
|
||||
let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||
let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.apply_diff(diff, cx).unwrap();
|
||||
assert_eq!(buffer.text(), text);
|
||||
|
@ -234,7 +230,7 @@ async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
|
||||
let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||
let diff = buffer.update(cx, |b, cx| b.diff(text.into(), cx)).await;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.apply_diff(diff, cx).unwrap();
|
||||
assert_eq!(buffer.text(), text);
|
||||
|
@ -254,15 +250,15 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||
]
|
||||
.join("\n");
|
||||
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
|
||||
let buffer = cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
|
||||
|
||||
// Spawn a task to format the buffer's whitespace.
|
||||
// Pause so that the foratting task starts running.
|
||||
let format = buffer.read_with(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
|
||||
let format = buffer.update(cx, |buffer, cx| buffer.remove_trailing_whitespace(cx));
|
||||
smol::future::yield_now().await;
|
||||
|
||||
// Edit the buffer while the normalization task is running.
|
||||
let version_before_edit = buffer.read_with(cx, |buffer, _| buffer.version());
|
||||
let version_before_edit = buffer.update(cx, |buffer, _| buffer.version());
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
|
@ -318,12 +314,13 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
|
|||
#[gpui::test]
|
||||
async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
||||
let text = "fn a() {}";
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
cx.executor().run_until_parked();
|
||||
assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
|
@ -354,7 +351,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d; }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
cx.executor().run_until_parked();
|
||||
assert!(!buffer.update(cx, |buffer, _| buffer.is_parsing()));
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
|
@ -386,7 +384,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
|
@ -408,7 +406,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a() {}");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
|
@ -426,7 +425,7 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
|
||||
assert!(buf.is_parsing());
|
||||
});
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, cx),
|
||||
concat!(
|
||||
|
@ -443,15 +442,15 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), "{}").with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.set_sync_parse_timeout(Duration::ZERO);
|
||||
buffer
|
||||
});
|
||||
|
||||
// Wait for the initial text to parse
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
get_tree_sexp(&buffer, cx),
|
||||
"(source_file (expression_statement (block)))"
|
||||
|
@ -460,7 +459,7 @@ async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
|
|||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(Arc::new(json_lang())), cx)
|
||||
});
|
||||
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(get_tree_sexp(&buffer, cx), "(document (object))");
|
||||
}
|
||||
|
||||
|
@ -493,11 +492,11 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.update(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -560,7 +559,7 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
|
|||
cx: &'a gpui::TestAppContext,
|
||||
) -> Vec<(&'a str, Vec<usize>)> {
|
||||
let matches = cx
|
||||
.read(|cx| outline.search(query, cx.background().clone()))
|
||||
.update(|cx| outline.search(query, cx.background_executor().clone()))
|
||||
.await;
|
||||
matches
|
||||
.into_iter()
|
||||
|
@ -579,11 +578,11 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let outline = buffer
|
||||
.read_with(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.update(cx, |buffer, _| buffer.snapshot().outline(None))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -617,10 +616,10 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(language), cx)
|
||||
});
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
// extra context nodes are included in the outline.
|
||||
let outline = snapshot.outline(None).unwrap();
|
||||
|
@ -661,10 +660,10 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
|
||||
|
||||
// point is at the start of an item
|
||||
assert_eq!(
|
||||
|
@ -882,10 +881,10 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
|
|||
|
||||
#[gpui::test]
|
||||
fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = "fn a() { b(|c| {}) }";
|
||||
let buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
assert_eq!(
|
||||
|
@ -923,10 +922,10 @@ fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
|
|||
fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = "fn a() {}";
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n \n}");
|
||||
|
@ -966,10 +965,10 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
|
|||
settings.defaults.hard_tabs = Some(true);
|
||||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = "fn a() {}";
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
|
||||
assert_eq!(buffer.text(), "fn a() {\n\t\n}");
|
||||
|
@ -1007,10 +1006,11 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
|
|||
fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let entity_id = cx.entity_id();
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
entity_id.as_u64(),
|
||||
"
|
||||
fn a() {
|
||||
c;
|
||||
|
@ -1080,10 +1080,12 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
buffer
|
||||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
eprintln!("second buffer: {:?}", cx.entity_id());
|
||||
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
cx.entity_id().as_u64(),
|
||||
"
|
||||
fn a() {
|
||||
b();
|
||||
|
@ -1137,16 +1139,18 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
|
|||
);
|
||||
buffer
|
||||
});
|
||||
|
||||
eprintln!("DONE");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
cx.entity_id().as_u64(),
|
||||
"
|
||||
fn a() {
|
||||
i
|
||||
|
@ -1205,10 +1209,10 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
|
|||
fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
cx.model_id() as u64,
|
||||
cx.entity_id().as_u64(),
|
||||
"
|
||||
fn a() {}
|
||||
"
|
||||
|
@ -1262,10 +1266,10 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
|
|||
fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = "a\nb";
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.edit(
|
||||
[(0..1, "\n"), (2..3, "\n")],
|
||||
Some(AutoindentMode::EachLine),
|
||||
|
@ -1280,7 +1284,7 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
|
|||
fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = "
|
||||
const a: usize = 1;
|
||||
fn b() {
|
||||
|
@ -1292,7 +1296,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
|||
.unindent();
|
||||
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
buffer.edit(
|
||||
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
|
||||
Some(AutoindentMode::EachLine),
|
||||
|
@ -1322,7 +1326,7 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
|
|||
fn test_autoindent_block_mode(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = r#"
|
||||
fn a() {
|
||||
b();
|
||||
|
@ -1330,7 +1334,7 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// When this text was copied, both of the quotation marks were at the same
|
||||
// indent level, but the indentation of the first line was not included in
|
||||
|
@ -1406,7 +1410,7 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
|
|||
fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = r#"
|
||||
fn a() {
|
||||
if b() {
|
||||
|
@ -1416,7 +1420,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
|
|||
"#
|
||||
.unindent();
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), text).with_language(Arc::new(rust_lang()), cx);
|
||||
|
||||
// The original indent columns are not known, so this text is
|
||||
// auto-indented in a block as if the first line was copied in
|
||||
|
@ -1486,7 +1490,7 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
|
|||
fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = "
|
||||
* one
|
||||
- a
|
||||
|
@ -1495,7 +1499,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
|
|||
"
|
||||
.unindent();
|
||||
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
|
||||
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text).with_language(
|
||||
Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "Markdown".into(),
|
||||
|
@ -1555,7 +1559,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||
language_registry.add(html_language.clone());
|
||||
language_registry.add(javascript_language.clone());
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let (text, ranges) = marked_text_ranges(
|
||||
&"
|
||||
<div>ˇ
|
||||
|
@ -1571,7 +1575,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||
false,
|
||||
);
|
||||
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
||||
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
|
||||
buffer.set_language_registry(language_registry);
|
||||
buffer.set_language(Some(html_language), cx);
|
||||
buffer.edit(
|
||||
|
@ -1606,9 +1610,9 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||
settings.defaults.tab_size = Some(2.try_into().unwrap());
|
||||
});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let mut buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), "").with_language(Arc::new(ruby_lang()), cx);
|
||||
|
||||
let text = r#"
|
||||
class C
|
||||
|
@ -1649,7 +1653,7 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||
fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "JavaScript".into(),
|
||||
|
@ -1710,7 +1714,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
|||
.unindent();
|
||||
|
||||
let buffer =
|
||||
Buffer::new(0, cx.model_id() as u64, &text).with_language(Arc::new(language), cx);
|
||||
Buffer::new(0, cx.entity_id().as_u64(), &text).with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
let config = snapshot.language_scope_at(0).unwrap();
|
||||
|
@ -1782,7 +1786,7 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
|
|||
fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
|
@ -1822,7 +1826,7 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
|||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
|
||||
let buffer = Buffer::new(0, cx.entity_id().as_u64(), text.clone())
|
||||
.with_language(Arc::new(language), cx);
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
|
@ -1850,7 +1854,7 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
|
|||
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
cx.new_model(|cx| {
|
||||
let text = r#"
|
||||
<ol>
|
||||
<% people.each do |person| %>
|
||||
|
@ -1867,7 +1871,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
|||
language_registry.add(Arc::new(html_lang()));
|
||||
language_registry.add(Arc::new(erb_lang()));
|
||||
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
|
||||
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), text);
|
||||
buffer.set_language_registry(language_registry.clone());
|
||||
buffer.set_language(
|
||||
language_registry
|
||||
|
@ -1898,8 +1902,8 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
|||
fn test_serialization(cx: &mut gpui::AppContext) {
|
||||
let mut now = Instant::now();
|
||||
|
||||
let buffer1 = cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
|
||||
let buffer1 = cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, cx.entity_id().as_u64(), "abc");
|
||||
buffer.edit([(3..3, "D")], None, cx);
|
||||
|
||||
now += Duration::from_secs(1);
|
||||
|
@ -1919,9 +1923,9 @@ fn test_serialization(cx: &mut gpui::AppContext) {
|
|||
|
||||
let state = buffer1.read(cx).to_proto();
|
||||
let ops = cx
|
||||
.background()
|
||||
.background_executor()
|
||||
.block(buffer1.read(cx).serialize_ops(None, cx));
|
||||
let buffer2 = cx.add_model(|cx| {
|
||||
let buffer2 = cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::from_proto(1, state, None).unwrap();
|
||||
buffer
|
||||
.apply_ops(
|
||||
|
@ -1953,14 +1957,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
.collect::<String>();
|
||||
let mut replica_ids = Vec::new();
|
||||
let mut buffers = Vec::new();
|
||||
let network = Rc::new(RefCell::new(Network::new(rng.clone())));
|
||||
let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
|
||||
let network = Arc::new(Mutex::new(Network::new(rng.clone())));
|
||||
let base_buffer =
|
||||
cx.new_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text.as_str()));
|
||||
|
||||
for i in 0..rng.gen_range(min_peers..=max_peers) {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let state = base_buffer.read(cx).to_proto();
|
||||
let ops = cx
|
||||
.background()
|
||||
.background_executor()
|
||||
.block(base_buffer.read(cx).serialize_ops(None, cx));
|
||||
let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
|
||||
buffer
|
||||
|
@ -1975,16 +1980,17 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
|
||||
if let Event::Operation(op) = event {
|
||||
network
|
||||
.borrow_mut()
|
||||
.lock()
|
||||
.broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
buffer
|
||||
});
|
||||
|
||||
buffers.push(buffer);
|
||||
replica_ids.push(i as ReplicaId);
|
||||
network.borrow_mut().add_peer(i as ReplicaId);
|
||||
network.lock().add_peer(i as ReplicaId);
|
||||
log::info!("Adding initial peer with replica id {}", i);
|
||||
}
|
||||
|
||||
|
@ -2065,7 +2071,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
50..=59 if replica_ids.len() < max_peers => {
|
||||
let old_buffer_state = buffer.read(cx).to_proto();
|
||||
let old_buffer_ops = cx
|
||||
.background()
|
||||
.background_executor()
|
||||
.block(buffer.read(cx).serialize_ops(None, cx));
|
||||
let new_replica_id = (0..=replica_ids.len() as ReplicaId)
|
||||
.filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
|
||||
|
@ -2076,7 +2082,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
new_replica_id,
|
||||
replica_id
|
||||
);
|
||||
new_buffer = Some(cx.add_model(|cx| {
|
||||
new_buffer = Some(cx.new_model(|cx| {
|
||||
let mut new_buffer =
|
||||
Buffer::from_proto(new_replica_id, old_buffer_state, None).unwrap();
|
||||
new_buffer
|
||||
|
@ -2096,7 +2102,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
let network = network.clone();
|
||||
cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
|
||||
if let Event::Operation(op) = event {
|
||||
network.borrow_mut().broadcast(
|
||||
network.lock().broadcast(
|
||||
buffer.replica_id(),
|
||||
vec![proto::serialize_operation(op)],
|
||||
);
|
||||
|
@ -2105,15 +2111,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
.detach();
|
||||
new_buffer
|
||||
}));
|
||||
network.borrow_mut().replicate(replica_id, new_replica_id);
|
||||
network.lock().replicate(replica_id, new_replica_id);
|
||||
|
||||
if new_replica_id as usize == replica_ids.len() {
|
||||
replica_ids.push(new_replica_id);
|
||||
} else {
|
||||
let new_buffer = new_buffer.take().unwrap();
|
||||
while network.borrow().has_unreceived(new_replica_id) {
|
||||
while network.lock().has_unreceived(new_replica_id) {
|
||||
let ops = network
|
||||
.borrow_mut()
|
||||
.lock()
|
||||
.receive(new_replica_id)
|
||||
.into_iter()
|
||||
.map(|op| proto::deserialize_operation(op).unwrap());
|
||||
|
@ -2140,9 +2146,9 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
});
|
||||
mutation_count -= 1;
|
||||
}
|
||||
_ if network.borrow().has_unreceived(replica_id) => {
|
||||
_ if network.lock().has_unreceived(replica_id) => {
|
||||
let ops = network
|
||||
.borrow_mut()
|
||||
.lock()
|
||||
.receive(replica_id)
|
||||
.into_iter()
|
||||
.map(|op| proto::deserialize_operation(op).unwrap());
|
||||
|
@ -2167,7 +2173,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
|||
buffer.read(cx).check_invariants();
|
||||
}
|
||||
|
||||
if mutation_count == 0 && network.borrow().is_idle() {
|
||||
if mutation_count == 0 && network.lock().is_idle() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -2438,8 +2444,8 @@ fn javascript_lang() -> Language {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
fn get_tree_sexp(buffer: &Model<Buffer>, cx: &mut gpui::TestAppContext) -> String {
|
||||
buffer.update(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
|
||||
layers[0].node().to_sexp()
|
||||
|
@ -2454,8 +2460,8 @@ fn assert_bracket_pairs(
|
|||
cx: &mut AppContext,
|
||||
) {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
|
||||
let buffer = cx.add_model(|cx| {
|
||||
Buffer::new(0, cx.model_id() as u64, expected_text.clone())
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, cx.entity_id().as_u64(), expected_text.clone())
|
||||
.with_language(Arc::new(language), cx)
|
||||
});
|
||||
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());
|
||||
|
@ -2478,9 +2484,10 @@ fn assert_bracket_pairs(
|
|||
}
|
||||
|
||||
fn init_settings(cx: &mut AppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
crate::init(cx);
|
||||
cx.update_global::<SettingsStore, _, _>(|settings, cx| {
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, f);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use gpui::fonts::HighlightStyle;
|
||||
use gpui::HighlightStyle;
|
||||
use std::sync::Arc;
|
||||
use theme::SyntaxTheme;
|
||||
|
||||
|
@ -79,23 +79,23 @@ impl Default for HighlightId {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::color::Color;
|
||||
use gpui::rgba;
|
||||
|
||||
#[test]
|
||||
fn test_highlight_map() {
|
||||
let theme = SyntaxTheme::new(
|
||||
[
|
||||
("function", Color::from_u32(0x100000ff)),
|
||||
("function.method", Color::from_u32(0x200000ff)),
|
||||
("function.async", Color::from_u32(0x300000ff)),
|
||||
("variable.builtin.self.rust", Color::from_u32(0x400000ff)),
|
||||
("variable.builtin", Color::from_u32(0x500000ff)),
|
||||
("variable", Color::from_u32(0x600000ff)),
|
||||
let theme = SyntaxTheme {
|
||||
highlights: [
|
||||
("function", rgba(0x100000ff)),
|
||||
("function.method", rgba(0x200000ff)),
|
||||
("function.async", rgba(0x300000ff)),
|
||||
("variable.builtin.self.rust", rgba(0x400000ff)),
|
||||
("variable.builtin", rgba(0x500000ff)),
|
||||
("variable", rgba(0x600000ff)),
|
||||
]
|
||||
.iter()
|
||||
.map(|(name, color)| (name.to_string(), (*color).into()))
|
||||
.collect(),
|
||||
);
|
||||
};
|
||||
|
||||
let capture_names = &[
|
||||
"function.special",
|
||||
|
|
|
@ -2,13 +2,13 @@ mod buffer;
|
|||
mod diagnostic_set;
|
||||
mod highlight_map;
|
||||
pub mod language_settings;
|
||||
pub mod markdown;
|
||||
mod outline;
|
||||
pub mod proto;
|
||||
mod syntax_map;
|
||||
|
||||
#[cfg(test)]
|
||||
mod buffer_tests;
|
||||
pub mod markdown;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
|
@ -18,7 +18,7 @@ use futures::{
|
|||
future::{BoxFuture, Shared},
|
||||
FutureExt, TryFutureExt as _,
|
||||
};
|
||||
use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
|
||||
use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
|
||||
pub use highlight_map::HighlightMap;
|
||||
use lazy_static::lazy_static;
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
|
@ -44,7 +44,7 @@ use std::{
|
|||
};
|
||||
use syntax_map::SyntaxSnapshot;
|
||||
use theme::{SyntaxTheme, Theme};
|
||||
use tree_sitter::{self, Query};
|
||||
use tree_sitter::{self, wasmtime, Query, WasmStore};
|
||||
use unicase::UniCase;
|
||||
use util::{http::HttpClient, paths::PathExt};
|
||||
use util::{post_inc, ResultExt, TryFutureExt as _, UnwrapFuture};
|
||||
|
@ -84,10 +84,15 @@ impl LspBinaryStatusSender {
|
|||
}
|
||||
|
||||
thread_local! {
|
||||
static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
|
||||
static PARSER: RefCell<Parser> = {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_wasm_store(WasmStore::new(WASM_ENGINE.clone()).unwrap()).unwrap();
|
||||
RefCell::new(parser)
|
||||
};
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref WASM_ENGINE: wasmtime::Engine = wasmtime::Engine::default();
|
||||
pub static ref NEXT_GRAMMAR_ID: AtomicUsize = Default::default();
|
||||
pub static ref PLAIN_TEXT: Arc<Language> = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
|
@ -111,6 +116,7 @@ pub struct LanguageServerName(pub Arc<str>);
|
|||
pub struct CachedLspAdapter {
|
||||
pub name: LanguageServerName,
|
||||
pub short_name: &'static str,
|
||||
pub initialization_options: Option<Value>,
|
||||
pub disk_based_diagnostic_sources: Vec<String>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
pub language_ids: HashMap<String, String>,
|
||||
|
@ -122,6 +128,7 @@ impl CachedLspAdapter {
|
|||
pub async fn new(adapter: Arc<dyn LspAdapter>) -> Arc<Self> {
|
||||
let name = adapter.name().await;
|
||||
let short_name = adapter.short_name();
|
||||
let initialization_options = adapter.initialization_options().await;
|
||||
let disk_based_diagnostic_sources = adapter.disk_based_diagnostic_sources().await;
|
||||
let disk_based_diagnostics_progress_token =
|
||||
adapter.disk_based_diagnostics_progress_token().await;
|
||||
|
@ -130,6 +137,7 @@ impl CachedLspAdapter {
|
|||
Arc::new(CachedLspAdapter {
|
||||
name,
|
||||
short_name,
|
||||
initialization_options,
|
||||
disk_based_diagnostic_sources,
|
||||
disk_based_diagnostics_progress_token,
|
||||
language_ids,
|
||||
|
@ -357,6 +365,7 @@ pub struct CodeLabel {
|
|||
#[derive(Clone, Deserialize)]
|
||||
pub struct LanguageConfig {
|
||||
pub name: Arc<str>,
|
||||
pub grammar_name: Option<Arc<str>>,
|
||||
pub path_suffixes: Vec<String>,
|
||||
pub brackets: BracketPairConfig,
|
||||
#[serde(default, deserialize_with = "deserialize_regex")]
|
||||
|
@ -443,6 +452,7 @@ impl Default for LanguageConfig {
|
|||
fn default() -> Self {
|
||||
Self {
|
||||
name: "".into(),
|
||||
grammar_name: None,
|
||||
path_suffixes: Default::default(),
|
||||
brackets: Default::default(),
|
||||
auto_indent_using_last_non_empty_line: auto_indent_using_last_non_empty_line_default(),
|
||||
|
@ -617,14 +627,25 @@ type AvailableLanguageId = usize;
|
|||
#[derive(Clone)]
|
||||
struct AvailableLanguage {
|
||||
id: AvailableLanguageId,
|
||||
path: &'static str,
|
||||
config: LanguageConfig,
|
||||
grammar: tree_sitter::Language,
|
||||
grammar: AvailableGrammar,
|
||||
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
|
||||
get_queries: fn(&str) -> LanguageQueries,
|
||||
loaded: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum AvailableGrammar {
|
||||
Native {
|
||||
grammar: tree_sitter::Language,
|
||||
asset_dir: &'static str,
|
||||
get_queries: fn(&str) -> LanguageQueries,
|
||||
},
|
||||
Wasm {
|
||||
grammar_name: Arc<str>,
|
||||
path: Arc<Path>,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct LanguageRegistry {
|
||||
state: RwLock<LanguageRegistryState>,
|
||||
language_server_download_dir: Option<Arc<Path>>,
|
||||
|
@ -633,7 +654,7 @@ pub struct LanguageRegistry {
|
|||
lsp_binary_paths: Mutex<
|
||||
HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
|
||||
>,
|
||||
executor: Option<Arc<Background>>,
|
||||
executor: Option<BackgroundExecutor>,
|
||||
lsp_binary_status_tx: LspBinaryStatusSender,
|
||||
}
|
||||
|
||||
|
@ -682,7 +703,7 @@ impl LanguageRegistry {
|
|||
Self::new(Task::ready(()))
|
||||
}
|
||||
|
||||
pub fn set_executor(&mut self, executor: Arc<Background>) {
|
||||
pub fn set_executor(&mut self, executor: BackgroundExecutor) {
|
||||
self.executor = Some(executor);
|
||||
}
|
||||
|
||||
|
@ -696,7 +717,7 @@ impl LanguageRegistry {
|
|||
|
||||
pub fn register(
|
||||
&self,
|
||||
path: &'static str,
|
||||
asset_dir: &'static str,
|
||||
config: LanguageConfig,
|
||||
grammar: tree_sitter::Language,
|
||||
lsp_adapters: Vec<Arc<dyn LspAdapter>>,
|
||||
|
@ -705,11 +726,24 @@ impl LanguageRegistry {
|
|||
let state = &mut *self.state.write();
|
||||
state.available_languages.push(AvailableLanguage {
|
||||
id: post_inc(&mut state.next_available_language_id),
|
||||
path,
|
||||
config,
|
||||
grammar,
|
||||
grammar: AvailableGrammar::Native {
|
||||
grammar,
|
||||
get_queries,
|
||||
asset_dir,
|
||||
},
|
||||
lsp_adapters,
|
||||
get_queries,
|
||||
loaded: false,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn register_wasm(&self, path: Arc<Path>, grammar_name: Arc<str>, config: LanguageConfig) {
|
||||
let state = &mut *self.state.write();
|
||||
state.available_languages.push(AvailableLanguage {
|
||||
id: post_inc(&mut state.next_available_language_id),
|
||||
config,
|
||||
grammar: AvailableGrammar::Wasm { grammar_name, path },
|
||||
lsp_adapters: Vec::new(),
|
||||
loaded: false,
|
||||
});
|
||||
}
|
||||
|
@ -749,7 +783,7 @@ impl LanguageRegistry {
|
|||
let mut state = self.state.write();
|
||||
state.theme = Some(theme.clone());
|
||||
for language in &state.languages {
|
||||
language.set_theme(&theme.editor.syntax);
|
||||
language.set_theme(&theme.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -834,13 +868,43 @@ impl LanguageRegistry {
|
|||
executor
|
||||
.spawn(async move {
|
||||
let id = language.id;
|
||||
let queries = (language.get_queries)(&language.path);
|
||||
let language =
|
||||
Language::new(language.config, Some(language.grammar))
|
||||
let name = language.config.name.clone();
|
||||
let language = async {
|
||||
let (grammar, queries) = match language.grammar {
|
||||
AvailableGrammar::Native {
|
||||
grammar,
|
||||
asset_dir,
|
||||
get_queries,
|
||||
} => (grammar, (get_queries)(asset_dir)),
|
||||
AvailableGrammar::Wasm { grammar_name, path } => {
|
||||
let mut wasm_path = path.join(grammar_name.as_ref());
|
||||
wasm_path.set_extension("wasm");
|
||||
let wasm_bytes = std::fs::read(&wasm_path)?;
|
||||
let grammar = PARSER.with(|parser| {
|
||||
let mut parser = parser.borrow_mut();
|
||||
let mut store = parser.take_wasm_store().unwrap();
|
||||
let grammar =
|
||||
store.load_language(&grammar_name, &wasm_bytes);
|
||||
parser.set_wasm_store(store).unwrap();
|
||||
grammar
|
||||
})?;
|
||||
let mut queries = LanguageQueries::default();
|
||||
if let Ok(contents) = std::fs::read_to_string(
|
||||
&path.join("highlights.scm"),
|
||||
) {
|
||||
queries.highlights = Some(contents.into());
|
||||
}
|
||||
(grammar, queries)
|
||||
}
|
||||
};
|
||||
Language::new(language.config, Some(grammar))
|
||||
.with_lsp_adapters(language.lsp_adapters)
|
||||
.await;
|
||||
let name = language.name();
|
||||
match language.with_queries(queries) {
|
||||
.await
|
||||
.with_queries(queries)
|
||||
}
|
||||
.await;
|
||||
|
||||
match language {
|
||||
Ok(language) => {
|
||||
let language = Arc::new(language);
|
||||
let mut state = this.state.write();
|
||||
|
@ -918,7 +982,7 @@ impl LanguageRegistry {
|
|||
}
|
||||
|
||||
let servers_tx = servers_tx.clone();
|
||||
cx.background()
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
if fake_server
|
||||
.try_receive_notification::<lsp::notification::Initialized>()
|
||||
|
@ -955,18 +1019,22 @@ impl LanguageRegistry {
|
|||
|
||||
let task = {
|
||||
let container_dir = container_dir.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(move |mut cx| async move {
|
||||
login_shell_env_loaded.await;
|
||||
|
||||
let mut lock = this.lsp_binary_paths.lock();
|
||||
let entry = lock
|
||||
let entry = this
|
||||
.lsp_binary_paths
|
||||
.lock()
|
||||
.entry(adapter.name.clone())
|
||||
.or_insert_with(|| {
|
||||
let adapter = adapter.clone();
|
||||
let language = language.clone();
|
||||
let delegate = delegate.clone();
|
||||
cx.spawn(|cx| {
|
||||
get_binary(
|
||||
adapter.clone(),
|
||||
language.clone(),
|
||||
delegate.clone(),
|
||||
adapter,
|
||||
language,
|
||||
delegate,
|
||||
container_dir,
|
||||
lsp_binary_statuses,
|
||||
cx,
|
||||
|
@ -976,9 +1044,8 @@ impl LanguageRegistry {
|
|||
.shared()
|
||||
})
|
||||
.clone();
|
||||
drop(lock);
|
||||
|
||||
let binary = match entry.clone().await {
|
||||
let binary = match entry.await {
|
||||
Ok(binary) => binary,
|
||||
Err(err) => anyhow::bail!("{err}"),
|
||||
};
|
||||
|
@ -1047,7 +1114,7 @@ impl LanguageRegistryState {
|
|||
|
||||
fn add(&mut self, language: Arc<Language>) {
|
||||
if let Some(theme) = self.theme.as_ref() {
|
||||
language.set_theme(&theme.editor.syntax);
|
||||
language.set_theme(&theme.syntax());
|
||||
}
|
||||
self.languages.push(language);
|
||||
self.version += 1;
|
||||
|
@ -1387,9 +1454,9 @@ impl Language {
|
|||
let query = Query::new(&self.grammar_mut().ts_language, source)?;
|
||||
|
||||
let mut override_configs_by_id = HashMap::default();
|
||||
for (ix, name) in query.capture_names().iter().copied().enumerate() {
|
||||
for (ix, name) in query.capture_names().iter().enumerate() {
|
||||
if !name.starts_with('_') {
|
||||
let value = self.config.overrides.remove(name).unwrap_or_default();
|
||||
let value = self.config.overrides.remove(*name).unwrap_or_default();
|
||||
for server_name in &value.opt_into_language_servers {
|
||||
if !self
|
||||
.config
|
||||
|
@ -1400,7 +1467,7 @@ impl Language {
|
|||
}
|
||||
}
|
||||
|
||||
override_configs_by_id.insert(ix as u32, (name.into(), value));
|
||||
override_configs_by_id.insert(ix as u32, (name.to_string(), value));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1855,7 +1922,8 @@ mod tests {
|
|||
#[gpui::test(iterations = 10)]
|
||||
async fn test_first_line_pattern(cx: &mut TestAppContext) {
|
||||
let mut languages = LanguageRegistry::test();
|
||||
languages.set_executor(cx.background());
|
||||
|
||||
languages.set_executor(cx.executor());
|
||||
let languages = Arc::new(languages);
|
||||
languages.register(
|
||||
"/javascript",
|
||||
|
@ -1892,7 +1960,7 @@ mod tests {
|
|||
#[gpui::test(iterations = 10)]
|
||||
async fn test_language_loading(cx: &mut TestAppContext) {
|
||||
let mut languages = LanguageRegistry::test();
|
||||
languages.set_executor(cx.background());
|
||||
languages.set_executor(cx.executor());
|
||||
let languages = Arc::new(languages);
|
||||
languages.register(
|
||||
"/JSON",
|
||||
|
|
|
@ -8,10 +8,11 @@ use schemars::{
|
|||
JsonSchema,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::{num::NonZeroU32, path::Path, sync::Arc};
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
settings::register::<AllLanguageSettings>(cx);
|
||||
AllLanguageSettings::register(cx);
|
||||
}
|
||||
|
||||
pub fn language_settings<'a>(
|
||||
|
@ -28,7 +29,7 @@ pub fn all_language_settings<'a>(
|
|||
cx: &'a AppContext,
|
||||
) -> &'a AllLanguageSettings {
|
||||
let location = file.map(|f| (f.worktree_id(), f.path().as_ref()));
|
||||
settings::get_local(location, cx)
|
||||
AllLanguageSettings::get(location, cx)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -254,7 +255,7 @@ impl InlayHintKind {
|
|||
}
|
||||
}
|
||||
|
||||
impl settings::Setting for AllLanguageSettings {
|
||||
impl settings::Settings for AllLanguageSettings {
|
||||
const KEY: Option<&'static str> = None;
|
||||
|
||||
type FileContent = AllLanguageSettingsContent;
|
||||
|
@ -262,7 +263,7 @@ impl settings::Setting for AllLanguageSettings {
|
|||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_settings: &[&Self::FileContent],
|
||||
_: &AppContext,
|
||||
_: &mut AppContext,
|
||||
) -> Result<Self> {
|
||||
// A default is provided for all settings.
|
||||
let mut defaults: LanguageSettings =
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::sync::Arc;
|
|||
use std::{ops::Range, path::PathBuf};
|
||||
|
||||
use crate::{HighlightId, Language, LanguageRegistry};
|
||||
use gpui::fonts::{self, HighlightStyle, Weight};
|
||||
use gpui::{px, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
|
||||
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -26,18 +26,18 @@ impl MarkdownHighlight {
|
|||
let mut highlight = HighlightStyle::default();
|
||||
|
||||
if style.italic {
|
||||
highlight.italic = Some(true);
|
||||
highlight.font_style = Some(FontStyle::Italic);
|
||||
}
|
||||
|
||||
if style.underline {
|
||||
highlight.underline = Some(fonts::Underline {
|
||||
thickness: 1.0.into(),
|
||||
highlight.underline = Some(UnderlineStyle {
|
||||
thickness: px(1.),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if style.weight != fonts::Weight::default() {
|
||||
highlight.weight = Some(style.weight);
|
||||
if style.weight != FontWeight::default() {
|
||||
highlight.font_weight = Some(style.weight);
|
||||
}
|
||||
|
||||
Some(highlight)
|
||||
|
@ -52,7 +52,7 @@ impl MarkdownHighlight {
|
|||
pub struct MarkdownHighlightStyle {
|
||||
pub italic: bool,
|
||||
pub underline: bool,
|
||||
pub weight: Weight,
|
||||
pub weight: FontWeight,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -138,7 +138,7 @@ pub async fn parse_markdown_block(
|
|||
let mut style = MarkdownHighlightStyle::default();
|
||||
|
||||
if bold_depth > 0 {
|
||||
style.weight = Weight::BOLD;
|
||||
style.weight = FontWeight::BOLD;
|
||||
}
|
||||
|
||||
if italic_depth > 0 {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{executor::Background, fonts::HighlightStyle};
|
||||
use std::{ops::Range, sync::Arc};
|
||||
use gpui::{BackgroundExecutor, HighlightStyle};
|
||||
use std::ops::Range;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Outline<T> {
|
||||
|
@ -57,7 +57,7 @@ impl<T> Outline<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn search(&self, query: &str, executor: Arc<Background>) -> Vec<StringMatch> {
|
||||
pub async fn search(&self, query: &str, executor: BackgroundExecutor) -> Vec<StringMatch> {
|
||||
let query = query.trim_start();
|
||||
let is_path_query = query.contains(' ');
|
||||
let smart_case = query.chars().any(|c| c.is_uppercase());
|
||||
|
@ -81,6 +81,7 @@ impl<T> Outline<T> {
|
|||
let mut prev_item_ix = 0;
|
||||
for mut string_match in matches {
|
||||
let outline_match = &self.items[string_match.candidate_id];
|
||||
string_match.string = outline_match.text.clone();
|
||||
|
||||
if is_path_query {
|
||||
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
|
||||
|
|
|
@ -7,7 +7,6 @@ use futures::FutureExt;
|
|||
use parking_lot::Mutex;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cell::RefCell,
|
||||
cmp::{self, Ordering, Reverse},
|
||||
collections::BinaryHeap,
|
||||
fmt, iter,
|
||||
|
@ -16,13 +15,9 @@ use std::{
|
|||
};
|
||||
use sum_tree::{Bias, SeekTarget, SumTree};
|
||||
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
|
||||
use tree_sitter::{
|
||||
Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
|
||||
};
|
||||
use tree_sitter::{Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree};
|
||||
|
||||
thread_local! {
|
||||
static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
|
||||
}
|
||||
use super::PARSER;
|
||||
|
||||
static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue