Remove language servers from buffers
Co-Authored-By: Nathan Sobo <nathan@zed.dev> Co-Authored-By: Antonio Scandurra <antonio@zed.dev> Co-Authored-By: Keith Simmons <keith@zed.dev>
This commit is contained in:
parent
6662ba62a3
commit
317a1bb07b
14 changed files with 1584 additions and 1235 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -3591,6 +3591,7 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sha2 0.10.2",
|
"sha2 0.10.2",
|
||||||
|
"similar",
|
||||||
"smol",
|
"smol",
|
||||||
"sum_tree",
|
"sum_tree",
|
||||||
"tempdir",
|
"tempdir",
|
||||||
|
|
|
@ -32,8 +32,8 @@ use items::{BufferItemHandle, MultiBufferItemHandle};
|
||||||
use itertools::Itertools as _;
|
use itertools::Itertools as _;
|
||||||
pub use language::{char_kind, CharKind};
|
pub use language::{char_kind, CharKind};
|
||||||
use language::{
|
use language::{
|
||||||
AnchorRangeExt as _, BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
|
BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticSeverity,
|
||||||
DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId,
|
Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId,
|
||||||
};
|
};
|
||||||
use multi_buffer::MultiBufferChunks;
|
use multi_buffer::MultiBufferChunks;
|
||||||
pub use multi_buffer::{
|
pub use multi_buffer::{
|
||||||
|
@ -8235,9 +8235,6 @@ mod tests {
|
||||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
buffer.set_language_server(Some(language_server), cx);
|
|
||||||
});
|
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||||
buffer.next_notification(&cx).await;
|
buffer.next_notification(&cx).await;
|
||||||
|
|
|
@ -8,8 +8,8 @@ use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
|
||||||
pub use language::Completion;
|
pub use language::Completion;
|
||||||
use language::{
|
use language::{
|
||||||
char_kind, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, DiagnosticEntry, Event, File,
|
char_kind, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, DiagnosticEntry, Event, File,
|
||||||
Language, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _, ToPointUtf16 as _,
|
Language, OffsetRangeExt, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _,
|
||||||
TransactionId,
|
ToPointUtf16 as _, TransactionId,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
cell::{Ref, RefCell},
|
cell::{Ref, RefCell},
|
||||||
|
@ -25,7 +25,7 @@ use text::{
|
||||||
locator::Locator,
|
locator::Locator,
|
||||||
rope::TextDimension,
|
rope::TextDimension,
|
||||||
subscription::{Subscription, Topic},
|
subscription::{Subscription, Topic},
|
||||||
AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary,
|
Edit, Point, PointUtf16, TextSummary,
|
||||||
};
|
};
|
||||||
use theme::SyntaxTheme;
|
use theme::SyntaxTheme;
|
||||||
|
|
||||||
|
|
|
@ -742,7 +742,7 @@ type GlobalActionCallback = dyn FnMut(&dyn AnyAction, &mut MutableAppContext);
|
||||||
type SubscriptionCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext) -> bool>;
|
type SubscriptionCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext) -> bool>;
|
||||||
type DelegationCallback = Box<dyn FnMut(Box<dyn Any>, &mut MutableAppContext) -> bool>;
|
type DelegationCallback = Box<dyn FnMut(Box<dyn Any>, &mut MutableAppContext) -> bool>;
|
||||||
type ObservationCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
|
type ObservationCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
|
||||||
type ReleaseObservationCallback = Box<dyn FnMut(&mut MutableAppContext)>;
|
type ReleaseObservationCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext)>;
|
||||||
|
|
||||||
pub struct MutableAppContext {
|
pub struct MutableAppContext {
|
||||||
weak_self: Option<rc::Weak<RefCell<Self>>>,
|
weak_self: Option<rc::Weak<RefCell<Self>>>,
|
||||||
|
@ -1186,14 +1186,20 @@ impl MutableAppContext {
|
||||||
E: Entity,
|
E: Entity,
|
||||||
E::Event: 'static,
|
E::Event: 'static,
|
||||||
H: Handle<E>,
|
H: Handle<E>,
|
||||||
F: 'static + FnMut(&mut Self),
|
F: 'static + FnMut(&E, &mut Self),
|
||||||
{
|
{
|
||||||
let id = post_inc(&mut self.next_subscription_id);
|
let id = post_inc(&mut self.next_subscription_id);
|
||||||
self.release_observations
|
self.release_observations
|
||||||
.lock()
|
.lock()
|
||||||
.entry(handle.id())
|
.entry(handle.id())
|
||||||
.or_default()
|
.or_default()
|
||||||
.insert(id, Box::new(move |cx| callback(cx)));
|
.insert(
|
||||||
|
id,
|
||||||
|
Box::new(move |released, cx| {
|
||||||
|
let released = released.downcast_ref().unwrap();
|
||||||
|
callback(released, cx)
|
||||||
|
}),
|
||||||
|
);
|
||||||
Subscription::ReleaseObservation {
|
Subscription::ReleaseObservation {
|
||||||
id,
|
id,
|
||||||
entity_id: handle.id(),
|
entity_id: handle.id(),
|
||||||
|
@ -1552,9 +1558,8 @@ impl MutableAppContext {
|
||||||
self.observations.lock().remove(&model_id);
|
self.observations.lock().remove(&model_id);
|
||||||
let mut model = self.cx.models.remove(&model_id).unwrap();
|
let mut model = self.cx.models.remove(&model_id).unwrap();
|
||||||
model.release(self);
|
model.release(self);
|
||||||
self.pending_effects.push_back(Effect::Release {
|
self.pending_effects
|
||||||
entity_id: model_id,
|
.push_back(Effect::ModelRelease { model_id, model });
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (window_id, view_id) in dropped_views {
|
for (window_id, view_id) in dropped_views {
|
||||||
|
@ -1580,7 +1585,7 @@ impl MutableAppContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.pending_effects
|
self.pending_effects
|
||||||
.push_back(Effect::Release { entity_id: view_id });
|
.push_back(Effect::ViewRelease { view_id, view });
|
||||||
}
|
}
|
||||||
|
|
||||||
for key in dropped_element_states {
|
for key in dropped_element_states {
|
||||||
|
@ -1607,7 +1612,12 @@ impl MutableAppContext {
|
||||||
self.notify_view_observers(window_id, view_id)
|
self.notify_view_observers(window_id, view_id)
|
||||||
}
|
}
|
||||||
Effect::Deferred(callback) => callback(self),
|
Effect::Deferred(callback) => callback(self),
|
||||||
Effect::Release { entity_id } => self.notify_release_observers(entity_id),
|
Effect::ModelRelease { model_id, model } => {
|
||||||
|
self.notify_release_observers(model_id, model.as_any())
|
||||||
|
}
|
||||||
|
Effect::ViewRelease { view_id, view } => {
|
||||||
|
self.notify_release_observers(view_id, view.as_any())
|
||||||
|
}
|
||||||
Effect::Focus { window_id, view_id } => {
|
Effect::Focus { window_id, view_id } => {
|
||||||
self.focus(window_id, view_id);
|
self.focus(window_id, view_id);
|
||||||
}
|
}
|
||||||
|
@ -1781,11 +1791,11 @@ impl MutableAppContext {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn notify_release_observers(&mut self, entity_id: usize) {
|
fn notify_release_observers(&mut self, entity_id: usize, entity: &dyn Any) {
|
||||||
let callbacks = self.release_observations.lock().remove(&entity_id);
|
let callbacks = self.release_observations.lock().remove(&entity_id);
|
||||||
if let Some(callbacks) = callbacks {
|
if let Some(callbacks) = callbacks {
|
||||||
for (_, mut callback) in callbacks {
|
for (_, mut callback) in callbacks {
|
||||||
callback(self);
|
callback(entity, self);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2112,8 +2122,13 @@ pub enum Effect {
|
||||||
view_id: usize,
|
view_id: usize,
|
||||||
},
|
},
|
||||||
Deferred(Box<dyn FnOnce(&mut MutableAppContext)>),
|
Deferred(Box<dyn FnOnce(&mut MutableAppContext)>),
|
||||||
Release {
|
ModelRelease {
|
||||||
entity_id: usize,
|
model_id: usize,
|
||||||
|
model: Box<dyn AnyModel>,
|
||||||
|
},
|
||||||
|
ViewRelease {
|
||||||
|
view_id: usize,
|
||||||
|
view: Box<dyn AnyView>,
|
||||||
},
|
},
|
||||||
Focus {
|
Focus {
|
||||||
window_id: usize,
|
window_id: usize,
|
||||||
|
@ -2142,9 +2157,13 @@ impl Debug for Effect {
|
||||||
.field("view_id", view_id)
|
.field("view_id", view_id)
|
||||||
.finish(),
|
.finish(),
|
||||||
Effect::Deferred(_) => f.debug_struct("Effect::Deferred").finish(),
|
Effect::Deferred(_) => f.debug_struct("Effect::Deferred").finish(),
|
||||||
Effect::Release { entity_id } => f
|
Effect::ModelRelease { model_id, .. } => f
|
||||||
.debug_struct("Effect::Release")
|
.debug_struct("Effect::ModelRelease")
|
||||||
.field("entity_id", entity_id)
|
.field("model_id", model_id)
|
||||||
|
.finish(),
|
||||||
|
Effect::ViewRelease { view_id, .. } => f
|
||||||
|
.debug_struct("Effect::ViewRelease")
|
||||||
|
.field("view_id", view_id)
|
||||||
.finish(),
|
.finish(),
|
||||||
Effect::Focus { window_id, view_id } => f
|
Effect::Focus { window_id, view_id } => f
|
||||||
.debug_struct("Effect::Focus")
|
.debug_struct("Effect::Focus")
|
||||||
|
@ -2395,13 +2414,13 @@ impl<'a, T: Entity> ModelContext<'a, T> {
|
||||||
) -> Subscription
|
) -> Subscription
|
||||||
where
|
where
|
||||||
S: Entity,
|
S: Entity,
|
||||||
F: 'static + FnMut(&mut T, &mut ModelContext<T>),
|
F: 'static + FnMut(&mut T, &S, &mut ModelContext<T>),
|
||||||
{
|
{
|
||||||
let observer = self.weak_handle();
|
let observer = self.weak_handle();
|
||||||
self.app.observe_release(handle, move |cx| {
|
self.app.observe_release(handle, move |released, cx| {
|
||||||
if let Some(observer) = observer.upgrade(cx) {
|
if let Some(observer) = observer.upgrade(cx) {
|
||||||
observer.update(cx, |observer, cx| {
|
observer.update(cx, |observer, cx| {
|
||||||
callback(observer, cx);
|
callback(observer, released, cx);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -2677,13 +2696,13 @@ impl<'a, T: View> ViewContext<'a, T> {
|
||||||
where
|
where
|
||||||
E: Entity,
|
E: Entity,
|
||||||
H: Handle<E>,
|
H: Handle<E>,
|
||||||
F: 'static + FnMut(&mut T, &mut ViewContext<T>),
|
F: 'static + FnMut(&mut T, &E, &mut ViewContext<T>),
|
||||||
{
|
{
|
||||||
let observer = self.weak_handle();
|
let observer = self.weak_handle();
|
||||||
self.app.observe_release(handle, move |cx| {
|
self.app.observe_release(handle, move |released, cx| {
|
||||||
if let Some(observer) = observer.upgrade(cx) {
|
if let Some(observer) = observer.upgrade(cx) {
|
||||||
observer.update(cx, |observer, cx| {
|
observer.update(cx, |observer, cx| {
|
||||||
callback(observer, cx);
|
callback(observer, released, cx);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -4403,12 +4422,12 @@ mod tests {
|
||||||
|
|
||||||
cx.observe_release(&model, {
|
cx.observe_release(&model, {
|
||||||
let model_release_observed = model_release_observed.clone();
|
let model_release_observed = model_release_observed.clone();
|
||||||
move |_| model_release_observed.set(true)
|
move |_, _| model_release_observed.set(true)
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
cx.observe_release(&view, {
|
cx.observe_release(&view, {
|
||||||
let view_release_observed = view_release_observed.clone();
|
let view_release_observed = view_release_observed.clone();
|
||||||
move |_| view_release_observed.set(true)
|
move |_, _| view_release_observed.set(true)
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
|
|
|
@ -7,16 +7,14 @@ pub use crate::{
|
||||||
use crate::{
|
use crate::{
|
||||||
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
||||||
outline::OutlineItem,
|
outline::OutlineItem,
|
||||||
range_from_lsp, CodeLabel, Outline, ToLspPosition,
|
CodeLabel, Outline,
|
||||||
};
|
};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use futures::FutureExt as _;
|
use futures::FutureExt as _;
|
||||||
use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
|
use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use lsp::LanguageServer;
|
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use postage::{prelude::Stream, sink::Sink, watch};
|
|
||||||
use similar::{ChangeTag, TextDiff};
|
use similar::{ChangeTag, TextDiff};
|
||||||
use smol::future::yield_now;
|
use smol::future::yield_now;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -26,7 +24,7 @@ use std::{
|
||||||
ffi::OsString,
|
ffi::OsString,
|
||||||
future::Future,
|
future::Future,
|
||||||
iter::{Iterator, Peekable},
|
iter::{Iterator, Peekable},
|
||||||
ops::{Deref, DerefMut, Range, Sub},
|
ops::{Deref, DerefMut, Range},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str,
|
str,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
|
@ -34,11 +32,11 @@ use std::{
|
||||||
vec,
|
vec,
|
||||||
};
|
};
|
||||||
use sum_tree::TreeMap;
|
use sum_tree::TreeMap;
|
||||||
use text::{operation_queue::OperationQueue, rope::TextDimension};
|
use text::operation_queue::OperationQueue;
|
||||||
pub use text::{Buffer as TextBuffer, Operation as _, *};
|
pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *};
|
||||||
use theme::SyntaxTheme;
|
use theme::SyntaxTheme;
|
||||||
use tree_sitter::{InputEdit, QueryCursor, Tree};
|
use tree_sitter::{InputEdit, QueryCursor, Tree};
|
||||||
use util::{post_inc, TryFutureExt as _};
|
use util::TryFutureExt as _;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub use tree_sitter_rust;
|
pub use tree_sitter_rust;
|
||||||
|
@ -70,7 +68,6 @@ pub struct Buffer {
|
||||||
diagnostics_update_count: usize,
|
diagnostics_update_count: usize,
|
||||||
diagnostics_timestamp: clock::Lamport,
|
diagnostics_timestamp: clock::Lamport,
|
||||||
file_update_count: usize,
|
file_update_count: usize,
|
||||||
language_server: Option<LanguageServerState>,
|
|
||||||
completion_triggers: Vec<String>,
|
completion_triggers: Vec<String>,
|
||||||
deferred_ops: OperationQueue<Operation>,
|
deferred_ops: OperationQueue<Operation>,
|
||||||
}
|
}
|
||||||
|
@ -126,21 +123,6 @@ pub struct CodeAction {
|
||||||
pub lsp_action: lsp::CodeAction,
|
pub lsp_action: lsp::CodeAction,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct LanguageServerState {
|
|
||||||
server: Arc<LanguageServer>,
|
|
||||||
latest_snapshot: watch::Sender<LanguageServerSnapshot>,
|
|
||||||
pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
|
|
||||||
next_version: usize,
|
|
||||||
_maintain_server: Task<Option<()>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct LanguageServerSnapshot {
|
|
||||||
buffer_snapshot: text::BufferSnapshot,
|
|
||||||
version: usize,
|
|
||||||
path: Arc<Path>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum Operation {
|
pub enum Operation {
|
||||||
Buffer(text::Operation),
|
Buffer(text::Operation),
|
||||||
|
@ -479,15 +461,6 @@ impl Buffer {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_language_server(
|
|
||||||
mut self,
|
|
||||||
server: Arc<LanguageServer>,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) -> Self {
|
|
||||||
self.set_language_server(Some(server), cx);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
|
fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
|
||||||
let saved_mtime;
|
let saved_mtime;
|
||||||
if let Some(file) = file.as_ref() {
|
if let Some(file) = file.as_ref() {
|
||||||
|
@ -514,7 +487,6 @@ impl Buffer {
|
||||||
diagnostics_update_count: 0,
|
diagnostics_update_count: 0,
|
||||||
diagnostics_timestamp: Default::default(),
|
diagnostics_timestamp: Default::default(),
|
||||||
file_update_count: 0,
|
file_update_count: 0,
|
||||||
language_server: None,
|
|
||||||
completion_triggers: Default::default(),
|
completion_triggers: Default::default(),
|
||||||
deferred_ops: OperationQueue::new(),
|
deferred_ops: OperationQueue::new(),
|
||||||
}
|
}
|
||||||
|
@ -536,6 +508,14 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
|
||||||
|
&self.text
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text_snapshot(&self) -> text::BufferSnapshot {
|
||||||
|
self.text.snapshot()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn file(&self) -> Option<&dyn File> {
|
pub fn file(&self) -> Option<&dyn File> {
|
||||||
self.file.as_deref()
|
self.file.as_deref()
|
||||||
}
|
}
|
||||||
|
@ -561,123 +541,15 @@ impl Buffer {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn saved_version(&self) -> &clock::Global {
|
||||||
|
&self.saved_version
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
|
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
|
||||||
self.language = language;
|
self.language = language;
|
||||||
self.reparse(cx);
|
self.reparse(cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_language_server(
|
|
||||||
&mut self,
|
|
||||||
language_server: Option<Arc<lsp::LanguageServer>>,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) {
|
|
||||||
self.language_server = if let Some((server, file)) =
|
|
||||||
language_server.zip(self.file.as_ref().and_then(|f| f.as_local()))
|
|
||||||
{
|
|
||||||
let initial_snapshot = LanguageServerSnapshot {
|
|
||||||
buffer_snapshot: self.text.snapshot(),
|
|
||||||
version: 0,
|
|
||||||
path: file.abs_path(cx).into(),
|
|
||||||
};
|
|
||||||
let (latest_snapshot_tx, mut latest_snapshot_rx) =
|
|
||||||
watch::channel_with::<LanguageServerSnapshot>(initial_snapshot.clone());
|
|
||||||
|
|
||||||
Some(LanguageServerState {
|
|
||||||
latest_snapshot: latest_snapshot_tx,
|
|
||||||
pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]),
|
|
||||||
next_version: 1,
|
|
||||||
server: server.clone(),
|
|
||||||
_maintain_server: cx.spawn_weak(|this, mut cx| async move {
|
|
||||||
let capabilities = server.capabilities().await.or_else(|| {
|
|
||||||
log::info!("language server exited");
|
|
||||||
if let Some(this) = this.upgrade(&cx) {
|
|
||||||
this.update(&mut cx, |this, _| this.language_server = None);
|
|
||||||
}
|
|
||||||
None
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let triggers = capabilities
|
|
||||||
.completion_provider
|
|
||||||
.and_then(|c| c.trigger_characters)
|
|
||||||
.unwrap_or_default();
|
|
||||||
this.upgrade(&cx)?.update(&mut cx, |this, cx| {
|
|
||||||
let lamport_timestamp = this.text.lamport_clock.tick();
|
|
||||||
this.completion_triggers = triggers.clone();
|
|
||||||
this.send_operation(
|
|
||||||
Operation::UpdateCompletionTriggers {
|
|
||||||
triggers,
|
|
||||||
lamport_timestamp,
|
|
||||||
},
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
cx.notify();
|
|
||||||
});
|
|
||||||
|
|
||||||
let maintain_changes = cx.background().spawn(async move {
|
|
||||||
let initial_snapshot =
|
|
||||||
latest_snapshot_rx.recv().await.ok_or_else(|| {
|
|
||||||
anyhow!("buffer dropped before sending DidOpenTextDocument")
|
|
||||||
})?;
|
|
||||||
server
|
|
||||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
|
||||||
lsp::DidOpenTextDocumentParams {
|
|
||||||
text_document: lsp::TextDocumentItem::new(
|
|
||||||
lsp::Url::from_file_path(initial_snapshot.path).unwrap(),
|
|
||||||
Default::default(),
|
|
||||||
initial_snapshot.version as i32,
|
|
||||||
initial_snapshot.buffer_snapshot.text(),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut prev_version = initial_snapshot.buffer_snapshot.version().clone();
|
|
||||||
while let Some(snapshot) = latest_snapshot_rx.recv().await {
|
|
||||||
let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
|
|
||||||
let buffer_snapshot = snapshot.buffer_snapshot.clone();
|
|
||||||
let content_changes = buffer_snapshot
|
|
||||||
.edits_since::<(PointUtf16, usize)>(&prev_version)
|
|
||||||
.map(|edit| {
|
|
||||||
let edit_start = edit.new.start.0;
|
|
||||||
let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
|
|
||||||
let new_text = buffer_snapshot
|
|
||||||
.text_for_range(edit.new.start.1..edit.new.end.1)
|
|
||||||
.collect();
|
|
||||||
lsp::TextDocumentContentChangeEvent {
|
|
||||||
range: Some(lsp::Range::new(
|
|
||||||
edit_start.to_lsp_position(),
|
|
||||||
edit_end.to_lsp_position(),
|
|
||||||
)),
|
|
||||||
range_length: None,
|
|
||||||
text: new_text,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let changes = lsp::DidChangeTextDocumentParams {
|
|
||||||
text_document: lsp::VersionedTextDocumentIdentifier::new(
|
|
||||||
uri,
|
|
||||||
snapshot.version as i32,
|
|
||||||
),
|
|
||||||
content_changes,
|
|
||||||
};
|
|
||||||
server
|
|
||||||
.notify::<lsp::notification::DidChangeTextDocument>(changes)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
prev_version = snapshot.buffer_snapshot.version().clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok::<_, anyhow::Error>(())
|
|
||||||
});
|
|
||||||
|
|
||||||
maintain_changes.log_err().await
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn did_save(
|
pub fn did_save(
|
||||||
&mut self,
|
&mut self,
|
||||||
version: clock::Global,
|
version: clock::Global,
|
||||||
|
@ -784,10 +656,6 @@ impl Buffer {
|
||||||
self.language.as_ref()
|
self.language.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
|
|
||||||
self.language_server.as_ref().map(|state| &state.server)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_count(&self) -> usize {
|
pub fn parse_count(&self) -> usize {
|
||||||
self.parse_count
|
self.parse_count
|
||||||
}
|
}
|
||||||
|
@ -899,100 +767,14 @@ impl Buffer {
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_diagnostics<T>(
|
pub fn update_diagnostics(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
|
||||||
&mut self,
|
|
||||||
mut diagnostics: Vec<DiagnosticEntry<T>>,
|
|
||||||
version: Option<i32>,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) -> Result<()>
|
|
||||||
where
|
|
||||||
T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
|
|
||||||
{
|
|
||||||
fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
|
|
||||||
Ordering::Equal
|
|
||||||
.then_with(|| b.is_primary.cmp(&a.is_primary))
|
|
||||||
.then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
|
|
||||||
.then_with(|| a.severity.cmp(&b.severity))
|
|
||||||
.then_with(|| a.message.cmp(&b.message))
|
|
||||||
}
|
|
||||||
|
|
||||||
let version = version.map(|version| version as usize);
|
|
||||||
let content =
|
|
||||||
if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
|
|
||||||
language_server.snapshot_for_version(version)?
|
|
||||||
} else {
|
|
||||||
self.deref()
|
|
||||||
};
|
|
||||||
|
|
||||||
diagnostics.sort_unstable_by(|a, b| {
|
|
||||||
Ordering::Equal
|
|
||||||
.then_with(|| a.range.start.cmp(&b.range.start))
|
|
||||||
.then_with(|| b.range.end.cmp(&a.range.end))
|
|
||||||
.then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut sanitized_diagnostics = Vec::new();
|
|
||||||
let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
|
|
||||||
let mut last_edit_old_end = T::default();
|
|
||||||
let mut last_edit_new_end = T::default();
|
|
||||||
'outer: for entry in diagnostics {
|
|
||||||
let mut start = entry.range.start;
|
|
||||||
let mut end = entry.range.end;
|
|
||||||
|
|
||||||
// Some diagnostics are based on files on disk instead of buffers'
|
|
||||||
// current contents. Adjust these diagnostics' ranges to reflect
|
|
||||||
// any unsaved edits.
|
|
||||||
if entry.diagnostic.is_disk_based {
|
|
||||||
while let Some(edit) = edits_since_save.peek() {
|
|
||||||
if edit.old.end <= start {
|
|
||||||
last_edit_old_end = edit.old.end;
|
|
||||||
last_edit_new_end = edit.new.end;
|
|
||||||
edits_since_save.next();
|
|
||||||
} else if edit.old.start <= end && edit.old.end >= start {
|
|
||||||
continue 'outer;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_overshoot = start - last_edit_old_end;
|
|
||||||
start = last_edit_new_end;
|
|
||||||
start.add_assign(&start_overshoot);
|
|
||||||
|
|
||||||
let end_overshoot = end - last_edit_old_end;
|
|
||||||
end = last_edit_new_end;
|
|
||||||
end.add_assign(&end_overshoot);
|
|
||||||
}
|
|
||||||
|
|
||||||
let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
|
|
||||||
let mut range = range.start.to_point(content)..range.end.to_point(content);
|
|
||||||
// Expand empty ranges by one character
|
|
||||||
if range.start == range.end {
|
|
||||||
range.end.column += 1;
|
|
||||||
range.end = content.clip_point(range.end, Bias::Right);
|
|
||||||
if range.start == range.end && range.end.column > 0 {
|
|
||||||
range.start.column -= 1;
|
|
||||||
range.start = content.clip_point(range.start, Bias::Left);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sanitized_diagnostics.push(DiagnosticEntry {
|
|
||||||
range,
|
|
||||||
diagnostic: entry.diagnostic,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
drop(edits_since_save);
|
|
||||||
|
|
||||||
let set = DiagnosticSet::new(sanitized_diagnostics, content);
|
|
||||||
let lamport_timestamp = self.text.lamport_clock.tick();
|
let lamport_timestamp = self.text.lamport_clock.tick();
|
||||||
self.apply_diagnostic_update(set.clone(), lamport_timestamp, cx);
|
|
||||||
|
|
||||||
let op = Operation::UpdateDiagnostics {
|
let op = Operation::UpdateDiagnostics {
|
||||||
diagnostics: set.iter().cloned().collect(),
|
diagnostics: diagnostics.iter().cloned().collect(),
|
||||||
lamport_timestamp,
|
lamport_timestamp,
|
||||||
};
|
};
|
||||||
|
self.apply_diagnostic_update(diagnostics, lamport_timestamp, cx);
|
||||||
self.send_operation(op, cx);
|
self.send_operation(op, cx);
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
|
@ -1305,30 +1087,6 @@ impl Buffer {
|
||||||
self.set_active_selections(Arc::from([]), cx);
|
self.set_active_selections(Arc::from([]), cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_language_server(&mut self, cx: &AppContext) {
|
|
||||||
let language_server = if let Some(language_server) = self.language_server.as_mut() {
|
|
||||||
language_server
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
|
|
||||||
file
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
let version = post_inc(&mut language_server.next_version);
|
|
||||||
let snapshot = LanguageServerSnapshot {
|
|
||||||
buffer_snapshot: self.text.snapshot(),
|
|
||||||
version,
|
|
||||||
path: Arc::from(file.abs_path(cx)),
|
|
||||||
};
|
|
||||||
language_server
|
|
||||||
.pending_snapshots
|
|
||||||
.insert(version, snapshot.clone());
|
|
||||||
let _ = language_server.latest_snapshot.blocking_send(snapshot);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
|
pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
|
||||||
where
|
where
|
||||||
T: Into<String>,
|
T: Into<String>,
|
||||||
|
@ -1455,115 +1213,6 @@ impl Buffer {
|
||||||
Some(edit_id)
|
Some(edit_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn edits_from_lsp(
|
|
||||||
&mut self,
|
|
||||||
lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
|
|
||||||
version: Option<i32>,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
|
|
||||||
let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) {
|
|
||||||
state
|
|
||||||
.snapshot_for_version(version as usize)
|
|
||||||
.map(Clone::clone)
|
|
||||||
} else {
|
|
||||||
Ok(TextBuffer::deref(self).clone())
|
|
||||||
};
|
|
||||||
|
|
||||||
cx.background().spawn(async move {
|
|
||||||
let snapshot = snapshot?;
|
|
||||||
let mut lsp_edits = lsp_edits
|
|
||||||
.into_iter()
|
|
||||||
.map(|edit| (range_from_lsp(edit.range), edit.new_text))
|
|
||||||
.peekable();
|
|
||||||
|
|
||||||
let mut edits = Vec::new();
|
|
||||||
while let Some((mut range, mut new_text)) = lsp_edits.next() {
|
|
||||||
// Combine any LSP edits that are adjacent.
|
|
||||||
//
|
|
||||||
// Also, combine LSP edits that are separated from each other by only
|
|
||||||
// a newline. This is important because for some code actions,
|
|
||||||
// Rust-analyzer rewrites the entire buffer via a series of edits that
|
|
||||||
// are separated by unchanged newline characters.
|
|
||||||
//
|
|
||||||
// In order for the diffing logic below to work properly, any edits that
|
|
||||||
// cancel each other out must be combined into one.
|
|
||||||
while let Some((next_range, next_text)) = lsp_edits.peek() {
|
|
||||||
if next_range.start > range.end {
|
|
||||||
if next_range.start.row > range.end.row + 1
|
|
||||||
|| next_range.start.column > 0
|
|
||||||
|| snapshot.clip_point_utf16(
|
|
||||||
PointUtf16::new(range.end.row, u32::MAX),
|
|
||||||
Bias::Left,
|
|
||||||
) > range.end
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
new_text.push('\n');
|
|
||||||
}
|
|
||||||
range.end = next_range.end;
|
|
||||||
new_text.push_str(&next_text);
|
|
||||||
lsp_edits.next();
|
|
||||||
}
|
|
||||||
|
|
||||||
if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
|
|
||||||
|| snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
|
|
||||||
{
|
|
||||||
return Err(anyhow!("invalid edits received from language server"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// For multiline edits, perform a diff of the old and new text so that
|
|
||||||
// we can identify the changes more precisely, preserving the locations
|
|
||||||
// of any anchors positioned in the unchanged regions.
|
|
||||||
if range.end.row > range.start.row {
|
|
||||||
let mut offset = range.start.to_offset(&snapshot);
|
|
||||||
let old_text = snapshot.text_for_range(range).collect::<String>();
|
|
||||||
|
|
||||||
let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
|
|
||||||
let mut moved_since_edit = true;
|
|
||||||
for change in diff.iter_all_changes() {
|
|
||||||
let tag = change.tag();
|
|
||||||
let value = change.value();
|
|
||||||
match tag {
|
|
||||||
ChangeTag::Equal => {
|
|
||||||
offset += value.len();
|
|
||||||
moved_since_edit = true;
|
|
||||||
}
|
|
||||||
ChangeTag::Delete => {
|
|
||||||
let start = snapshot.anchor_after(offset);
|
|
||||||
let end = snapshot.anchor_before(offset + value.len());
|
|
||||||
if moved_since_edit {
|
|
||||||
edits.push((start..end, String::new()));
|
|
||||||
} else {
|
|
||||||
edits.last_mut().unwrap().0.end = end;
|
|
||||||
}
|
|
||||||
offset += value.len();
|
|
||||||
moved_since_edit = false;
|
|
||||||
}
|
|
||||||
ChangeTag::Insert => {
|
|
||||||
if moved_since_edit {
|
|
||||||
let anchor = snapshot.anchor_after(offset);
|
|
||||||
edits.push((anchor.clone()..anchor, value.to_string()));
|
|
||||||
} else {
|
|
||||||
edits.last_mut().unwrap().1.push_str(value);
|
|
||||||
}
|
|
||||||
moved_since_edit = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if range.end == range.start {
|
|
||||||
let anchor = snapshot.anchor_after(range.start);
|
|
||||||
edits.push((anchor.clone()..anchor, new_text));
|
|
||||||
} else {
|
|
||||||
let edit_start = snapshot.anchor_after(range.start);
|
|
||||||
let edit_end = snapshot.anchor_before(range.end);
|
|
||||||
edits.push((edit_start..edit_end, new_text));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(edits)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn did_edit(
|
fn did_edit(
|
||||||
&mut self,
|
&mut self,
|
||||||
old_version: &clock::Global,
|
old_version: &clock::Global,
|
||||||
|
@ -1575,7 +1224,6 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.reparse(cx);
|
self.reparse(cx);
|
||||||
self.update_language_server(cx);
|
|
||||||
|
|
||||||
cx.emit(Event::Edited);
|
cx.emit(Event::Edited);
|
||||||
if !was_dirty {
|
if !was_dirty {
|
||||||
|
@ -1788,7 +1436,7 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn completion_triggers(&self) -> &[String] {
|
pub fn completion_triggers(&self) -> &[String] {
|
||||||
&self.completion_triggers
|
todo!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1843,23 +1491,6 @@ impl Buffer {
|
||||||
|
|
||||||
impl Entity for Buffer {
|
impl Entity for Buffer {
|
||||||
type Event = Event;
|
type Event = Event;
|
||||||
|
|
||||||
fn release(&mut self, cx: &mut gpui::MutableAppContext) {
|
|
||||||
if let Some(file) = self.file.as_ref() {
|
|
||||||
if let Some((lang_server, file)) = self.language_server.as_ref().zip(file.as_local()) {
|
|
||||||
let request = lang_server
|
|
||||||
.server
|
|
||||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
|
||||||
lsp::DidCloseTextDocumentParams {
|
|
||||||
text_document: lsp::TextDocumentIdentifier::new(
|
|
||||||
lsp::Url::from_file_path(file.abs_path(cx)).unwrap(),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
cx.foreground().spawn(request).detach_and_log_err(cx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for Buffer {
|
impl Deref for Buffer {
|
||||||
|
@ -2592,20 +2223,6 @@ impl operation_queue::Operation for Operation {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LanguageServerState {
|
|
||||||
fn snapshot_for_version(&mut self, version: usize) -> Result<&text::BufferSnapshot> {
|
|
||||||
const OLD_VERSIONS_TO_RETAIN: usize = 10;
|
|
||||||
|
|
||||||
self.pending_snapshots
|
|
||||||
.retain(|&v, _| v + OLD_VERSIONS_TO_RETAIN >= version);
|
|
||||||
let snapshot = self
|
|
||||||
.pending_snapshots
|
|
||||||
.get(&version)
|
|
||||||
.ok_or_else(|| anyhow!("missing snapshot"))?;
|
|
||||||
Ok(&snapshot.buffer_snapshot)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Diagnostic {
|
impl Default for Diagnostic {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::{
|
||||||
ops::Range,
|
ops::Range,
|
||||||
};
|
};
|
||||||
use sum_tree::{self, Bias, SumTree};
|
use sum_tree::{self, Bias, SumTree};
|
||||||
use text::{Anchor, FromAnchor, Point, ToOffset};
|
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct DiagnosticSet {
|
pub struct DiagnosticSet {
|
||||||
|
@ -46,7 +46,7 @@ impl DiagnosticSet {
|
||||||
|
|
||||||
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = DiagnosticEntry<Point>>,
|
I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
|
||||||
{
|
{
|
||||||
let mut entries = iter.into_iter().collect::<Vec<_>>();
|
let mut entries = iter.into_iter().collect::<Vec<_>>();
|
||||||
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
|
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
|
||||||
|
|
|
@ -6,7 +6,6 @@ use rand::prelude::*;
|
||||||
use std::{
|
use std::{
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
env,
|
env,
|
||||||
iter::FromIterator,
|
|
||||||
ops::Range,
|
ops::Range,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
|
@ -558,584 +557,6 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
|
||||||
async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
|
|
||||||
let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
|
|
||||||
let mut rust_lang = rust_lang();
|
|
||||||
rust_lang.config.language_server = Some(LanguageServerConfig {
|
|
||||||
disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
|
|
||||||
let text = "
|
|
||||||
fn a() { A }
|
|
||||||
fn b() { BB }
|
|
||||||
fn c() { CCC }
|
|
||||||
"
|
|
||||||
.unindent();
|
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
|
||||||
Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
|
|
||||||
.with_language(Arc::new(rust_lang), cx)
|
|
||||||
.with_language_server(language_server, cx)
|
|
||||||
});
|
|
||||||
|
|
||||||
let open_notification = fake
|
|
||||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Edit the buffer, moving the content down
|
|
||||||
buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
|
|
||||||
let change_notification_1 = fake
|
|
||||||
.receive_notification::<lsp::notification::DidChangeTextDocument>()
|
|
||||||
.await;
|
|
||||||
assert!(change_notification_1.text_document.version > open_notification.text_document.version);
|
|
||||||
|
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
// Receive diagnostics for an earlier version of the buffer.
|
|
||||||
buffer
|
|
||||||
.update_diagnostics(
|
|
||||||
vec![
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'A'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 0,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'BB'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 1,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
is_disk_based: true,
|
|
||||||
message: "undefined variable 'CCC'".to_string(),
|
|
||||||
group_id: 2,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
Some(open_notification.text_document.version),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// The diagnostics have moved down since they were created.
|
|
||||||
assert_eq!(
|
|
||||||
buffer
|
|
||||||
.snapshot()
|
|
||||||
.diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
&[
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: Point::new(3, 9)..Point::new(3, 11),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'BB'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 1,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: Point::new(4, 9)..Point::new(4, 12),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'CCC'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 2,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
chunks_with_diagnostics(buffer, 0..buffer.len()),
|
|
||||||
[
|
|
||||||
("\n\nfn a() { ".to_string(), None),
|
|
||||||
("A".to_string(), Some(DiagnosticSeverity::ERROR)),
|
|
||||||
(" }\nfn b() { ".to_string(), None),
|
|
||||||
("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
|
|
||||||
(" }\nfn c() { ".to_string(), None),
|
|
||||||
("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
|
|
||||||
(" }\n".to_string(), None),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
|
|
||||||
[
|
|
||||||
("B".to_string(), Some(DiagnosticSeverity::ERROR)),
|
|
||||||
(" }\nfn c() { ".to_string(), None),
|
|
||||||
("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Ensure overlapping diagnostics are highlighted correctly.
|
|
||||||
buffer
|
|
||||||
.update_diagnostics(
|
|
||||||
vec![
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'A'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 0,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::WARNING,
|
|
||||||
message: "unreachable statement".to_string(),
|
|
||||||
group_id: 1,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
Some(open_notification.text_document.version),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
buffer
|
|
||||||
.snapshot()
|
|
||||||
.diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
&[
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: Point::new(2, 9)..Point::new(2, 12),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::WARNING,
|
|
||||||
message: "unreachable statement".to_string(),
|
|
||||||
group_id: 1,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: Point::new(2, 9)..Point::new(2, 10),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'A'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 0,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
|
|
||||||
[
|
|
||||||
("fn a() { ".to_string(), None),
|
|
||||||
("A".to_string(), Some(DiagnosticSeverity::ERROR)),
|
|
||||||
(" }".to_string(), Some(DiagnosticSeverity::WARNING)),
|
|
||||||
("\n".to_string(), None),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
|
|
||||||
[
|
|
||||||
(" }".to_string(), Some(DiagnosticSeverity::WARNING)),
|
|
||||||
("\n".to_string(), None),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Keep editing the buffer and ensure disk-based diagnostics get translated according to the
|
|
||||||
// changes since the last save.
|
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
|
|
||||||
buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
|
|
||||||
});
|
|
||||||
let change_notification_2 = fake
|
|
||||||
.receive_notification::<lsp::notification::DidChangeTextDocument>()
|
|
||||||
.await;
|
|
||||||
assert!(
|
|
||||||
change_notification_2.text_document.version > change_notification_1.text_document.version
|
|
||||||
);
|
|
||||||
|
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
buffer
|
|
||||||
.update_diagnostics(
|
|
||||||
vec![
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'BB'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 1,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'A'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 0,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
Some(change_notification_2.text_document.version),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
buffer
|
|
||||||
.snapshot()
|
|
||||||
.diagnostics_in_range::<_, Point>(0..buffer.len())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
&[
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: Point::new(2, 21)..Point::new(2, 22),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'A'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 0,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: Point::new(3, 9)..Point::new(3, 11),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "undefined variable 'BB'".to_string(),
|
|
||||||
is_disk_based: true,
|
|
||||||
group_id: 1,
|
|
||||||
is_primary: true,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[gpui::test]
|
|
||||||
async fn test_language_server_has_exited(cx: &mut gpui::TestAppContext) {
|
|
||||||
let (language_server, fake) = cx.update(lsp::LanguageServer::fake);
|
|
||||||
|
|
||||||
// Simulate the language server failing to start up.
|
|
||||||
drop(fake);
|
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
|
||||||
Buffer::from_file(0, "", Box::new(FakeFile::new("/some/path")), cx)
|
|
||||||
.with_language(Arc::new(rust_lang()), cx)
|
|
||||||
.with_language_server(language_server, cx)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Run the buffer's task that retrieves the server's capabilities.
|
|
||||||
cx.foreground().advance_clock(Duration::from_millis(1));
|
|
||||||
|
|
||||||
buffer.read_with(cx, |buffer, _| {
|
|
||||||
assert!(buffer.language_server().is_none());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[gpui::test]
|
|
||||||
async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
|
|
||||||
let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
|
|
||||||
|
|
||||||
let text = "
|
|
||||||
fn a() {
|
|
||||||
f1();
|
|
||||||
}
|
|
||||||
fn b() {
|
|
||||||
f2();
|
|
||||||
}
|
|
||||||
fn c() {
|
|
||||||
f3();
|
|
||||||
}
|
|
||||||
"
|
|
||||||
.unindent();
|
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| {
|
|
||||||
Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
|
|
||||||
.with_language(Arc::new(rust_lang()), cx)
|
|
||||||
.with_language_server(language_server, cx)
|
|
||||||
});
|
|
||||||
|
|
||||||
let lsp_document_version = fake
|
|
||||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
|
||||||
.await
|
|
||||||
.text_document
|
|
||||||
.version;
|
|
||||||
|
|
||||||
// Simulate editing the buffer after the language server computes some edits.
|
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
buffer.edit(
|
|
||||||
[Point::new(0, 0)..Point::new(0, 0)],
|
|
||||||
"// above first function\n",
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
buffer.edit(
|
|
||||||
[Point::new(2, 0)..Point::new(2, 0)],
|
|
||||||
" // inside first function\n",
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
buffer.edit(
|
|
||||||
[Point::new(6, 4)..Point::new(6, 4)],
|
|
||||||
"// inside second function ",
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
buffer.text(),
|
|
||||||
"
|
|
||||||
// above first function
|
|
||||||
fn a() {
|
|
||||||
// inside first function
|
|
||||||
f1();
|
|
||||||
}
|
|
||||||
fn b() {
|
|
||||||
// inside second function f2();
|
|
||||||
}
|
|
||||||
fn c() {
|
|
||||||
f3();
|
|
||||||
}
|
|
||||||
"
|
|
||||||
.unindent()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
let edits = buffer
|
|
||||||
.update(cx, |buffer, cx| {
|
|
||||||
buffer.edits_from_lsp(
|
|
||||||
vec![
|
|
||||||
// replace body of first function
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
|
|
||||||
new_text: "
|
|
||||||
fn a() {
|
|
||||||
f10();
|
|
||||||
}
|
|
||||||
"
|
|
||||||
.unindent(),
|
|
||||||
},
|
|
||||||
// edit inside second function
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
|
|
||||||
new_text: "00".into(),
|
|
||||||
},
|
|
||||||
// edit inside third function via two distinct edits
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
|
|
||||||
new_text: "4000".into(),
|
|
||||||
},
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
|
|
||||||
new_text: "".into(),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
Some(lsp_document_version),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
for (range, new_text) in edits {
|
|
||||||
buffer.edit([range], new_text, cx);
|
|
||||||
}
|
|
||||||
assert_eq!(
|
|
||||||
buffer.text(),
|
|
||||||
"
|
|
||||||
// above first function
|
|
||||||
fn a() {
|
|
||||||
// inside first function
|
|
||||||
f10();
|
|
||||||
}
|
|
||||||
fn b() {
|
|
||||||
// inside second function f200();
|
|
||||||
}
|
|
||||||
fn c() {
|
|
||||||
f4000();
|
|
||||||
}
|
|
||||||
"
|
|
||||||
.unindent()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[gpui::test]
|
|
||||||
async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
|
|
||||||
let text = "
|
|
||||||
use a::b;
|
|
||||||
use a::c;
|
|
||||||
|
|
||||||
fn f() {
|
|
||||||
b();
|
|
||||||
c();
|
|
||||||
}
|
|
||||||
"
|
|
||||||
.unindent();
|
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
|
|
||||||
|
|
||||||
// Simulate the language server sending us a small edit in the form of a very large diff.
|
|
||||||
// Rust-analyzer does this when performing a merge-imports code action.
|
|
||||||
let edits = buffer
|
|
||||||
.update(cx, |buffer, cx| {
|
|
||||||
buffer.edits_from_lsp(
|
|
||||||
[
|
|
||||||
// Replace the first use statement without editing the semicolon.
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
|
|
||||||
new_text: "a::{b, c}".into(),
|
|
||||||
},
|
|
||||||
// Reinsert the remainder of the file between the semicolon and the final
|
|
||||||
// newline of the file.
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
|
|
||||||
new_text: "\n\n".into(),
|
|
||||||
},
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
|
|
||||||
new_text: "
|
|
||||||
fn f() {
|
|
||||||
b();
|
|
||||||
c();
|
|
||||||
}"
|
|
||||||
.unindent(),
|
|
||||||
},
|
|
||||||
// Delete everything after the first newline of the file.
|
|
||||||
lsp::TextEdit {
|
|
||||||
range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
|
|
||||||
new_text: "".into(),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
buffer.update(cx, |buffer, cx| {
|
|
||||||
let edits = edits
|
|
||||||
.into_iter()
|
|
||||||
.map(|(range, text)| {
|
|
||||||
(
|
|
||||||
range.start.to_point(&buffer)..range.end.to_point(&buffer),
|
|
||||||
text,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
edits,
|
|
||||||
[
|
|
||||||
(Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
|
|
||||||
(Point::new(1, 0)..Point::new(2, 0), "".into())
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
for (range, new_text) in edits {
|
|
||||||
buffer.edit([range], new_text, cx);
|
|
||||||
}
|
|
||||||
assert_eq!(
|
|
||||||
buffer.text(),
|
|
||||||
"
|
|
||||||
use a::{b, c};
|
|
||||||
|
|
||||||
fn f() {
|
|
||||||
b();
|
|
||||||
c();
|
|
||||||
}
|
|
||||||
"
|
|
||||||
.unindent()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[gpui::test]
|
|
||||||
async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
|
|
||||||
cx.add_model(|cx| {
|
|
||||||
let text = concat!(
|
|
||||||
"let one = ;\n", //
|
|
||||||
"let two = \n",
|
|
||||||
"let three = 3;\n",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut buffer = Buffer::new(0, text, cx);
|
|
||||||
buffer.set_language(Some(Arc::new(rust_lang())), cx);
|
|
||||||
buffer
|
|
||||||
.update_diagnostics(
|
|
||||||
vec![
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "syntax error 1".to_string(),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
DiagnosticEntry {
|
|
||||||
range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
|
|
||||||
diagnostic: Diagnostic {
|
|
||||||
severity: DiagnosticSeverity::ERROR,
|
|
||||||
message: "syntax error 2".to_string(),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// An empty range is extended forward to include the following character.
|
|
||||||
// At the end of a line, an empty range is extended backward to include
|
|
||||||
// the preceding character.
|
|
||||||
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
|
||||||
assert_eq!(
|
|
||||||
chunks
|
|
||||||
.iter()
|
|
||||||
.map(|(s, d)| (s.as_str(), *d))
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
&[
|
|
||||||
("let one = ", None),
|
|
||||||
(";", Some(DiagnosticSeverity::ERROR)),
|
|
||||||
("\nlet two =", None),
|
|
||||||
(" ", Some(DiagnosticSeverity::ERROR)),
|
|
||||||
("\nlet three = 3;\n", None)
|
|
||||||
]
|
|
||||||
);
|
|
||||||
buffer
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
fn test_serialization(cx: &mut gpui::MutableAppContext) {
|
fn test_serialization(cx: &mut gpui::MutableAppContext) {
|
||||||
let mut now = Instant::now();
|
let mut now = Instant::now();
|
||||||
|
@ -1253,9 +674,10 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
||||||
40..=49 if mutation_count != 0 && replica_id == 0 => {
|
40..=49 if mutation_count != 0 && replica_id == 0 => {
|
||||||
let entry_count = rng.gen_range(1..=5);
|
let entry_count = rng.gen_range(1..=5);
|
||||||
buffer.update(cx, |buffer, cx| {
|
buffer.update(cx, |buffer, cx| {
|
||||||
let diagnostics = (0..entry_count)
|
let diagnostics = DiagnosticSet::new(
|
||||||
.map(|_| {
|
(0..entry_count).map(|_| {
|
||||||
let range = buffer.random_byte_range(0, &mut rng);
|
let range = buffer.random_byte_range(0, &mut rng);
|
||||||
|
let range = range.to_point_utf16(buffer);
|
||||||
DiagnosticEntry {
|
DiagnosticEntry {
|
||||||
range,
|
range,
|
||||||
diagnostic: Diagnostic {
|
diagnostic: Diagnostic {
|
||||||
|
@ -1263,10 +685,11 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
})
|
}),
|
||||||
.collect();
|
buffer,
|
||||||
|
);
|
||||||
log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
|
log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
|
||||||
buffer.update_diagnostics(diagnostics, None, cx).unwrap();
|
buffer.update_diagnostics(diagnostics, cx);
|
||||||
});
|
});
|
||||||
mutation_count -= 1;
|
mutation_count -= 1;
|
||||||
}
|
}
|
||||||
|
@ -1370,24 +793,6 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
|
|
||||||
buffer: &Buffer,
|
|
||||||
range: Range<T>,
|
|
||||||
) -> Vec<(String, Option<DiagnosticSeverity>)> {
|
|
||||||
let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
|
|
||||||
for chunk in buffer.snapshot().chunks(range, true) {
|
|
||||||
if chunks
|
|
||||||
.last()
|
|
||||||
.map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
|
|
||||||
{
|
|
||||||
chunks.last_mut().unwrap().0.push_str(chunk.text);
|
|
||||||
} else {
|
|
||||||
chunks.push((chunk.text.to_string(), chunk.diagnostic));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
chunks
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_contiguous_ranges() {
|
fn test_contiguous_ranges() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -42,6 +42,7 @@ regex = "1.5"
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.64", features = ["preserve_order"] }
|
serde_json = { version = "1.0.64", features = ["preserve_order"] }
|
||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
|
similar = "1.3"
|
||||||
smol = "1.2.5"
|
smol = "1.2.5"
|
||||||
toml = "0.5"
|
toml = "0.5"
|
||||||
|
|
||||||
|
|
|
@ -223,21 +223,19 @@ impl LspCommand for PerformRename {
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<ProjectTransaction> {
|
) -> Result<ProjectTransaction> {
|
||||||
if let Some(edit) = message {
|
if let Some(edit) = message {
|
||||||
let (language_name, language_server) = buffer.read_with(&cx, |buffer, _| {
|
let language_server = project
|
||||||
|
.read_with(&cx, |project, cx| {
|
||||||
|
project.language_server_for_buffer(&buffer, cx).cloned()
|
||||||
|
})
|
||||||
|
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
||||||
let language = buffer
|
let language = buffer
|
||||||
.language()
|
.read_with(&cx, |buffer, _| buffer.language().cloned())
|
||||||
.ok_or_else(|| anyhow!("buffer's language was removed"))?;
|
.ok_or_else(|| anyhow!("no language for buffer"))?;
|
||||||
let language_server = buffer
|
|
||||||
.language_server()
|
|
||||||
.cloned()
|
|
||||||
.ok_or_else(|| anyhow!("buffer's language server was removed"))?;
|
|
||||||
Ok::<_, anyhow::Error>((language.name().to_string(), language_server))
|
|
||||||
})?;
|
|
||||||
Project::deserialize_workspace_edit(
|
Project::deserialize_workspace_edit(
|
||||||
project,
|
project,
|
||||||
edit,
|
edit,
|
||||||
self.push_to_history,
|
self.push_to_history,
|
||||||
language_name,
|
language.name(),
|
||||||
language_server,
|
language_server,
|
||||||
&mut cx,
|
&mut cx,
|
||||||
)
|
)
|
||||||
|
@ -343,14 +341,14 @@ impl LspCommand for GetDefinition {
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Location>> {
|
) -> Result<Vec<Location>> {
|
||||||
let mut definitions = Vec::new();
|
let mut definitions = Vec::new();
|
||||||
let (language, language_server) = buffer
|
let language_server = project
|
||||||
.read_with(&cx, |buffer, _| {
|
.read_with(&cx, |project, cx| {
|
||||||
buffer
|
project.language_server_for_buffer(&buffer, cx).cloned()
|
||||||
.language()
|
|
||||||
.cloned()
|
|
||||||
.zip(buffer.language_server().cloned())
|
|
||||||
})
|
})
|
||||||
.ok_or_else(|| anyhow!("buffer no longer has language server"))?;
|
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
||||||
|
let language = buffer
|
||||||
|
.read_with(&cx, |buffer, _| buffer.language().cloned())
|
||||||
|
.ok_or_else(|| anyhow!("no language for buffer"))?;
|
||||||
|
|
||||||
if let Some(message) = message {
|
if let Some(message) = message {
|
||||||
let mut unresolved_locations = Vec::new();
|
let mut unresolved_locations = Vec::new();
|
||||||
|
@ -375,7 +373,7 @@ impl LspCommand for GetDefinition {
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.open_local_buffer_via_lsp(
|
this.open_local_buffer_via_lsp(
|
||||||
target_uri,
|
target_uri,
|
||||||
language.name().to_string(),
|
language.name(),
|
||||||
language_server.clone(),
|
language_server.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
|
@ -519,14 +517,14 @@ impl LspCommand for GetReferences {
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Location>> {
|
) -> Result<Vec<Location>> {
|
||||||
let mut references = Vec::new();
|
let mut references = Vec::new();
|
||||||
let (language, language_server) = buffer
|
let language_server = project
|
||||||
.read_with(&cx, |buffer, _| {
|
.read_with(&cx, |project, cx| {
|
||||||
buffer
|
project.language_server_for_buffer(&buffer, cx).cloned()
|
||||||
.language()
|
|
||||||
.cloned()
|
|
||||||
.zip(buffer.language_server().cloned())
|
|
||||||
})
|
})
|
||||||
.ok_or_else(|| anyhow!("buffer no longer has language server"))?;
|
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
||||||
|
let language = buffer
|
||||||
|
.read_with(&cx, |buffer, _| buffer.language().cloned())
|
||||||
|
.ok_or_else(|| anyhow!("no language for buffer"))?;
|
||||||
|
|
||||||
if let Some(locations) = locations {
|
if let Some(locations) = locations {
|
||||||
for lsp_location in locations {
|
for lsp_location in locations {
|
||||||
|
@ -534,7 +532,7 @@ impl LspCommand for GetReferences {
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.open_local_buffer_via_lsp(
|
this.open_local_buffer_via_lsp(
|
||||||
lsp_location.uri,
|
lsp_location.uri,
|
||||||
language.name().to_string(),
|
language.name(),
|
||||||
language_server.clone(),
|
language_server.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -556,6 +556,7 @@ impl LocalWorktree {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
|
pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
|
||||||
|
dbg!(&self.diagnostics);
|
||||||
self.diagnostics.get(path).cloned()
|
self.diagnostics.get(path).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ use gpui::{
|
||||||
action, elements::*, keymap::Binding, platform::CursorStyle, Entity, MutableAppContext,
|
action, elements::*, keymap::Binding, platform::CursorStyle, Entity, MutableAppContext,
|
||||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||||
};
|
};
|
||||||
use language::AnchorRangeExt;
|
use language::OffsetRangeExt;
|
||||||
use postage::watch;
|
use postage::watch;
|
||||||
use project::search::SearchQuery;
|
use project::search::SearchQuery;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
|
|
@ -1011,8 +1011,8 @@ mod tests {
|
||||||
};
|
};
|
||||||
use gpui::{executor, ModelHandle, TestAppContext};
|
use gpui::{executor, ModelHandle, TestAppContext};
|
||||||
use language::{
|
use language::{
|
||||||
tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, LanguageConfig,
|
tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, LanguageRegistry,
|
||||||
LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
|
LanguageServerConfig, OffsetRangeExt, Point, ToLspPosition,
|
||||||
};
|
};
|
||||||
use lsp;
|
use lsp;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use super::{Point, ToOffset};
|
use super::{Point, ToOffset};
|
||||||
use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPointUtf16};
|
use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPoint, ToPointUtf16};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use std::{cmp::Ordering, fmt::Debug, ops::Range};
|
use std::{cmp::Ordering, fmt::Debug, ops::Range};
|
||||||
use sum_tree::Bias;
|
use sum_tree::Bias;
|
||||||
|
@ -74,11 +74,33 @@ impl Anchor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait OffsetRangeExt {
|
||||||
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize>;
|
||||||
|
fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point>;
|
||||||
|
fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> OffsetRangeExt for Range<T>
|
||||||
|
where
|
||||||
|
T: ToOffset,
|
||||||
|
{
|
||||||
|
fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize> {
|
||||||
|
self.start.to_offset(snapshot)..self.end.to_offset(&snapshot)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point> {
|
||||||
|
self.start.to_offset(snapshot).to_point(snapshot)
|
||||||
|
..self.end.to_offset(snapshot).to_point(snapshot)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16> {
|
||||||
|
self.start.to_offset(snapshot).to_point_utf16(snapshot)
|
||||||
|
..self.end.to_offset(snapshot).to_point_utf16(snapshot)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait AnchorRangeExt {
|
pub trait AnchorRangeExt {
|
||||||
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
|
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
|
||||||
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
|
|
||||||
fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
|
|
||||||
fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AnchorRangeExt for Range<Anchor> {
|
impl AnchorRangeExt for Range<Anchor> {
|
||||||
|
@ -88,16 +110,4 @@ impl AnchorRangeExt for Range<Anchor> {
|
||||||
ord @ _ => ord,
|
ord @ _ => ord,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
|
|
||||||
self.start.to_offset(&content)..self.end.to_offset(&content)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
|
|
||||||
self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16> {
|
|
||||||
self.start.to_point_utf16(content)..self.end.to_point_utf16(content)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue