Re-enable cargo check for rust-analyzer

This commit is contained in:
Antonio Scandurra 2022-01-04 17:38:45 +01:00
parent 508b9dc024
commit ad1db117e6
7 changed files with 118 additions and 50 deletions

View file

@ -11,7 +11,7 @@ use gpui::{
}; };
use language::{Bias, Buffer, Diagnostic, DiagnosticEntry, Point}; use language::{Bias, Buffer, Diagnostic, DiagnosticEntry, Point};
use postage::watch; use postage::watch;
use project::Project; use project::{Project, ProjectPath};
use std::{cmp::Ordering, ops::Range, path::Path, sync::Arc}; use std::{cmp::Ordering, ops::Range, path::Path, sync::Arc};
use util::TryFutureExt; use util::TryFutureExt;
use workspace::Workspace; use workspace::Workspace;
@ -41,9 +41,11 @@ struct ProjectDiagnostics {
} }
struct ProjectDiagnosticsEditor { struct ProjectDiagnosticsEditor {
project: ModelHandle<Project>,
editor: ViewHandle<Editor>, editor: ViewHandle<Editor>,
excerpts: ModelHandle<MultiBuffer>, excerpts: ModelHandle<MultiBuffer>,
path_states: Vec<(Arc<Path>, Vec<DiagnosticGroupState>)>, path_states: Vec<(Arc<Path>, Vec<DiagnosticGroupState>)>,
paths_to_update: HashMap<usize, HashSet<ProjectPath>>,
build_settings: BuildSettings, build_settings: BuildSettings,
} }
@ -95,41 +97,19 @@ impl ProjectDiagnosticsEditor {
settings: watch::Receiver<workspace::Settings>, settings: watch::Receiver<workspace::Settings>,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> Self { ) -> Self {
let project_paths = project cx.subscribe(&project, |this, _, event, cx| match event {
.read(cx) project::Event::DiskBasedDiagnosticsUpdated { worktree_id } => {
.diagnostic_summaries(cx) if let Some(paths) = this.paths_to_update.remove(&worktree_id) {
.map(|e| e.0) this.update_excerpts(paths, cx);
.collect::<Vec<_>>();
cx.spawn(|this, mut cx| {
let project = project.clone();
async move {
for project_path in project_paths {
let buffer = project
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
.await?;
this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx))
} }
Result::<_, anyhow::Error>::Ok(())
} }
}) project::Event::DiagnosticsUpdated(path) => {
.detach(); this.paths_to_update
.entry(path.worktree_id)
cx.subscribe(&project, |_, project, event, cx| { .or_default()
if let project::Event::DiagnosticsUpdated(project_path) = event { .insert(path.clone());
let project_path = project_path.clone();
cx.spawn(|this, mut cx| {
async move {
let buffer = project
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
.await?;
this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx));
Ok(())
}
.log_err()
})
.detach();
} }
_ => {}
}) })
.detach(); .detach();
@ -139,12 +119,22 @@ impl ProjectDiagnosticsEditor {
cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx)); cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx));
cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event)) cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event))
.detach(); .detach();
Self {
let paths_to_update = project
.read(cx)
.diagnostic_summaries(cx)
.map(|e| e.0)
.collect();
let this = Self {
project,
excerpts, excerpts,
editor, editor,
build_settings, build_settings,
path_states: Default::default(), path_states: Default::default(),
} paths_to_update: Default::default(),
};
this.update_excerpts(paths_to_update, cx);
this
} }
#[cfg(test)] #[cfg(test)]
@ -189,6 +179,23 @@ impl ProjectDiagnosticsEditor {
.update(cx, |editor, cx| editor.remove_blocks(blocks_to_delete, cx)); .update(cx, |editor, cx| editor.remove_blocks(blocks_to_delete, cx));
} }
fn update_excerpts(&self, paths: HashSet<ProjectPath>, cx: &mut ViewContext<Self>) {
let project = self.project.clone();
cx.spawn(|this, mut cx| {
async move {
for path in paths {
let buffer = project
.update(&mut cx, |project, cx| project.open_buffer(path, cx))
.await?;
this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx))
}
Result::<_, anyhow::Error>::Ok(())
}
.log_err()
})
.detach();
}
fn populate_excerpts(&mut self, buffer: ModelHandle<Buffer>, cx: &mut ViewContext<Self>) { fn populate_excerpts(&mut self, buffer: ModelHandle<Buffer>, cx: &mut ViewContext<Self>) {
let snapshot; let snapshot;
let path; let path;
@ -572,7 +579,7 @@ mod tests {
use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore}; use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore};
use gpui::TestAppContext; use gpui::TestAppContext;
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry, PointUtf16}; use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry, PointUtf16};
use project::FakeFs; use project::{worktree, FakeFs};
use serde_json::json; use serde_json::json;
use std::sync::Arc; use std::sync::Arc;
use unindent::Unindent as _; use unindent::Unindent as _;
@ -813,6 +820,7 @@ mod tests {
cx, cx,
) )
.unwrap(); .unwrap();
cx.emit(worktree::Event::DiskBasedDiagnosticsUpdated);
}); });
view.condition(&mut cx, |view, cx| view.text(cx).contains("const a")) view.condition(&mut cx, |view, cx| view.text(cx).contains("const a"))

View file

@ -19,6 +19,7 @@ pub struct DiagnosticEntry<T> {
pub diagnostic: Diagnostic, pub diagnostic: Diagnostic,
} }
#[derive(Debug)]
pub struct DiagnosticGroup<T> { pub struct DiagnosticGroup<T> {
pub entries: Vec<DiagnosticEntry<T>>, pub entries: Vec<DiagnosticEntry<T>>,
pub primary_ix: usize, pub primary_ix: usize,

View file

@ -46,6 +46,7 @@ pub struct LanguageConfig {
pub struct LanguageServerConfig { pub struct LanguageServerConfig {
pub binary: String, pub binary: String,
pub disk_based_diagnostic_sources: HashSet<String>, pub disk_based_diagnostic_sources: HashSet<String>,
pub disk_based_diagnostics_progress_token: Option<String>,
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
#[serde(skip)] #[serde(skip)]
pub fake_server: Option<(Arc<lsp::LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>, pub fake_server: Option<(Arc<lsp::LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
@ -199,6 +200,13 @@ impl Language {
.map(|config| &config.disk_based_diagnostic_sources) .map(|config| &config.disk_based_diagnostic_sources)
} }
pub fn disk_based_diagnostics_progress_token(&self) -> Option<&String> {
self.config
.language_server
.as_ref()
.and_then(|config| config.disk_based_diagnostics_progress_token.as_ref())
}
pub fn brackets(&self) -> &[BracketPair] { pub fn brackets(&self) -> &[BracketPair] {
&self.config.brackets &self.config.brackets
} }

View file

@ -28,7 +28,7 @@ pub use lsp_types::*;
const JSON_RPC_VERSION: &'static str = "2.0"; const JSON_RPC_VERSION: &'static str = "2.0";
const CONTENT_LEN_HEADER: &'static str = "Content-Length: "; const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
type NotificationHandler = Box<dyn Send + Sync + Fn(&str)>; type NotificationHandler = Box<dyn Send + Sync + FnMut(&str)>;
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>; type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
pub struct LanguageServer { pub struct LanguageServer {
@ -139,7 +139,7 @@ impl LanguageServer {
if let Ok(AnyNotification { method, params }) = if let Ok(AnyNotification { method, params }) =
serde_json::from_slice(&buffer) serde_json::from_slice(&buffer)
{ {
if let Some(handler) = notification_handlers.read().get(method) { if let Some(handler) = notification_handlers.write().get_mut(method) {
handler(params.get()); handler(params.get());
} else { } else {
log::info!( log::info!(
@ -226,15 +226,15 @@ impl LanguageServer {
process_id: Default::default(), process_id: Default::default(),
root_path: Default::default(), root_path: Default::default(),
root_uri: Some(root_uri), root_uri: Some(root_uri),
initialization_options: Some(json!({ initialization_options: Default::default(),
"checkOnSave": {
"enable": false
},
})),
capabilities: lsp_types::ClientCapabilities { capabilities: lsp_types::ClientCapabilities {
experimental: Some(json!({ experimental: Some(json!({
"serverStatusNotification": true, "serverStatusNotification": true,
})), })),
window: Some(lsp_types::WindowClientCapabilities {
work_done_progress: Some(true),
..Default::default()
}),
..Default::default() ..Default::default()
}, },
trace: Default::default(), trace: Default::default(),
@ -283,10 +283,10 @@ impl LanguageServer {
} }
} }
pub fn on_notification<T, F>(&self, f: F) -> Subscription pub fn on_notification<T, F>(&self, mut f: F) -> Subscription
where where
T: lsp_types::notification::Notification, T: lsp_types::notification::Notification,
F: 'static + Send + Sync + Fn(T::Params), F: 'static + Send + Sync + FnMut(T::Params),
{ {
let prev_handler = self.notification_handlers.write().insert( let prev_handler = self.notification_handlers.write().insert(
T::METHOD, T::METHOD,

View file

@ -1,6 +1,6 @@
pub mod fs; pub mod fs;
mod ignore; mod ignore;
mod worktree; pub mod worktree;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
@ -60,6 +60,7 @@ pub struct Collaborator {
pub enum Event { pub enum Event {
ActiveEntryChanged(Option<ProjectEntry>), ActiveEntryChanged(Option<ProjectEntry>),
WorktreeRemoved(usize), WorktreeRemoved(usize),
DiskBasedDiagnosticsUpdated { worktree_id: usize },
DiagnosticsUpdated(ProjectPath), DiagnosticsUpdated(ProjectPath),
} }
@ -482,6 +483,11 @@ impl Project {
path: path.clone(), path: path.clone(),
})); }));
} }
worktree::Event::DiskBasedDiagnosticsUpdated => {
cx.emit(Event::DiskBasedDiagnosticsUpdated {
worktree_id: worktree.id(),
});
}
}) })
.detach(); .detach();
self.worktrees.push(worktree); self.worktrees.push(worktree);

View file

@ -66,6 +66,7 @@ pub enum Worktree {
#[derive(Debug)] #[derive(Debug)]
pub enum Event { pub enum Event {
DiskBasedDiagnosticsUpdated,
DiagnosticsUpdated(Arc<Path>), DiagnosticsUpdated(Arc<Path>),
} }
@ -1037,18 +1038,61 @@ impl LocalWorktree {
.disk_based_diagnostic_sources() .disk_based_diagnostic_sources()
.cloned() .cloned()
.unwrap_or_default(); .unwrap_or_default();
let disk_based_diagnostics_progress_token =
language.disk_based_diagnostics_progress_token().cloned();
let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded(); let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
language_server language_server
.on_notification::<lsp::notification::PublishDiagnostics, _>(move |params| { .on_notification::<lsp::notification::PublishDiagnostics, _>(move |params| {
smol::block_on(diagnostics_tx.send(params)).ok(); smol::block_on(diagnostics_tx.send(params)).ok();
}) })
.detach(); .detach();
cx.spawn_weak(|this, mut cx| {
let has_disk_based_diagnostic_progress_token =
disk_based_diagnostics_progress_token.is_some();
async move {
while let Ok(diagnostics) = diagnostics_rx.recv().await {
if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
handle.update(&mut cx, |this, cx| {
this.update_diagnostics(diagnostics, &disk_based_sources, cx)
.log_err();
if !has_disk_based_diagnostic_progress_token {
cx.emit(Event::DiskBasedDiagnosticsUpdated);
}
});
} else {
break;
}
}
}
})
.detach();
let (mut disk_based_diagnostics_done_tx, mut disk_based_diagnostics_done_rx) =
watch::channel_with(());
language_server
.on_notification::<lsp::notification::Progress, _>(move |params| {
let token = match params.token {
lsp::NumberOrString::Number(_) => None,
lsp::NumberOrString::String(token) => Some(token),
};
if token == disk_based_diagnostics_progress_token {
match params.value {
lsp::ProgressParamsValue::WorkDone(progress) => match progress {
lsp::WorkDoneProgress::End(_) => {
smol::block_on(disk_based_diagnostics_done_tx.send(())).ok();
}
_ => {}
},
}
}
})
.detach();
cx.spawn_weak(|this, mut cx| async move { cx.spawn_weak(|this, mut cx| async move {
while let Ok(diagnostics) = diagnostics_rx.recv().await { while let Some(()) = disk_based_diagnostics_done_rx.recv().await {
if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
handle.update(&mut cx, |this, cx| { handle.update(&mut cx, |_, cx| {
this.update_diagnostics(diagnostics, &disk_based_sources, cx) cx.emit(Event::DiskBasedDiagnosticsUpdated);
.log_err();
}); });
} else { } else {
break; break;

View file

@ -13,3 +13,4 @@ brackets = [
[language_server] [language_server]
binary = "rust-analyzer" binary = "rust-analyzer"
disk_based_diagnostic_sources = ["rustc"] disk_based_diagnostic_sources = ["rustc"]
disk_based_diagnostics_progress_token = "rustAnalyzer/cargo check"