Batch diagnostics updates (#35794)

Diagnostics updates were programmed in Zed based off the r-a LSP push
diagnostics, with all related updates happening per file.

https://github.com/zed-industries/zed/pull/19230 and especially
https://github.com/zed-industries/zed/pull/32269 brought in pull
diagnostics that could produce results for thousands files
simultaneously.

It was noted and addressed on the local side in
https://github.com/zed-industries/zed/pull/34022 but the remote side was
still not adjusted properly.

This PR 

* removes redundant diagnostics pull updates on remote clients, as
buffer diagnostics are updated via buffer sync operations separately
* batches all diagnostics-related updates and proto messages, so
multiple diagnostic summaries (per file) could be sent at once,
specifically, 1 (potentially large) diagnostics summary update instead
of N*10^3 small ones.

Buffer updates are still sent per buffer and not updated, as happening
separately and not offending the collab traffic that much.

Release Notes:

- Improved diagnostics performance in the collaborative mode
This commit is contained in:
Kirill Bulatov 2025-08-07 17:45:41 +03:00 committed by GitHub
parent a5c25e0366
commit 740686b883
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 500 additions and 350 deletions

View file

@ -1630,15 +1630,15 @@ fn notify_rejoined_projects(
} }
// Stream this worktree's diagnostics. // Stream this worktree's diagnostics.
for summary in worktree.diagnostic_summaries { let mut worktree_diagnostics = worktree.diagnostic_summaries.into_iter();
session.peer.send( if let Some(summary) = worktree_diagnostics.next() {
session.connection_id, let message = proto::UpdateDiagnosticSummary {
proto::UpdateDiagnosticSummary {
project_id: project.id.to_proto(), project_id: project.id.to_proto(),
worktree_id: worktree.id, worktree_id: worktree.id,
summary: Some(summary), summary: Some(summary),
}, more_summaries: worktree_diagnostics.collect(),
)?; };
session.peer.send(session.connection_id, message)?;
} }
for settings_file in worktree.settings_files { for settings_file in worktree.settings_files {
@ -2060,15 +2060,15 @@ async fn join_project(
} }
// Stream this worktree's diagnostics. // Stream this worktree's diagnostics.
for summary in worktree.diagnostic_summaries { let mut worktree_diagnostics = worktree.diagnostic_summaries.into_iter();
session.peer.send( if let Some(summary) = worktree_diagnostics.next() {
session.connection_id, let message = proto::UpdateDiagnosticSummary {
proto::UpdateDiagnosticSummary { project_id: project.id.to_proto(),
project_id: project_id.to_proto(),
worktree_id: worktree.id, worktree_id: worktree.id,
summary: Some(summary), summary: Some(summary),
}, more_summaries: worktree_diagnostics.collect(),
)?; };
session.peer.send(session.connection_id, message)?;
} }
for settings_file in worktree.settings_files { for settings_file in worktree.settings_files {

View file

@ -177,9 +177,9 @@ impl ProjectDiagnosticsEditor {
} }
project::Event::DiagnosticsUpdated { project::Event::DiagnosticsUpdated {
language_server_id, language_server_id,
path, paths,
} => { } => {
this.paths_to_update.insert(path.clone()); this.paths_to_update.extend(paths.clone());
let project = project.clone(); let project = project.clone();
this.diagnostic_summary_update = cx.spawn(async move |this, cx| { this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor() cx.background_executor()
@ -193,9 +193,9 @@ impl ProjectDiagnosticsEditor {
cx.emit(EditorEvent::TitleChanged); cx.emit(EditorEvent::TitleChanged);
if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) { if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) {
log::debug!("diagnostics updated for server {language_server_id}, path {path:?}. recording change"); log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. recording change");
} else { } else {
log::debug!("diagnostics updated for server {language_server_id}, path {path:?}. updating excerpts"); log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. updating excerpts");
this.update_stale_excerpts(window, cx); this.update_stale_excerpts(window, cx);
} }
} }

View file

@ -140,6 +140,20 @@ impl FormatTrigger {
} }
} }
#[derive(Debug)]
pub struct DocumentDiagnosticsUpdate<'a, D> {
pub diagnostics: D,
pub result_id: Option<String>,
pub server_id: LanguageServerId,
pub disk_based_sources: Cow<'a, [String]>,
}
pub struct DocumentDiagnostics {
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
document_abs_path: PathBuf,
version: Option<i32>,
}
pub struct LocalLspStore { pub struct LocalLspStore {
weak: WeakEntity<LspStore>, weak: WeakEntity<LspStore>,
worktree_store: Entity<WorktreeStore>, worktree_store: Entity<WorktreeStore>,
@ -503,12 +517,16 @@ impl LocalLspStore {
adapter.process_diagnostics(&mut params, server_id, buffer); adapter.process_diagnostics(&mut params, server_id, buffer);
} }
this.merge_diagnostics( this.merge_lsp_diagnostics(
server_id,
params,
None,
DiagnosticSourceKind::Pushed, DiagnosticSourceKind::Pushed,
vec![DocumentDiagnosticsUpdate {
server_id,
diagnostics: params,
result_id: None,
disk_based_sources: Cow::Borrowed(
&adapter.disk_based_diagnostic_sources, &adapter.disk_based_diagnostic_sources,
),
}],
|_, diagnostic, cx| match diagnostic.source_kind { |_, diagnostic, cx| match diagnostic.source_kind {
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => { DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
adapter.retain_old_diagnostic(diagnostic, cx) adapter.retain_old_diagnostic(diagnostic, cx)
@ -3610,8 +3628,8 @@ pub enum LspStoreEvent {
RefreshInlayHints, RefreshInlayHints,
RefreshCodeLens, RefreshCodeLens,
DiagnosticsUpdated { DiagnosticsUpdated {
language_server_id: LanguageServerId, server_id: LanguageServerId,
path: ProjectPath, paths: Vec<ProjectPath>,
}, },
DiskBasedDiagnosticsStarted { DiskBasedDiagnosticsStarted {
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
@ -4440,21 +4458,28 @@ impl LspStore {
pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) {
if let Some((client, downstream_project_id)) = self.downstream_client.clone() { if let Some((client, downstream_project_id)) = self.downstream_client.clone() {
if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { if let Some(diangostic_summaries) = self.diagnostic_summaries.get(&worktree.id()) {
for (path, summaries) in summaries { let mut summaries =
for (&server_id, summary) in summaries { diangostic_summaries
.into_iter()
.flat_map(|(path, summaries)| {
summaries
.into_iter()
.map(|(server_id, summary)| summary.to_proto(*server_id, path))
});
if let Some(summary) = summaries.next() {
client client
.send(proto::UpdateDiagnosticSummary { .send(proto::UpdateDiagnosticSummary {
project_id: downstream_project_id, project_id: downstream_project_id,
worktree_id: worktree.id().to_proto(), worktree_id: worktree.id().to_proto(),
summary: Some(summary.to_proto(server_id, path)), summary: Some(summary),
more_summaries: summaries.collect(),
}) })
.log_err(); .log_err();
} }
} }
} }
} }
}
// TODO: remove MultiLspQuery: instead, the proto handler should pick appropriate server(s) // TODO: remove MultiLspQuery: instead, the proto handler should pick appropriate server(s)
// Then, use `send_lsp_proto_request` or analogue for most of the LSP proto requests and inline this check inside // Then, use `send_lsp_proto_request` or analogue for most of the LSP proto requests and inline this check inside
@ -6564,7 +6589,7 @@ impl LspStore {
&mut self, &mut self,
buffer: Entity<Buffer>, buffer: Entity<Buffer>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<Result<Vec<LspPullDiagnostics>>> { ) -> Task<Result<Option<Vec<LspPullDiagnostics>>>> {
let buffer_id = buffer.read(cx).remote_id(); let buffer_id = buffer.read(cx).remote_id();
if let Some((client, upstream_project_id)) = self.upstream_client() { if let Some((client, upstream_project_id)) = self.upstream_client() {
@ -6575,7 +6600,7 @@ impl LspStore {
}, },
cx, cx,
) { ) {
return Task::ready(Ok(Vec::new())); return Task::ready(Ok(None));
} }
let request_task = client.request(proto::MultiLspQuery { let request_task = client.request(proto::MultiLspQuery {
buffer_id: buffer_id.to_proto(), buffer_id: buffer_id.to_proto(),
@ -6593,7 +6618,7 @@ impl LspStore {
)), )),
}); });
cx.background_spawn(async move { cx.background_spawn(async move {
Ok(request_task let _proto_responses = request_task
.await? .await?
.responses .responses
.into_iter() .into_iter()
@ -6606,8 +6631,11 @@ impl LspStore {
None None
} }
}) })
.flat_map(GetDocumentDiagnostics::diagnostics_from_proto) .collect::<Vec<_>>();
.collect()) // Proto requests cause the diagnostics to be pulled from language server(s) on the local side
// and then, buffer state updated with the diagnostics received, which will be later propagated to the client.
// Do not attempt to further process the dummy responses here.
Ok(None)
}) })
} else { } else {
let server_ids = buffer.update(cx, |buffer, cx| { let server_ids = buffer.update(cx, |buffer, cx| {
@ -6635,7 +6663,7 @@ impl LspStore {
for diagnostics in join_all(pull_diagnostics).await { for diagnostics in join_all(pull_diagnostics).await {
responses.extend(diagnostics?); responses.extend(diagnostics?);
} }
Ok(responses) Ok(Some(responses))
}) })
} }
} }
@ -6701,76 +6729,94 @@ impl LspStore {
buffer: Entity<Buffer>, buffer: Entity<Buffer>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> { ) -> Task<anyhow::Result<()>> {
let buffer_id = buffer.read(cx).remote_id();
let diagnostics = self.pull_diagnostics(buffer, cx); let diagnostics = self.pull_diagnostics(buffer, cx);
cx.spawn(async move |lsp_store, cx| { cx.spawn(async move |lsp_store, cx| {
let diagnostics = diagnostics.await.context("pulling diagnostics")?; let Some(diagnostics) = diagnostics.await.context("pulling diagnostics")? else {
return Ok(());
};
lsp_store.update(cx, |lsp_store, cx| { lsp_store.update(cx, |lsp_store, cx| {
if lsp_store.as_local().is_none() { if lsp_store.as_local().is_none() {
return; return;
} }
for diagnostics_set in diagnostics { let mut unchanged_buffers = HashSet::default();
let LspPullDiagnostics::Response { let mut changed_buffers = HashSet::default();
let server_diagnostics_updates = diagnostics
.into_iter()
.filter_map(|diagnostics_set| match diagnostics_set {
LspPullDiagnostics::Response {
server_id, server_id,
uri, uri,
diagnostics, diagnostics,
} = diagnostics_set } => Some((server_id, uri, diagnostics)),
else { LspPullDiagnostics::Default => None,
continue; })
}; .fold(
HashMap::default(),
let adapter = lsp_store.language_server_adapter_for_id(server_id); |mut acc, (server_id, uri, diagnostics)| {
let disk_based_sources = adapter let (result_id, diagnostics) = match diagnostics {
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[]);
match diagnostics {
PulledDiagnostics::Unchanged { result_id } => { PulledDiagnostics::Unchanged { result_id } => {
lsp_store unchanged_buffers.insert(uri.clone());
.merge_diagnostics( (Some(result_id), Vec::new())
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics: Vec::new(),
version: None,
},
Some(result_id),
DiagnosticSourceKind::Pulled,
disk_based_sources,
|_, _, _| true,
cx,
)
.log_err();
} }
PulledDiagnostics::Changed { PulledDiagnostics::Changed {
diagnostics,
result_id, result_id,
diagnostics,
} => { } => {
changed_buffers.insert(uri.clone());
(result_id, diagnostics)
}
};
let disk_based_sources = Cow::Owned(
lsp_store lsp_store
.merge_diagnostics( .language_server_adapter_for_id(server_id)
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[])
.to_vec(),
);
acc.entry(server_id).or_insert_with(Vec::new).push(
DocumentDiagnosticsUpdate {
server_id, server_id,
lsp::PublishDiagnosticsParams { diagnostics: lsp::PublishDiagnosticsParams {
uri: uri.clone(), uri,
diagnostics, diagnostics,
version: None, version: None,
}, },
result_id, result_id,
DiagnosticSourceKind::Pulled,
disk_based_sources, disk_based_sources,
|buffer, old_diagnostic, _| match old_diagnostic.source_kind { },
);
acc
},
);
for diagnostic_updates in server_diagnostics_updates.into_values() {
lsp_store
.merge_lsp_diagnostics(
DiagnosticSourceKind::Pulled,
diagnostic_updates,
|buffer, old_diagnostic, cx| {
File::from_dyn(buffer.file())
.and_then(|file| {
let abs_path = file.as_local()?.abs_path(cx);
lsp::Url::from_file_path(abs_path).ok()
})
.is_none_or(|buffer_uri| {
unchanged_buffers.contains(&buffer_uri)
|| match old_diagnostic.source_kind {
DiagnosticSourceKind::Pulled => { DiagnosticSourceKind::Pulled => {
buffer.remote_id() != buffer_id !changed_buffers.contains(&buffer_uri)
} }
DiagnosticSourceKind::Other DiagnosticSourceKind::Other
| DiagnosticSourceKind::Pushed => true, | DiagnosticSourceKind::Pushed => true,
}
})
}, },
cx, cx,
) )
.log_err(); .log_err();
} }
}
}
}) })
}) })
} }
@ -7791,36 +7837,43 @@ impl LspStore {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
self.merge_diagnostic_entries( self.merge_diagnostic_entries(
server_id, vec![DocumentDiagnosticsUpdate {
abs_path, diagnostics: DocumentDiagnostics {
result_id,
version,
diagnostics, diagnostics,
document_abs_path: abs_path,
version,
},
result_id,
server_id,
disk_based_sources: Cow::Borrowed(&[]),
}],
|_, _, _| false, |_, _, _| false,
cx, cx,
)?; )?;
Ok(()) Ok(())
} }
pub fn merge_diagnostic_entries( pub fn merge_diagnostic_entries<'a>(
&mut self, &mut self,
server_id: LanguageServerId, diagnostic_updates: Vec<DocumentDiagnosticsUpdate<'a, DocumentDiagnostics>>,
abs_path: PathBuf, merge: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
result_id: Option<String>,
version: Option<i32>,
mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
filter: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let mut diagnostics_summary = None::<proto::UpdateDiagnosticSummary>;
let mut updated_diagnostics_paths = HashMap::default();
for mut update in diagnostic_updates {
let abs_path = &update.diagnostics.document_abs_path;
let server_id = update.server_id;
let Some((worktree, relative_path)) = let Some((worktree, relative_path)) =
self.worktree_store.read(cx).find_worktree(&abs_path, cx) self.worktree_store.read(cx).find_worktree(abs_path, cx)
else { else {
log::warn!("skipping diagnostics update, no worktree found for path {abs_path:?}"); log::warn!("skipping diagnostics update, no worktree found for path {abs_path:?}");
return Ok(()); return Ok(());
}; };
let worktree_id = worktree.read(cx).id();
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(), worktree_id,
path: relative_path.into(), path: relative_path.into(),
}; };
@ -7832,7 +7885,7 @@ impl LspStore {
.into_iter() .into_iter()
.flat_map(|diag| { .flat_map(|diag| {
diag.iter() diag.iter()
.filter(|v| filter(buffer, &v.diagnostic, cx)) .filter(|v| merge(buffer, &v.diagnostic, cx))
.map(|v| { .map(|v| {
let start = Unclipped(v.range.start.to_point_utf16(&snapshot)); let start = Unclipped(v.range.start.to_point_utf16(&snapshot));
let end = Unclipped(v.range.end.to_point_utf16(&snapshot)); let end = Unclipped(v.range.end.to_point_utf16(&snapshot));
@ -7849,14 +7902,14 @@ impl LspStore {
.update_buffer_diagnostics( .update_buffer_diagnostics(
&buffer_handle, &buffer_handle,
server_id, server_id,
result_id, update.result_id,
version, update.diagnostics.version,
diagnostics.clone(), update.diagnostics.diagnostics.clone(),
reused_diagnostics.clone(), reused_diagnostics.clone(),
cx, cx,
)?; )?;
diagnostics.extend(reused_diagnostics); update.diagnostics.diagnostics.extend(reused_diagnostics);
} }
let updated = worktree.update(cx, |worktree, cx| { let updated = worktree.update(cx, |worktree, cx| {
@ -7864,16 +7917,56 @@ impl LspStore {
worktree.id(), worktree.id(),
server_id, server_id,
project_path.path.clone(), project_path.path.clone(),
diagnostics, update.diagnostics.diagnostics,
cx, cx,
) )
})?; })?;
if updated { match updated {
cx.emit(LspStoreEvent::DiagnosticsUpdated { ControlFlow::Continue(new_summary) => {
language_server_id: server_id, if let Some((project_id, new_summary)) = new_summary {
path: project_path, match &mut diagnostics_summary {
Some(diagnostics_summary) => {
diagnostics_summary
.more_summaries
.push(proto::DiagnosticSummary {
path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count,
warning_count: new_summary.warning_count,
}) })
} }
None => {
diagnostics_summary = Some(proto::UpdateDiagnosticSummary {
project_id: project_id,
worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary {
path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count,
warning_count: new_summary.warning_count,
}),
more_summaries: Vec::new(),
})
}
}
}
updated_diagnostics_paths
.entry(server_id)
.or_insert_with(Vec::new)
.push(project_path);
}
ControlFlow::Break(()) => {}
}
}
if let Some((diagnostics_summary, (downstream_client, _))) =
diagnostics_summary.zip(self.downstream_client.as_ref())
{
downstream_client.send(diagnostics_summary).log_err();
}
for (server_id, paths) in updated_diagnostics_paths {
cx.emit(LspStoreEvent::DiagnosticsUpdated { server_id, paths });
}
Ok(()) Ok(())
} }
@ -7881,10 +7974,10 @@ impl LspStore {
&mut self, &mut self,
worktree_id: WorktreeId, worktree_id: WorktreeId,
server_id: LanguageServerId, server_id: LanguageServerId,
worktree_path: Arc<Path>, path_in_worktree: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>, diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut Context<Worktree>, _: &mut Context<Worktree>,
) -> Result<bool> { ) -> Result<ControlFlow<(), Option<(u64, proto::DiagnosticSummary)>>> {
let local = match &mut self.mode { let local = match &mut self.mode {
LspStoreMode::Local(local_lsp_store) => local_lsp_store, LspStoreMode::Local(local_lsp_store) => local_lsp_store,
_ => anyhow::bail!("update_worktree_diagnostics called on remote"), _ => anyhow::bail!("update_worktree_diagnostics called on remote"),
@ -7892,7 +7985,9 @@ impl LspStore {
let summaries_for_tree = self.diagnostic_summaries.entry(worktree_id).or_default(); let summaries_for_tree = self.diagnostic_summaries.entry(worktree_id).or_default();
let diagnostics_for_tree = local.diagnostics.entry(worktree_id).or_default(); let diagnostics_for_tree = local.diagnostics.entry(worktree_id).or_default();
let summaries_by_server_id = summaries_for_tree.entry(worktree_path.clone()).or_default(); let summaries_by_server_id = summaries_for_tree
.entry(path_in_worktree.clone())
.or_default();
let old_summary = summaries_by_server_id let old_summary = summaries_by_server_id
.remove(&server_id) .remove(&server_id)
@ -7900,18 +7995,19 @@ impl LspStore {
let new_summary = DiagnosticSummary::new(&diagnostics); let new_summary = DiagnosticSummary::new(&diagnostics);
if new_summary.is_empty() { if new_summary.is_empty() {
if let Some(diagnostics_by_server_id) = diagnostics_for_tree.get_mut(&worktree_path) { if let Some(diagnostics_by_server_id) = diagnostics_for_tree.get_mut(&path_in_worktree)
{
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix); diagnostics_by_server_id.remove(ix);
} }
if diagnostics_by_server_id.is_empty() { if diagnostics_by_server_id.is_empty() {
diagnostics_for_tree.remove(&worktree_path); diagnostics_for_tree.remove(&path_in_worktree);
} }
} }
} else { } else {
summaries_by_server_id.insert(server_id, new_summary); summaries_by_server_id.insert(server_id, new_summary);
let diagnostics_by_server_id = diagnostics_for_tree let diagnostics_by_server_id = diagnostics_for_tree
.entry(worktree_path.clone()) .entry(path_in_worktree.clone())
.or_default(); .or_default();
match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
Ok(ix) => { Ok(ix) => {
@ -7924,23 +8020,22 @@ impl LspStore {
} }
if !old_summary.is_empty() || !new_summary.is_empty() { if !old_summary.is_empty() || !new_summary.is_empty() {
if let Some((downstream_client, project_id)) = &self.downstream_client { if let Some((_, project_id)) = &self.downstream_client {
downstream_client Ok(ControlFlow::Continue(Some((
.send(proto::UpdateDiagnosticSummary { *project_id,
project_id: *project_id, proto::DiagnosticSummary {
worktree_id: worktree_id.to_proto(), path: path_in_worktree.to_proto(),
summary: Some(proto::DiagnosticSummary {
path: worktree_path.to_proto(),
language_server_id: server_id.0 as u64, language_server_id: server_id.0 as u64,
error_count: new_summary.error_count as u32, error_count: new_summary.error_count as u32,
warning_count: new_summary.warning_count as u32, warning_count: new_summary.warning_count as u32,
}), },
}) ))))
.log_err(); } else {
Ok(ControlFlow::Continue(None))
} }
} else {
Ok(ControlFlow::Break(()))
} }
Ok(!old_summary.is_empty() || !new_summary.is_empty())
} }
pub fn open_buffer_for_symbol( pub fn open_buffer_for_symbol(
@ -8793,23 +8888,30 @@ impl LspStore {
envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>, envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
mut cx: AsyncApp, mut cx: AsyncApp,
) -> Result<()> { ) -> Result<()> {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |lsp_store, cx| {
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
if let Some(message) = envelope.payload.summary { let mut updated_diagnostics_paths = HashMap::default();
let mut diagnostics_summary = None::<proto::UpdateDiagnosticSummary>;
for message_summary in envelope
.payload
.summary
.into_iter()
.chain(envelope.payload.more_summaries)
{
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id, worktree_id,
path: Arc::<Path>::from_proto(message.path), path: Arc::<Path>::from_proto(message_summary.path),
}; };
let path = project_path.path.clone(); let path = project_path.path.clone();
let server_id = LanguageServerId(message.language_server_id as usize); let server_id = LanguageServerId(message_summary.language_server_id as usize);
let summary = DiagnosticSummary { let summary = DiagnosticSummary {
error_count: message.error_count as usize, error_count: message_summary.error_count as usize,
warning_count: message.warning_count as usize, warning_count: message_summary.warning_count as usize,
}; };
if summary.is_empty() { if summary.is_empty() {
if let Some(worktree_summaries) = if let Some(worktree_summaries) =
this.diagnostic_summaries.get_mut(&worktree_id) lsp_store.diagnostic_summaries.get_mut(&worktree_id)
{ {
if let Some(summaries) = worktree_summaries.get_mut(&path) { if let Some(summaries) = worktree_summaries.get_mut(&path) {
summaries.remove(&server_id); summaries.remove(&server_id);
@ -8819,16 +8921,29 @@ impl LspStore {
} }
} }
} else { } else {
this.diagnostic_summaries lsp_store
.diagnostic_summaries
.entry(worktree_id) .entry(worktree_id)
.or_default() .or_default()
.entry(path) .entry(path)
.or_default() .or_default()
.insert(server_id, summary); .insert(server_id, summary);
} }
if let Some((downstream_client, project_id)) = &this.downstream_client {
downstream_client if let Some((_, project_id)) = &lsp_store.downstream_client {
.send(proto::UpdateDiagnosticSummary { match &mut diagnostics_summary {
Some(diagnostics_summary) => {
diagnostics_summary
.more_summaries
.push(proto::DiagnosticSummary {
path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64,
error_count: summary.error_count as u32,
warning_count: summary.warning_count as u32,
})
}
None => {
diagnostics_summary = Some(proto::UpdateDiagnosticSummary {
project_id: *project_id, project_id: *project_id,
worktree_id: worktree_id.to_proto(), worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary { summary: Some(proto::DiagnosticSummary {
@ -8837,13 +8952,24 @@ impl LspStore {
error_count: summary.error_count as u32, error_count: summary.error_count as u32,
warning_count: summary.warning_count as u32, warning_count: summary.warning_count as u32,
}), }),
more_summaries: Vec::new(),
}) })
.log_err();
} }
cx.emit(LspStoreEvent::DiagnosticsUpdated { }
language_server_id: LanguageServerId(message.language_server_id as usize), }
path: project_path, updated_diagnostics_paths
}); .entry(server_id)
.or_insert_with(Vec::new)
.push(project_path);
}
if let Some((diagnostics_summary, (downstream_client, _))) =
diagnostics_summary.zip(lsp_store.downstream_client.as_ref())
{
downstream_client.send(diagnostics_summary).log_err();
}
for (server_id, paths) in updated_diagnostics_paths {
cx.emit(LspStoreEvent::DiagnosticsUpdated { server_id, paths });
} }
Ok(()) Ok(())
})? })?
@ -10361,6 +10487,7 @@ impl LspStore {
error_count: 0, error_count: 0,
warning_count: 0, warning_count: 0,
}), }),
more_summaries: Vec::new(),
}) })
.log_err(); .log_err();
} }
@ -10649,52 +10776,80 @@ impl LspStore {
) )
} }
#[cfg(any(test, feature = "test-support"))]
pub fn update_diagnostics( pub fn update_diagnostics(
&mut self, &mut self,
language_server_id: LanguageServerId, server_id: LanguageServerId,
params: lsp::PublishDiagnosticsParams, diagnostics: lsp::PublishDiagnosticsParams,
result_id: Option<String>, result_id: Option<String>,
source_kind: DiagnosticSourceKind, source_kind: DiagnosticSourceKind,
disk_based_sources: &[String], disk_based_sources: &[String],
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Result<()> { ) -> Result<()> {
self.merge_diagnostics( self.merge_lsp_diagnostics(
language_server_id,
params,
result_id,
source_kind, source_kind,
disk_based_sources, vec![DocumentDiagnosticsUpdate {
diagnostics,
result_id,
server_id,
disk_based_sources: Cow::Borrowed(disk_based_sources),
}],
|_, _, _| false, |_, _, _| false,
cx, cx,
) )
} }
pub fn merge_diagnostics( pub fn merge_lsp_diagnostics(
&mut self, &mut self,
language_server_id: LanguageServerId,
mut params: lsp::PublishDiagnosticsParams,
result_id: Option<String>,
source_kind: DiagnosticSourceKind, source_kind: DiagnosticSourceKind,
disk_based_sources: &[String], lsp_diagnostics: Vec<DocumentDiagnosticsUpdate<lsp::PublishDiagnosticsParams>>,
filter: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone, merge: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Result<()> { ) -> Result<()> {
anyhow::ensure!(self.mode.is_local(), "called update_diagnostics on remote"); anyhow::ensure!(self.mode.is_local(), "called update_diagnostics on remote");
let abs_path = params let updates = lsp_diagnostics
.uri .into_iter()
.to_file_path() .filter_map(|update| {
.map_err(|()| anyhow!("URI is not a file"))?; let abs_path = update.diagnostics.uri.to_file_path().ok()?;
Some(DocumentDiagnosticsUpdate {
diagnostics: self.lsp_to_document_diagnostics(
abs_path,
source_kind,
update.server_id,
update.diagnostics,
&update.disk_based_sources,
),
result_id: update.result_id,
server_id: update.server_id,
disk_based_sources: update.disk_based_sources,
})
})
.collect();
self.merge_diagnostic_entries(updates, merge, cx)?;
Ok(())
}
fn lsp_to_document_diagnostics(
&mut self,
document_abs_path: PathBuf,
source_kind: DiagnosticSourceKind,
server_id: LanguageServerId,
mut lsp_diagnostics: lsp::PublishDiagnosticsParams,
disk_based_sources: &[String],
) -> DocumentDiagnostics {
let mut diagnostics = Vec::default(); let mut diagnostics = Vec::default();
let mut primary_diagnostic_group_ids = HashMap::default(); let mut primary_diagnostic_group_ids = HashMap::default();
let mut sources_by_group_id = HashMap::default(); let mut sources_by_group_id = HashMap::default();
let mut supporting_diagnostics = HashMap::default(); let mut supporting_diagnostics = HashMap::default();
let adapter = self.language_server_adapter_for_id(language_server_id); let adapter = self.language_server_adapter_for_id(server_id);
// Ensure that primary diagnostics are always the most severe // Ensure that primary diagnostics are always the most severe
params.diagnostics.sort_by_key(|item| item.severity); lsp_diagnostics
.diagnostics
.sort_by_key(|item| item.severity);
for diagnostic in &params.diagnostics { for diagnostic in &lsp_diagnostics.diagnostics {
let source = diagnostic.source.as_ref(); let source = diagnostic.source.as_ref();
let range = range_from_lsp(diagnostic.range); let range = range_from_lsp(diagnostic.range);
let is_supporting = diagnostic let is_supporting = diagnostic
@ -10716,7 +10871,7 @@ impl LspStore {
.map_or(false, |tags| tags.contains(&DiagnosticTag::UNNECESSARY)); .map_or(false, |tags| tags.contains(&DiagnosticTag::UNNECESSARY));
let underline = self let underline = self
.language_server_adapter_for_id(language_server_id) .language_server_adapter_for_id(server_id)
.map_or(true, |adapter| adapter.underline_diagnostic(diagnostic)); .map_or(true, |adapter| adapter.underline_diagnostic(diagnostic));
if is_supporting { if is_supporting {
@ -10758,7 +10913,7 @@ impl LspStore {
}); });
if let Some(infos) = &diagnostic.related_information { if let Some(infos) = &diagnostic.related_information {
for info in infos { for info in infos {
if info.location.uri == params.uri && !info.message.is_empty() { if info.location.uri == lsp_diagnostics.uri && !info.message.is_empty() {
let range = range_from_lsp(info.location.range); let range = range_from_lsp(info.location.range);
diagnostics.push(DiagnosticEntry { diagnostics.push(DiagnosticEntry {
range, range,
@ -10806,16 +10961,11 @@ impl LspStore {
} }
} }
self.merge_diagnostic_entries( DocumentDiagnostics {
language_server_id,
abs_path,
result_id,
params.version,
diagnostics, diagnostics,
filter, document_abs_path,
cx, version: lsp_diagnostics.version,
)?; }
Ok(())
} }
fn insert_newly_running_language_server( fn insert_newly_running_language_server(
@ -11571,61 +11721,80 @@ impl LspStore {
) { ) {
let workspace_diagnostics = let workspace_diagnostics =
GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(report, server_id); GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(report, server_id);
for workspace_diagnostics in workspace_diagnostics { let mut unchanged_buffers = HashSet::default();
let LspPullDiagnostics::Response { let mut changed_buffers = HashSet::default();
let workspace_diagnostics_updates = workspace_diagnostics
.into_iter()
.filter_map(
|workspace_diagnostics| match workspace_diagnostics.diagnostics {
LspPullDiagnostics::Response {
server_id, server_id,
uri, uri,
diagnostics, diagnostics,
} = workspace_diagnostics.diagnostics } => Some((server_id, uri, diagnostics, workspace_diagnostics.version)),
else { LspPullDiagnostics::Default => None,
continue;
};
let adapter = self.language_server_adapter_for_id(server_id);
let disk_based_sources = adapter
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[]);
match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
self.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics: Vec::new(),
version: None,
}, },
Some(result_id),
DiagnosticSourceKind::Pulled,
disk_based_sources,
|_, _, _| true,
cx,
) )
.log_err(); .fold(
HashMap::default(),
|mut acc, (server_id, uri, diagnostics, version)| {
let (result_id, diagnostics) = match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
unchanged_buffers.insert(uri.clone());
(Some(result_id), Vec::new())
} }
PulledDiagnostics::Changed { PulledDiagnostics::Changed {
diagnostics,
result_id, result_id,
} => {
self.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics, diagnostics,
version: workspace_diagnostics.version, } => {
changed_buffers.insert(uri.clone());
(result_id, diagnostics)
}
};
let disk_based_sources = Cow::Owned(
self.language_server_adapter_for_id(server_id)
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[])
.to_vec(),
);
acc.entry(server_id)
.or_insert_with(Vec::new)
.push(DocumentDiagnosticsUpdate {
server_id,
diagnostics: lsp::PublishDiagnosticsParams {
uri,
diagnostics,
version,
}, },
result_id, result_id,
DiagnosticSourceKind::Pulled,
disk_based_sources, disk_based_sources,
|buffer, old_diagnostic, cx| match old_diagnostic.source_kind { });
acc
},
);
for diagnostic_updates in workspace_diagnostics_updates.into_values() {
self.merge_lsp_diagnostics(
DiagnosticSourceKind::Pulled,
diagnostic_updates,
|buffer, old_diagnostic, cx| {
File::from_dyn(buffer.file())
.and_then(|file| {
let abs_path = file.as_local()?.abs_path(cx);
lsp::Url::from_file_path(abs_path).ok()
})
.is_none_or(|buffer_uri| {
unchanged_buffers.contains(&buffer_uri)
|| match old_diagnostic.source_kind {
DiagnosticSourceKind::Pulled => { DiagnosticSourceKind::Pulled => {
let buffer_url = File::from_dyn(buffer.file()) !changed_buffers.contains(&buffer_uri)
.map(|f| f.abs_path(cx))
.and_then(|abs_path| file_path_to_lsp_url(&abs_path).ok());
buffer_url.is_none_or(|buffer_url| buffer_url != uri)
} }
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => true, DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
true
}
}
})
}, },
cx, cx,
) )
@ -11633,8 +11802,6 @@ impl LspStore {
} }
} }
} }
}
}
fn subscribe_to_binary_statuses( fn subscribe_to_binary_statuses(
languages: &Arc<LanguageRegistry>, languages: &Arc<LanguageRegistry>,

View file

@ -1,4 +1,4 @@
use std::sync::Arc; use std::{borrow::Cow, sync::Arc};
use ::serde::{Deserialize, Serialize}; use ::serde::{Deserialize, Serialize};
use gpui::WeakEntity; use gpui::WeakEntity;
@ -6,7 +6,7 @@ use language::{CachedLspAdapter, Diagnostic, DiagnosticSourceKind};
use lsp::{LanguageServer, LanguageServerName}; use lsp::{LanguageServer, LanguageServerName};
use util::ResultExt as _; use util::ResultExt as _;
use crate::LspStore; use crate::{LspStore, lsp_store::DocumentDiagnosticsUpdate};
pub const CLANGD_SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd"); pub const CLANGD_SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd");
const INACTIVE_REGION_MESSAGE: &str = "inactive region"; const INACTIVE_REGION_MESSAGE: &str = "inactive region";
@ -81,12 +81,16 @@ pub fn register_notifications(
version: params.text_document.version, version: params.text_document.version,
diagnostics, diagnostics,
}; };
this.merge_diagnostics( this.merge_lsp_diagnostics(
server_id,
mapped_diagnostics,
None,
DiagnosticSourceKind::Pushed, DiagnosticSourceKind::Pushed,
vec![DocumentDiagnosticsUpdate {
server_id,
diagnostics: mapped_diagnostics,
result_id: None,
disk_based_sources: Cow::Borrowed(
&adapter.disk_based_diagnostic_sources, &adapter.disk_based_diagnostic_sources,
),
}],
|_, diag, _| !is_inactive_region(diag), |_, diag, _| !is_inactive_region(diag),
cx, cx,
) )

View file

@ -74,9 +74,9 @@ use gpui::{
Task, WeakEntity, Window, Task, WeakEntity, Window,
}; };
use language::{ use language::{
Buffer, BufferEvent, Capability, CodeLabel, CursorShape, DiagnosticSourceKind, Language, Buffer, BufferEvent, Capability, CodeLabel, CursorShape, Language, LanguageName,
LanguageName, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, Transaction,
Transaction, Unclipped, language_settings::InlayHintKind, proto::split_operations, Unclipped, language_settings::InlayHintKind, proto::split_operations,
}; };
use lsp::{ use lsp::{
CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode, CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode,
@ -305,7 +305,7 @@ pub enum Event {
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
}, },
DiagnosticsUpdated { DiagnosticsUpdated {
path: ProjectPath, paths: Vec<ProjectPath>,
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
}, },
RemoteIdChanged(Option<u64>), RemoteIdChanged(Option<u64>),
@ -2895,18 +2895,17 @@ impl Project {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
match event { match event {
LspStoreEvent::DiagnosticsUpdated { LspStoreEvent::DiagnosticsUpdated { server_id, paths } => {
language_server_id, cx.emit(Event::DiagnosticsUpdated {
path, paths: paths.clone(),
} => cx.emit(Event::DiagnosticsUpdated { language_server_id: *server_id,
path: path.clone(), })
language_server_id: *language_server_id, }
}), LspStoreEvent::LanguageServerAdded(server_id, name, worktree_id) => cx.emit(
LspStoreEvent::LanguageServerAdded(language_server_id, name, worktree_id) => cx.emit( Event::LanguageServerAdded(*server_id, name.clone(), *worktree_id),
Event::LanguageServerAdded(*language_server_id, name.clone(), *worktree_id),
), ),
LspStoreEvent::LanguageServerRemoved(language_server_id) => { LspStoreEvent::LanguageServerRemoved(server_id) => {
cx.emit(Event::LanguageServerRemoved(*language_server_id)) cx.emit(Event::LanguageServerRemoved(*server_id))
} }
LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit( LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit(
Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()), Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()),
@ -3829,27 +3828,6 @@ impl Project {
}) })
} }
pub fn update_diagnostics(
&mut self,
language_server_id: LanguageServerId,
source_kind: DiagnosticSourceKind,
result_id: Option<String>,
params: lsp::PublishDiagnosticsParams,
disk_based_sources: &[String],
cx: &mut Context<Self>,
) -> Result<(), anyhow::Error> {
self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.update_diagnostics(
language_server_id,
params,
result_id,
source_kind,
disk_based_sources,
cx,
)
})
}
pub fn search(&mut self, query: SearchQuery, cx: &mut Context<Self>) -> Receiver<SearchResult> { pub fn search(&mut self, query: SearchQuery, cx: &mut Context<Self>) -> Receiver<SearchResult> {
let (result_tx, result_rx) = smol::channel::unbounded(); let (result_tx, result_rx) = smol::channel::unbounded();

View file

@ -20,8 +20,8 @@ use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
use http_client::Url; use http_client::Url;
use itertools::Itertools; use itertools::Itertools;
use language::{ use language::{
Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings}, language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
tree_sitter_rust, tree_sitter_typescript, tree_sitter_rust, tree_sitter_typescript,
}; };
@ -1619,7 +1619,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
events.next().await.unwrap(), events.next().await.unwrap(),
Event::DiagnosticsUpdated { Event::DiagnosticsUpdated {
language_server_id: LanguageServerId(0), language_server_id: LanguageServerId(0),
path: (worktree_id, Path::new("a.rs")).into() paths: vec![(worktree_id, Path::new("a.rs")).into()],
} }
); );
@ -1667,7 +1667,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
events.next().await.unwrap(), events.next().await.unwrap(),
Event::DiagnosticsUpdated { Event::DiagnosticsUpdated {
language_server_id: LanguageServerId(0), language_server_id: LanguageServerId(0),
path: (worktree_id, Path::new("a.rs")).into() paths: vec![(worktree_id, Path::new("a.rs")).into()],
} }
); );

View file

@ -525,6 +525,7 @@ message UpdateDiagnosticSummary {
uint64 project_id = 1; uint64 project_id = 1;
uint64 worktree_id = 2; uint64 worktree_id = 2;
DiagnosticSummary summary = 3; DiagnosticSummary summary = 3;
repeated DiagnosticSummary more_summaries = 4;
} }
message DiagnosticSummary { message DiagnosticSummary {