Batch diagnostics updates (#35794)

Diagnostics updates were programmed in Zed based off the r-a LSP push
diagnostics, with all related updates happening per file.

https://github.com/zed-industries/zed/pull/19230 and especially
https://github.com/zed-industries/zed/pull/32269 brought in pull
diagnostics that could produce results for thousands files
simultaneously.

It was noted and addressed on the local side in
https://github.com/zed-industries/zed/pull/34022 but the remote side was
still not adjusted properly.

This PR 

* removes redundant diagnostics pull updates on remote clients, as
buffer diagnostics are updated via buffer sync operations separately
* batches all diagnostics-related updates and proto messages, so
multiple diagnostic summaries (per file) could be sent at once,
specifically, 1 (potentially large) diagnostics summary update instead
of N*10^3 small ones.

Buffer updates are still sent per buffer and not updated, as happening
separately and not offending the collab traffic that much.

Release Notes:

- Improved diagnostics performance in the collaborative mode
This commit is contained in:
Kirill Bulatov 2025-08-07 17:45:41 +03:00 committed by GitHub
parent a5c25e0366
commit 740686b883
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 500 additions and 350 deletions

View file

@ -1630,15 +1630,15 @@ fn notify_rejoined_projects(
} }
// Stream this worktree's diagnostics. // Stream this worktree's diagnostics.
for summary in worktree.diagnostic_summaries { let mut worktree_diagnostics = worktree.diagnostic_summaries.into_iter();
session.peer.send( if let Some(summary) = worktree_diagnostics.next() {
session.connection_id, let message = proto::UpdateDiagnosticSummary {
proto::UpdateDiagnosticSummary { project_id: project.id.to_proto(),
project_id: project.id.to_proto(), worktree_id: worktree.id,
worktree_id: worktree.id, summary: Some(summary),
summary: Some(summary), more_summaries: worktree_diagnostics.collect(),
}, };
)?; session.peer.send(session.connection_id, message)?;
} }
for settings_file in worktree.settings_files { for settings_file in worktree.settings_files {
@ -2060,15 +2060,15 @@ async fn join_project(
} }
// Stream this worktree's diagnostics. // Stream this worktree's diagnostics.
for summary in worktree.diagnostic_summaries { let mut worktree_diagnostics = worktree.diagnostic_summaries.into_iter();
session.peer.send( if let Some(summary) = worktree_diagnostics.next() {
session.connection_id, let message = proto::UpdateDiagnosticSummary {
proto::UpdateDiagnosticSummary { project_id: project.id.to_proto(),
project_id: project_id.to_proto(), worktree_id: worktree.id,
worktree_id: worktree.id, summary: Some(summary),
summary: Some(summary), more_summaries: worktree_diagnostics.collect(),
}, };
)?; session.peer.send(session.connection_id, message)?;
} }
for settings_file in worktree.settings_files { for settings_file in worktree.settings_files {

View file

@ -177,9 +177,9 @@ impl ProjectDiagnosticsEditor {
} }
project::Event::DiagnosticsUpdated { project::Event::DiagnosticsUpdated {
language_server_id, language_server_id,
path, paths,
} => { } => {
this.paths_to_update.insert(path.clone()); this.paths_to_update.extend(paths.clone());
let project = project.clone(); let project = project.clone();
this.diagnostic_summary_update = cx.spawn(async move |this, cx| { this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor() cx.background_executor()
@ -193,9 +193,9 @@ impl ProjectDiagnosticsEditor {
cx.emit(EditorEvent::TitleChanged); cx.emit(EditorEvent::TitleChanged);
if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) { if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) {
log::debug!("diagnostics updated for server {language_server_id}, path {path:?}. recording change"); log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. recording change");
} else { } else {
log::debug!("diagnostics updated for server {language_server_id}, path {path:?}. updating excerpts"); log::debug!("diagnostics updated for server {language_server_id}, paths {paths:?}. updating excerpts");
this.update_stale_excerpts(window, cx); this.update_stale_excerpts(window, cx);
} }
} }

View file

@ -140,6 +140,20 @@ impl FormatTrigger {
} }
} }
#[derive(Debug)]
pub struct DocumentDiagnosticsUpdate<'a, D> {
pub diagnostics: D,
pub result_id: Option<String>,
pub server_id: LanguageServerId,
pub disk_based_sources: Cow<'a, [String]>,
}
pub struct DocumentDiagnostics {
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
document_abs_path: PathBuf,
version: Option<i32>,
}
pub struct LocalLspStore { pub struct LocalLspStore {
weak: WeakEntity<LspStore>, weak: WeakEntity<LspStore>,
worktree_store: Entity<WorktreeStore>, worktree_store: Entity<WorktreeStore>,
@ -503,12 +517,16 @@ impl LocalLspStore {
adapter.process_diagnostics(&mut params, server_id, buffer); adapter.process_diagnostics(&mut params, server_id, buffer);
} }
this.merge_diagnostics( this.merge_lsp_diagnostics(
server_id,
params,
None,
DiagnosticSourceKind::Pushed, DiagnosticSourceKind::Pushed,
&adapter.disk_based_diagnostic_sources, vec![DocumentDiagnosticsUpdate {
server_id,
diagnostics: params,
result_id: None,
disk_based_sources: Cow::Borrowed(
&adapter.disk_based_diagnostic_sources,
),
}],
|_, diagnostic, cx| match diagnostic.source_kind { |_, diagnostic, cx| match diagnostic.source_kind {
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => { DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
adapter.retain_old_diagnostic(diagnostic, cx) adapter.retain_old_diagnostic(diagnostic, cx)
@ -3610,8 +3628,8 @@ pub enum LspStoreEvent {
RefreshInlayHints, RefreshInlayHints,
RefreshCodeLens, RefreshCodeLens,
DiagnosticsUpdated { DiagnosticsUpdated {
language_server_id: LanguageServerId, server_id: LanguageServerId,
path: ProjectPath, paths: Vec<ProjectPath>,
}, },
DiskBasedDiagnosticsStarted { DiskBasedDiagnosticsStarted {
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
@ -4440,17 +4458,24 @@ impl LspStore {
pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) {
if let Some((client, downstream_project_id)) = self.downstream_client.clone() { if let Some((client, downstream_project_id)) = self.downstream_client.clone() {
if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { if let Some(diangostic_summaries) = self.diagnostic_summaries.get(&worktree.id()) {
for (path, summaries) in summaries { let mut summaries =
for (&server_id, summary) in summaries { diangostic_summaries
client .into_iter()
.send(proto::UpdateDiagnosticSummary { .flat_map(|(path, summaries)| {
project_id: downstream_project_id, summaries
worktree_id: worktree.id().to_proto(), .into_iter()
summary: Some(summary.to_proto(server_id, path)), .map(|(server_id, summary)| summary.to_proto(*server_id, path))
}) });
.log_err(); if let Some(summary) = summaries.next() {
} client
.send(proto::UpdateDiagnosticSummary {
project_id: downstream_project_id,
worktree_id: worktree.id().to_proto(),
summary: Some(summary),
more_summaries: summaries.collect(),
})
.log_err();
} }
} }
} }
@ -6564,7 +6589,7 @@ impl LspStore {
&mut self, &mut self,
buffer: Entity<Buffer>, buffer: Entity<Buffer>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<Result<Vec<LspPullDiagnostics>>> { ) -> Task<Result<Option<Vec<LspPullDiagnostics>>>> {
let buffer_id = buffer.read(cx).remote_id(); let buffer_id = buffer.read(cx).remote_id();
if let Some((client, upstream_project_id)) = self.upstream_client() { if let Some((client, upstream_project_id)) = self.upstream_client() {
@ -6575,7 +6600,7 @@ impl LspStore {
}, },
cx, cx,
) { ) {
return Task::ready(Ok(Vec::new())); return Task::ready(Ok(None));
} }
let request_task = client.request(proto::MultiLspQuery { let request_task = client.request(proto::MultiLspQuery {
buffer_id: buffer_id.to_proto(), buffer_id: buffer_id.to_proto(),
@ -6593,7 +6618,7 @@ impl LspStore {
)), )),
}); });
cx.background_spawn(async move { cx.background_spawn(async move {
Ok(request_task let _proto_responses = request_task
.await? .await?
.responses .responses
.into_iter() .into_iter()
@ -6606,8 +6631,11 @@ impl LspStore {
None None
} }
}) })
.flat_map(GetDocumentDiagnostics::diagnostics_from_proto) .collect::<Vec<_>>();
.collect()) // Proto requests cause the diagnostics to be pulled from language server(s) on the local side
// and then, buffer state updated with the diagnostics received, which will be later propagated to the client.
// Do not attempt to further process the dummy responses here.
Ok(None)
}) })
} else { } else {
let server_ids = buffer.update(cx, |buffer, cx| { let server_ids = buffer.update(cx, |buffer, cx| {
@ -6635,7 +6663,7 @@ impl LspStore {
for diagnostics in join_all(pull_diagnostics).await { for diagnostics in join_all(pull_diagnostics).await {
responses.extend(diagnostics?); responses.extend(diagnostics?);
} }
Ok(responses) Ok(Some(responses))
}) })
} }
} }
@ -6701,75 +6729,93 @@ impl LspStore {
buffer: Entity<Buffer>, buffer: Entity<Buffer>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> { ) -> Task<anyhow::Result<()>> {
let buffer_id = buffer.read(cx).remote_id();
let diagnostics = self.pull_diagnostics(buffer, cx); let diagnostics = self.pull_diagnostics(buffer, cx);
cx.spawn(async move |lsp_store, cx| { cx.spawn(async move |lsp_store, cx| {
let diagnostics = diagnostics.await.context("pulling diagnostics")?; let Some(diagnostics) = diagnostics.await.context("pulling diagnostics")? else {
return Ok(());
};
lsp_store.update(cx, |lsp_store, cx| { lsp_store.update(cx, |lsp_store, cx| {
if lsp_store.as_local().is_none() { if lsp_store.as_local().is_none() {
return; return;
} }
for diagnostics_set in diagnostics { let mut unchanged_buffers = HashSet::default();
let LspPullDiagnostics::Response { let mut changed_buffers = HashSet::default();
server_id, let server_diagnostics_updates = diagnostics
uri, .into_iter()
diagnostics, .filter_map(|diagnostics_set| match diagnostics_set {
} = diagnostics_set LspPullDiagnostics::Response {
else { server_id,
continue; uri,
};
let adapter = lsp_store.language_server_adapter_for_id(server_id);
let disk_based_sources = adapter
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[]);
match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
lsp_store
.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics: Vec::new(),
version: None,
},
Some(result_id),
DiagnosticSourceKind::Pulled,
disk_based_sources,
|_, _, _| true,
cx,
)
.log_err();
}
PulledDiagnostics::Changed {
diagnostics, diagnostics,
result_id, } => Some((server_id, uri, diagnostics)),
} => { LspPullDiagnostics::Default => None,
lsp_store })
.merge_diagnostics( .fold(
HashMap::default(),
|mut acc, (server_id, uri, diagnostics)| {
let (result_id, diagnostics) = match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
unchanged_buffers.insert(uri.clone());
(Some(result_id), Vec::new())
}
PulledDiagnostics::Changed {
result_id,
diagnostics,
} => {
changed_buffers.insert(uri.clone());
(result_id, diagnostics)
}
};
let disk_based_sources = Cow::Owned(
lsp_store
.language_server_adapter_for_id(server_id)
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[])
.to_vec(),
);
acc.entry(server_id).or_insert_with(Vec::new).push(
DocumentDiagnosticsUpdate {
server_id, server_id,
lsp::PublishDiagnosticsParams { diagnostics: lsp::PublishDiagnosticsParams {
uri: uri.clone(), uri,
diagnostics, diagnostics,
version: None, version: None,
}, },
result_id, result_id,
DiagnosticSourceKind::Pulled,
disk_based_sources, disk_based_sources,
|buffer, old_diagnostic, _| match old_diagnostic.source_kind { },
DiagnosticSourceKind::Pulled => { );
buffer.remote_id() != buffer_id acc
} },
DiagnosticSourceKind::Other );
| DiagnosticSourceKind::Pushed => true,
}, for diagnostic_updates in server_diagnostics_updates.into_values() {
cx, lsp_store
) .merge_lsp_diagnostics(
.log_err(); DiagnosticSourceKind::Pulled,
} diagnostic_updates,
} |buffer, old_diagnostic, cx| {
File::from_dyn(buffer.file())
.and_then(|file| {
let abs_path = file.as_local()?.abs_path(cx);
lsp::Url::from_file_path(abs_path).ok()
})
.is_none_or(|buffer_uri| {
unchanged_buffers.contains(&buffer_uri)
|| match old_diagnostic.source_kind {
DiagnosticSourceKind::Pulled => {
!changed_buffers.contains(&buffer_uri)
}
DiagnosticSourceKind::Other
| DiagnosticSourceKind::Pushed => true,
}
})
},
cx,
)
.log_err();
} }
}) })
}) })
@ -7791,88 +7837,135 @@ impl LspStore {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
self.merge_diagnostic_entries( self.merge_diagnostic_entries(
server_id, vec![DocumentDiagnosticsUpdate {
abs_path, diagnostics: DocumentDiagnostics {
result_id, diagnostics,
version, document_abs_path: abs_path,
diagnostics, version,
},
result_id,
server_id,
disk_based_sources: Cow::Borrowed(&[]),
}],
|_, _, _| false, |_, _, _| false,
cx, cx,
)?; )?;
Ok(()) Ok(())
} }
pub fn merge_diagnostic_entries( pub fn merge_diagnostic_entries<'a>(
&mut self, &mut self,
server_id: LanguageServerId, diagnostic_updates: Vec<DocumentDiagnosticsUpdate<'a, DocumentDiagnostics>>,
abs_path: PathBuf, merge: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
result_id: Option<String>,
version: Option<i32>,
mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
filter: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let Some((worktree, relative_path)) = let mut diagnostics_summary = None::<proto::UpdateDiagnosticSummary>;
self.worktree_store.read(cx).find_worktree(&abs_path, cx) let mut updated_diagnostics_paths = HashMap::default();
else { for mut update in diagnostic_updates {
log::warn!("skipping diagnostics update, no worktree found for path {abs_path:?}"); let abs_path = &update.diagnostics.document_abs_path;
return Ok(()); let server_id = update.server_id;
}; let Some((worktree, relative_path)) =
self.worktree_store.read(cx).find_worktree(abs_path, cx)
else {
log::warn!("skipping diagnostics update, no worktree found for path {abs_path:?}");
return Ok(());
};
let project_path = ProjectPath { let worktree_id = worktree.read(cx).id();
worktree_id: worktree.read(cx).id(), let project_path = ProjectPath {
path: relative_path.into(), worktree_id,
}; path: relative_path.into(),
};
if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path) { if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path) {
let snapshot = buffer_handle.read(cx).snapshot(); let snapshot = buffer_handle.read(cx).snapshot();
let buffer = buffer_handle.read(cx); let buffer = buffer_handle.read(cx);
let reused_diagnostics = buffer let reused_diagnostics = buffer
.get_diagnostics(server_id) .get_diagnostics(server_id)
.into_iter() .into_iter()
.flat_map(|diag| { .flat_map(|diag| {
diag.iter() diag.iter()
.filter(|v| filter(buffer, &v.diagnostic, cx)) .filter(|v| merge(buffer, &v.diagnostic, cx))
.map(|v| { .map(|v| {
let start = Unclipped(v.range.start.to_point_utf16(&snapshot)); let start = Unclipped(v.range.start.to_point_utf16(&snapshot));
let end = Unclipped(v.range.end.to_point_utf16(&snapshot)); let end = Unclipped(v.range.end.to_point_utf16(&snapshot));
DiagnosticEntry { DiagnosticEntry {
range: start..end, range: start..end,
diagnostic: v.diagnostic.clone(), diagnostic: v.diagnostic.clone(),
} }
}) })
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.as_local_mut() self.as_local_mut()
.context("cannot merge diagnostics on a remote LspStore")? .context("cannot merge diagnostics on a remote LspStore")?
.update_buffer_diagnostics( .update_buffer_diagnostics(
&buffer_handle, &buffer_handle,
server_id,
update.result_id,
update.diagnostics.version,
update.diagnostics.diagnostics.clone(),
reused_diagnostics.clone(),
cx,
)?;
update.diagnostics.diagnostics.extend(reused_diagnostics);
}
let updated = worktree.update(cx, |worktree, cx| {
self.update_worktree_diagnostics(
worktree.id(),
server_id, server_id,
result_id, project_path.path.clone(),
version, update.diagnostics.diagnostics,
diagnostics.clone(),
reused_diagnostics.clone(),
cx, cx,
)?; )
})?;
diagnostics.extend(reused_diagnostics); match updated {
ControlFlow::Continue(new_summary) => {
if let Some((project_id, new_summary)) = new_summary {
match &mut diagnostics_summary {
Some(diagnostics_summary) => {
diagnostics_summary
.more_summaries
.push(proto::DiagnosticSummary {
path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count,
warning_count: new_summary.warning_count,
})
}
None => {
diagnostics_summary = Some(proto::UpdateDiagnosticSummary {
project_id: project_id,
worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary {
path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64,
error_count: new_summary.error_count,
warning_count: new_summary.warning_count,
}),
more_summaries: Vec::new(),
})
}
}
}
updated_diagnostics_paths
.entry(server_id)
.or_insert_with(Vec::new)
.push(project_path);
}
ControlFlow::Break(()) => {}
}
} }
let updated = worktree.update(cx, |worktree, cx| { if let Some((diagnostics_summary, (downstream_client, _))) =
self.update_worktree_diagnostics( diagnostics_summary.zip(self.downstream_client.as_ref())
worktree.id(), {
server_id, downstream_client.send(diagnostics_summary).log_err();
project_path.path.clone(), }
diagnostics, for (server_id, paths) in updated_diagnostics_paths {
cx, cx.emit(LspStoreEvent::DiagnosticsUpdated { server_id, paths });
)
})?;
if updated {
cx.emit(LspStoreEvent::DiagnosticsUpdated {
language_server_id: server_id,
path: project_path,
})
} }
Ok(()) Ok(())
} }
@ -7881,10 +7974,10 @@ impl LspStore {
&mut self, &mut self,
worktree_id: WorktreeId, worktree_id: WorktreeId,
server_id: LanguageServerId, server_id: LanguageServerId,
worktree_path: Arc<Path>, path_in_worktree: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>, diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut Context<Worktree>, _: &mut Context<Worktree>,
) -> Result<bool> { ) -> Result<ControlFlow<(), Option<(u64, proto::DiagnosticSummary)>>> {
let local = match &mut self.mode { let local = match &mut self.mode {
LspStoreMode::Local(local_lsp_store) => local_lsp_store, LspStoreMode::Local(local_lsp_store) => local_lsp_store,
_ => anyhow::bail!("update_worktree_diagnostics called on remote"), _ => anyhow::bail!("update_worktree_diagnostics called on remote"),
@ -7892,7 +7985,9 @@ impl LspStore {
let summaries_for_tree = self.diagnostic_summaries.entry(worktree_id).or_default(); let summaries_for_tree = self.diagnostic_summaries.entry(worktree_id).or_default();
let diagnostics_for_tree = local.diagnostics.entry(worktree_id).or_default(); let diagnostics_for_tree = local.diagnostics.entry(worktree_id).or_default();
let summaries_by_server_id = summaries_for_tree.entry(worktree_path.clone()).or_default(); let summaries_by_server_id = summaries_for_tree
.entry(path_in_worktree.clone())
.or_default();
let old_summary = summaries_by_server_id let old_summary = summaries_by_server_id
.remove(&server_id) .remove(&server_id)
@ -7900,18 +7995,19 @@ impl LspStore {
let new_summary = DiagnosticSummary::new(&diagnostics); let new_summary = DiagnosticSummary::new(&diagnostics);
if new_summary.is_empty() { if new_summary.is_empty() {
if let Some(diagnostics_by_server_id) = diagnostics_for_tree.get_mut(&worktree_path) { if let Some(diagnostics_by_server_id) = diagnostics_for_tree.get_mut(&path_in_worktree)
{
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
diagnostics_by_server_id.remove(ix); diagnostics_by_server_id.remove(ix);
} }
if diagnostics_by_server_id.is_empty() { if diagnostics_by_server_id.is_empty() {
diagnostics_for_tree.remove(&worktree_path); diagnostics_for_tree.remove(&path_in_worktree);
} }
} }
} else { } else {
summaries_by_server_id.insert(server_id, new_summary); summaries_by_server_id.insert(server_id, new_summary);
let diagnostics_by_server_id = diagnostics_for_tree let diagnostics_by_server_id = diagnostics_for_tree
.entry(worktree_path.clone()) .entry(path_in_worktree.clone())
.or_default(); .or_default();
match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
Ok(ix) => { Ok(ix) => {
@ -7924,23 +8020,22 @@ impl LspStore {
} }
if !old_summary.is_empty() || !new_summary.is_empty() { if !old_summary.is_empty() || !new_summary.is_empty() {
if let Some((downstream_client, project_id)) = &self.downstream_client { if let Some((_, project_id)) = &self.downstream_client {
downstream_client Ok(ControlFlow::Continue(Some((
.send(proto::UpdateDiagnosticSummary { *project_id,
project_id: *project_id, proto::DiagnosticSummary {
worktree_id: worktree_id.to_proto(), path: path_in_worktree.to_proto(),
summary: Some(proto::DiagnosticSummary { language_server_id: server_id.0 as u64,
path: worktree_path.to_proto(), error_count: new_summary.error_count as u32,
language_server_id: server_id.0 as u64, warning_count: new_summary.warning_count as u32,
error_count: new_summary.error_count as u32, },
warning_count: new_summary.warning_count as u32, ))))
}), } else {
}) Ok(ControlFlow::Continue(None))
.log_err();
} }
} else {
Ok(ControlFlow::Break(()))
} }
Ok(!old_summary.is_empty() || !new_summary.is_empty())
} }
pub fn open_buffer_for_symbol( pub fn open_buffer_for_symbol(
@ -8793,23 +8888,30 @@ impl LspStore {
envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>, envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
mut cx: AsyncApp, mut cx: AsyncApp,
) -> Result<()> { ) -> Result<()> {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |lsp_store, cx| {
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
if let Some(message) = envelope.payload.summary { let mut updated_diagnostics_paths = HashMap::default();
let mut diagnostics_summary = None::<proto::UpdateDiagnosticSummary>;
for message_summary in envelope
.payload
.summary
.into_iter()
.chain(envelope.payload.more_summaries)
{
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id, worktree_id,
path: Arc::<Path>::from_proto(message.path), path: Arc::<Path>::from_proto(message_summary.path),
}; };
let path = project_path.path.clone(); let path = project_path.path.clone();
let server_id = LanguageServerId(message.language_server_id as usize); let server_id = LanguageServerId(message_summary.language_server_id as usize);
let summary = DiagnosticSummary { let summary = DiagnosticSummary {
error_count: message.error_count as usize, error_count: message_summary.error_count as usize,
warning_count: message.warning_count as usize, warning_count: message_summary.warning_count as usize,
}; };
if summary.is_empty() { if summary.is_empty() {
if let Some(worktree_summaries) = if let Some(worktree_summaries) =
this.diagnostic_summaries.get_mut(&worktree_id) lsp_store.diagnostic_summaries.get_mut(&worktree_id)
{ {
if let Some(summaries) = worktree_summaries.get_mut(&path) { if let Some(summaries) = worktree_summaries.get_mut(&path) {
summaries.remove(&server_id); summaries.remove(&server_id);
@ -8819,31 +8921,55 @@ impl LspStore {
} }
} }
} else { } else {
this.diagnostic_summaries lsp_store
.diagnostic_summaries
.entry(worktree_id) .entry(worktree_id)
.or_default() .or_default()
.entry(path) .entry(path)
.or_default() .or_default()
.insert(server_id, summary); .insert(server_id, summary);
} }
if let Some((downstream_client, project_id)) = &this.downstream_client {
downstream_client if let Some((_, project_id)) = &lsp_store.downstream_client {
.send(proto::UpdateDiagnosticSummary { match &mut diagnostics_summary {
project_id: *project_id, Some(diagnostics_summary) => {
worktree_id: worktree_id.to_proto(), diagnostics_summary
summary: Some(proto::DiagnosticSummary { .more_summaries
path: project_path.path.as_ref().to_proto(), .push(proto::DiagnosticSummary {
language_server_id: server_id.0 as u64, path: project_path.path.as_ref().to_proto(),
error_count: summary.error_count as u32, language_server_id: server_id.0 as u64,
warning_count: summary.warning_count as u32, error_count: summary.error_count as u32,
}), warning_count: summary.warning_count as u32,
}) })
.log_err(); }
None => {
diagnostics_summary = Some(proto::UpdateDiagnosticSummary {
project_id: *project_id,
worktree_id: worktree_id.to_proto(),
summary: Some(proto::DiagnosticSummary {
path: project_path.path.as_ref().to_proto(),
language_server_id: server_id.0 as u64,
error_count: summary.error_count as u32,
warning_count: summary.warning_count as u32,
}),
more_summaries: Vec::new(),
})
}
}
} }
cx.emit(LspStoreEvent::DiagnosticsUpdated { updated_diagnostics_paths
language_server_id: LanguageServerId(message.language_server_id as usize), .entry(server_id)
path: project_path, .or_insert_with(Vec::new)
}); .push(project_path);
}
if let Some((diagnostics_summary, (downstream_client, _))) =
diagnostics_summary.zip(lsp_store.downstream_client.as_ref())
{
downstream_client.send(diagnostics_summary).log_err();
}
for (server_id, paths) in updated_diagnostics_paths {
cx.emit(LspStoreEvent::DiagnosticsUpdated { server_id, paths });
} }
Ok(()) Ok(())
})? })?
@ -10361,6 +10487,7 @@ impl LspStore {
error_count: 0, error_count: 0,
warning_count: 0, warning_count: 0,
}), }),
more_summaries: Vec::new(),
}) })
.log_err(); .log_err();
} }
@ -10649,52 +10776,80 @@ impl LspStore {
) )
} }
#[cfg(any(test, feature = "test-support"))]
pub fn update_diagnostics( pub fn update_diagnostics(
&mut self, &mut self,
language_server_id: LanguageServerId, server_id: LanguageServerId,
params: lsp::PublishDiagnosticsParams, diagnostics: lsp::PublishDiagnosticsParams,
result_id: Option<String>, result_id: Option<String>,
source_kind: DiagnosticSourceKind, source_kind: DiagnosticSourceKind,
disk_based_sources: &[String], disk_based_sources: &[String],
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Result<()> { ) -> Result<()> {
self.merge_diagnostics( self.merge_lsp_diagnostics(
language_server_id,
params,
result_id,
source_kind, source_kind,
disk_based_sources, vec![DocumentDiagnosticsUpdate {
diagnostics,
result_id,
server_id,
disk_based_sources: Cow::Borrowed(disk_based_sources),
}],
|_, _, _| false, |_, _, _| false,
cx, cx,
) )
} }
pub fn merge_diagnostics( pub fn merge_lsp_diagnostics(
&mut self, &mut self,
language_server_id: LanguageServerId,
mut params: lsp::PublishDiagnosticsParams,
result_id: Option<String>,
source_kind: DiagnosticSourceKind, source_kind: DiagnosticSourceKind,
disk_based_sources: &[String], lsp_diagnostics: Vec<DocumentDiagnosticsUpdate<lsp::PublishDiagnosticsParams>>,
filter: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone, merge: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Result<()> { ) -> Result<()> {
anyhow::ensure!(self.mode.is_local(), "called update_diagnostics on remote"); anyhow::ensure!(self.mode.is_local(), "called update_diagnostics on remote");
let abs_path = params let updates = lsp_diagnostics
.uri .into_iter()
.to_file_path() .filter_map(|update| {
.map_err(|()| anyhow!("URI is not a file"))?; let abs_path = update.diagnostics.uri.to_file_path().ok()?;
Some(DocumentDiagnosticsUpdate {
diagnostics: self.lsp_to_document_diagnostics(
abs_path,
source_kind,
update.server_id,
update.diagnostics,
&update.disk_based_sources,
),
result_id: update.result_id,
server_id: update.server_id,
disk_based_sources: update.disk_based_sources,
})
})
.collect();
self.merge_diagnostic_entries(updates, merge, cx)?;
Ok(())
}
fn lsp_to_document_diagnostics(
&mut self,
document_abs_path: PathBuf,
source_kind: DiagnosticSourceKind,
server_id: LanguageServerId,
mut lsp_diagnostics: lsp::PublishDiagnosticsParams,
disk_based_sources: &[String],
) -> DocumentDiagnostics {
let mut diagnostics = Vec::default(); let mut diagnostics = Vec::default();
let mut primary_diagnostic_group_ids = HashMap::default(); let mut primary_diagnostic_group_ids = HashMap::default();
let mut sources_by_group_id = HashMap::default(); let mut sources_by_group_id = HashMap::default();
let mut supporting_diagnostics = HashMap::default(); let mut supporting_diagnostics = HashMap::default();
let adapter = self.language_server_adapter_for_id(language_server_id); let adapter = self.language_server_adapter_for_id(server_id);
// Ensure that primary diagnostics are always the most severe // Ensure that primary diagnostics are always the most severe
params.diagnostics.sort_by_key(|item| item.severity); lsp_diagnostics
.diagnostics
.sort_by_key(|item| item.severity);
for diagnostic in &params.diagnostics { for diagnostic in &lsp_diagnostics.diagnostics {
let source = diagnostic.source.as_ref(); let source = diagnostic.source.as_ref();
let range = range_from_lsp(diagnostic.range); let range = range_from_lsp(diagnostic.range);
let is_supporting = diagnostic let is_supporting = diagnostic
@ -10716,7 +10871,7 @@ impl LspStore {
.map_or(false, |tags| tags.contains(&DiagnosticTag::UNNECESSARY)); .map_or(false, |tags| tags.contains(&DiagnosticTag::UNNECESSARY));
let underline = self let underline = self
.language_server_adapter_for_id(language_server_id) .language_server_adapter_for_id(server_id)
.map_or(true, |adapter| adapter.underline_diagnostic(diagnostic)); .map_or(true, |adapter| adapter.underline_diagnostic(diagnostic));
if is_supporting { if is_supporting {
@ -10758,7 +10913,7 @@ impl LspStore {
}); });
if let Some(infos) = &diagnostic.related_information { if let Some(infos) = &diagnostic.related_information {
for info in infos { for info in infos {
if info.location.uri == params.uri && !info.message.is_empty() { if info.location.uri == lsp_diagnostics.uri && !info.message.is_empty() {
let range = range_from_lsp(info.location.range); let range = range_from_lsp(info.location.range);
diagnostics.push(DiagnosticEntry { diagnostics.push(DiagnosticEntry {
range, range,
@ -10806,16 +10961,11 @@ impl LspStore {
} }
} }
self.merge_diagnostic_entries( DocumentDiagnostics {
language_server_id,
abs_path,
result_id,
params.version,
diagnostics, diagnostics,
filter, document_abs_path,
cx, version: lsp_diagnostics.version,
)?; }
Ok(())
} }
fn insert_newly_running_language_server( fn insert_newly_running_language_server(
@ -11571,67 +11721,84 @@ impl LspStore {
) { ) {
let workspace_diagnostics = let workspace_diagnostics =
GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(report, server_id); GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(report, server_id);
for workspace_diagnostics in workspace_diagnostics { let mut unchanged_buffers = HashSet::default();
let LspPullDiagnostics::Response { let mut changed_buffers = HashSet::default();
server_id, let workspace_diagnostics_updates = workspace_diagnostics
uri, .into_iter()
diagnostics, .filter_map(
} = workspace_diagnostics.diagnostics |workspace_diagnostics| match workspace_diagnostics.diagnostics {
else { LspPullDiagnostics::Response {
continue;
};
let adapter = self.language_server_adapter_for_id(server_id);
let disk_based_sources = adapter
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[]);
match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
self.merge_diagnostics(
server_id, server_id,
lsp::PublishDiagnosticsParams { uri,
uri: uri.clone(), diagnostics,
diagnostics: Vec::new(), } => Some((server_id, uri, diagnostics, workspace_diagnostics.version)),
version: None, LspPullDiagnostics::Default => None,
}, },
Some(result_id), )
DiagnosticSourceKind::Pulled, .fold(
disk_based_sources, HashMap::default(),
|_, _, _| true, |mut acc, (server_id, uri, diagnostics, version)| {
cx, let (result_id, diagnostics) = match diagnostics {
) PulledDiagnostics::Unchanged { result_id } => {
.log_err(); unchanged_buffers.insert(uri.clone());
} (Some(result_id), Vec::new())
PulledDiagnostics::Changed { }
diagnostics, PulledDiagnostics::Changed {
result_id, result_id,
} => {
self.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics, diagnostics,
version: workspace_diagnostics.version, } => {
}, changed_buffers.insert(uri.clone());
result_id, (result_id, diagnostics)
DiagnosticSourceKind::Pulled, }
disk_based_sources, };
|buffer, old_diagnostic, cx| match old_diagnostic.source_kind { let disk_based_sources = Cow::Owned(
DiagnosticSourceKind::Pulled => { self.language_server_adapter_for_id(server_id)
let buffer_url = File::from_dyn(buffer.file()) .as_ref()
.map(|f| f.abs_path(cx)) .map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.and_then(|abs_path| file_path_to_lsp_url(&abs_path).ok()); .unwrap_or(&[])
buffer_url.is_none_or(|buffer_url| buffer_url != uri) .to_vec(),
} );
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => true, acc.entry(server_id)
}, .or_insert_with(Vec::new)
cx, .push(DocumentDiagnosticsUpdate {
) server_id,
.log_err(); diagnostics: lsp::PublishDiagnosticsParams {
} uri,
} diagnostics,
version,
},
result_id,
disk_based_sources,
});
acc
},
);
for diagnostic_updates in workspace_diagnostics_updates.into_values() {
self.merge_lsp_diagnostics(
DiagnosticSourceKind::Pulled,
diagnostic_updates,
|buffer, old_diagnostic, cx| {
File::from_dyn(buffer.file())
.and_then(|file| {
let abs_path = file.as_local()?.abs_path(cx);
lsp::Url::from_file_path(abs_path).ok()
})
.is_none_or(|buffer_uri| {
unchanged_buffers.contains(&buffer_uri)
|| match old_diagnostic.source_kind {
DiagnosticSourceKind::Pulled => {
!changed_buffers.contains(&buffer_uri)
}
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
true
}
}
})
},
cx,
)
.log_err();
} }
} }
} }

View file

@ -1,4 +1,4 @@
use std::sync::Arc; use std::{borrow::Cow, sync::Arc};
use ::serde::{Deserialize, Serialize}; use ::serde::{Deserialize, Serialize};
use gpui::WeakEntity; use gpui::WeakEntity;
@ -6,7 +6,7 @@ use language::{CachedLspAdapter, Diagnostic, DiagnosticSourceKind};
use lsp::{LanguageServer, LanguageServerName}; use lsp::{LanguageServer, LanguageServerName};
use util::ResultExt as _; use util::ResultExt as _;
use crate::LspStore; use crate::{LspStore, lsp_store::DocumentDiagnosticsUpdate};
pub const CLANGD_SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd"); pub const CLANGD_SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd");
const INACTIVE_REGION_MESSAGE: &str = "inactive region"; const INACTIVE_REGION_MESSAGE: &str = "inactive region";
@ -81,12 +81,16 @@ pub fn register_notifications(
version: params.text_document.version, version: params.text_document.version,
diagnostics, diagnostics,
}; };
this.merge_diagnostics( this.merge_lsp_diagnostics(
server_id,
mapped_diagnostics,
None,
DiagnosticSourceKind::Pushed, DiagnosticSourceKind::Pushed,
&adapter.disk_based_diagnostic_sources, vec![DocumentDiagnosticsUpdate {
server_id,
diagnostics: mapped_diagnostics,
result_id: None,
disk_based_sources: Cow::Borrowed(
&adapter.disk_based_diagnostic_sources,
),
}],
|_, diag, _| !is_inactive_region(diag), |_, diag, _| !is_inactive_region(diag),
cx, cx,
) )

View file

@ -74,9 +74,9 @@ use gpui::{
Task, WeakEntity, Window, Task, WeakEntity, Window,
}; };
use language::{ use language::{
Buffer, BufferEvent, Capability, CodeLabel, CursorShape, DiagnosticSourceKind, Language, Buffer, BufferEvent, Capability, CodeLabel, CursorShape, Language, LanguageName,
LanguageName, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, Transaction,
Transaction, Unclipped, language_settings::InlayHintKind, proto::split_operations, Unclipped, language_settings::InlayHintKind, proto::split_operations,
}; };
use lsp::{ use lsp::{
CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode, CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode,
@ -305,7 +305,7 @@ pub enum Event {
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
}, },
DiagnosticsUpdated { DiagnosticsUpdated {
path: ProjectPath, paths: Vec<ProjectPath>,
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
}, },
RemoteIdChanged(Option<u64>), RemoteIdChanged(Option<u64>),
@ -2895,18 +2895,17 @@ impl Project {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
match event { match event {
LspStoreEvent::DiagnosticsUpdated { LspStoreEvent::DiagnosticsUpdated { server_id, paths } => {
language_server_id, cx.emit(Event::DiagnosticsUpdated {
path, paths: paths.clone(),
} => cx.emit(Event::DiagnosticsUpdated { language_server_id: *server_id,
path: path.clone(), })
language_server_id: *language_server_id, }
}), LspStoreEvent::LanguageServerAdded(server_id, name, worktree_id) => cx.emit(
LspStoreEvent::LanguageServerAdded(language_server_id, name, worktree_id) => cx.emit( Event::LanguageServerAdded(*server_id, name.clone(), *worktree_id),
Event::LanguageServerAdded(*language_server_id, name.clone(), *worktree_id),
), ),
LspStoreEvent::LanguageServerRemoved(language_server_id) => { LspStoreEvent::LanguageServerRemoved(server_id) => {
cx.emit(Event::LanguageServerRemoved(*language_server_id)) cx.emit(Event::LanguageServerRemoved(*server_id))
} }
LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit( LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit(
Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()), Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()),
@ -3829,27 +3828,6 @@ impl Project {
}) })
} }
pub fn update_diagnostics(
&mut self,
language_server_id: LanguageServerId,
source_kind: DiagnosticSourceKind,
result_id: Option<String>,
params: lsp::PublishDiagnosticsParams,
disk_based_sources: &[String],
cx: &mut Context<Self>,
) -> Result<(), anyhow::Error> {
self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.update_diagnostics(
language_server_id,
params,
result_id,
source_kind,
disk_based_sources,
cx,
)
})
}
pub fn search(&mut self, query: SearchQuery, cx: &mut Context<Self>) -> Receiver<SearchResult> { pub fn search(&mut self, query: SearchQuery, cx: &mut Context<Self>) -> Receiver<SearchResult> {
let (result_tx, result_rx) = smol::channel::unbounded(); let (result_tx, result_rx) = smol::channel::unbounded();

View file

@ -20,8 +20,8 @@ use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
use http_client::Url; use http_client::Url;
use itertools::Itertools; use itertools::Itertools;
use language::{ use language::{
Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings}, language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
tree_sitter_rust, tree_sitter_typescript, tree_sitter_rust, tree_sitter_typescript,
}; };
@ -1619,7 +1619,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
events.next().await.unwrap(), events.next().await.unwrap(),
Event::DiagnosticsUpdated { Event::DiagnosticsUpdated {
language_server_id: LanguageServerId(0), language_server_id: LanguageServerId(0),
path: (worktree_id, Path::new("a.rs")).into() paths: vec![(worktree_id, Path::new("a.rs")).into()],
} }
); );
@ -1667,7 +1667,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
events.next().await.unwrap(), events.next().await.unwrap(),
Event::DiagnosticsUpdated { Event::DiagnosticsUpdated {
language_server_id: LanguageServerId(0), language_server_id: LanguageServerId(0),
path: (worktree_id, Path::new("a.rs")).into() paths: vec![(worktree_id, Path::new("a.rs")).into()],
} }
); );

View file

@ -525,6 +525,7 @@ message UpdateDiagnosticSummary {
uint64 project_id = 1; uint64 project_id = 1;
uint64 worktree_id = 2; uint64 worktree_id = 2;
DiagnosticSummary summary = 3; DiagnosticSummary summary = 3;
repeated DiagnosticSummary more_summaries = 4;
} }
message DiagnosticSummary { message DiagnosticSummary {