lsp: Fix workspace diagnostics lag & add streaming support (#34022)

Closes https://github.com/zed-industries/zed/issues/33980
Closes https://github.com/zed-industries/zed/discussions/33979

- Switches to the debounce task pattern for diagnostic summary
computations, which most importantly lets us do them only once when a
large number of DiagnosticUpdated events are received at once.
- Makes workspace diagnostic requests not time out if a partial result
is received.
- Makes diagnostics from workspace diagnostic partial results get
merged.

There might be some related areas where we're not fully complying with
the LSP spec but they may be outside the scope of what this PR should
include.

Release Notes:

- Added support for streaming LSP workspace diagnostics.
- Fixed editor freeze from large LSP workspace diagnostic responses.
This commit is contained in:
teapo 2025-07-15 17:41:45 +02:00 committed by GitHub
parent 5f3e7a5f91
commit d7bb1c1d0e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 460 additions and 114 deletions

1
Cargo.lock generated
View file

@ -3168,6 +3168,7 @@ dependencies = [
"session",
"settings",
"sha2",
"smol",
"sqlx",
"strum 0.27.1",
"subtle",

View file

@ -127,6 +127,7 @@ sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] }
serde_json.workspace = true
session = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
smol.workspace = true
sqlx = { version = "0.8", features = ["sqlite"] }
task.workspace = true
theme.workspace = true

View file

@ -2246,8 +2246,11 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo
});
}
#[gpui::test(iterations = 10)]
async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
async fn test_lsp_pull_diagnostics(
should_stream_workspace_diagnostic: bool,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
let mut server = TestServer::start(cx_a.executor()).await;
let executor = cx_a.executor();
let client_a = server.create_client(cx_a, "user_a").await;
@ -2396,12 +2399,25 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
let closure_workspace_diagnostics_pulls_made = workspace_diagnostics_pulls_made.clone();
let closure_workspace_diagnostics_pulls_result_ids =
workspace_diagnostics_pulls_result_ids.clone();
let (workspace_diagnostic_cancel_tx, closure_workspace_diagnostic_cancel_rx) =
smol::channel::bounded::<()>(1);
let (closure_workspace_diagnostic_received_tx, workspace_diagnostic_received_rx) =
smol::channel::bounded::<()>(1);
let expected_workspace_diagnostic_token = lsp::ProgressToken::String(format!(
"workspace/diagnostic-{}-1",
fake_language_server.server.server_id()
));
let closure_expected_workspace_diagnostic_token = expected_workspace_diagnostic_token.clone();
let mut workspace_diagnostics_pulls_handle = fake_language_server
.set_request_handler::<lsp::request::WorkspaceDiagnosticRequest, _, _>(
move |params, _| {
let workspace_requests_made = closure_workspace_diagnostics_pulls_made.clone();
let workspace_diagnostics_pulls_result_ids =
closure_workspace_diagnostics_pulls_result_ids.clone();
let workspace_diagnostic_cancel_rx = closure_workspace_diagnostic_cancel_rx.clone();
let workspace_diagnostic_received_tx = closure_workspace_diagnostic_received_tx.clone();
let expected_workspace_diagnostic_token =
closure_expected_workspace_diagnostic_token.clone();
async move {
let workspace_request_count =
workspace_requests_made.fetch_add(1, atomic::Ordering::Release) + 1;
@ -2411,6 +2427,21 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
.await
.extend(params.previous_result_ids.into_iter().map(|id| id.value));
}
if should_stream_workspace_diagnostic && !workspace_diagnostic_cancel_rx.is_closed()
{
assert_eq!(
params.partial_result_params.partial_result_token,
Some(expected_workspace_diagnostic_token)
);
workspace_diagnostic_received_tx.send(()).await.unwrap();
workspace_diagnostic_cancel_rx.recv().await.unwrap();
workspace_diagnostic_cancel_rx.close();
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#partialResults
// > The final response has to be empty in terms of result values.
return Ok(lsp::WorkspaceDiagnosticReportResult::Report(
lsp::WorkspaceDiagnosticReport { items: Vec::new() },
));
}
Ok(lsp::WorkspaceDiagnosticReportResult::Report(
lsp::WorkspaceDiagnosticReport {
items: vec![
@ -2479,7 +2510,11 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
},
);
workspace_diagnostics_pulls_handle.next().await.unwrap();
if should_stream_workspace_diagnostic {
workspace_diagnostic_received_rx.recv().await.unwrap();
} else {
workspace_diagnostics_pulls_handle.next().await.unwrap();
}
assert_eq!(
1,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
@ -2503,10 +2538,10 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
"Expected single diagnostic, but got: {all_diagnostics:?}"
);
let diagnostic = &all_diagnostics[0];
let expected_messages = [
expected_workspace_pull_diagnostics_main_message,
expected_pull_diagnostic_main_message,
];
let mut expected_messages = vec![expected_pull_diagnostic_main_message];
if !should_stream_workspace_diagnostic {
expected_messages.push(expected_workspace_pull_diagnostics_main_message);
}
assert!(
expected_messages.contains(&diagnostic.diagnostic.message.as_str()),
"Expected {expected_messages:?} on the host, but got: {}",
@ -2556,6 +2591,70 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
version: None,
},
);
if should_stream_workspace_diagnostic {
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
token: expected_workspace_diagnostic_token.clone(),
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
items: vec![
lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
version: None,
full_document_diagnostic_report:
lsp::FullDocumentDiagnosticReport {
result_id: Some(format!(
"workspace_{}",
workspace_diagnostics_pulls_made
.fetch_add(1, atomic::Ordering::Release)
+ 1
)),
items: vec![lsp::Diagnostic {
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 1,
},
end: lsp::Position {
line: 0,
character: 2,
},
},
severity: Some(lsp::DiagnosticSeverity::ERROR),
message:
expected_workspace_pull_diagnostics_main_message
.to_string(),
..lsp::Diagnostic::default()
}],
},
},
),
lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
version: None,
full_document_diagnostic_report:
lsp::FullDocumentDiagnosticReport {
result_id: Some(format!(
"workspace_{}",
workspace_diagnostics_pulls_made
.fetch_add(1, atomic::Ordering::Release)
+ 1
)),
items: Vec::new(),
},
},
),
],
}),
),
});
};
let mut workspace_diagnostic_start_count =
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire);
executor.run_until_parked();
editor_a_main.update(cx_a, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
@ -2599,7 +2698,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
executor.run_until_parked();
assert_eq!(
1,
workspace_diagnostic_start_count,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
"Workspace diagnostics should not be changed as the remote client does not initialize the workspace diagnostics pull"
);
@ -2646,7 +2745,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
executor.run_until_parked();
assert_eq!(
1,
workspace_diagnostic_start_count,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
"The remote client still did not anything to trigger the workspace diagnostics pull"
);
@ -2673,6 +2772,75 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
}
});
if should_stream_workspace_diagnostic {
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
token: expected_workspace_diagnostic_token.clone(),
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
items: vec![lsp::WorkspaceDocumentDiagnosticReport::Full(
lsp::WorkspaceFullDocumentDiagnosticReport {
uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
version: None,
full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport {
result_id: Some(format!(
"workspace_{}",
workspace_diagnostics_pulls_made
.fetch_add(1, atomic::Ordering::Release)
+ 1
)),
items: vec![lsp::Diagnostic {
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 1,
},
end: lsp::Position {
line: 0,
character: 2,
},
},
severity: Some(lsp::DiagnosticSeverity::ERROR),
message: expected_workspace_pull_diagnostics_lib_message
.to_string(),
..lsp::Diagnostic::default()
}],
},
},
)],
}),
),
});
workspace_diagnostic_start_count =
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire);
workspace_diagnostic_cancel_tx.send(()).await.unwrap();
workspace_diagnostics_pulls_handle.next().await.unwrap();
executor.run_until_parked();
editor_b_lib.update(cx_b, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let all_diagnostics = snapshot
.diagnostics_in_range(0..snapshot.len())
.collect::<Vec<_>>();
let expected_messages = [
expected_workspace_pull_diagnostics_lib_message,
// TODO bug: the pushed diagnostics are not being sent to the client when they open the corresponding buffer.
// expected_push_diagnostic_lib_message,
];
assert_eq!(
all_diagnostics.len(),
1,
"Expected pull diagnostics, but got: {all_diagnostics:?}"
);
for diagnostic in all_diagnostics {
assert!(
expected_messages.contains(&diagnostic.diagnostic.message.as_str()),
"The client should get both push and pull messages: {expected_messages:?}, but got: {}",
diagnostic.diagnostic.message
);
}
});
};
{
assert!(
diagnostics_pulls_result_ids.lock().await.len() > 0,
@ -2701,7 +2869,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
workspace_diagnostics_pulls_handle.next().await.unwrap();
assert_eq!(
2,
workspace_diagnostic_start_count + 1,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
"After client lib.rs edits, the workspace diagnostics request should follow"
);
@ -2720,7 +2888,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
workspace_diagnostics_pulls_handle.next().await.unwrap();
assert_eq!(
3,
workspace_diagnostic_start_count + 2,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
"After client main.rs edits, the workspace diagnostics pull should follow"
);
@ -2739,7 +2907,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
workspace_diagnostics_pulls_handle.next().await.unwrap();
assert_eq!(
4,
workspace_diagnostic_start_count + 3,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
"After host main.rs edits, the workspace diagnostics pull should follow"
);
@ -2769,7 +2937,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
);
workspace_diagnostics_pulls_handle.next().await.unwrap();
assert_eq!(
5,
workspace_diagnostic_start_count + 4,
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
"Another workspace diagnostics pull should happen after the diagnostics refresh server request"
);
@ -2840,6 +3008,19 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
});
}
#[gpui::test(iterations = 10)]
async fn test_non_streamed_lsp_pull_diagnostics(
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
test_lsp_pull_diagnostics(false, cx_a, cx_b).await;
}
#[gpui::test(iterations = 10)]
async fn test_streamed_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
test_lsp_pull_diagnostics(true, cx_a, cx_b).await;
}
#[gpui::test(iterations = 10)]
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;

View file

@ -80,6 +80,7 @@ pub(crate) struct ProjectDiagnosticsEditor {
include_warnings: bool,
update_excerpts_task: Option<Task<Result<()>>>,
cargo_diagnostics_fetch: CargoDiagnosticsFetchState,
diagnostic_summary_update: Task<()>,
_subscription: Subscription,
}
@ -179,7 +180,16 @@ impl ProjectDiagnosticsEditor {
path,
} => {
this.paths_to_update.insert(path.clone());
this.summary = project.read(cx).diagnostic_summary(false, cx);
let project = project.clone();
this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor()
.timer(Duration::from_millis(30))
.await;
this.update(cx, |this, cx| {
this.summary = project.read(cx).diagnostic_summary(false, cx);
})
.log_err();
});
cx.emit(EditorEvent::TitleChanged);
if this.editor.focus_handle(cx).contains_focused(window, cx) || this.focus_handle.contains_focused(window, cx) {
@ -276,6 +286,7 @@ impl ProjectDiagnosticsEditor {
cancel_task: None,
diagnostic_sources: Arc::new(Vec::new()),
},
diagnostic_summary_update: Task::ready(()),
_subscription: project_event_subscription,
};
this.update_all_diagnostics(true, window, cx);

View file

@ -9,6 +9,7 @@ use language::Diagnostic;
use project::project_settings::{GoToDiagnosticSeverityFilter, ProjectSettings};
use settings::Settings;
use ui::{Button, ButtonLike, Color, Icon, IconName, Label, Tooltip, h_flex, prelude::*};
use util::ResultExt;
use workspace::{StatusItemView, ToolbarItemEvent, Workspace, item::ItemHandle};
use crate::{Deploy, IncludeWarnings, ProjectDiagnosticsEditor};
@ -20,6 +21,7 @@ pub struct DiagnosticIndicator {
current_diagnostic: Option<Diagnostic>,
_observe_active_editor: Option<Subscription>,
diagnostics_update: Task<()>,
diagnostic_summary_update: Task<()>,
}
impl Render for DiagnosticIndicator {
@ -135,8 +137,16 @@ impl DiagnosticIndicator {
}
project::Event::DiagnosticsUpdated { .. } => {
this.summary = project.read(cx).diagnostic_summary(false, cx);
cx.notify();
this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor()
.timer(Duration::from_millis(30))
.await;
this.update(cx, |this, cx| {
this.summary = project.read(cx).diagnostic_summary(false, cx);
cx.notify();
})
.log_err();
});
}
_ => {}
@ -150,6 +160,7 @@ impl DiagnosticIndicator {
current_diagnostic: None,
_observe_active_editor: None,
diagnostics_update: Task::ready(()),
diagnostic_summary_update: Task::ready(()),
}
}

View file

@ -1106,6 +1106,7 @@ impl LanguageServer {
pub fn binary(&self) -> &LanguageServerBinary {
&self.binary
}
/// Sends a RPC request to the language server.
///
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
@ -1125,16 +1126,40 @@ impl LanguageServer {
)
}
fn request_internal<T>(
/// Sends a RPC request to the language server, with a custom timer, a future which when becoming
/// ready causes the request to be timed out with the future's output message.
///
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
pub fn request_with_timer<T: request::Request, U: Future<Output = String>>(
&self,
params: T::Params,
timer: U,
) -> impl LspRequestFuture<T::Result> + use<T, U>
where
T::Result: 'static + Send,
{
Self::request_internal_with_timer::<T, U>(
&self.next_id,
&self.response_handlers,
&self.outbound_tx,
&self.executor,
timer,
params,
)
}
fn request_internal_with_timer<T, U>(
next_id: &AtomicI32,
response_handlers: &Mutex<Option<HashMap<RequestId, ResponseHandler>>>,
outbound_tx: &channel::Sender<String>,
executor: &BackgroundExecutor,
timer: U,
params: T::Params,
) -> impl LspRequestFuture<T::Result> + use<T>
) -> impl LspRequestFuture<T::Result> + use<T, U>
where
T::Result: 'static + Send,
T: request::Request,
U: Future<Output = String>,
{
let id = next_id.fetch_add(1, SeqCst);
let message = serde_json::to_string(&Request {
@ -1179,7 +1204,6 @@ impl LanguageServer {
.context("failed to write to language server's stdin");
let outbound_tx = outbound_tx.downgrade();
let mut timeout = executor.timer(LSP_REQUEST_TIMEOUT).fuse();
let started = Instant::now();
LspRequest::new(id, async move {
if let Err(e) = handle_response {
@ -1216,14 +1240,41 @@ impl LanguageServer {
}
}
_ = timeout => {
log::error!("Cancelled LSP request task for {method:?} id {id} which took over {LSP_REQUEST_TIMEOUT:?}");
message = timer.fuse() => {
log::error!("Cancelled LSP request task for {method:?} id {id} {message}");
ConnectionResult::Timeout
}
}
})
}
fn request_internal<T>(
next_id: &AtomicI32,
response_handlers: &Mutex<Option<HashMap<RequestId, ResponseHandler>>>,
outbound_tx: &channel::Sender<String>,
executor: &BackgroundExecutor,
params: T::Params,
) -> impl LspRequestFuture<T::Result> + use<T>
where
T::Result: 'static + Send,
T: request::Request,
{
Self::request_internal_with_timer::<T, _>(
next_id,
response_handlers,
outbound_tx,
executor,
Self::default_request_timer(executor.clone()),
params,
)
}
pub fn default_request_timer(executor: BackgroundExecutor) -> impl Future<Output = String> {
executor
.timer(LSP_REQUEST_TIMEOUT)
.map(|_| format!("which took over {LSP_REQUEST_TIMEOUT:?}"))
}
/// Sends a RPC notification to the language server.
///
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#notificationMessage)

View file

@ -29,7 +29,7 @@ use clock::Global;
use collections::{BTreeMap, BTreeSet, HashMap, HashSet, btree_map};
use futures::{
AsyncWriteExt, Future, FutureExt, StreamExt,
future::{Shared, join_all},
future::{Either, Shared, join_all, pending, select},
select, select_biased,
stream::FuturesUnordered,
};
@ -85,9 +85,11 @@ use std::{
cmp::{Ordering, Reverse},
convert::TryInto,
ffi::OsStr,
future::ready,
iter, mem,
ops::{ControlFlow, Range},
path::{self, Path, PathBuf},
pin::pin,
rc::Rc,
sync::Arc,
time::{Duration, Instant},
@ -7585,7 +7587,8 @@ impl LspStore {
diagnostics,
|_, _, _| false,
cx,
)
)?;
Ok(())
}
pub fn merge_diagnostic_entries(
@ -9130,13 +9133,39 @@ impl LspStore {
}
};
let progress = match progress.value {
lsp::ProgressParamsValue::WorkDone(progress) => progress,
lsp::ProgressParamsValue::WorkspaceDiagnostic(_) => {
return;
match progress.value {
lsp::ProgressParamsValue::WorkDone(progress) => {
self.handle_work_done_progress(
progress,
language_server_id,
disk_based_diagnostics_progress_token,
token,
cx,
);
}
};
lsp::ProgressParamsValue::WorkspaceDiagnostic(report) => {
if let Some(LanguageServerState::Running {
workspace_refresh_task: Some(workspace_refresh_task),
..
}) = self
.as_local_mut()
.and_then(|local| local.language_servers.get_mut(&language_server_id))
{
workspace_refresh_task.progress_tx.try_send(()).ok();
self.apply_workspace_diagnostic_report(language_server_id, report, cx)
}
}
}
}
fn handle_work_done_progress(
&mut self,
progress: lsp::WorkDoneProgress,
language_server_id: LanguageServerId,
disk_based_diagnostics_progress_token: Option<String>,
token: String,
cx: &mut Context<Self>,
) {
let language_server_status =
if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
status
@ -11297,13 +11326,13 @@ impl LspStore {
pub fn pull_workspace_diagnostics(&mut self, server_id: LanguageServerId) {
if let Some(LanguageServerState::Running {
workspace_refresh_task: Some((tx, _)),
workspace_refresh_task: Some(workspace_refresh_task),
..
}) = self
.as_local_mut()
.and_then(|local| local.language_servers.get_mut(&server_id))
{
tx.try_send(()).ok();
workspace_refresh_task.refresh_tx.try_send(()).ok();
}
}
@ -11319,11 +11348,83 @@ impl LspStore {
local.language_server_ids_for_buffer(buffer, cx)
}) {
if let Some(LanguageServerState::Running {
workspace_refresh_task: Some((tx, _)),
workspace_refresh_task: Some(workspace_refresh_task),
..
}) = local.language_servers.get_mut(&server_id)
{
tx.try_send(()).ok();
workspace_refresh_task.refresh_tx.try_send(()).ok();
}
}
}
fn apply_workspace_diagnostic_report(
&mut self,
server_id: LanguageServerId,
report: lsp::WorkspaceDiagnosticReportResult,
cx: &mut Context<Self>,
) {
let workspace_diagnostics =
GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(report, server_id);
for workspace_diagnostics in workspace_diagnostics {
let LspPullDiagnostics::Response {
server_id,
uri,
diagnostics,
} = workspace_diagnostics.diagnostics
else {
continue;
};
let adapter = self.language_server_adapter_for_id(server_id);
let disk_based_sources = adapter
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[]);
match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
self.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics: Vec::new(),
version: None,
},
Some(result_id),
DiagnosticSourceKind::Pulled,
disk_based_sources,
|_, _, _| true,
cx,
)
.log_err();
}
PulledDiagnostics::Changed {
diagnostics,
result_id,
} => {
self.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics,
version: workspace_diagnostics.version,
},
result_id,
DiagnosticSourceKind::Pulled,
disk_based_sources,
|buffer, old_diagnostic, cx| match old_diagnostic.source_kind {
DiagnosticSourceKind::Pulled => {
let buffer_url = File::from_dyn(buffer.file())
.map(|f| f.abs_path(cx))
.and_then(|abs_path| file_path_to_lsp_url(&abs_path).ok());
buffer_url.is_none_or(|buffer_url| buffer_url != uri)
}
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => true,
},
cx,
)
.log_err();
}
}
}
}
@ -11379,7 +11480,7 @@ fn subscribe_to_binary_statuses(
fn lsp_workspace_diagnostics_refresh(
server: Arc<LanguageServer>,
cx: &mut Context<'_, LspStore>,
) -> Option<(mpsc::Sender<()>, Task<()>)> {
) -> Option<WorkspaceRefreshTask> {
let identifier = match server.capabilities().diagnostic_provider? {
lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => {
if !diagnostic_options.workspace_diagnostics {
@ -11396,19 +11497,22 @@ fn lsp_workspace_diagnostics_refresh(
}
};
let (mut tx, mut rx) = mpsc::channel(1);
tx.try_send(()).ok();
let (progress_tx, mut progress_rx) = mpsc::channel(1);
let (mut refresh_tx, mut refresh_rx) = mpsc::channel(1);
refresh_tx.try_send(()).ok();
let workspace_query_language_server = cx.spawn(async move |lsp_store, cx| {
let mut attempts = 0;
let max_attempts = 50;
let mut requests = 0;
loop {
let Some(()) = rx.recv().await else {
let Some(()) = refresh_rx.recv().await else {
return;
};
'request: loop {
requests += 1;
if attempts > max_attempts {
log::error!(
"Failed to pull workspace diagnostics {max_attempts} times, aborting"
@ -11437,14 +11541,29 @@ fn lsp_workspace_diagnostics_refresh(
return;
};
let token = format!("workspace/diagnostic-{}-{}", server.server_id(), requests);
progress_rx.try_recv().ok();
let timer =
LanguageServer::default_request_timer(cx.background_executor().clone()).fuse();
let progress = pin!(progress_rx.recv().fuse());
let response_result = server
.request::<lsp::WorkspaceDiagnosticRequest>(lsp::WorkspaceDiagnosticParams {
previous_result_ids,
identifier: identifier.clone(),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
})
.request_with_timer::<lsp::WorkspaceDiagnosticRequest, _>(
lsp::WorkspaceDiagnosticParams {
previous_result_ids,
identifier: identifier.clone(),
work_done_progress_params: Default::default(),
partial_result_params: lsp::PartialResultParams {
partial_result_token: Some(lsp::ProgressToken::String(token)),
},
},
select(timer, progress).then(|either| match either {
Either::Left((message, ..)) => ready(message).left_future(),
Either::Right(..) => pending::<String>().right_future(),
}),
)
.await;
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#diagnostic_refresh
// > If a server closes a workspace diagnostic pull request the client should re-trigger the request.
match response_result {
@ -11464,72 +11583,11 @@ fn lsp_workspace_diagnostics_refresh(
attempts = 0;
if lsp_store
.update(cx, |lsp_store, cx| {
let workspace_diagnostics =
GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(pulled_diagnostics, server.server_id());
for workspace_diagnostics in workspace_diagnostics {
let LspPullDiagnostics::Response {
server_id,
uri,
diagnostics,
} = workspace_diagnostics.diagnostics
else {
continue;
};
let adapter = lsp_store.language_server_adapter_for_id(server_id);
let disk_based_sources = adapter
.as_ref()
.map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
.unwrap_or(&[]);
match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
lsp_store
.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics: Vec::new(),
version: None,
},
Some(result_id),
DiagnosticSourceKind::Pulled,
disk_based_sources,
|_, _, _| true,
cx,
)
.log_err();
}
PulledDiagnostics::Changed {
diagnostics,
result_id,
} => {
lsp_store
.merge_diagnostics(
server_id,
lsp::PublishDiagnosticsParams {
uri: uri.clone(),
diagnostics,
version: workspace_diagnostics.version,
},
result_id,
DiagnosticSourceKind::Pulled,
disk_based_sources,
|buffer, old_diagnostic, cx| match old_diagnostic.source_kind {
DiagnosticSourceKind::Pulled => {
let buffer_url = File::from_dyn(buffer.file()).map(|f| f.abs_path(cx))
.and_then(|abs_path| file_path_to_lsp_url(&abs_path).ok());
buffer_url.is_none_or(|buffer_url| buffer_url != uri)
},
DiagnosticSourceKind::Other
| DiagnosticSourceKind::Pushed => true,
},
cx,
)
.log_err();
}
}
}
lsp_store.apply_workspace_diagnostic_report(
server.server_id(),
pulled_diagnostics,
cx,
)
})
.is_err()
{
@ -11542,7 +11600,11 @@ fn lsp_workspace_diagnostics_refresh(
}
});
Some((tx, workspace_query_language_server))
Some(WorkspaceRefreshTask {
refresh_tx,
progress_tx,
task: workspace_query_language_server,
})
}
fn resolve_word_completion(snapshot: &BufferSnapshot, completion: &mut Completion) {
@ -11912,6 +11974,13 @@ impl LanguageServerLogType {
}
}
pub struct WorkspaceRefreshTask {
refresh_tx: mpsc::Sender<()>,
progress_tx: mpsc::Sender<()>,
#[allow(dead_code)]
task: Task<()>,
}
pub enum LanguageServerState {
Starting {
startup: Task<Option<Arc<LanguageServer>>>,
@ -11923,7 +11992,7 @@ pub enum LanguageServerState {
adapter: Arc<CachedLspAdapter>,
server: Arc<LanguageServer>,
simulate_disk_based_diagnostics_completion: Option<Task<()>>,
workspace_refresh_task: Option<(mpsc::Sender<()>, Task<()>)>,
workspace_refresh_task: Option<WorkspaceRefreshTask>,
},
}

View file

@ -108,6 +108,7 @@ pub struct ProjectPanel {
hide_scrollbar_task: Option<Task<()>>,
diagnostics: HashMap<(WorktreeId, PathBuf), DiagnosticSeverity>,
max_width_item_index: Option<usize>,
diagnostic_summary_update: Task<()>,
// We keep track of the mouse down state on entries so we don't flash the UI
// in case a user clicks to open a file.
mouse_down: bool,
@ -420,8 +421,16 @@ impl ProjectPanel {
| project::Event::DiagnosticsUpdated { .. } => {
if ProjectPanelSettings::get_global(cx).show_diagnostics != ShowDiagnostics::Off
{
this.update_diagnostics(cx);
cx.notify();
this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor()
.timer(Duration::from_millis(30))
.await;
this.update(cx, |this, cx| {
this.update_diagnostics(cx);
cx.notify();
})
.log_err();
});
}
}
project::Event::WorktreeRemoved(id) => {
@ -564,6 +573,7 @@ impl ProjectPanel {
.parent_entity(&cx.entity()),
max_width_item_index: None,
diagnostics: Default::default(),
diagnostic_summary_update: Task::ready(()),
scroll_handle,
mouse_down: false,
hover_expand_task: None,

View file

@ -40,6 +40,7 @@ use std::{
Arc,
atomic::{AtomicUsize, Ordering},
},
time::Duration,
};
use theme::ThemeSettings;
use ui::{
@ -364,6 +365,7 @@ pub struct Pane {
pinned_tab_count: usize,
diagnostics: HashMap<ProjectPath, DiagnosticSeverity>,
zoom_out_on_close: bool,
diagnostic_summary_update: Task<()>,
/// If a certain project item wants to get recreated with specific data, it can persist its data before the recreation here.
pub project_item_restoration_data: HashMap<ProjectItemKind, Box<dyn Any + Send>>,
}
@ -505,6 +507,7 @@ impl Pane {
pinned_tab_count: 0,
diagnostics: Default::default(),
zoom_out_on_close: true,
diagnostic_summary_update: Task::ready(()),
project_item_restoration_data: HashMap::default(),
}
}
@ -616,8 +619,16 @@ impl Pane {
project::Event::DiskBasedDiagnosticsFinished { .. }
| project::Event::DiagnosticsUpdated { .. } => {
if ItemSettings::get_global(cx).show_diagnostics != ShowDiagnostics::Off {
self.update_diagnostics(cx);
cx.notify();
self.diagnostic_summary_update = cx.spawn(async move |this, cx| {
cx.background_executor()
.timer(Duration::from_millis(30))
.await;
this.update(cx, |this, cx| {
this.update_diagnostics(cx);
cx.notify();
})
.log_err();
});
}
}
_ => {}