Allow to fetch cargo diagnostics separately (#29706)

Adjusts the way `cargo` and `rust-analyzer` diagnostics are fetched into
Zed.

Nothing is changed for defaults: in this mode, Zed does nothing but
reports file updates, which trigger rust-analyzers'
mechanisms:

* generating internal diagnostics, which it is able to produce on the
fly, without blocking cargo lock.
Unfortunately, there are not that many diagnostics in r-a, and some of
them have false-positives compared to rustc ones

* running `cargo check --workspace --all-targets` on each file save,
taking the cargo lock
For large projects like Zed, this might take a while, reducing the
ability to choose how to work with the project: e.g. it's impossible to
save multiple times without long diagnostics refreshes (may happen
automatically on e.g. focus loss), save the project and run it instantly
without waiting for cargo check to finish, etc.

In addition, it's relatively tricky to reconfigure r-a to run a
different command, with different arguments and maybe different env
vars: that would require a language server restart (and a large project
reindex) and fiddling with multiple JSON fields.

The new mode aims to separate out cargo diagnostics into its own loop so
that all Zed diagnostics features are supported still.


For that, an extra mode was introduced:

```jsonc
"rust": {
  // When enabled, Zed runs `cargo check --message-format=json`-based commands and
  // collect cargo diagnostics instead of rust-analyzer.
  "fetch_cargo_diagnostics": false,
  // A command override for fetching the cargo diagnostics.
  // First argument is the command, followed by the arguments.
  "diagnostics_fetch_command": [
    "cargo",
    "check",
    "--quiet",
    "--workspace",
    "--message-format=json",
    "--all-targets",
    "--keep-going"
  ],
  // Extra environment variables to pass to the diagnostics fetch command.
  "env": {}
}
```

which calls to cargo, parses its output and mixes in with the existing
diagnostics:




https://github.com/user-attachments/assets/e986f955-b452-4995-8aac-3049683dd22c




Release Notes:

- Added a way to get diagnostics from cargo and rust-analyzer without
mutually locking each other
- Added `ctrl-r` binding to refresh diagnostics in the project
diagnostics editor context
This commit is contained in:
Kirill Bulatov 2025-05-01 11:25:52 +03:00 committed by GitHub
parent 5e4be013af
commit e07ffe7cf1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 1306 additions and 89 deletions

4
Cargo.lock generated
View file

@ -4365,14 +4365,17 @@ name = "diagnostics"
version = "0.1.0"
dependencies = [
"anyhow",
"cargo_metadata",
"client",
"collections",
"component",
"ctor",
"editor",
"env_logger 0.11.8",
"futures 0.3.31",
"gpui",
"indoc",
"itertools 0.14.0",
"language",
"linkme",
"log",
@ -4384,6 +4387,7 @@ dependencies = [
"serde",
"serde_json",
"settings",
"smol",
"text",
"theme",
"ui",

View file

@ -962,5 +962,12 @@
"bindings": {
"escape": "menu::Cancel"
}
},
{
"context": "Diagnostics",
"use_key_equivalents": true,
"bindings": {
"ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
}
}
]

View file

@ -1068,5 +1068,12 @@
"bindings": {
"escape": "menu::Cancel"
}
},
{
"context": "Diagnostics",
"use_key_equivalents": true,
"bindings": {
"ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
}
}
]

View file

@ -931,6 +931,24 @@
// The minimum severity of the diagnostics to show inline.
// Shows all diagnostics when not specified.
"max_severity": null
},
"rust": {
// When enabled, Zed runs `cargo check --message-format=json`-based commands and
// collect cargo diagnostics instead of rust-analyzer.
"fetch_cargo_diagnostics": false,
// A command override for fetching the cargo diagnostics.
// First argument is the command, followed by the arguments.
"diagnostics_fetch_command": [
"cargo",
"check",
"--quiet",
"--workspace",
"--message-format=json",
"--all-targets",
"--keep-going"
],
// Extra environment variables to pass to the diagnostics fetch command.
"env": {}
}
},
// Files or globs of files that will be excluded by Zed entirely. They will be skipped during file

View file

@ -14,13 +14,16 @@ doctest = false
[dependencies]
anyhow.workspace = true
cargo_metadata.workspace = true
collections.workspace = true
component.workspace = true
ctor.workspace = true
editor.workspace = true
env_logger.workspace = true
futures.workspace = true
gpui.workspace = true
indoc.workspace = true
itertools.workspace = true
language.workspace = true
linkme.workspace = true
log.workspace = true
@ -29,7 +32,9 @@ markdown.workspace = true
project.workspace = true
rand.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
text.workspace = true
theme.workspace = true
ui.workspace = true

View file

@ -0,0 +1,603 @@
use std::{
path::{Component, Path, Prefix},
process::Stdio,
sync::atomic::{self, AtomicUsize},
};
use cargo_metadata::{
Message,
diagnostic::{Applicability, Diagnostic as CargoDiagnostic, DiagnosticLevel, DiagnosticSpan},
};
use collections::HashMap;
use gpui::{AppContext, Entity, Task};
use itertools::Itertools as _;
use language::Diagnostic;
use project::{
Worktree, lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME,
project_settings::ProjectSettings,
};
use serde::{Deserialize, Serialize};
use settings::Settings;
use smol::{
channel::Receiver,
io::{AsyncBufReadExt, BufReader},
process::Command,
};
use ui::App;
use util::ResultExt;
use crate::ProjectDiagnosticsEditor;
#[derive(Debug, serde::Deserialize)]
#[serde(untagged)]
enum CargoMessage {
Cargo(Message),
Rustc(CargoDiagnostic),
}
/// Appends formatted string to a `String`.
macro_rules! format_to {
($buf:expr) => ();
($buf:expr, $lit:literal $($arg:tt)*) => {
{
use ::std::fmt::Write as _;
// We can't do ::std::fmt::Write::write_fmt($buf, format_args!($lit $($arg)*))
// unfortunately, as that loses out on autoref behavior.
_ = $buf.write_fmt(format_args!($lit $($arg)*))
}
};
}
pub fn cargo_diagnostics_sources(
editor: &ProjectDiagnosticsEditor,
cx: &App,
) -> Vec<Entity<Worktree>> {
let fetch_cargo_diagnostics = ProjectSettings::get_global(cx)
.diagnostics
.fetch_cargo_diagnostics();
if !fetch_cargo_diagnostics {
return Vec::new();
}
editor
.project
.read(cx)
.worktrees(cx)
.filter(|worktree| worktree.read(cx).entry_for_path("Cargo.toml").is_some())
.collect()
}
#[derive(Debug)]
pub enum FetchUpdate {
Diagnostic(CargoDiagnostic),
Progress(String),
}
#[derive(Debug)]
pub enum FetchStatus {
Started,
Progress { message: String },
Finished,
}
pub fn fetch_worktree_diagnostics(
worktree_root: &Path,
cx: &App,
) -> Option<(Task<()>, Receiver<FetchUpdate>)> {
let diagnostics_settings = ProjectSettings::get_global(cx)
.diagnostics
.cargo
.as_ref()
.filter(|cargo_diagnostics| cargo_diagnostics.fetch_cargo_diagnostics)?;
let command_string = diagnostics_settings
.diagnostics_fetch_command
.iter()
.join(" ");
let mut command_parts = diagnostics_settings.diagnostics_fetch_command.iter();
let mut command = Command::new(command_parts.next()?)
.args(command_parts)
.envs(diagnostics_settings.env.clone())
.current_dir(worktree_root)
.stdout(Stdio::piped())
.stderr(Stdio::null())
.kill_on_drop(true)
.spawn()
.log_err()?;
let stdout = command.stdout.take()?;
let mut reader = BufReader::new(stdout);
let (tx, rx) = smol::channel::unbounded();
let error_threshold = 10;
let cargo_diagnostics_fetch_task = cx.background_spawn(async move {
let _command = command;
let mut errors = 0;
loop {
let mut line = String::new();
match reader.read_line(&mut line).await {
Ok(0) => {
return;
},
Ok(_) => {
errors = 0;
let mut deserializer = serde_json::Deserializer::from_str(&line);
deserializer.disable_recursion_limit();
let send_result = match CargoMessage::deserialize(&mut deserializer) {
Ok(CargoMessage::Cargo(Message::CompilerMessage(message))) => tx.send(FetchUpdate::Diagnostic(message.message)).await,
Ok(CargoMessage::Cargo(Message::CompilerArtifact(artifact))) => tx.send(FetchUpdate::Progress(format!("Compiled {:?}", artifact.manifest_path.parent().unwrap_or(&artifact.manifest_path)))).await,
Ok(CargoMessage::Cargo(_)) => Ok(()),
Ok(CargoMessage::Rustc(rustc_message)) => tx.send(FetchUpdate::Diagnostic(rustc_message)).await,
Err(_) => {
log::debug!("Failed to parse cargo diagnostics from line '{line}'");
Ok(())
},
};
if send_result.is_err() {
return;
}
},
Err(e) => {
log::error!("Failed to read line from {command_string} command output when fetching cargo diagnostics: {e}");
errors += 1;
if errors >= error_threshold {
log::error!("Failed {error_threshold} times, aborting the diagnostics fetch");
return;
}
},
}
}
});
Some((cargo_diagnostics_fetch_task, rx))
}
static CARGO_DIAGNOSTICS_FETCH_GENERATION: AtomicUsize = AtomicUsize::new(0);
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
struct CargoFetchDiagnosticData {
generation: usize,
}
pub fn next_cargo_fetch_generation() {
CARGO_DIAGNOSTICS_FETCH_GENERATION.fetch_add(1, atomic::Ordering::Release);
}
pub fn is_outdated_cargo_fetch_diagnostic(diagnostic: &Diagnostic) -> bool {
if let Some(data) = diagnostic
.data
.clone()
.and_then(|data| serde_json::from_value::<CargoFetchDiagnosticData>(data).ok())
{
let current_generation = CARGO_DIAGNOSTICS_FETCH_GENERATION.load(atomic::Ordering::Acquire);
data.generation < current_generation
} else {
false
}
}
/// Converts a Rust root diagnostic to LSP form
///
/// This flattens the Rust diagnostic by:
///
/// 1. Creating a LSP diagnostic with the root message and primary span.
/// 2. Adding any labelled secondary spans to `relatedInformation`
/// 3. Categorising child diagnostics as either `SuggestedFix`es,
/// `relatedInformation` or additional message lines.
///
/// If the diagnostic has no primary span this will return `None`
///
/// Taken from https://github.com/rust-lang/rust-analyzer/blob/fe7b4f2ad96f7c13cc571f45edc2c578b35dddb4/crates/rust-analyzer/src/diagnostics/to_proto.rs#L275-L285
pub(crate) fn map_rust_diagnostic_to_lsp(
worktree_root: &Path,
cargo_diagnostic: &CargoDiagnostic,
) -> Vec<(lsp::Url, lsp::Diagnostic)> {
let primary_spans: Vec<&DiagnosticSpan> = cargo_diagnostic
.spans
.iter()
.filter(|s| s.is_primary)
.collect();
if primary_spans.is_empty() {
return Vec::new();
}
let severity = diagnostic_severity(cargo_diagnostic.level);
let mut source = String::from(CARGO_DIAGNOSTICS_SOURCE_NAME);
let mut code = cargo_diagnostic.code.as_ref().map(|c| c.code.clone());
if let Some(code_val) = &code {
// See if this is an RFC #2103 scoped lint (e.g. from Clippy)
let scoped_code: Vec<&str> = code_val.split("::").collect();
if scoped_code.len() == 2 {
source = String::from(scoped_code[0]);
code = Some(String::from(scoped_code[1]));
}
}
let mut needs_primary_span_label = true;
let mut subdiagnostics = Vec::new();
let mut tags = Vec::new();
for secondary_span in cargo_diagnostic.spans.iter().filter(|s| !s.is_primary) {
if let Some(label) = secondary_span.label.clone() {
subdiagnostics.push(lsp::DiagnosticRelatedInformation {
location: location(worktree_root, secondary_span),
message: label,
});
}
}
let mut message = cargo_diagnostic.message.clone();
for child in &cargo_diagnostic.children {
let child = map_rust_child_diagnostic(worktree_root, child);
match child {
MappedRustChildDiagnostic::SubDiagnostic(sub) => {
subdiagnostics.push(sub);
}
MappedRustChildDiagnostic::MessageLine(message_line) => {
format_to!(message, "\n{message_line}");
// These secondary messages usually duplicate the content of the
// primary span label.
needs_primary_span_label = false;
}
}
}
if let Some(code) = &cargo_diagnostic.code {
let code = code.code.as_str();
if matches!(
code,
"dead_code"
| "unknown_lints"
| "unreachable_code"
| "unused_attributes"
| "unused_imports"
| "unused_macros"
| "unused_variables"
) {
tags.push(lsp::DiagnosticTag::UNNECESSARY);
}
if matches!(code, "deprecated") {
tags.push(lsp::DiagnosticTag::DEPRECATED);
}
}
let code_description = match source.as_str() {
"rustc" => rustc_code_description(code.as_deref()),
"clippy" => clippy_code_description(code.as_deref()),
_ => None,
};
let generation = CARGO_DIAGNOSTICS_FETCH_GENERATION.load(atomic::Ordering::Acquire);
let data = Some(
serde_json::to_value(CargoFetchDiagnosticData { generation })
.expect("Serializing a regular Rust struct"),
);
primary_spans
.iter()
.flat_map(|primary_span| {
let primary_location = primary_location(worktree_root, primary_span);
let message = {
let mut message = message.clone();
if needs_primary_span_label {
if let Some(primary_span_label) = &primary_span.label {
format_to!(message, "\n{primary_span_label}");
}
}
message
};
// Each primary diagnostic span may result in multiple LSP diagnostics.
let mut diagnostics = Vec::new();
let mut related_info_macro_calls = vec![];
// If error occurs from macro expansion, add related info pointing to
// where the error originated
// Also, we would generate an additional diagnostic, so that exact place of macro
// will be highlighted in the error origin place.
let span_stack = std::iter::successors(Some(*primary_span), |span| {
Some(&span.expansion.as_ref()?.span)
});
for (i, span) in span_stack.enumerate() {
if is_dummy_macro_file(&span.file_name) {
continue;
}
// First span is the original diagnostic, others are macro call locations that
// generated that code.
let is_in_macro_call = i != 0;
let secondary_location = location(worktree_root, span);
if secondary_location == primary_location {
continue;
}
related_info_macro_calls.push(lsp::DiagnosticRelatedInformation {
location: secondary_location.clone(),
message: if is_in_macro_call {
"Error originated from macro call here".to_owned()
} else {
"Actual error occurred here".to_owned()
},
});
// For the additional in-macro diagnostic we add the inverse message pointing to the error location in code.
let information_for_additional_diagnostic =
vec![lsp::DiagnosticRelatedInformation {
location: primary_location.clone(),
message: "Exact error occurred here".to_owned(),
}];
let diagnostic = lsp::Diagnostic {
range: secondary_location.range,
// downgrade to hint if we're pointing at the macro
severity: Some(lsp::DiagnosticSeverity::HINT),
code: code.clone().map(lsp::NumberOrString::String),
code_description: code_description.clone(),
source: Some(source.clone()),
message: message.clone(),
related_information: Some(information_for_additional_diagnostic),
tags: if tags.is_empty() {
None
} else {
Some(tags.clone())
},
data: data.clone(),
};
diagnostics.push((secondary_location.uri, diagnostic));
}
// Emit the primary diagnostic.
diagnostics.push((
primary_location.uri.clone(),
lsp::Diagnostic {
range: primary_location.range,
severity,
code: code.clone().map(lsp::NumberOrString::String),
code_description: code_description.clone(),
source: Some(source.clone()),
message,
related_information: {
let info = related_info_macro_calls
.iter()
.cloned()
.chain(subdiagnostics.iter().cloned())
.collect::<Vec<_>>();
if info.is_empty() { None } else { Some(info) }
},
tags: if tags.is_empty() {
None
} else {
Some(tags.clone())
},
data: data.clone(),
},
));
// Emit hint-level diagnostics for all `related_information` entries such as "help"s.
// This is useful because they will show up in the user's editor, unlike
// `related_information`, which just produces hard-to-read links, at least in VS Code.
let back_ref = lsp::DiagnosticRelatedInformation {
location: primary_location,
message: "original diagnostic".to_owned(),
};
for sub in &subdiagnostics {
diagnostics.push((
sub.location.uri.clone(),
lsp::Diagnostic {
range: sub.location.range,
severity: Some(lsp::DiagnosticSeverity::HINT),
code: code.clone().map(lsp::NumberOrString::String),
code_description: code_description.clone(),
source: Some(source.clone()),
message: sub.message.clone(),
related_information: Some(vec![back_ref.clone()]),
tags: None, // don't apply modifiers again
data: data.clone(),
},
));
}
diagnostics
})
.collect()
}
fn rustc_code_description(code: Option<&str>) -> Option<lsp::CodeDescription> {
code.filter(|code| {
let mut chars = code.chars();
chars.next() == Some('E')
&& chars.by_ref().take(4).all(|c| c.is_ascii_digit())
&& chars.next().is_none()
})
.and_then(|code| {
lsp::Url::parse(&format!(
"https://doc.rust-lang.org/error-index.html#{code}"
))
.ok()
.map(|href| lsp::CodeDescription { href })
})
}
fn clippy_code_description(code: Option<&str>) -> Option<lsp::CodeDescription> {
code.and_then(|code| {
lsp::Url::parse(&format!(
"https://rust-lang.github.io/rust-clippy/master/index.html#{code}"
))
.ok()
.map(|href| lsp::CodeDescription { href })
})
}
/// Determines the LSP severity from a diagnostic
fn diagnostic_severity(level: DiagnosticLevel) -> Option<lsp::DiagnosticSeverity> {
let res = match level {
DiagnosticLevel::Ice => lsp::DiagnosticSeverity::ERROR,
DiagnosticLevel::Error => lsp::DiagnosticSeverity::ERROR,
DiagnosticLevel::Warning => lsp::DiagnosticSeverity::WARNING,
DiagnosticLevel::Note => lsp::DiagnosticSeverity::INFORMATION,
DiagnosticLevel::Help => lsp::DiagnosticSeverity::HINT,
_ => return None,
};
Some(res)
}
enum MappedRustChildDiagnostic {
SubDiagnostic(lsp::DiagnosticRelatedInformation),
MessageLine(String),
}
fn map_rust_child_diagnostic(
worktree_root: &Path,
cargo_diagnostic: &CargoDiagnostic,
) -> MappedRustChildDiagnostic {
let spans: Vec<&DiagnosticSpan> = cargo_diagnostic
.spans
.iter()
.filter(|s| s.is_primary)
.collect();
if spans.is_empty() {
// `rustc` uses these spanless children as a way to print multi-line
// messages
return MappedRustChildDiagnostic::MessageLine(cargo_diagnostic.message.clone());
}
let mut edit_map: HashMap<lsp::Url, Vec<lsp::TextEdit>> = HashMap::default();
let mut suggested_replacements = Vec::new();
for &span in &spans {
if let Some(suggested_replacement) = &span.suggested_replacement {
if !suggested_replacement.is_empty() {
suggested_replacements.push(suggested_replacement);
}
let location = location(worktree_root, span);
let edit = lsp::TextEdit::new(location.range, suggested_replacement.clone());
// Only actually emit a quickfix if the suggestion is "valid enough".
// We accept both "MaybeIncorrect" and "MachineApplicable". "MaybeIncorrect" means that
// the suggestion is *complete* (contains no placeholders where code needs to be
// inserted), but might not be what the user wants, or might need minor adjustments.
if matches!(
span.suggestion_applicability,
None | Some(Applicability::MaybeIncorrect | Applicability::MachineApplicable)
) {
edit_map.entry(location.uri).or_default().push(edit);
}
}
}
// rustc renders suggestion diagnostics by appending the suggested replacement, so do the same
// here, otherwise the diagnostic text is missing useful information.
let mut message = cargo_diagnostic.message.clone();
if !suggested_replacements.is_empty() {
message.push_str(": ");
let suggestions = suggested_replacements
.iter()
.map(|suggestion| format!("`{suggestion}`"))
.join(", ");
message.push_str(&suggestions);
}
MappedRustChildDiagnostic::SubDiagnostic(lsp::DiagnosticRelatedInformation {
location: location(worktree_root, spans[0]),
message,
})
}
/// Converts a Rust span to a LSP location
fn location(worktree_root: &Path, span: &DiagnosticSpan) -> lsp::Location {
let file_name = worktree_root.join(&span.file_name);
let uri = url_from_abs_path(&file_name);
let range = {
lsp::Range::new(
position(span, span.line_start, span.column_start.saturating_sub(1)),
position(span, span.line_end, span.column_end.saturating_sub(1)),
)
};
lsp::Location::new(uri, range)
}
/// Returns a `Url` object from a given path, will lowercase drive letters if present.
/// This will only happen when processing windows paths.
///
/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
pub(crate) fn url_from_abs_path(path: &Path) -> lsp::Url {
let url = lsp::Url::from_file_path(path).unwrap();
match path.components().next() {
Some(Component::Prefix(prefix))
if matches!(prefix.kind(), Prefix::Disk(_) | Prefix::VerbatimDisk(_)) =>
{
// Need to lowercase driver letter
}
_ => return url,
}
let driver_letter_range = {
let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() {
Some(it) => it,
None => return url,
};
let start = scheme.len() + ':'.len_utf8();
start..(start + drive_letter.len())
};
// Note: lowercasing the `path` itself doesn't help, the `Url::parse`
// machinery *also* canonicalizes the drive letter. So, just massage the
// string in place.
let mut url: String = url.into();
url[driver_letter_range].make_ascii_lowercase();
lsp::Url::parse(&url).unwrap()
}
fn position(
span: &DiagnosticSpan,
line_number: usize,
column_offset_utf32: usize,
) -> lsp::Position {
let line_index = line_number - span.line_start;
let column_offset_encoded = match span.text.get(line_index) {
// Fast path.
Some(line) if line.text.is_ascii() => column_offset_utf32,
Some(line) => {
let line_prefix_len = line
.text
.char_indices()
.take(column_offset_utf32)
.last()
.map(|(pos, c)| pos + c.len_utf8())
.unwrap_or(0);
let line_prefix = &line.text[..line_prefix_len];
line_prefix.len()
}
None => column_offset_utf32,
};
lsp::Position {
line: (line_number as u32).saturating_sub(1),
character: column_offset_encoded as u32,
}
}
/// Checks whether a file name is from macro invocation and does not refer to an actual file.
fn is_dummy_macro_file(file_name: &str) -> bool {
file_name.starts_with('<') && file_name.ends_with('>')
}
/// Extracts a suitable "primary" location from a rustc diagnostic.
///
/// This takes locations pointing into the standard library, or generally outside the current
/// workspace into account and tries to avoid those, in case macros are involved.
fn primary_location(worktree_root: &Path, span: &DiagnosticSpan) -> lsp::Location {
let span_stack = std::iter::successors(Some(span), |span| Some(&span.expansion.as_ref()?.span));
for span in span_stack.clone() {
let abs_path = worktree_root.join(&span.file_name);
if !is_dummy_macro_file(&span.file_name) && abs_path.starts_with(worktree_root) {
return location(worktree_root, span);
}
}
// Fall back to the outermost macro invocation if no suitable span comes up.
let last_span = span_stack.last().unwrap();
location(worktree_root, last_span)
}

View file

@ -1,3 +1,4 @@
mod cargo;
pub mod items;
mod toolbar_controls;
@ -7,7 +8,12 @@ mod diagnostic_renderer;
mod diagnostics_tests;
use anyhow::Result;
use collections::{BTreeSet, HashMap};
use cargo::{
FetchStatus, FetchUpdate, cargo_diagnostics_sources, fetch_worktree_diagnostics,
is_outdated_cargo_fetch_diagnostic, map_rust_diagnostic_to_lsp, next_cargo_fetch_generation,
url_from_abs_path,
};
use collections::{BTreeSet, HashMap, HashSet};
use diagnostic_renderer::DiagnosticBlock;
use editor::{
DEFAULT_MULTIBUFFER_CONTEXT, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
@ -22,14 +28,16 @@ use gpui::{
use language::{
Bias, Buffer, BufferRow, BufferSnapshot, DiagnosticEntry, Point, ToTreeSitterPoint,
};
use lsp::DiagnosticSeverity;
use project::{DiagnosticSummary, Project, ProjectPath, project_settings::ProjectSettings};
use lsp::{DiagnosticSeverity, LanguageServerId};
use project::{
DiagnosticSummary, Project, ProjectPath, Worktree,
lsp_store::rust_analyzer_ext::{CARGO_DIAGNOSTICS_SOURCE_NAME, RUST_ANALYZER_NAME},
project_settings::ProjectSettings,
};
use settings::Settings;
use std::{
any::{Any, TypeId},
cmp,
cmp::Ordering,
cmp::{self, Ordering},
ops::{Range, RangeInclusive},
sync::Arc,
time::Duration,
@ -45,7 +53,10 @@ use workspace::{
searchable::SearchableItemHandle,
};
actions!(diagnostics, [Deploy, ToggleWarnings]);
actions!(
diagnostics,
[Deploy, ToggleWarnings, ToggleDiagnosticsRefresh]
);
#[derive(Default)]
pub(crate) struct IncludeWarnings(bool);
@ -68,9 +79,15 @@ pub(crate) struct ProjectDiagnosticsEditor {
paths_to_update: BTreeSet<ProjectPath>,
include_warnings: bool,
update_excerpts_task: Option<Task<Result<()>>>,
cargo_diagnostics_fetch: CargoDiagnosticsFetchState,
_subscription: Subscription,
}
struct CargoDiagnosticsFetchState {
task: Option<Task<()>>,
rust_analyzer: Option<LanguageServerId>,
}
impl EventEmitter<EditorEvent> for ProjectDiagnosticsEditor {}
const DIAGNOSTICS_UPDATE_DELAY: Duration = Duration::from_millis(50);
@ -126,6 +143,7 @@ impl Render for ProjectDiagnosticsEditor {
.track_focus(&self.focus_handle(cx))
.size_full()
.on_action(cx.listener(Self::toggle_warnings))
.on_action(cx.listener(Self::toggle_diagnostics_refresh))
.child(child)
}
}
@ -212,7 +230,11 @@ impl ProjectDiagnosticsEditor {
cx.observe_global_in::<IncludeWarnings>(window, |this, window, cx| {
this.include_warnings = cx.global::<IncludeWarnings>().0;
this.diagnostics.clear();
this.update_all_excerpts(window, cx);
this.update_all_diagnostics(window, cx);
})
.detach();
cx.observe_release(&cx.entity(), |editor, _, cx| {
editor.stop_cargo_diagnostics_fetch(cx);
})
.detach();
@ -229,9 +251,13 @@ impl ProjectDiagnosticsEditor {
editor,
paths_to_update: Default::default(),
update_excerpts_task: None,
cargo_diagnostics_fetch: CargoDiagnosticsFetchState {
task: None,
rust_analyzer: None,
},
_subscription: project_event_subscription,
};
this.update_all_excerpts(window, cx);
this.update_all_diagnostics(window, cx);
this
}
@ -239,15 +265,17 @@ impl ProjectDiagnosticsEditor {
if self.update_excerpts_task.is_some() {
return;
}
let project_handle = self.project.clone();
self.update_excerpts_task = Some(cx.spawn_in(window, async move |this, cx| {
cx.background_executor()
.timer(DIAGNOSTICS_UPDATE_DELAY)
.await;
loop {
let Some(path) = this.update(cx, |this, _| {
let Some(path) = this.update(cx, |this, cx| {
let Some(path) = this.paths_to_update.pop_first() else {
this.update_excerpts_task.take();
this.update_excerpts_task = None;
cx.notify();
return None;
};
Some(path)
@ -307,6 +335,32 @@ impl ProjectDiagnosticsEditor {
cx.set_global(IncludeWarnings(!self.include_warnings));
}
fn toggle_diagnostics_refresh(
&mut self,
_: &ToggleDiagnosticsRefresh,
window: &mut Window,
cx: &mut Context<Self>,
) {
let fetch_cargo_diagnostics = ProjectSettings::get_global(cx)
.diagnostics
.fetch_cargo_diagnostics();
if fetch_cargo_diagnostics {
if self.cargo_diagnostics_fetch.task.is_some() {
self.stop_cargo_diagnostics_fetch(cx);
} else {
self.update_all_diagnostics(window, cx);
}
} else {
if self.update_excerpts_task.is_some() {
self.update_excerpts_task = None;
} else {
self.update_all_diagnostics(window, cx);
}
}
cx.notify();
}
fn focus_in(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() {
self.editor.focus_handle(cx).focus(window)
@ -320,6 +374,303 @@ impl ProjectDiagnosticsEditor {
}
}
fn update_all_diagnostics(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let cargo_diagnostics_sources = cargo_diagnostics_sources(self, cx);
if cargo_diagnostics_sources.is_empty() {
self.update_all_excerpts(window, cx);
} else {
self.fetch_cargo_diagnostics(Arc::new(cargo_diagnostics_sources), window, cx);
}
}
fn fetch_cargo_diagnostics(
&mut self,
diagnostics_sources: Arc<Vec<Entity<Worktree>>>,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.cargo_diagnostics_fetch.task = Some(cx.spawn_in(window, async move |editor, cx| {
let rust_analyzer_server = editor
.update(cx, |editor, cx| {
editor
.project
.read(cx)
.language_server_with_name(RUST_ANALYZER_NAME, cx)
})
.ok();
let rust_analyzer_server = match rust_analyzer_server {
Some(rust_analyzer_server) => rust_analyzer_server.await,
None => None,
};
let mut worktree_diagnostics_tasks = Vec::new();
let mut paths_with_reported_cargo_diagnostics = HashSet::default();
if let Some(rust_analyzer_server) = rust_analyzer_server {
let can_continue = editor
.update(cx, |editor, cx| {
editor.cargo_diagnostics_fetch.rust_analyzer = Some(rust_analyzer_server);
let status_inserted =
editor
.project
.read(cx)
.lsp_store()
.update(cx, |lsp_store, cx| {
if let Some(rust_analyzer_status) = lsp_store
.language_server_statuses
.get_mut(&rust_analyzer_server)
{
rust_analyzer_status
.progress_tokens
.insert(fetch_cargo_diagnostics_token());
paths_with_reported_cargo_diagnostics.extend(editor.diagnostics.iter().filter_map(|(buffer_id, diagnostics)| {
if diagnostics.iter().any(|d| d.diagnostic.source.as_deref() == Some(CARGO_DIAGNOSTICS_SOURCE_NAME)) {
Some(*buffer_id)
} else {
None
}
}).filter_map(|buffer_id| {
let buffer = lsp_store.buffer_store().read(cx).get(buffer_id)?;
let path = buffer.read(cx).file()?.as_local()?.abs_path(cx);
Some(url_from_abs_path(&path))
}));
true
} else {
false
}
});
if status_inserted {
editor.update_cargo_fetch_status(FetchStatus::Started, cx);
next_cargo_fetch_generation();
true
} else {
false
}
})
.unwrap_or(false);
if can_continue {
for worktree in diagnostics_sources.iter() {
if let Some(((_task, worktree_diagnostics), worktree_root)) = cx
.update(|_, cx| {
let worktree_root = worktree.read(cx).abs_path();
log::info!("Fetching cargo diagnostics for {worktree_root:?}");
fetch_worktree_diagnostics(&worktree_root, cx)
.zip(Some(worktree_root))
})
.ok()
.flatten()
{
let editor = editor.clone();
worktree_diagnostics_tasks.push(cx.spawn(async move |cx| {
let _task = _task;
let mut file_diagnostics = HashMap::default();
let mut diagnostics_total = 0;
let mut updated_urls = HashSet::default();
while let Ok(fetch_update) = worktree_diagnostics.recv().await {
match fetch_update {
FetchUpdate::Diagnostic(diagnostic) => {
for (url, diagnostic) in map_rust_diagnostic_to_lsp(
&worktree_root,
&diagnostic,
) {
let file_diagnostics = file_diagnostics
.entry(url)
.or_insert_with(Vec::<lsp::Diagnostic>::new);
let i = file_diagnostics
.binary_search_by(|probe| {
probe.range.start.cmp(&diagnostic.range.start)
.then(probe.range.end.cmp(&diagnostic.range.end))
.then(Ordering::Greater)
})
.unwrap_or_else(|i| i);
file_diagnostics.insert(i, diagnostic);
}
let file_changed = file_diagnostics.len() > 1;
if file_changed {
if editor
.update_in(cx, |editor, window, cx| {
editor
.project
.read(cx)
.lsp_store()
.update(cx, |lsp_store, cx| {
for (uri, mut diagnostics) in
file_diagnostics.drain()
{
diagnostics.dedup();
diagnostics_total += diagnostics.len();
updated_urls.insert(uri.clone());
lsp_store.merge_diagnostics(
rust_analyzer_server,
lsp::PublishDiagnosticsParams {
uri,
diagnostics,
version: None,
},
&[],
|diagnostic, _| {
!is_outdated_cargo_fetch_diagnostic(diagnostic)
},
cx,
)?;
}
anyhow::Ok(())
})?;
editor.update_all_excerpts(window, cx);
anyhow::Ok(())
})
.ok()
.transpose()
.ok()
.flatten()
.is_none()
{
break;
}
}
}
FetchUpdate::Progress(message) => {
if editor
.update(cx, |editor, cx| {
editor.update_cargo_fetch_status(
FetchStatus::Progress { message },
cx,
);
})
.is_err()
{
return updated_urls;
}
}
}
}
editor
.update_in(cx, |editor, window, cx| {
editor
.project
.read(cx)
.lsp_store()
.update(cx, |lsp_store, cx| {
for (uri, mut diagnostics) in
file_diagnostics.drain()
{
diagnostics.dedup();
diagnostics_total += diagnostics.len();
updated_urls.insert(uri.clone());
lsp_store.merge_diagnostics(
rust_analyzer_server,
lsp::PublishDiagnosticsParams {
uri,
diagnostics,
version: None,
},
&[],
|diagnostic, _| {
!is_outdated_cargo_fetch_diagnostic(diagnostic)
},
cx,
)?;
}
anyhow::Ok(())
})?;
editor.update_all_excerpts(window, cx);
anyhow::Ok(())
})
.ok();
log::info!("Fetched {diagnostics_total} cargo diagnostics for worktree {worktree_root:?}");
updated_urls
}));
}
}
} else {
log::info!(
"No rust-analyzer language server found, skipping diagnostics fetch"
);
}
}
let updated_urls = futures::future::join_all(worktree_diagnostics_tasks).await.into_iter().flatten().collect();
if let Some(rust_analyzer_server) = rust_analyzer_server {
editor
.update_in(cx, |editor, window, cx| {
editor
.project
.read(cx)
.lsp_store()
.update(cx, |lsp_store, cx| {
for uri_to_cleanup in paths_with_reported_cargo_diagnostics.difference(&updated_urls).cloned() {
lsp_store.merge_diagnostics(
rust_analyzer_server,
lsp::PublishDiagnosticsParams {
uri: uri_to_cleanup,
diagnostics: Vec::new(),
version: None,
},
&[],
|diagnostic, _| {
!is_outdated_cargo_fetch_diagnostic(diagnostic)
},
cx,
).ok();
}
});
editor.update_all_excerpts(window, cx);
editor.stop_cargo_diagnostics_fetch(cx);
cx.notify();
})
.ok();
}
}));
}
fn update_cargo_fetch_status(&self, status: FetchStatus, cx: &mut App) {
let Some(rust_analyzer) = self.cargo_diagnostics_fetch.rust_analyzer else {
return;
};
let work_done = match status {
FetchStatus::Started => lsp::WorkDoneProgress::Begin(lsp::WorkDoneProgressBegin {
title: "cargo".to_string(),
cancellable: None,
message: Some("Fetching cargo diagnostics".to_string()),
percentage: None,
}),
FetchStatus::Progress { message } => {
lsp::WorkDoneProgress::Report(lsp::WorkDoneProgressReport {
message: Some(message),
cancellable: None,
percentage: None,
})
}
FetchStatus::Finished => {
lsp::WorkDoneProgress::End(lsp::WorkDoneProgressEnd { message: None })
}
};
let progress = lsp::ProgressParams {
token: lsp::NumberOrString::String(fetch_cargo_diagnostics_token()),
value: lsp::ProgressParamsValue::WorkDone(work_done),
};
self.project
.read(cx)
.lsp_store()
.update(cx, |lsp_store, cx| {
lsp_store.on_lsp_progress(progress, rust_analyzer, None, cx)
});
}
fn stop_cargo_diagnostics_fetch(&mut self, cx: &mut App) {
self.update_cargo_fetch_status(FetchStatus::Finished, cx);
self.cargo_diagnostics_fetch.task = None;
log::info!("Finished fetching cargo diagnostics");
}
/// Enqueue an update of all excerpts. Updates all paths that either
/// currently have diagnostics or are currently present in this view.
fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@ -422,20 +773,17 @@ impl ProjectDiagnosticsEditor {
})?;
for item in more {
let insert_pos = blocks
.binary_search_by(|existing| {
match existing.initial_range.start.cmp(&item.initial_range.start) {
Ordering::Equal => item
.initial_range
.end
.cmp(&existing.initial_range.end)
.reverse(),
other => other,
}
let i = blocks
.binary_search_by(|probe| {
probe
.initial_range
.start
.cmp(&item.initial_range.start)
.then(probe.initial_range.end.cmp(&item.initial_range.end))
.then(Ordering::Greater)
})
.unwrap_or_else(|pos| pos);
blocks.insert(insert_pos, item);
.unwrap_or_else(|i| i);
blocks.insert(i, item);
}
}
@ -448,10 +796,25 @@ impl ProjectDiagnosticsEditor {
&mut cx,
)
.await;
excerpt_ranges.push(ExcerptRange {
context: excerpt_range,
primary: b.initial_range.clone(),
})
let i = excerpt_ranges
.binary_search_by(|probe| {
probe
.context
.start
.cmp(&excerpt_range.start)
.then(probe.context.end.cmp(&excerpt_range.end))
.then(probe.primary.start.cmp(&b.initial_range.start))
.then(probe.primary.end.cmp(&b.initial_range.end))
.then(cmp::Ordering::Greater)
})
.unwrap_or_else(|i| i);
excerpt_ranges.insert(
i,
ExcerptRange {
context: excerpt_range,
primary: b.initial_range.clone(),
},
)
}
this.update_in(cx, |this, window, cx| {
@ -923,3 +1286,7 @@ fn is_line_blank_or_indented_less(
let line_indent = snapshot.line_indent_for_row(row);
line_indent.is_line_blank() || line_indent.len(tab_size) < indent_level
}
fn fetch_cargo_diagnostics_token() -> String {
"fetch_cargo_diagnostics".to_string()
}

View file

@ -1,4 +1,7 @@
use crate::ProjectDiagnosticsEditor;
use std::sync::Arc;
use crate::cargo::cargo_diagnostics_sources;
use crate::{ProjectDiagnosticsEditor, ToggleDiagnosticsRefresh};
use gpui::{Context, Entity, EventEmitter, ParentElement, Render, WeakEntity, Window};
use ui::prelude::*;
use ui::{IconButton, IconButtonShape, IconName, Tooltip};
@ -13,18 +16,28 @@ impl Render for ToolbarControls {
let mut include_warnings = false;
let mut has_stale_excerpts = false;
let mut is_updating = false;
let cargo_diagnostics_sources = Arc::new(
self.diagnostics()
.map(|editor| cargo_diagnostics_sources(editor.read(cx), cx))
.unwrap_or_default(),
);
let fetch_cargo_diagnostics = !cargo_diagnostics_sources.is_empty();
if let Some(editor) = self.diagnostics() {
let diagnostics = editor.read(cx);
include_warnings = diagnostics.include_warnings;
has_stale_excerpts = !diagnostics.paths_to_update.is_empty();
is_updating = diagnostics.update_excerpts_task.is_some()
|| diagnostics
.project
.read(cx)
.language_servers_running_disk_based_diagnostics(cx)
.next()
.is_some();
is_updating = if fetch_cargo_diagnostics {
diagnostics.cargo_diagnostics_fetch.task.is_some()
} else {
diagnostics.update_excerpts_task.is_some()
|| diagnostics
.project
.read(cx)
.language_servers_running_disk_based_diagnostics(cx)
.next()
.is_some()
};
}
let tooltip = if include_warnings {
@ -41,21 +54,57 @@ impl Render for ToolbarControls {
h_flex()
.gap_1()
.when(has_stale_excerpts, |div| {
div.child(
IconButton::new("update-excerpts", IconName::Update)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
.disabled(is_updating)
.tooltip(Tooltip::text("Update excerpts"))
.on_click(cx.listener(|this, _, window, cx| {
if let Some(diagnostics) = this.diagnostics() {
diagnostics.update(cx, |diagnostics, cx| {
diagnostics.update_all_excerpts(window, cx);
});
}
})),
)
.map(|div| {
if is_updating {
div.child(
IconButton::new("stop-updating", IconName::StopFilled)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
.tooltip(Tooltip::for_action_title(
"Stop diagnostics update",
&ToggleDiagnosticsRefresh,
))
.on_click(cx.listener(move |toolbar_controls, _, _, cx| {
if let Some(diagnostics) = toolbar_controls.diagnostics() {
diagnostics.update(cx, |diagnostics, cx| {
diagnostics.stop_cargo_diagnostics_fetch(cx);
diagnostics.update_excerpts_task = None;
cx.notify();
});
}
})),
)
} else {
div.child(
IconButton::new("refresh-diagnostics", IconName::Update)
.icon_color(Color::Info)
.shape(IconButtonShape::Square)
.disabled(!has_stale_excerpts && !fetch_cargo_diagnostics)
.tooltip(Tooltip::for_action_title(
"Refresh diagnostics",
&ToggleDiagnosticsRefresh,
))
.on_click(cx.listener({
move |toolbar_controls, _, window, cx| {
if let Some(diagnostics) = toolbar_controls.diagnostics() {
let cargo_diagnostics_sources =
Arc::clone(&cargo_diagnostics_sources);
diagnostics.update(cx, move |diagnostics, cx| {
if fetch_cargo_diagnostics {
diagnostics.fetch_cargo_diagnostics(
cargo_diagnostics_sources,
window,
cx,
);
} else {
diagnostics.update_all_excerpts(window, cx);
}
});
}
}
})),
)
}
})
.child(
IconButton::new("toggle-warnings", IconName::Warning)

View file

@ -2276,6 +2276,9 @@ impl EditorElement {
}
let display_row = multibuffer_point.to_display_point(snapshot).row();
if !range.contains(&display_row) {
return None;
}
if row_infos
.get((display_row - range.start).0 as usize)
.is_some_and(|row_info| row_info.expand_info.is_some())

View file

@ -239,6 +239,10 @@ impl CachedLspAdapter {
.process_diagnostics(params, server_id, existing_diagnostics)
}
pub fn retain_old_diagnostic(&self, previous_diagnostic: &Diagnostic, cx: &App) -> bool {
self.adapter.retain_old_diagnostic(previous_diagnostic, cx)
}
pub fn diagnostic_message_to_markdown(&self, message: &str) -> Option<String> {
self.adapter.diagnostic_message_to_markdown(message)
}
@ -461,6 +465,11 @@ pub trait LspAdapter: 'static + Send + Sync {
) {
}
/// When processing new `lsp::PublishDiagnosticsParams` diagnostics, whether to retain previous one(s) or not.
fn retain_old_diagnostic(&self, _previous_diagnostic: &Diagnostic, _cx: &App) -> bool {
false
}
/// Post-processes completions provided by the language server.
async fn process_completions(&self, _: &mut [lsp::CompletionItem]) {}

View file

@ -298,9 +298,9 @@ impl super::LspAdapter for CLspAdapter {
&self,
params: &mut lsp::PublishDiagnosticsParams,
server_id: LanguageServerId,
buffer_access: Option<&'_ Buffer>,
buffer: Option<&'_ Buffer>,
) {
if let Some(buffer) = buffer_access {
if let Some(buffer) = buffer {
let snapshot = buffer.snapshot();
let inactive_regions = buffer
.get_diagnostics(server_id)

View file

@ -8,6 +8,7 @@ use http_client::github::AssetKind;
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary};
use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME;
use project::project_settings::ProjectSettings;
use regex::Regex;
use serde_json::json;
@ -252,13 +253,22 @@ impl LspAdapter for RustLspAdapter {
}
fn disk_based_diagnostic_sources(&self) -> Vec<String> {
vec!["rustc".into()]
vec![CARGO_DIAGNOSTICS_SOURCE_NAME.to_owned()]
}
fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
Some("rust-analyzer/flycheck".into())
}
fn retain_old_diagnostic(&self, previous_diagnostic: &Diagnostic, cx: &App) -> bool {
let zed_provides_cargo_diagnostics = ProjectSettings::get_global(cx)
.diagnostics
.fetch_cargo_diagnostics();
// Zed manages the lifecycle of cargo diagnostics when configured so.
zed_provides_cargo_diagnostics
&& previous_diagnostic.source.as_deref() == Some(CARGO_DIAGNOSTICS_SOURCE_NAME)
}
fn process_diagnostics(
&self,
params: &mut lsp::PublishDiagnosticsParams,
@ -499,12 +509,27 @@ impl LspAdapter for RustLspAdapter {
"kinds": [ "cargo", "shell" ],
},
});
if let Some(ref mut original_experimental) = original.capabilities.experimental {
if let Some(original_experimental) = &mut original.capabilities.experimental {
merge_json_value_into(experimental, original_experimental);
} else {
original.capabilities.experimental = Some(experimental);
}
}
let zed_provides_cargo_diagnostics = ProjectSettings::get_global(cx)
.diagnostics
.fetch_cargo_diagnostics();
if zed_provides_cargo_diagnostics {
let disable_check_on_save = json!({
"checkOnSave": false,
});
if let Some(initialization_options) = &mut original.initialization_options {
merge_json_value_into(disable_check_on_save, initialization_options);
} else {
original.initialization_options = Some(disable_check_on_save);
}
}
Ok(original)
}
}

View file

@ -1686,7 +1686,10 @@ impl MultiBuffer {
let mut counts: Vec<usize> = Vec::new();
for range in expanded_ranges {
if let Some(last_range) = merged_ranges.last_mut() {
debug_assert!(last_range.context.start <= range.context.start);
debug_assert!(
last_range.context.start <= range.context.start,
"Last range: {last_range:?} Range: {range:?}"
);
if last_range.context.end >= range.context.start {
last_range.context.end = range.context.end.max(last_range.context.end);
*counts.last_mut().unwrap() += 1;

View file

@ -467,10 +467,11 @@ impl LocalLspStore {
adapter.process_diagnostics(&mut params, server_id, buffer);
}
this.update_diagnostics(
this.merge_diagnostics(
server_id,
params,
&adapter.disk_based_diagnostic_sources,
|diagnostic, cx| adapter.retain_old_diagnostic(diagnostic, cx),
cx,
)
.log_err();
@ -3395,7 +3396,7 @@ pub struct LanguageServerStatus {
pub name: String,
pub pending_work: BTreeMap<String, LanguageServerProgress>,
pub has_pending_diagnostic_updates: bool,
progress_tokens: HashSet<String>,
pub progress_tokens: HashSet<String>,
}
#[derive(Clone, Debug)]
@ -6237,6 +6238,13 @@ impl LspStore {
})
}
pub fn language_server_with_name(&self, name: &str, cx: &App) -> Option<LanguageServerId> {
self.as_local()?
.lsp_tree
.read(cx)
.server_id_for_name(&LanguageServerName::from(name))
}
pub fn language_servers_for_local_buffer<'a>(
&'a self,
buffer: &Buffer,
@ -6380,10 +6388,10 @@ impl LspStore {
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
cx: &mut Context<Self>,
) -> anyhow::Result<()> {
self.merge_diagnostic_entries(server_id, abs_path, version, diagnostics, |_| false, cx)
self.merge_diagnostic_entries(server_id, abs_path, version, diagnostics, |_, _| false, cx)
}
pub fn merge_diagnostic_entries<F: Fn(&Diagnostic) -> bool + Clone>(
pub fn merge_diagnostic_entries<F: Fn(&Diagnostic, &App) -> bool + Clone>(
&mut self,
server_id: LanguageServerId,
abs_path: PathBuf,
@ -6416,7 +6424,7 @@ impl LspStore {
.get_diagnostics(server_id)
.into_iter()
.flat_map(|diag| {
diag.iter().filter(|v| filter(&v.diagnostic)).map(|v| {
diag.iter().filter(|v| filter(&v.diagnostic, cx)).map(|v| {
let start = Unclipped(v.range.start.to_point_utf16(&snapshot));
let end = Unclipped(v.range.end.to_point_utf16(&snapshot));
DiagnosticEntry {
@ -7021,27 +7029,38 @@ impl LspStore {
envelope: TypedEnvelope<proto::LanguageServerIdForName>,
mut cx: AsyncApp,
) -> Result<proto::LanguageServerIdForNameResponse> {
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
let name = &envelope.payload.name;
lsp_store
.update(&mut cx, |lsp_store, cx| {
let buffer = lsp_store.buffer_store.read(cx).get_existing(buffer_id)?;
let server_id = buffer.update(cx, |buffer, cx| {
lsp_store
.language_servers_for_local_buffer(buffer, cx)
.find_map(|(adapter, server)| {
if adapter.name.0.as_ref() == name {
Some(server.server_id())
} else {
None
}
})
});
Ok(server_id)
})?
.map(|server_id| proto::LanguageServerIdForNameResponse {
server_id: server_id.map(|id| id.to_proto()),
})
match envelope.payload.buffer_id {
Some(buffer_id) => {
let buffer_id = BufferId::new(buffer_id)?;
lsp_store
.update(&mut cx, |lsp_store, cx| {
let buffer = lsp_store.buffer_store.read(cx).get_existing(buffer_id)?;
let server_id = buffer.update(cx, |buffer, cx| {
lsp_store
.language_servers_for_local_buffer(buffer, cx)
.find_map(|(adapter, server)| {
if adapter.name.0.as_ref() == name {
Some(server.server_id())
} else {
None
}
})
});
Ok(server_id)
})?
.map(|server_id| proto::LanguageServerIdForNameResponse {
server_id: server_id.map(|id| id.to_proto()),
})
}
None => lsp_store.update(&mut cx, |lsp_store, cx| {
proto::LanguageServerIdForNameResponse {
server_id: lsp_store
.language_server_with_name(name, cx)
.map(|id| id.to_proto()),
}
}),
}
}
async fn handle_rename_project_entry(
@ -7517,7 +7536,7 @@ impl LspStore {
}
}
fn on_lsp_progress(
pub fn on_lsp_progress(
&mut self,
progress: lsp::ProgressParams,
language_server_id: LanguageServerId,
@ -8550,12 +8569,12 @@ impl LspStore {
language_server_id,
params,
disk_based_sources,
|_| false,
|_, _| false,
cx,
)
}
pub fn merge_diagnostics<F: Fn(&Diagnostic) -> bool + Clone>(
pub fn merge_diagnostics<F: Fn(&Diagnostic, &App) -> bool + Clone>(
&mut self,
language_server_id: LanguageServerId,
mut params: lsp::PublishDiagnosticsParams,

View file

@ -75,7 +75,7 @@ pub fn register_notifications(
server_id,
mapped_diagnostics,
&adapter.disk_based_diagnostic_sources,
|diag| !is_inactive_region(diag),
|diag, _| !is_inactive_region(diag),
cx,
)
.log_err();

View file

@ -5,6 +5,7 @@ use lsp::LanguageServer;
use crate::{LanguageServerPromptRequest, LspStore, LspStoreEvent};
pub const RUST_ANALYZER_NAME: &str = "rust-analyzer";
pub const CARGO_DIAGNOSTICS_SOURCE_NAME: &str = "rustc";
/// Experimental: Informs the end user about the state of the server
///

View file

@ -247,6 +247,20 @@ impl LanguageServerTree {
self.languages.adapter_for_name(name)
}
pub fn server_id_for_name(&self, name: &LanguageServerName) -> Option<LanguageServerId> {
self.instances
.values()
.flat_map(|instance| instance.roots.values())
.flatten()
.find_map(|(server_name, (data, _))| {
if server_name == name {
data.id.get().copied()
} else {
None
}
})
}
fn adapters_for_language(
&self,
settings_location: SettingsLocation,

View file

@ -4748,6 +4748,42 @@ impl Project {
})
}
pub fn language_server_with_name(
&self,
name: &str,
cx: &App,
) -> Task<Option<LanguageServerId>> {
if self.is_local() {
Task::ready(self.lsp_store.read(cx).language_server_with_name(name, cx))
} else if let Some(project_id) = self.remote_id() {
let request = self.client.request(proto::LanguageServerIdForName {
project_id,
buffer_id: None,
name: name.to_string(),
});
cx.background_spawn(async move {
let response = request.await.log_err()?;
response.server_id.map(LanguageServerId::from_proto)
})
} else if let Some(ssh_client) = self.ssh_client.as_ref() {
let request =
ssh_client
.read(cx)
.proto_client()
.request(proto::LanguageServerIdForName {
project_id: SSH_PROJECT_ID,
buffer_id: None,
name: name.to_string(),
});
cx.background_spawn(async move {
let response = request.await.log_err()?;
response.server_id.map(LanguageServerId::from_proto)
})
} else {
Task::ready(None)
}
}
pub fn language_server_id_for_name(
&self,
buffer: &Buffer,
@ -4769,7 +4805,7 @@ impl Project {
} else if let Some(project_id) = self.remote_id() {
let request = self.client.request(proto::LanguageServerIdForName {
project_id,
buffer_id: buffer.remote_id().to_proto(),
buffer_id: Some(buffer.remote_id().to_proto()),
name: name.to_string(),
});
cx.background_spawn(async move {
@ -4783,7 +4819,7 @@ impl Project {
.proto_client()
.request(proto::LanguageServerIdForName {
project_id: SSH_PROJECT_ID,
buffer_id: buffer.remote_id().to_proto(),
buffer_id: Some(buffer.remote_id().to_proto()),
name: name.to_string(),
});
cx.background_spawn(async move {

View file

@ -99,7 +99,7 @@ pub enum DirenvSettings {
Direct,
}
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
pub struct DiagnosticsSettings {
/// Whether or not to include warning diagnostics
#[serde(default = "true_value")]
@ -108,6 +108,18 @@ pub struct DiagnosticsSettings {
/// Settings for showing inline diagnostics
#[serde(default)]
pub inline: InlineDiagnosticsSettings,
/// Configuration, related to Rust language diagnostics.
#[serde(default)]
pub cargo: Option<CargoDiagnosticsSettings>,
}
impl DiagnosticsSettings {
pub fn fetch_cargo_diagnostics(&self) -> bool {
self.cargo.as_ref().map_or(false, |cargo_diagnostics| {
cargo_diagnostics.fetch_cargo_diagnostics
})
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)]
@ -141,6 +153,41 @@ pub struct InlineDiagnosticsSettings {
pub max_severity: Option<DiagnosticSeverity>,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
pub struct CargoDiagnosticsSettings {
/// When enabled, Zed runs `cargo check --message-format=json`-based commands and
/// collect cargo diagnostics instead of rust-analyzer.
///
/// Default: false
#[serde(default)]
pub fetch_cargo_diagnostics: bool,
/// A command override for fetching the cargo diagnostics.
/// First argument is the command, followed by the arguments.
///
/// Default: ["cargo", "check", "--quiet", "--workspace", "--message-format=json", "--all-targets", "--keep-going"]
#[serde(default = "default_diagnostics_fetch_command")]
pub diagnostics_fetch_command: Vec<String>,
/// Extra environment variables to pass to the diagnostics fetch command.
///
/// Default: {}
#[serde(default)]
pub env: HashMap<String, String>,
}
fn default_diagnostics_fetch_command() -> Vec<String> {
vec![
"cargo".to_string(),
"check".to_string(),
"--quiet".to_string(),
"--workspace".to_string(),
"--message-format=json".to_string(),
"--all-targets".to_string(),
"--keep-going".to_string(),
]
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum DiagnosticSeverity {

View file

@ -696,7 +696,7 @@ message LspResponse {
message LanguageServerIdForName {
uint64 project_id = 1;
uint64 buffer_id = 2;
optional uint64 buffer_id = 2;
string name = 3;
}