Support tasks from rust-analyzer (#28359)

(and any other LSP server in theory, if it exposes any LSP-ext endpoint
for the same)

Closes https://github.com/zed-industries/zed/issues/16160

* adds a way to disable tree-sitter tasks (the ones from the plugins,
enabled by default) with
```json5
"languages": {
  "Rust": "tasks": {
      "enabled": false
    }
  }
}
```
language settings

* adds a way to disable LSP tasks (the ones from the rust-analyzer
language server, enabled by default) with
```json5
"lsp": {
  "rust-analyzer": {
    "enable_lsp_tasks": false,
  }
}
```

* adds rust-analyzer tasks into tasks modal and gutter:

<img width="1728" alt="modal"
src="https://github.com/user-attachments/assets/22b9cee1-4ffb-4c9e-b1f1-d01e80e72508"
/>

<img width="396" alt="gutter"
src="https://github.com/user-attachments/assets/bd818079-e247-4332-bdb5-1b7cb1cce768"
/>


Release Notes:

- Added tasks from rust-analyzer
This commit is contained in:
Kirill Bulatov 2025-04-08 15:07:56 -06:00 committed by GitHub
parent 763cc6dba3
commit 39c98ce882
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
24 changed files with 882 additions and 201 deletions

View file

@ -977,62 +977,69 @@ async fn location_links_from_proto(
let mut links = Vec::new();
for link in proto_links {
let origin = match link.origin {
Some(origin) => {
let buffer_id = BufferId::new(origin.buffer_id)?;
let buffer = lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let start = origin
.start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing origin start"))?;
let end = origin
.end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing origin end"))?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
Some(Location {
buffer,
range: start..end,
})
}
None => None,
};
let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
let buffer_id = BufferId::new(target.buffer_id)?;
let buffer = lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let start = target
.start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target start"))?;
let end = target
.end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
let target = Location {
buffer,
range: start..end,
};
links.push(LocationLink { origin, target })
links.push(location_link_from_proto(link, &lsp_store, &mut cx).await?)
}
Ok(links)
}
pub async fn location_link_from_proto(
link: proto::LocationLink,
lsp_store: &Entity<LspStore>,
cx: &mut AsyncApp,
) -> Result<LocationLink> {
let origin = match link.origin {
Some(origin) => {
let buffer_id = BufferId::new(origin.buffer_id)?;
let buffer = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let start = origin
.start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing origin start"))?;
let end = origin
.end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing origin end"))?;
buffer
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
Some(Location {
buffer,
range: start..end,
})
}
None => None,
};
let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
let buffer_id = BufferId::new(target.buffer_id)?;
let buffer = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let start = target
.start
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target start"))?;
let end = target
.end
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?;
buffer
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
let target = Location {
buffer,
range: start..end,
};
Ok(LocationLink { origin, target })
}
async fn location_links_from_lsp(
message: Option<lsp::GotoDefinitionResponse>,
lsp_store: Entity<LspStore>,
@ -1115,6 +1122,65 @@ async fn location_links_from_lsp(
Ok(definitions)
}
pub async fn location_link_from_lsp(
link: lsp::LocationLink,
lsp_store: &Entity<LspStore>,
buffer: &Entity<Buffer>,
server_id: LanguageServerId,
cx: &mut AsyncApp,
) -> Result<LocationLink> {
let (lsp_adapter, language_server) =
language_server_for_buffer(&lsp_store, &buffer, server_id, cx)?;
let (origin_range, target_uri, target_range) = (
link.origin_selection_range,
link.target_uri,
link.target_selection_range,
);
let target_buffer_handle = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.open_local_buffer_via_lsp(
target_uri,
language_server.server_id(),
lsp_adapter.name.clone(),
cx,
)
})?
.await?;
cx.update(|cx| {
let origin_location = origin_range.map(|origin_range| {
let origin_buffer = buffer.read(cx);
let origin_start =
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left);
let origin_end =
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.end), Bias::Left);
Location {
buffer: buffer.clone(),
range: origin_buffer.anchor_after(origin_start)
..origin_buffer.anchor_before(origin_end),
}
});
let target_buffer = target_buffer_handle.read(cx);
let target_start =
target_buffer.clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
let target_end =
target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
let target_location = Location {
buffer: target_buffer_handle,
range: target_buffer.anchor_after(target_start)
..target_buffer.anchor_before(target_end),
};
LocationLink {
origin: origin_location,
target: target_location,
}
})
}
fn location_links_to_proto(
links: Vec<LocationLink>,
lsp_store: &mut LspStore,
@ -1123,45 +1189,52 @@ fn location_links_to_proto(
) -> Vec<proto::LocationLink> {
links
.into_iter()
.map(|definition| {
let origin = definition.origin.map(|origin| {
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = origin.buffer.read(cx).remote_id().into();
proto::Location {
start: Some(serialize_anchor(&origin.range.start)),
end: Some(serialize_anchor(&origin.range.end)),
buffer_id,
}
});
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&definition.target.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = definition.target.buffer.read(cx).remote_id().into();
let target = proto::Location {
start: Some(serialize_anchor(&definition.target.range.start)),
end: Some(serialize_anchor(&definition.target.range.end)),
buffer_id,
};
proto::LocationLink {
origin,
target: Some(target),
}
})
.map(|definition| location_link_to_proto(definition, lsp_store, peer_id, cx))
.collect()
}
pub fn location_link_to_proto(
location: LocationLink,
lsp_store: &mut LspStore,
peer_id: PeerId,
cx: &mut App,
) -> proto::LocationLink {
let origin = location.origin.map(|origin| {
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = origin.buffer.read(cx).remote_id().into();
proto::Location {
start: Some(serialize_anchor(&origin.range.start)),
end: Some(serialize_anchor(&origin.range.end)),
buffer_id,
}
});
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&location.target.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = location.target.buffer.read(cx).remote_id().into();
let target = proto::Location {
start: Some(serialize_anchor(&location.target.range.start)),
end: Some(serialize_anchor(&location.target.range.end)),
buffer_id,
};
proto::LocationLink {
origin,
target: Some(target),
}
}
#[async_trait(?Send)]
impl LspCommand for GetReferences {
type Response = Vec<Location>;

View file

@ -280,7 +280,7 @@ impl LocalLspStore {
let initialization_params = cx.update(|cx| {
let mut params = language_server.default_initialize_params(cx);
params.initialization_options = initialization_options;
adapter.adapter.prepare_initialize_params(params)
adapter.adapter.prepare_initialize_params(params, cx)
})??;
Self::setup_lsp_messages(
@ -3428,6 +3428,9 @@ impl LspStore {
client.add_entity_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
client.add_entity_request_handler(Self::handle_lsp_command::<lsp_ext_command::OpenDocs>);
client.add_entity_request_handler(
Self::handle_lsp_command::<lsp_ext_command::GetLspRunnables>,
);
client.add_entity_request_handler(
Self::handle_lsp_command::<lsp_ext_command::SwitchSourceHeader>,
);
@ -8368,7 +8371,6 @@ impl LspStore {
self.buffer_store.update(cx, |buffer_store, cx| {
for buffer in buffer_store.buffers() {
buffer.update(cx, |buffer, cx| {
// TODO kb clean inlays
buffer.update_diagnostics(server_id, DiagnosticSet::new([], buffer), cx);
buffer.set_completion_triggers(server_id, Default::default(), cx);
});

View file

@ -1,12 +1,27 @@
use crate::{lsp_command::LspCommand, lsp_store::LspStore, make_text_document_identifier};
use crate::{
LocationLink,
lsp_command::{
LspCommand, location_link_from_lsp, location_link_from_proto, location_link_to_proto,
},
lsp_store::LspStore,
make_text_document_identifier,
};
use anyhow::{Context as _, Result};
use async_trait::async_trait;
use collections::HashMap;
use gpui::{App, AsyncApp, Entity};
use language::{Buffer, point_to_lsp, proto::deserialize_anchor};
use language::{
Buffer, point_to_lsp,
proto::{deserialize_anchor, serialize_anchor},
};
use lsp::{LanguageServer, LanguageServerId};
use rpc::proto::{self, PeerId};
use serde::{Deserialize, Serialize};
use std::{path::Path, sync::Arc};
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use task::TaskTemplate;
use text::{BufferId, PointUtf16, ToPointUtf16};
pub enum LspExpandMacro {}
@ -363,3 +378,245 @@ impl LspCommand for SwitchSourceHeader {
BufferId::new(message.buffer_id)
}
}
// https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#runnables
// Taken from https://github.com/rust-lang/rust-analyzer/blob/a73a37a757a58b43a796d3eb86a1f7dfd0036659/crates/rust-analyzer/src/lsp/ext.rs#L425-L489
pub enum Runnables {}
impl lsp::request::Request for Runnables {
type Params = RunnablesParams;
type Result = Vec<Runnable>;
const METHOD: &'static str = "experimental/runnables";
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RunnablesParams {
pub text_document: lsp::TextDocumentIdentifier,
pub position: Option<lsp::Position>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Runnable {
pub label: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub location: Option<lsp::LocationLink>,
pub kind: RunnableKind,
pub args: RunnableArgs,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum RunnableArgs {
Cargo(CargoRunnableArgs),
Shell(ShellRunnableArgs),
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum RunnableKind {
Cargo,
Shell,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct CargoRunnableArgs {
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub environment: HashMap<String, String>,
pub cwd: PathBuf,
/// Command to be executed instead of cargo
pub override_cargo: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub workspace_root: Option<PathBuf>,
// command, --package and --lib stuff
pub cargo_args: Vec<String>,
// stuff after --
pub executable_args: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ShellRunnableArgs {
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub environment: HashMap<String, String>,
pub cwd: PathBuf,
pub program: String,
pub args: Vec<String>,
}
#[derive(Debug)]
pub struct GetLspRunnables {
pub buffer_id: BufferId,
pub position: Option<text::Anchor>,
}
#[derive(Debug, Default)]
pub struct LspRunnables {
pub runnables: Vec<(Option<LocationLink>, TaskTemplate)>,
}
#[async_trait(?Send)]
impl LspCommand for GetLspRunnables {
type Response = LspRunnables;
type LspRequest = Runnables;
type ProtoRequest = proto::LspExtRunnables;
fn display_name(&self) -> &str {
"LSP Runnables"
}
fn to_lsp(
&self,
path: &Path,
buffer: &Buffer,
_: &Arc<LanguageServer>,
_: &App,
) -> Result<RunnablesParams> {
let url = match lsp::Url::from_file_path(path) {
Ok(url) => url,
Err(()) => anyhow::bail!("Failed to parse path {path:?} as lsp::Url"),
};
Ok(RunnablesParams {
text_document: lsp::TextDocumentIdentifier::new(url),
position: self
.position
.map(|anchor| point_to_lsp(anchor.to_point_utf16(&buffer.snapshot()))),
})
}
async fn response_from_lsp(
self,
lsp_runnables: Vec<Runnable>,
lsp_store: Entity<LspStore>,
buffer: Entity<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncApp,
) -> Result<LspRunnables> {
let mut runnables = Vec::with_capacity(lsp_runnables.len());
for runnable in lsp_runnables {
let location = match runnable.location {
Some(location) => Some(
location_link_from_lsp(location, &lsp_store, &buffer, server_id, &mut cx)
.await?,
),
None => None,
};
let mut task_template = TaskTemplate::default();
task_template.label = runnable.label;
match runnable.args {
RunnableArgs::Cargo(cargo) => {
match cargo.override_cargo {
Some(override_cargo) => {
let mut override_parts =
override_cargo.split(" ").map(|s| s.to_string());
task_template.command = override_parts
.next()
.unwrap_or_else(|| override_cargo.clone());
task_template.args.extend(override_parts);
}
None => task_template.command = "cargo".to_string(),
};
task_template.env = cargo.environment;
task_template.cwd = Some(
cargo
.workspace_root
.unwrap_or(cargo.cwd)
.to_string_lossy()
.to_string(),
);
task_template.args.extend(cargo.cargo_args);
if !cargo.executable_args.is_empty() {
task_template.args.push("--".to_string());
task_template.args.extend(cargo.executable_args);
}
}
RunnableArgs::Shell(shell) => {
task_template.command = shell.program;
task_template.args = shell.args;
task_template.env = shell.environment;
task_template.cwd = Some(shell.cwd.to_string_lossy().to_string());
}
}
runnables.push((location, task_template));
}
Ok(LspRunnables { runnables })
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::LspExtRunnables {
proto::LspExtRunnables {
project_id,
buffer_id: buffer.remote_id().to_proto(),
position: self.position.as_ref().map(serialize_anchor),
}
}
async fn from_proto(
message: proto::LspExtRunnables,
_: Entity<LspStore>,
_: Entity<Buffer>,
_: AsyncApp,
) -> Result<Self> {
let buffer_id = Self::buffer_id_from_proto(&message)?;
let position = message.position.and_then(deserialize_anchor);
Ok(Self {
buffer_id,
position,
})
}
fn response_to_proto(
response: LspRunnables,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut App,
) -> proto::LspExtRunnablesResponse {
proto::LspExtRunnablesResponse {
runnables: response
.runnables
.into_iter()
.map(|(location, task_template)| proto::LspRunnable {
location: location
.map(|location| location_link_to_proto(location, lsp_store, peer_id, cx)),
task_template: serde_json::to_vec(&task_template).unwrap(),
})
.collect(),
}
}
async fn response_from_proto(
self,
message: proto::LspExtRunnablesResponse,
lsp_store: Entity<LspStore>,
_: Entity<Buffer>,
mut cx: AsyncApp,
) -> Result<LspRunnables> {
let mut runnables = LspRunnables {
runnables: Vec::new(),
};
for lsp_runnable in message.runnables {
let location = match lsp_runnable.location {
Some(location) => {
Some(location_link_from_proto(location, &lsp_store, &mut cx).await?)
}
None => None,
};
let task_template = serde_json::from_slice(&lsp_runnable.task_template)
.context("deserializing task template from proto")?;
runnables.runnables.push((location, task_template));
}
Ok(runnables)
}
fn buffer_id_from_proto(message: &proto::LspExtRunnables) -> Result<BufferId> {
BufferId::new(message.buffer_id)
}
}

View file

@ -8,7 +8,7 @@ pub const RUST_ANALYZER_NAME: &str = "rust-analyzer";
/// Experimental: Informs the end user about the state of the server
///
/// [Rust Analyzer Specification](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/lsp-extensions.md#server-status)
/// [Rust Analyzer Specification](https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#server-status)
#[derive(Debug)]
enum ServerStatus {}
@ -38,13 +38,10 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
let name = language_server.name();
let server_id = language_server.server_id();
let this = lsp_store;
language_server
.on_notification::<ServerStatus, _>({
let name = name.to_string();
move |params, cx| {
let this = this.clone();
let name = name.to_string();
if let Some(ref message) = params.message {
let message = message.trim();
@ -53,10 +50,10 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
"Language server {name} (id {server_id}) status update: {message}"
);
match params.health {
ServerHealthStatus::Ok => log::info!("{}", formatted_message),
ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
ServerHealthStatus::Ok => log::info!("{formatted_message}"),
ServerHealthStatus::Warning => log::warn!("{formatted_message}"),
ServerHealthStatus::Error => {
log::error!("{}", formatted_message);
log::error!("{formatted_message}");
let (tx, _rx) = smol::channel::bounded(1);
let request = LanguageServerPromptRequest {
level: PromptLevel::Critical,
@ -65,7 +62,7 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
response_channel: tx,
lsp_name: name.clone(),
};
let _ = this
lsp_store
.update(cx, |_, cx| {
cx.emit(LspStoreEvent::LanguageServerPrompt(request));
})

View file

@ -25,7 +25,7 @@ use std::{
time::Duration,
};
use task::{TaskTemplates, VsCodeTaskFile};
use util::ResultExt;
use util::{ResultExt, serde::default_true};
use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId};
use crate::{
@ -278,12 +278,28 @@ pub struct BinarySettings {
pub ignore_system_version: Option<bool>,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct LspSettings {
pub binary: Option<BinarySettings>,
pub initialization_options: Option<serde_json::Value>,
pub settings: Option<serde_json::Value>,
/// If the server supports sending tasks over LSP extensions,
/// this setting can be used to enable or disable them in Zed.
/// Default: true
#[serde(default = "default_true")]
pub enable_lsp_tasks: bool,
}
impl Default for LspSettings {
fn default() -> Self {
Self {
binary: None,
initialization_options: None,
settings: None,
enable_lsp_tasks: true,
}
}
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema)]

View file

@ -459,6 +459,8 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
active_worktree_context: None,
other_worktree_contexts: Vec::new(),
lsp_task_sources: HashMap::default(),
latest_selection: None,
},
cx,
)
@ -481,6 +483,8 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
worktree_context
})),
other_worktree_contexts: Vec::new(),
lsp_task_sources: HashMap::default(),
latest_selection: None,
},
cx,
)
@ -797,7 +801,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await
.text_document,
lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
);
assert_eq!(
fake_json_server

View file

@ -12,13 +12,17 @@ use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
use gpui::{App, AppContext as _, Entity, SharedString, Task};
use itertools::Itertools;
use language::{ContextProvider, File, Language, LanguageToolchainStore, Location};
use language::{
ContextProvider, File, Language, LanguageToolchainStore, Location,
language_settings::language_settings,
};
use lsp::{LanguageServerId, LanguageServerName};
use settings::{InvalidSettingsError, TaskKind, parse_json_with_comments};
use task::{
DebugTaskDefinition, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates,
TaskVariables, VariableName,
};
use text::{Point, ToPoint};
use text::{BufferId, Point, ToPoint};
use util::{NumericPrefixWithSuffix, ResultExt as _, paths::PathExt as _, post_inc};
use worktree::WorktreeId;
@ -55,6 +59,8 @@ pub enum TaskSourceKind {
},
/// Languages-specific tasks coming from extensions.
Language { name: SharedString },
/// Language-specific tasks coming from LSP servers.
Lsp(LanguageServerId),
}
/// A collection of task contexts, derived from the current state of the workspace.
@ -68,6 +74,8 @@ pub struct TaskContexts {
pub active_worktree_context: Option<(WorktreeId, TaskContext)>,
/// If there are multiple worktrees in the workspace, all non-active ones are included here.
pub other_worktree_contexts: Vec<(WorktreeId, TaskContext)>,
pub lsp_task_sources: HashMap<LanguageServerName, Vec<BufferId>>,
pub latest_selection: Option<text::Anchor>,
}
impl TaskContexts {
@ -104,18 +112,19 @@ impl TaskContexts {
impl TaskSourceKind {
pub fn to_id_base(&self) -> String {
match self {
TaskSourceKind::UserInput => "oneshot".to_string(),
TaskSourceKind::AbsPath { id_base, abs_path } => {
Self::UserInput => "oneshot".to_string(),
Self::AbsPath { id_base, abs_path } => {
format!("{id_base}_{}", abs_path.display())
}
TaskSourceKind::Worktree {
Self::Worktree {
id,
id_base,
directory_in_worktree,
} => {
format!("{id_base}_{id}_{}", directory_in_worktree.display())
}
TaskSourceKind::Language { name } => format!("language_{name}"),
Self::Language { name } => format!("language_{name}"),
Self::Lsp(server_id) => format!("lsp_{server_id}"),
}
}
}
@ -156,6 +165,11 @@ impl Inventory {
});
let global_tasks = self.global_templates_from_settings();
let language_tasks = language
.filter(|language| {
language_settings(Some(language.name()), file.as_ref(), cx)
.tasks
.enabled
})
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
.into_iter()
.flat_map(|tasks| tasks.0.into_iter())
@ -171,10 +185,10 @@ impl Inventory {
/// Joins the new resolutions with the resolved tasks that were used (spawned) before,
/// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first.
/// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
pub fn used_and_current_resolved_tasks(
&self,
task_contexts: &TaskContexts,
cx: &App,
pub fn used_and_current_resolved_tasks<'a>(
&'a self,
task_contexts: &'a TaskContexts,
cx: &'a App,
) -> (
Vec<(TaskSourceKind, ResolvedTask)>,
Vec<(TaskSourceKind, ResolvedTask)>,
@ -227,7 +241,13 @@ impl Inventory {
let not_used_score = post_inc(&mut lru_score);
let global_tasks = self.global_templates_from_settings();
let language_tasks = language
.filter(|language| {
language_settings(Some(language.name()), file.as_ref(), cx)
.tasks
.enabled
})
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
.into_iter()
.flat_map(|tasks| tasks.0.into_iter())
@ -475,6 +495,7 @@ fn task_lru_comparator(
fn task_source_kind_preference(kind: &TaskSourceKind) -> u32 {
match kind {
TaskSourceKind::Lsp(..) => 0,
TaskSourceKind::Language { .. } => 1,
TaskSourceKind::UserInput => 2,
TaskSourceKind::Worktree { .. } => 3,
@ -698,7 +719,7 @@ mod tests {
async fn test_task_list_sorting(cx: &mut TestAppContext) {
init_test(cx);
let inventory = cx.update(Inventory::new);
let initial_tasks = resolved_task_names(&inventory, None, cx).await;
let initial_tasks = resolved_task_names(&inventory, None, cx);
assert!(
initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}"
@ -732,7 +753,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
resolved_task_names(&inventory, None, cx).await,
resolved_task_names(&inventory, None, cx),
&expected_initial_state,
"Tasks with equal amount of usages should be sorted alphanumerically"
);
@ -743,7 +764,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
resolved_task_names(&inventory, None, cx).await,
resolved_task_names(&inventory, None, cx),
vec![
"2_task".to_string(),
"1_a_task".to_string(),
@ -761,7 +782,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
resolved_task_names(&inventory, None, cx).await,
resolved_task_names(&inventory, None, cx),
vec![
"3_task".to_string(),
"1_task".to_string(),
@ -797,7 +818,7 @@ mod tests {
&expected_updated_state,
);
assert_eq!(
resolved_task_names(&inventory, None, cx).await,
resolved_task_names(&inventory, None, cx),
vec![
"3_task".to_string(),
"1_task".to_string(),
@ -814,7 +835,7 @@ mod tests {
&expected_updated_state,
);
assert_eq!(
resolved_task_names(&inventory, None, cx).await,
resolved_task_names(&inventory, None, cx),
vec![
"11_hello".to_string(),
"3_task".to_string(),
@ -987,21 +1008,21 @@ mod tests {
TaskStore::init(None);
}
async fn resolved_task_names(
fn resolved_task_names(
inventory: &Entity<Inventory>,
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<String> {
let (used, current) = inventory.update(cx, |inventory, cx| {
inventory.update(cx, |inventory, cx| {
let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context =
worktree.map(|worktree| (worktree, TaskContext::default()));
inventory.used_and_current_resolved_tasks(&task_contexts, cx)
});
used.into_iter()
.chain(current)
.map(|(_, task)| task.original_task().label.clone())
.collect()
let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
used.into_iter()
.chain(current)
.map(|(_, task)| task.original_task().label.clone())
.collect()
})
}
fn mock_tasks_from_names<'a>(task_names: impl Iterator<Item = &'a str> + 'a) -> String {
@ -1024,17 +1045,17 @@ mod tests {
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> {
let (used, current) = inventory.update(cx, |inventory, cx| {
inventory.update(cx, |inventory, cx| {
let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context =
worktree.map(|worktree| (worktree, TaskContext::default()));
inventory.used_and_current_resolved_tasks(&task_contexts, cx)
});
let mut all = used;
all.extend(current);
all.into_iter()
.map(|(source_kind, task)| (source_kind, task.resolved_label))
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
.collect()
let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
let mut all = used;
all.extend(current);
all.into_iter()
.map(|(source_kind, task)| (source_kind, task.resolved_label))
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
.collect()
})
}
}