Add initial package.json scripts task autodetection (#32497)

Now, every JS/TS-related file will get their package.json script
contents added as tasks:

<img width="1020" alt="image"
src="https://github.com/user-attachments/assets/5bf80f80-fd72-4ba8-8ccf-418872895a25"
/>

To achieve that, `fn associated_tasks` from the `ContextProvider` was
made asynchronous and the related code adjusted.

Release Notes:

- Added initial `package.json` scripts task autodetection

---------

Co-authored-by: Piotr Osiewicz <piotr@zed.dev>
This commit is contained in:
Kirill Bulatov 2025-06-11 01:16:27 +03:00 committed by GitHub
parent 0c0933d1c0
commit 9c513223c4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 782 additions and 661 deletions

View file

@ -194,11 +194,12 @@ impl NewProcessModal {
return Ok(()); return Ok(());
}; };
let (used_tasks, current_resolved_tasks) = let (used_tasks, current_resolved_tasks) = task_inventory
task_inventory.update(cx, |task_inventory, cx| { .update(cx, |task_inventory, cx| {
task_inventory task_inventory
.used_and_current_resolved_tasks(&task_contexts, cx) .used_and_current_resolved_tasks(task_contexts.clone(), cx)
})?; })?
.await;
debug_picker debug_picker
.update_in(cx, |picker, window, cx| { .update_in(cx, |picker, window, cx| {

View file

@ -824,14 +824,14 @@ impl RunningState {
let config_is_valid = request_type.is_ok(); let config_is_valid = request_type.is_ok();
let build_output = if let Some(build) = build { let build_output = if let Some(build) = build {
let (task, locator_name) = match build { let (task_template, locator_name) = match build {
BuildTaskDefinition::Template { BuildTaskDefinition::Template {
task_template, task_template,
locator_name, locator_name,
} => (task_template, locator_name), } => (task_template, locator_name),
BuildTaskDefinition::ByName(ref label) => { BuildTaskDefinition::ByName(ref label) => {
let Some(task) = task_store.update(cx, |this, cx| { let task = task_store.update(cx, |this, cx| {
this.task_inventory().and_then(|inventory| { this.task_inventory().map(|inventory| {
inventory.read(cx).task_template_by_label( inventory.read(cx).task_template_by_label(
buffer, buffer,
worktree_id, worktree_id,
@ -839,14 +839,15 @@ impl RunningState {
cx, cx,
) )
}) })
})? })?;
else { let task = match task {
anyhow::bail!("Couldn't find task template for {:?}", build) Some(task) => task.await,
}; None => None,
}.with_context(|| format!("Couldn't find task template for {build:?}"))?;
(task, None) (task, None)
} }
}; };
let Some(task) = task.resolve_task("debug-build-task", &task_context) else { let Some(task) = task_template.resolve_task("debug-build-task", &task_context) else {
anyhow::bail!("Could not resolve task variables within a debug scenario"); anyhow::bail!("Could not resolve task variables within a debug scenario");
}; };

View file

@ -14038,7 +14038,8 @@ impl Editor {
prefer_lsp && !lsp_tasks_by_rows.is_empty(), prefer_lsp && !lsp_tasks_by_rows.is_empty(),
new_rows, new_rows,
cx.clone(), cx.clone(),
); )
.await;
editor editor
.update(cx, |editor, _| { .update(cx, |editor, _| {
editor.clear_tasks(); editor.clear_tasks();
@ -14068,35 +14069,40 @@ impl Editor {
snapshot: DisplaySnapshot, snapshot: DisplaySnapshot,
prefer_lsp: bool, prefer_lsp: bool,
runnable_ranges: Vec<RunnableRange>, runnable_ranges: Vec<RunnableRange>,
mut cx: AsyncWindowContext, cx: AsyncWindowContext,
) -> Vec<((BufferId, BufferRow), RunnableTasks)> { ) -> Task<Vec<((BufferId, BufferRow), RunnableTasks)>> {
runnable_ranges cx.spawn(async move |cx| {
.into_iter() let mut runnable_rows = Vec::with_capacity(runnable_ranges.len());
.filter_map(|mut runnable| { for mut runnable in runnable_ranges {
let mut tasks = cx let Some(tasks) = cx
.update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx)) .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx))
.ok()?; .ok()
else {
continue;
};
let mut tasks = tasks.await;
if prefer_lsp { if prefer_lsp {
tasks.retain(|(task_kind, _)| { tasks.retain(|(task_kind, _)| {
!matches!(task_kind, TaskSourceKind::Language { .. }) !matches!(task_kind, TaskSourceKind::Language { .. })
}); });
} }
if tasks.is_empty() { if tasks.is_empty() {
return None; continue;
} }
let point = runnable.run_range.start.to_point(&snapshot.buffer_snapshot); let point = runnable.run_range.start.to_point(&snapshot.buffer_snapshot);
let Some(row) = snapshot
let row = snapshot
.buffer_snapshot .buffer_snapshot
.buffer_line_for_row(MultiBufferRow(point.row))? .buffer_line_for_row(MultiBufferRow(point.row))
.1 .map(|(_, range)| range.start.row)
.start else {
.row; continue;
};
let context_range = let context_range =
BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end); BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end);
Some(( runnable_rows.push((
(runnable.buffer_id, row), (runnable.buffer_id, row),
RunnableTasks { RunnableTasks {
templates: tasks, templates: tasks,
@ -14107,16 +14113,17 @@ impl Editor {
column: point.column, column: point.column,
extra_variables: runnable.extra_captures, extra_variables: runnable.extra_captures,
}, },
)) ));
}) }
.collect() runnable_rows
})
} }
fn templates_with_tags( fn templates_with_tags(
project: &Entity<Project>, project: &Entity<Project>,
runnable: &mut Runnable, runnable: &mut Runnable,
cx: &mut App, cx: &mut App,
) -> Vec<(TaskSourceKind, TaskTemplate)> { ) -> Task<Vec<(TaskSourceKind, TaskTemplate)>> {
let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| { let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| {
let (worktree_id, file) = project let (worktree_id, file) = project
.buffer_for_id(runnable.buffer, cx) .buffer_for_id(runnable.buffer, cx)
@ -14131,39 +14138,40 @@ impl Editor {
) )
}); });
let mut templates_with_tags = mem::take(&mut runnable.tags) let tags = mem::take(&mut runnable.tags);
.into_iter() let language = runnable.language.clone();
.flat_map(|RunnableTag(tag)| { cx.spawn(async move |cx| {
inventory let mut templates_with_tags = Vec::new();
.as_ref() if let Some(inventory) = inventory {
.into_iter() for RunnableTag(tag) in tags {
.flat_map(|inventory| { let Ok(new_tasks) = inventory.update(cx, |inventory, cx| {
inventory.read(cx).list_tasks( inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx)
file.clone(), }) else {
Some(runnable.language.clone()), return templates_with_tags;
worktree_id, };
cx, templates_with_tags.extend(new_tasks.await.into_iter().filter(
) move |(_, template)| {
}) template.tags.iter().any(|source_tag| source_tag == &tag)
.filter(move |(_, template)| { },
template.tags.iter().any(|source_tag| source_tag == &tag) ));
}) }
})
.sorted_by_key(|(kind, _)| kind.to_owned())
.collect::<Vec<_>>();
if let Some((leading_tag_source, _)) = templates_with_tags.first() {
// Strongest source wins; if we have worktree tag binding, prefer that to
// global and language bindings;
// if we have a global binding, prefer that to language binding.
let first_mismatch = templates_with_tags
.iter()
.position(|(tag_source, _)| tag_source != leading_tag_source);
if let Some(index) = first_mismatch {
templates_with_tags.truncate(index);
} }
} templates_with_tags.sort_by_key(|(kind, _)| kind.to_owned());
templates_with_tags if let Some((leading_tag_source, _)) = templates_with_tags.first() {
// Strongest source wins; if we have worktree tag binding, prefer that to
// global and language bindings;
// if we have a global binding, prefer that to language binding.
let first_mismatch = templates_with_tags
.iter()
.position(|(tag_source, _)| tag_source != leading_tag_source);
if let Some(index) = first_mismatch {
templates_with_tags.truncate(index);
}
}
templates_with_tags
})
} }
pub fn move_to_enclosing_bracket( pub fn move_to_enclosing_bracket(

View file

@ -1,6 +1,6 @@
use std::{ops::Range, path::PathBuf, sync::Arc}; use std::{ops::Range, path::PathBuf, sync::Arc};
use crate::{LanguageToolchainStore, Location, Runnable}; use crate::{File, LanguageToolchainStore, Location, Runnable};
use anyhow::Result; use anyhow::Result;
use collections::HashMap; use collections::HashMap;
@ -39,10 +39,11 @@ pub trait ContextProvider: Send + Sync {
/// Provides all tasks, associated with the current language. /// Provides all tasks, associated with the current language.
fn associated_tasks( fn associated_tasks(
&self, &self,
_: Option<Arc<dyn crate::File>>, _: Arc<dyn Fs>,
_cx: &App, _: Option<Arc<dyn File>>,
) -> Option<TaskTemplates> { _: &App,
None ) -> Task<Option<TaskTemplates>> {
Task::ready(None)
} }
/// A language server name, that can return tasks using LSP (ext) for this language. /// A language server name, that can return tasks using LSP (ext) for this language.

View file

@ -510,9 +510,10 @@ impl ContextProvider for GoContextProvider {
fn associated_tasks( fn associated_tasks(
&self, &self,
_: Option<Arc<dyn language::File>>, _: Arc<dyn Fs>,
_: Option<Arc<dyn File>>,
_: &App, _: &App,
) -> Option<TaskTemplates> { ) -> Task<Option<TaskTemplates>> {
let package_cwd = if GO_PACKAGE_TASK_VARIABLE.template_value() == "." { let package_cwd = if GO_PACKAGE_TASK_VARIABLE.template_value() == "." {
None None
} else { } else {
@ -520,7 +521,7 @@ impl ContextProvider for GoContextProvider {
}; };
let module_cwd = Some(GO_MODULE_ROOT_TASK_VARIABLE.template_value()); let module_cwd = Some(GO_MODULE_ROOT_TASK_VARIABLE.template_value());
Some(TaskTemplates(vec![ Task::ready(Some(TaskTemplates(vec![
TaskTemplate { TaskTemplate {
label: format!( label: format!(
"go test {} -run {}", "go test {} -run {}",
@ -631,7 +632,7 @@ impl ContextProvider for GoContextProvider {
cwd: module_cwd.clone(), cwd: module_cwd.clone(),
..TaskTemplate::default() ..TaskTemplate::default()
}, },
])) ])))
} }
} }

View file

@ -481,9 +481,10 @@ impl ContextProvider for PythonContextProvider {
fn associated_tasks( fn associated_tasks(
&self, &self,
_: Arc<dyn Fs>,
file: Option<Arc<dyn language::File>>, file: Option<Arc<dyn language::File>>,
cx: &App, cx: &App,
) -> Option<TaskTemplates> { ) -> Task<Option<TaskTemplates>> {
let test_runner = selected_test_runner(file.as_ref(), cx); let test_runner = selected_test_runner(file.as_ref(), cx);
let mut tasks = vec![ let mut tasks = vec![
@ -587,7 +588,7 @@ impl ContextProvider for PythonContextProvider {
} }
}); });
Some(TaskTemplates(tasks)) Task::ready(Some(TaskTemplates(tasks)))
} }
} }

View file

@ -8,6 +8,7 @@ use http_client::github::AssetKind;
use http_client::github::{GitHubLspBinaryVersion, latest_github_release}; use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
pub use language::*; pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary}; use lsp::{InitializeParams, LanguageServerBinary};
use project::Fs;
use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME; use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME;
use project::project_settings::ProjectSettings; use project::project_settings::ProjectSettings;
use regex::Regex; use regex::Regex;
@ -628,9 +629,10 @@ impl ContextProvider for RustContextProvider {
fn associated_tasks( fn associated_tasks(
&self, &self,
_: Arc<dyn Fs>,
file: Option<Arc<dyn language::File>>, file: Option<Arc<dyn language::File>>,
cx: &App, cx: &App,
) -> Option<TaskTemplates> { ) -> Task<Option<TaskTemplates>> {
const DEFAULT_RUN_NAME_STR: &str = "RUST_DEFAULT_PACKAGE_RUN"; const DEFAULT_RUN_NAME_STR: &str = "RUST_DEFAULT_PACKAGE_RUN";
const CUSTOM_TARGET_DIR: &str = "RUST_TARGET_DIR"; const CUSTOM_TARGET_DIR: &str = "RUST_TARGET_DIR";
@ -798,7 +800,7 @@ impl ContextProvider for RustContextProvider {
.collect(); .collect();
} }
Some(TaskTemplates(task_templates)) Task::ready(Some(TaskTemplates(task_templates)))
} }
fn lsp_task_source(&self) -> Option<LanguageServerName> { fn lsp_task_source(&self) -> Option<LanguageServerName> {

View file

@ -4,10 +4,12 @@ use async_tar::Archive;
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Local}; use chrono::{DateTime, Local};
use collections::HashMap; use collections::HashMap;
use futures::future::join_all;
use gpui::{App, AppContext, AsyncApp, Task}; use gpui::{App, AppContext, AsyncApp, Task};
use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url}; use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url};
use language::{ use language::{
ContextLocation, ContextProvider, File, LanguageToolchainStore, LspAdapter, LspAdapterDelegate, ContextLocation, ContextProvider, File, LanguageToolchainStore, LocalFile, LspAdapter,
LspAdapterDelegate,
}; };
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName};
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
@ -17,11 +19,12 @@ use smol::{fs, io::BufReader, lock::RwLock, stream::StreamExt};
use std::{ use std::{
any::Any, any::Any,
borrow::Cow, borrow::Cow,
collections::BTreeSet,
ffi::OsString, ffi::OsString,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; use task::{TaskTemplate, TaskTemplates, VariableName};
use util::archive::extract_zip; use util::archive::extract_zip;
use util::merge_json_value_into; use util::merge_json_value_into;
use util::{ResultExt, fs::remove_matching, maybe}; use util::{ResultExt, fs::remove_matching, maybe};
@ -32,23 +35,12 @@ pub(crate) struct TypeScriptContextProvider {
const TYPESCRIPT_RUNNER_VARIABLE: VariableName = const TYPESCRIPT_RUNNER_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_RUNNER")); VariableName::Custom(Cow::Borrowed("TYPESCRIPT_RUNNER"));
const TYPESCRIPT_JEST_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JEST"));
const TYPESCRIPT_JEST_TEST_NAME_VARIABLE: VariableName = const TYPESCRIPT_JEST_TEST_NAME_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JEST_TEST_NAME")); VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JEST_TEST_NAME"));
const TYPESCRIPT_MOCHA_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_MOCHA"));
const TYPESCRIPT_VITEST_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_VITEST"));
const TYPESCRIPT_VITEST_TEST_NAME_VARIABLE: VariableName = const TYPESCRIPT_VITEST_TEST_NAME_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_VITEST_TEST_NAME")); VariableName::Custom(Cow::Borrowed("TYPESCRIPT_VITEST_TEST_NAME"));
const TYPESCRIPT_JASMINE_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JASMINE"));
const TYPESCRIPT_BUILD_SCRIPT_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_BUILD_SCRIPT"));
const TYPESCRIPT_TEST_SCRIPT_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_TEST_SCRIPT"));
#[derive(Clone, Default)] #[derive(Clone, Default)]
struct PackageJsonContents(Arc<RwLock<HashMap<PathBuf, PackageJson>>>); struct PackageJsonContents(Arc<RwLock<HashMap<PathBuf, PackageJson>>>);
@ -58,36 +50,21 @@ struct PackageJson {
data: PackageJsonData, data: PackageJsonData,
} }
#[derive(Clone, Copy, Default)] #[derive(Clone, Default)]
struct PackageJsonData { struct PackageJsonData {
jest: bool, jest: bool,
mocha: bool, mocha: bool,
vitest: bool, vitest: bool,
jasmine: bool, jasmine: bool,
build_script: bool, scripts: BTreeSet<String>,
test_script: bool, package_manager: Option<&'static str>,
runner: Runner,
}
#[derive(Clone, Copy, Default)]
enum Runner {
#[default]
Npm,
Yarn,
Pnpm,
} }
impl PackageJsonData { impl PackageJsonData {
async fn new( fn new(package_json: HashMap<String, Value>) -> Self {
package_json: HashMap<String, Value>, let mut scripts = BTreeSet::new();
worktree_root: PathBuf, if let Some(serde_json::Value::Object(package_json_scripts)) = package_json.get("scripts") {
fs: Arc<dyn Fs>, scripts.extend(package_json_scripts.keys().cloned());
) -> Self {
let mut build_script = false;
let mut test_script = false;
if let Some(serde_json::Value::Object(scripts)) = package_json.get("scripts") {
build_script |= scripts.contains_key("build");
test_script |= scripts.contains_key("test");
} }
let mut jest = false; let mut jest = false;
@ -108,262 +85,351 @@ impl PackageJsonData {
jasmine |= dev_dependencies.contains_key("jasmine"); jasmine |= dev_dependencies.contains_key("jasmine");
} }
let mut runner = package_json let package_manager = package_json
.get("packageManager") .get("packageManager")
.and_then(|value| value.as_str()) .and_then(|value| value.as_str())
.and_then(|value| { .and_then(|value| {
if value.starts_with("pnpm") { if value.starts_with("pnpm") {
Some(Runner::Pnpm) Some("pnpm")
} else if value.starts_with("yarn") { } else if value.starts_with("yarn") {
Some(Runner::Yarn) Some("yarn")
} else if value.starts_with("npm") { } else if value.starts_with("npm") {
Some(Runner::Npm) Some("npm")
} else { } else {
None None
} }
}); });
if runner.is_none() {
let detected_runner = detect_package_manager(&fs, &worktree_root).await;
runner = Some(detected_runner);
}
Self { Self {
jest, jest,
mocha, mocha,
vitest, vitest,
jasmine, jasmine,
build_script, scripts,
test_script, package_manager,
runner: runner.unwrap(),
} }
} }
fn fill_variables(&self, variables: &mut TaskVariables) { fn merge(&mut self, other: Self) {
let runner = match self.runner { self.jest |= other.jest;
Runner::Npm => "npm", self.mocha |= other.mocha;
Runner::Pnpm => "pnpm", self.vitest |= other.vitest;
Runner::Yarn => "yarn", self.jasmine |= other.jasmine;
}; self.scripts.extend(other.scripts);
variables.insert(TYPESCRIPT_RUNNER_VARIABLE, runner.to_owned()); }
fn fill_task_templates(&self, task_templates: &mut TaskTemplates) {
if self.jest { if self.jest {
variables.insert(TYPESCRIPT_JEST_TASK_VARIABLE, "jest".to_owned()); task_templates.0.push(TaskTemplate {
} label: "jest file test".to_owned(),
if self.mocha { command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
variables.insert(TYPESCRIPT_MOCHA_TASK_VARIABLE, "mocha".to_owned()); args: vec![
"jest".to_owned(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!("jest test {}", VariableName::Symbol.template_value()),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
"jest".to_owned(),
"--testNamePattern".to_owned(),
format!(
"\"{}\"",
TYPESCRIPT_JEST_TEST_NAME_VARIABLE.template_value()
),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
} }
if self.vitest { if self.vitest {
variables.insert(TYPESCRIPT_VITEST_TASK_VARIABLE, "vitest".to_owned()); task_templates.0.push(TaskTemplate {
} label: format!("{} file test", "vitest".to_owned()),
if self.jasmine { command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
variables.insert(TYPESCRIPT_JASMINE_TASK_VARIABLE, "jasmine".to_owned()); args: vec![
} "vitest".to_owned(),
if self.build_script { "run".to_owned(),
variables.insert(TYPESCRIPT_BUILD_SCRIPT_TASK_VARIABLE, "build".to_owned()); VariableName::RelativeFile.template_value(),
} ],
if self.test_script { cwd: Some(VariableName::WorktreeRoot.template_value()),
variables.insert(TYPESCRIPT_TEST_SCRIPT_TASK_VARIABLE, "test".to_owned()); ..TaskTemplate::default()
} });
}
}
impl TypeScriptContextProvider {
pub fn new() -> Self {
TypeScriptContextProvider {
last_package_json: PackageJsonContents::default(),
}
}
}
impl ContextProvider for TypeScriptContextProvider {
fn associated_tasks(&self, _: Option<Arc<dyn File>>, _: &App) -> Option<TaskTemplates> {
let mut task_templates = TaskTemplates(Vec::new());
// Jest tasks
task_templates.0.push(TaskTemplate {
label: format!(
"{} file test",
TYPESCRIPT_JEST_TASK_VARIABLE.template_value()
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_JEST_TASK_VARIABLE.template_value(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!(
"{} test {}",
TYPESCRIPT_JEST_TASK_VARIABLE.template_value(),
VariableName::Symbol.template_value(),
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_JEST_TASK_VARIABLE.template_value(),
"--testNamePattern".to_owned(),
format!(
"\"{}\"",
TYPESCRIPT_JEST_TEST_NAME_VARIABLE.template_value()
),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
// Vitest tasks
task_templates.0.push(TaskTemplate {
label: format!(
"{} file test",
TYPESCRIPT_VITEST_TASK_VARIABLE.template_value()
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_VITEST_TASK_VARIABLE.template_value(),
"run".to_owned(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!(
"{} test {}",
TYPESCRIPT_VITEST_TASK_VARIABLE.template_value(),
VariableName::Symbol.template_value(),
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_VITEST_TASK_VARIABLE.template_value(),
"run".to_owned(),
"--testNamePattern".to_owned(),
format!("\"{}\"", TYPESCRIPT_VITEST_TASK_VARIABLE.template_value()),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
// Mocha tasks
task_templates.0.push(TaskTemplate {
label: format!(
"{} file test",
TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value()
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!(
"{} test {}",
TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value(),
VariableName::Symbol.template_value(),
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value(),
"--grep".to_owned(),
format!("\"{}\"", VariableName::Symbol.template_value()),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
// Jasmine tasks
task_templates.0.push(TaskTemplate {
label: format!(
"{} file test",
TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value()
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!(
"{} test {}",
TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value(),
VariableName::Symbol.template_value(),
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value(),
format!("--filter={}", VariableName::Symbol.template_value()),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
for package_json_script in [
TYPESCRIPT_TEST_SCRIPT_TASK_VARIABLE,
TYPESCRIPT_BUILD_SCRIPT_TASK_VARIABLE,
] {
task_templates.0.push(TaskTemplate { task_templates.0.push(TaskTemplate {
label: format!( label: format!(
"package.json script {}", "{} test {}",
package_json_script.template_value() "vitest".to_owned(),
VariableName::Symbol.template_value(),
), ),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(), command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
"vitest".to_owned(),
"run".to_owned(),
"--testNamePattern".to_owned(),
format!("\"{}\"", "vitest".to_owned()),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
}
if self.mocha {
task_templates.0.push(TaskTemplate {
label: format!("{} file test", "mocha".to_owned()),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
"mocha".to_owned(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!(
"{} test {}",
"mocha".to_owned(),
VariableName::Symbol.template_value(),
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
"mocha".to_owned(),
"--grep".to_owned(),
format!("\"{}\"", VariableName::Symbol.template_value()),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
}
if self.jasmine {
task_templates.0.push(TaskTemplate {
label: format!("{} file test", "jasmine".to_owned()),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
"jasmine".to_owned(),
VariableName::RelativeFile.template_value(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
task_templates.0.push(TaskTemplate {
label: format!(
"{} test {}",
"jasmine".to_owned(),
VariableName::Symbol.template_value(),
),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![
"jasmine".to_owned(),
format!("--filter={}", VariableName::Symbol.template_value()),
VariableName::RelativeFile.template_value(),
],
tags: vec![
"ts-test".to_owned(),
"js-test".to_owned(),
"tsx-test".to_owned(),
],
cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
}
for script in &self.scripts {
task_templates.0.push(TaskTemplate {
label: format!("package.json > {script}",),
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec![ args: vec![
"--prefix".to_owned(), "--prefix".to_owned(),
VariableName::WorktreeRoot.template_value(), VariableName::WorktreeRoot.template_value(),
"run".to_owned(), "run".to_owned(),
package_json_script.template_value(), script.to_owned(),
], ],
tags: vec!["package-script".into()], tags: vec!["package-script".into()],
cwd: Some(VariableName::WorktreeRoot.template_value()), cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default() ..TaskTemplate::default()
}); });
} }
}
}
task_templates.0.push(TaskTemplate { impl TypeScriptContextProvider {
label: format!( pub fn new() -> Self {
"execute selection {}", Self {
VariableName::SelectedText.template_value() last_package_json: PackageJsonContents::default(),
), }
command: "node".to_owned(), }
args: vec![
"-e".to_owned(),
format!("\"{}\"", VariableName::SelectedText.template_value()),
],
..TaskTemplate::default()
});
Some(task_templates) fn combined_package_json_data(
&self,
fs: Arc<dyn Fs>,
worktree_root: &Path,
file_abs_path: &Path,
cx: &App,
) -> Task<anyhow::Result<PackageJsonData>> {
let Some(file_relative_path) = file_abs_path.strip_prefix(&worktree_root).ok() else {
log::debug!("No package json data for off-worktree files");
return Task::ready(Ok(PackageJsonData::default()));
};
let new_json_data = file_relative_path
.ancestors()
.map(|path| worktree_root.join(path))
.map(|parent_path| {
self.package_json_data(&parent_path, self.last_package_json.clone(), fs.clone(), cx)
})
.collect::<Vec<_>>();
cx.background_spawn(async move {
let mut package_json_data = PackageJsonData::default();
for new_data in join_all(new_json_data).await.into_iter().flatten() {
package_json_data.merge(new_data);
}
Ok(package_json_data)
})
}
fn package_json_data(
&self,
directory_path: &Path,
existing_package_json: PackageJsonContents,
fs: Arc<dyn Fs>,
cx: &App,
) -> Task<anyhow::Result<PackageJsonData>> {
let package_json_path = directory_path.join("package.json");
let metadata_check_fs = fs.clone();
cx.background_spawn(async move {
let metadata = metadata_check_fs
.metadata(&package_json_path)
.await
.with_context(|| format!("getting metadata for {package_json_path:?}"))?
.with_context(|| format!("missing FS metadata for {package_json_path:?}"))?;
let mtime = DateTime::<Local>::from(metadata.mtime.timestamp_for_user());
let existing_data = {
let contents = existing_package_json.0.read().await;
contents
.get(&package_json_path)
.filter(|package_json| package_json.mtime == mtime)
.map(|package_json| package_json.data.clone())
};
match existing_data {
Some(existing_data) => Ok(existing_data),
None => {
let package_json_string =
fs.load(&package_json_path).await.with_context(|| {
format!("loading package.json from {package_json_path:?}")
})?;
let package_json: HashMap<String, serde_json::Value> =
serde_json::from_str(&package_json_string).with_context(|| {
format!("parsing package.json from {package_json_path:?}")
})?;
let new_data = PackageJsonData::new(package_json);
{
let mut contents = existing_package_json.0.write().await;
contents.insert(
package_json_path,
PackageJson {
mtime,
data: new_data.clone(),
},
);
}
Ok(new_data)
}
}
})
}
fn detect_package_manager(
&self,
worktree_root: PathBuf,
fs: Arc<dyn Fs>,
cx: &App,
) -> Task<&'static str> {
let last_package_json = self.last_package_json.clone();
let package_json_data =
self.package_json_data(&worktree_root, last_package_json, fs.clone(), cx);
cx.background_spawn(async move {
if let Ok(package_json_data) = package_json_data.await {
if let Some(package_manager) = package_json_data.package_manager {
return package_manager;
}
}
if fs.is_file(&worktree_root.join("pnpm-lock.yaml")).await {
return "pnpm";
}
if fs.is_file(&worktree_root.join("yarn.lock")).await {
return "yarn";
}
"npm"
})
}
}
impl ContextProvider for TypeScriptContextProvider {
fn associated_tasks(
&self,
fs: Arc<dyn Fs>,
file: Option<Arc<dyn File>>,
cx: &App,
) -> Task<Option<TaskTemplates>> {
let Some(file) = project::File::from_dyn(file.as_ref()).cloned() else {
return Task::ready(None);
};
let Some(worktree_root) = file.worktree.read(cx).root_dir() else {
return Task::ready(None);
};
let file_abs_path = file.abs_path(cx);
let package_json_data =
self.combined_package_json_data(fs.clone(), &worktree_root, &file_abs_path, cx);
cx.background_spawn(async move {
let mut task_templates = TaskTemplates(Vec::new());
task_templates.0.push(TaskTemplate {
label: format!(
"execute selection {}",
VariableName::SelectedText.template_value()
),
command: "node".to_owned(),
args: vec![
"-e".to_owned(),
format!("\"{}\"", VariableName::SelectedText.template_value()),
],
..TaskTemplate::default()
});
match package_json_data.await {
Ok(package_json) => {
package_json.fill_task_templates(&mut task_templates);
}
Err(e) => {
log::error!(
"Failed to read package.json for worktree {file_abs_path:?}: {e:#}"
);
}
}
Some(task_templates)
})
} }
fn build_context( fn build_context(
@ -387,91 +453,19 @@ impl ContextProvider for TypeScriptContextProvider {
); );
} }
let Some((fs, worktree_root)) = location.fs.zip(location.worktree_root) else { let task = location
return Task::ready(Ok(vars)); .worktree_root
}; .zip(location.fs)
.map(|(worktree_root, fs)| self.detect_package_manager(worktree_root, fs, cx));
let package_json_contents = self.last_package_json.clone();
cx.background_spawn(async move { cx.background_spawn(async move {
let variables = package_json_variables(fs, worktree_root, package_json_contents) if let Some(task) = task {
.await vars.insert(TYPESCRIPT_RUNNER_VARIABLE, task.await.to_owned());
.context("package.json context retrieval") }
.log_err()
.unwrap_or_else(task::TaskVariables::default);
vars.extend(variables);
Ok(vars) Ok(vars)
}) })
} }
} }
async fn package_json_variables(
fs: Arc<dyn Fs>,
worktree_root: PathBuf,
package_json_contents: PackageJsonContents,
) -> anyhow::Result<task::TaskVariables> {
let package_json_path = worktree_root.join("package.json");
let metadata = fs
.metadata(&package_json_path)
.await
.with_context(|| format!("getting metadata for {package_json_path:?}"))?
.with_context(|| format!("missing FS metadata for {package_json_path:?}"))?;
let mtime = DateTime::<Local>::from(metadata.mtime.timestamp_for_user());
let existing_data = {
let contents = package_json_contents.0.read().await;
contents
.get(&package_json_path)
.filter(|package_json| package_json.mtime == mtime)
.map(|package_json| package_json.data)
};
let mut variables = TaskVariables::default();
if let Some(existing_data) = existing_data {
existing_data.fill_variables(&mut variables);
} else {
let package_json_string = fs
.load(&package_json_path)
.await
.with_context(|| format!("loading package.json from {package_json_path:?}"))?;
let package_json: HashMap<String, serde_json::Value> =
serde_json::from_str(&package_json_string)
.with_context(|| format!("parsing package.json from {package_json_path:?}"))?;
let new_data = PackageJsonData::new(package_json, worktree_root, fs).await;
new_data.fill_variables(&mut variables);
{
let mut contents = package_json_contents.0.write().await;
contents.insert(
package_json_path,
PackageJson {
mtime,
data: new_data,
},
);
}
}
Ok(variables)
}
async fn detect_package_manager(fs: &Arc<dyn Fs>, worktree_root: &PathBuf) -> Runner {
// Check for pnpm-lock.yaml first (pnpm)
if fs
.metadata(&worktree_root.join("pnpm-lock.yaml"))
.await
.is_ok()
{
return Runner::Pnpm;
}
if fs.metadata(&worktree_root.join("yarn.lock")).await.is_ok() {
return Runner::Yarn;
}
Runner::Npm
}
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> { fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![server_path.into(), "--stdio".into()] vec![server_path.into(), "--stdio".into()]
} }

View file

@ -997,6 +997,7 @@ impl Project {
let task_store = cx.new(|cx| { let task_store = cx.new(|cx| {
TaskStore::local( TaskStore::local(
fs.clone(),
buffer_store.downgrade(), buffer_store.downgrade(),
worktree_store.clone(), worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(), toolchain_store.read(cx).as_language_toolchain_store(),
@ -1136,6 +1137,7 @@ impl Project {
.new(|cx| ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx)); .new(|cx| ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx));
let task_store = cx.new(|cx| { let task_store = cx.new(|cx| {
TaskStore::remote( TaskStore::remote(
fs.clone(),
buffer_store.downgrade(), buffer_store.downgrade(),
worktree_store.clone(), worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(), toolchain_store.read(cx).as_language_toolchain_store(),
@ -1396,6 +1398,7 @@ impl Project {
let task_store = cx.new(|cx| { let task_store = cx.new(|cx| {
if run_tasks { if run_tasks {
TaskStore::remote( TaskStore::remote(
fs.clone(),
buffer_store.downgrade(), buffer_store.downgrade(),
worktree_store.clone(), worktree_store.clone(),
Arc::new(EmptyToolchainStore), Arc::new(EmptyToolchainStore),

View file

@ -329,6 +329,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
let mut task_contexts = TaskContexts::default(); let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default())); task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
let task_contexts = Arc::new(task_contexts);
let topmost_local_task_source_kind = TaskSourceKind::Worktree { let topmost_local_task_source_kind = TaskSourceKind::Worktree {
id: worktree_id, id: worktree_id,
@ -354,8 +355,9 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
assert_eq!(settings_a.tab_size.get(), 8); assert_eq!(settings_a.tab_size.get(), 8);
assert_eq!(settings_b.tab_size.get(), 2); assert_eq!(settings_b.tab_size.get(), 2);
get_all_tasks(&project, &task_contexts, cx) get_all_tasks(&project, task_contexts.clone(), cx)
}) })
.await
.into_iter() .into_iter()
.map(|(source_kind, task)| { .map(|(source_kind, task)| {
let resolved = task.resolved; let resolved = task.resolved;
@ -394,7 +396,8 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
); );
let (_, resolved_task) = cx let (_, resolved_task) = cx
.update(|cx| get_all_tasks(&project, &task_contexts, cx)) .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
.await
.into_iter() .into_iter()
.find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind) .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
.expect("should have one global task"); .expect("should have one global task");
@ -432,7 +435,8 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
cx.run_until_parked(); cx.run_until_parked();
let all_tasks = cx let all_tasks = cx
.update(|cx| get_all_tasks(&project, &task_contexts, cx)) .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
.await
.into_iter() .into_iter()
.map(|(source_kind, task)| { .map(|(source_kind, task)| {
let resolved = task.resolved; let resolved = task.resolved;
@ -519,43 +523,47 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
}) })
}); });
let active_non_worktree_item_tasks = cx.update(|cx| { let active_non_worktree_item_tasks = cx
get_all_tasks( .update(|cx| {
&project, get_all_tasks(
&TaskContexts { &project,
active_item_context: Some((Some(worktree_id), None, TaskContext::default())), Arc::new(TaskContexts {
active_worktree_context: None, active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
other_worktree_contexts: Vec::new(), active_worktree_context: None,
lsp_task_sources: HashMap::default(), other_worktree_contexts: Vec::new(),
latest_selection: None, lsp_task_sources: HashMap::default(),
}, latest_selection: None,
cx, }),
) cx,
}); )
})
.await;
assert!( assert!(
active_non_worktree_item_tasks.is_empty(), active_non_worktree_item_tasks.is_empty(),
"A task can not be resolved with context with no ZED_WORKTREE_ROOT data" "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
); );
let active_worktree_tasks = cx.update(|cx| { let active_worktree_tasks = cx
get_all_tasks( .update(|cx| {
&project, get_all_tasks(
&TaskContexts { &project,
active_item_context: Some((Some(worktree_id), None, TaskContext::default())), Arc::new(TaskContexts {
active_worktree_context: Some((worktree_id, { active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
let mut worktree_context = TaskContext::default(); active_worktree_context: Some((worktree_id, {
worktree_context let mut worktree_context = TaskContext::default();
.task_variables worktree_context
.insert(task::VariableName::WorktreeRoot, "/dir".to_string()); .task_variables
worktree_context .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
})), worktree_context
other_worktree_contexts: Vec::new(), })),
lsp_task_sources: HashMap::default(), other_worktree_contexts: Vec::new(),
latest_selection: None, lsp_task_sources: HashMap::default(),
}, latest_selection: None,
cx, }),
) cx,
}); )
})
.await;
assert_eq!( assert_eq!(
active_worktree_tasks active_worktree_tasks
.into_iter() .into_iter()
@ -8851,20 +8859,22 @@ fn tsx_lang() -> Arc<Language> {
fn get_all_tasks( fn get_all_tasks(
project: &Entity<Project>, project: &Entity<Project>,
task_contexts: &TaskContexts, task_contexts: Arc<TaskContexts>,
cx: &mut App, cx: &mut App,
) -> Vec<(TaskSourceKind, ResolvedTask)> { ) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
let (mut old, new) = project.update(cx, |project, cx| { let new_tasks = project.update(cx, |project, cx| {
project project.task_store.update(cx, |task_store, cx| {
.task_store task_store.task_inventory().unwrap().update(cx, |this, cx| {
.read(cx) this.used_and_current_resolved_tasks(task_contexts, cx)
.task_inventory() })
.unwrap() })
.read(cx)
.used_and_current_resolved_tasks(task_contexts, cx)
}); });
old.extend(new);
old cx.background_spawn(async move {
let (mut old, new) = new_tasks.await;
old.extend(new);
old
})
} }
#[track_caller] #[track_caller]

View file

@ -11,7 +11,8 @@ use std::{
use anyhow::Result; use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque}; use collections::{HashMap, HashSet, VecDeque};
use dap::DapRegistry; use dap::DapRegistry;
use gpui::{App, AppContext as _, Entity, SharedString, Task}; use fs::Fs;
use gpui::{App, AppContext as _, Context, Entity, SharedString, Task};
use itertools::Itertools; use itertools::Itertools;
use language::{ use language::{
Buffer, ContextLocation, ContextProvider, File, Language, LanguageToolchainStore, Location, Buffer, ContextLocation, ContextProvider, File, Language, LanguageToolchainStore, Location,
@ -31,14 +32,25 @@ use worktree::WorktreeId;
use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore}; use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
/// Inventory tracks available tasks for a given project. /// Inventory tracks available tasks for a given project.
#[derive(Debug, Default)]
pub struct Inventory { pub struct Inventory {
fs: Arc<dyn Fs>,
last_scheduled_tasks: VecDeque<(TaskSourceKind, ResolvedTask)>, last_scheduled_tasks: VecDeque<(TaskSourceKind, ResolvedTask)>,
last_scheduled_scenarios: VecDeque<DebugScenario>, last_scheduled_scenarios: VecDeque<DebugScenario>,
templates_from_settings: InventoryFor<TaskTemplate>, templates_from_settings: InventoryFor<TaskTemplate>,
scenarios_from_settings: InventoryFor<DebugScenario>, scenarios_from_settings: InventoryFor<DebugScenario>,
} }
impl std::fmt::Debug for Inventory {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Inventory")
.field("last_scheduled_tasks", &self.last_scheduled_tasks)
.field("last_scheduled_scenarios", &self.last_scheduled_scenarios)
.field("templates_from_settings", &self.templates_from_settings)
.field("scenarios_from_settings", &self.scenarios_from_settings)
.finish()
}
}
// Helper trait for better error messages in [InventoryFor] // Helper trait for better error messages in [InventoryFor]
trait InventoryContents: Clone { trait InventoryContents: Clone {
const GLOBAL_SOURCE_FILE: &'static str; const GLOBAL_SOURCE_FILE: &'static str;
@ -223,8 +235,14 @@ impl TaskSourceKind {
} }
impl Inventory { impl Inventory {
pub fn new(cx: &mut App) -> Entity<Self> { pub fn new(fs: Arc<dyn Fs>, cx: &mut App) -> Entity<Self> {
cx.new(|_| Self::default()) cx.new(|_| Self {
fs,
last_scheduled_tasks: VecDeque::default(),
last_scheduled_scenarios: VecDeque::default(),
templates_from_settings: InventoryFor::default(),
scenarios_from_settings: InventoryFor::default(),
})
} }
pub fn scenario_scheduled(&mut self, scenario: DebugScenario) { pub fn scenario_scheduled(&mut self, scenario: DebugScenario) {
@ -311,7 +329,7 @@ impl Inventory {
worktree_id: Option<WorktreeId>, worktree_id: Option<WorktreeId>,
label: &str, label: &str,
cx: &App, cx: &App,
) -> Option<TaskTemplate> { ) -> Task<Option<TaskTemplate>> {
let (buffer_worktree_id, file, language) = buffer let (buffer_worktree_id, file, language) = buffer
.map(|buffer| { .map(|buffer| {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
@ -324,10 +342,15 @@ impl Inventory {
}) })
.unwrap_or((None, None, None)); .unwrap_or((None, None, None));
self.list_tasks(file, language, worktree_id.or(buffer_worktree_id), cx) let tasks = self.list_tasks(file, language, worktree_id.or(buffer_worktree_id), cx);
.into_iter() let label = label.to_owned();
.find(|(_, template)| template.label == label) cx.background_spawn(async move {
.map(|val| val.1) tasks
.await
.into_iter()
.find(|(_, template)| template.label == label)
.map(|val| val.1)
})
} }
/// Pulls its task sources relevant to the worktree and the language given, /// Pulls its task sources relevant to the worktree and the language given,
@ -339,11 +362,13 @@ impl Inventory {
language: Option<Arc<Language>>, language: Option<Arc<Language>>,
worktree: Option<WorktreeId>, worktree: Option<WorktreeId>,
cx: &App, cx: &App,
) -> Vec<(TaskSourceKind, TaskTemplate)> { ) -> Task<Vec<(TaskSourceKind, TaskTemplate)>> {
let global_tasks = self.global_templates_from_settings(); let global_tasks = self.global_templates_from_settings().collect::<Vec<_>>();
let worktree_tasks = worktree let fs = self.fs.clone();
let mut worktree_tasks = worktree
.into_iter() .into_iter()
.flat_map(|worktree| self.worktree_templates_from_settings(worktree)); .flat_map(|worktree| self.worktree_templates_from_settings(worktree))
.collect::<Vec<_>>();
let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language { let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language {
name: language.name().into(), name: language.name().into(),
}); });
@ -353,29 +378,38 @@ impl Inventory {
.tasks .tasks
.enabled .enabled
}) })
.and_then(|language| language.context_provider()?.associated_tasks(file, cx)) .and_then(|language| {
.into_iter() language
.flat_map(|tasks| tasks.0.into_iter()) .context_provider()
.flat_map(|task| Some((task_source_kind.clone()?, task))); .map(|provider| provider.associated_tasks(fs, file, cx))
});
worktree_tasks cx.background_spawn(async move {
.chain(language_tasks) if let Some(t) = language_tasks {
.chain(global_tasks) worktree_tasks.extend(t.await.into_iter().flat_map(|tasks| {
.collect() tasks
.0
.into_iter()
.filter_map(|task| Some((task_source_kind.clone()?, task)))
}));
}
worktree_tasks.extend(global_tasks);
worktree_tasks
})
} }
/// Pulls its task sources relevant to the worktree and the language given and resolves them with the [`TaskContexts`] given. /// Pulls its task sources relevant to the worktree and the language given and resolves them with the [`TaskContexts`] given.
/// Joins the new resolutions with the resolved tasks that were used (spawned) before, /// Joins the new resolutions with the resolved tasks that were used (spawned) before,
/// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first. /// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first.
/// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks. /// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
pub fn used_and_current_resolved_tasks<'a>( pub fn used_and_current_resolved_tasks(
&'a self, &self,
task_contexts: &'a TaskContexts, task_contexts: Arc<TaskContexts>,
cx: &'a App, cx: &mut Context<Self>,
) -> ( ) -> Task<(
Vec<(TaskSourceKind, ResolvedTask)>, Vec<(TaskSourceKind, ResolvedTask)>,
Vec<(TaskSourceKind, ResolvedTask)>, Vec<(TaskSourceKind, ResolvedTask)>,
) { )> {
let fs = self.fs.clone();
let worktree = task_contexts.worktree(); let worktree = task_contexts.worktree();
let location = task_contexts.location(); let location = task_contexts.location();
let language = location let language = location
@ -423,85 +457,103 @@ impl Inventory {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let not_used_score = post_inc(&mut lru_score); let not_used_score = post_inc(&mut lru_score);
let global_tasks = self.global_templates_from_settings(); let global_tasks = self.global_templates_from_settings().collect::<Vec<_>>();
let associated_tasks = language
let language_tasks = language
.filter(|language| { .filter(|language| {
language_settings(Some(language.name()), file.as_ref(), cx) language_settings(Some(language.name()), file.as_ref(), cx)
.tasks .tasks
.enabled .enabled
}) })
.and_then(|language| language.context_provider()?.associated_tasks(file, cx)) .and_then(|language| {
.into_iter() language
.flat_map(|tasks| tasks.0.into_iter()) .context_provider()
.flat_map(|task| Some((task_source_kind.clone()?, task))); .map(|provider| provider.associated_tasks(fs, file, cx))
});
let worktree_tasks = worktree let worktree_tasks = worktree
.into_iter() .into_iter()
.flat_map(|worktree| self.worktree_templates_from_settings(worktree)) .flat_map(|worktree| self.worktree_templates_from_settings(worktree))
.chain(language_tasks)
.chain(global_tasks);
let new_resolved_tasks = worktree_tasks
.flat_map(|(kind, task)| {
let id_base = kind.to_id_base();
if let TaskSourceKind::Worktree { id, .. } = &kind {
None.or_else(|| {
let (_, _, item_context) = task_contexts
.active_item_context
.as_ref()
.filter(|(worktree_id, _, _)| Some(id) == worktree_id.as_ref())?;
task.resolve_task(&id_base, item_context)
})
.or_else(|| {
let (_, worktree_context) = task_contexts
.active_worktree_context
.as_ref()
.filter(|(worktree_id, _)| id == worktree_id)?;
task.resolve_task(&id_base, worktree_context)
})
.or_else(|| {
if let TaskSourceKind::Worktree { id, .. } = &kind {
let worktree_context = task_contexts
.other_worktree_contexts
.iter()
.find(|(worktree_id, _)| worktree_id == id)
.map(|(_, context)| context)?;
task.resolve_task(&id_base, worktree_context)
} else {
None
}
})
} else {
None.or_else(|| {
let (_, _, item_context) = task_contexts.active_item_context.as_ref()?;
task.resolve_task(&id_base, item_context)
})
.or_else(|| {
let (_, worktree_context) =
task_contexts.active_worktree_context.as_ref()?;
task.resolve_task(&id_base, worktree_context)
})
}
.or_else(|| task.resolve_task(&id_base, &TaskContext::default()))
.map(move |resolved_task| (kind.clone(), resolved_task, not_used_score))
})
.filter(|(_, resolved_task, _)| {
match task_labels_to_ids.entry(resolved_task.resolved_label.clone()) {
hash_map::Entry::Occupied(mut o) => {
// Allow new tasks with the same label, if their context is different
o.get_mut().insert(resolved_task.id.clone())
}
hash_map::Entry::Vacant(v) => {
v.insert(HashSet::from_iter(Some(resolved_task.id.clone())));
true
}
}
})
.sorted_unstable_by(task_lru_comparator)
.map(|(kind, task, _)| (kind, task))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let task_contexts = task_contexts.clone();
cx.background_spawn(async move {
let language_tasks = if let Some(task) = associated_tasks {
task.await.map(|templates| {
templates
.0
.into_iter()
.flat_map(|task| Some((task_source_kind.clone()?, task)))
})
} else {
None
};
(previously_spawned_tasks, new_resolved_tasks) let worktree_tasks = worktree_tasks
.into_iter()
.chain(language_tasks.into_iter().flatten())
.chain(global_tasks);
let new_resolved_tasks = worktree_tasks
.flat_map(|(kind, task)| {
let id_base = kind.to_id_base();
if let TaskSourceKind::Worktree { id, .. } = &kind {
None.or_else(|| {
let (_, _, item_context) =
task_contexts.active_item_context.as_ref().filter(
|(worktree_id, _, _)| Some(id) == worktree_id.as_ref(),
)?;
task.resolve_task(&id_base, item_context)
})
.or_else(|| {
let (_, worktree_context) = task_contexts
.active_worktree_context
.as_ref()
.filter(|(worktree_id, _)| id == worktree_id)?;
task.resolve_task(&id_base, worktree_context)
})
.or_else(|| {
if let TaskSourceKind::Worktree { id, .. } = &kind {
let worktree_context = task_contexts
.other_worktree_contexts
.iter()
.find(|(worktree_id, _)| worktree_id == id)
.map(|(_, context)| context)?;
task.resolve_task(&id_base, worktree_context)
} else {
None
}
})
} else {
None.or_else(|| {
let (_, _, item_context) =
task_contexts.active_item_context.as_ref()?;
task.resolve_task(&id_base, item_context)
})
.or_else(|| {
let (_, worktree_context) =
task_contexts.active_worktree_context.as_ref()?;
task.resolve_task(&id_base, worktree_context)
})
}
.or_else(|| task.resolve_task(&id_base, &TaskContext::default()))
.map(move |resolved_task| (kind.clone(), resolved_task, not_used_score))
})
.filter(|(_, resolved_task, _)| {
match task_labels_to_ids.entry(resolved_task.resolved_label.clone()) {
hash_map::Entry::Occupied(mut o) => {
// Allow new tasks with the same label, if their context is different
o.get_mut().insert(resolved_task.id.clone())
}
hash_map::Entry::Vacant(v) => {
v.insert(HashSet::from_iter(Some(resolved_task.id.clone())));
true
}
}
})
.sorted_unstable_by(task_lru_comparator)
.map(|(kind, task, _)| (kind, task))
.collect::<Vec<_>>();
(previously_spawned_tasks, new_resolved_tasks)
})
} }
/// Returns the last scheduled task by task_id if provided. /// Returns the last scheduled task by task_id if provided.
@ -746,7 +798,7 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse<usize> {
#[cfg(test)] #[cfg(test)]
mod test_inventory { mod test_inventory {
use gpui::{Entity, TestAppContext}; use gpui::{AppContext as _, Entity, Task, TestAppContext};
use itertools::Itertools; use itertools::Itertools;
use task::TaskContext; use task::TaskContext;
use worktree::WorktreeId; use worktree::WorktreeId;
@ -759,10 +811,13 @@ mod test_inventory {
inventory: &Entity<Inventory>, inventory: &Entity<Inventory>,
worktree: Option<WorktreeId>, worktree: Option<WorktreeId>,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) -> Vec<String> { ) -> Task<Vec<String>> {
inventory.update(cx, |inventory, cx| { let new_tasks = inventory.update(cx, |inventory, cx| {
inventory inventory.list_tasks(None, None, worktree, cx)
.list_tasks(None, None, worktree, cx) });
cx.background_spawn(async move {
new_tasks
.await
.into_iter() .into_iter()
.map(|(_, task)| task.label) .map(|(_, task)| task.label)
.sorted() .sorted()
@ -774,20 +829,33 @@ mod test_inventory {
inventory: &Entity<Inventory>, inventory: &Entity<Inventory>,
task_name: &str, task_name: &str,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) { ) -> Task<()> {
inventory.update(cx, |inventory, cx| { let tasks = inventory.update(cx, |inventory, cx| {
let (task_source_kind, task) = inventory inventory.list_tasks(None, None, None, cx)
.list_tasks(None, None, None, cx) });
let task_name = task_name.to_owned();
let inventory = inventory.clone();
cx.spawn(|mut cx| async move {
let (task_source_kind, task) = tasks
.await
.into_iter() .into_iter()
.find(|(_, task)| task.label == task_name) .find(|(_, task)| task.label == task_name)
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}")); .unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
let id_base = task_source_kind.to_id_base(); let id_base = task_source_kind.to_id_base();
inventory.task_scheduled( inventory
task_source_kind.clone(), .update(&mut cx, |inventory, _| {
task.resolve_task(&id_base, &TaskContext::default()) inventory.task_scheduled(
.unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")), task_source_kind.clone(),
); task.resolve_task(&id_base, &TaskContext::default())
}); .unwrap_or_else(|| {
panic!("Failed to resolve task with name {task_name}")
}),
)
})
.unwrap();
})
} }
pub(super) fn register_worktree_task_used( pub(super) fn register_worktree_task_used(
@ -795,20 +863,32 @@ mod test_inventory {
worktree_id: WorktreeId, worktree_id: WorktreeId,
task_name: &str, task_name: &str,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) { ) -> Task<()> {
inventory.update(cx, |inventory, cx| { let tasks = inventory.update(cx, |inventory, cx| {
let (task_source_kind, task) = inventory inventory.list_tasks(None, None, Some(worktree_id), cx)
.list_tasks(None, None, Some(worktree_id), cx) });
let inventory = inventory.clone();
let task_name = task_name.to_owned();
cx.spawn(|mut cx| async move {
let (task_source_kind, task) = tasks
.await
.into_iter() .into_iter()
.find(|(_, task)| task.label == task_name) .find(|(_, task)| task.label == task_name)
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}")); .unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
let id_base = task_source_kind.to_id_base(); let id_base = task_source_kind.to_id_base();
inventory.task_scheduled( inventory
task_source_kind.clone(), .update(&mut cx, |inventory, _| {
task.resolve_task(&id_base, &TaskContext::default()) inventory.task_scheduled(
.unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")), task_source_kind.clone(),
); task.resolve_task(&id_base, &TaskContext::default())
}); .unwrap_or_else(|| {
panic!("Failed to resolve task with name {task_name}")
}),
);
})
.unwrap();
})
} }
pub(super) async fn list_tasks( pub(super) async fn list_tasks(
@ -816,18 +896,19 @@ mod test_inventory {
worktree: Option<WorktreeId>, worktree: Option<WorktreeId>,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> { ) -> Vec<(TaskSourceKind, String)> {
inventory.update(cx, |inventory, cx| { let task_context = &TaskContext::default();
let task_context = &TaskContext::default(); inventory
inventory .update(cx, |inventory, cx| {
.list_tasks(None, None, worktree, cx) inventory.list_tasks(None, None, worktree, cx)
.into_iter() })
.filter_map(|(source_kind, task)| { .await
let id_base = source_kind.to_id_base(); .into_iter()
Some((source_kind, task.resolve_task(&id_base, task_context)?)) .filter_map(|(source_kind, task)| {
}) let id_base = source_kind.to_id_base();
.map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label)) Some((source_kind, task.resolve_task(&id_base, task_context)?))
.collect() })
}) .map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label))
.collect()
} }
} }
@ -959,15 +1040,17 @@ impl ContextProviderWithTasks {
impl ContextProvider for ContextProviderWithTasks { impl ContextProvider for ContextProviderWithTasks {
fn associated_tasks( fn associated_tasks(
&self, &self,
_: Option<Arc<dyn language::File>>, _: Arc<dyn Fs>,
_: Option<Arc<dyn File>>,
_: &App, _: &App,
) -> Option<TaskTemplates> { ) -> Task<Option<TaskTemplates>> {
Some(self.templates.clone()) Task::ready(Some(self.templates.clone()))
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use fs::FakeFs;
use gpui::TestAppContext; use gpui::TestAppContext;
use paths::tasks_file; use paths::tasks_file;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
@ -982,13 +1065,14 @@ mod tests {
#[gpui::test] #[gpui::test]
async fn test_task_list_sorting(cx: &mut TestAppContext) { async fn test_task_list_sorting(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);
let inventory = cx.update(Inventory::new); let fs = FakeFs::new(cx.executor());
let initial_tasks = resolved_task_names(&inventory, None, cx); let inventory = cx.update(|cx| Inventory::new(fs, cx));
let initial_tasks = resolved_task_names(&inventory, None, cx).await;
assert!( assert!(
initial_tasks.is_empty(), initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}" "No tasks expected for empty inventory, but got {initial_tasks:?}"
); );
let initial_tasks = task_template_names(&inventory, None, cx); let initial_tasks = task_template_names(&inventory, None, cx).await;
assert!( assert!(
initial_tasks.is_empty(), initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}" "No tasks expected for empty inventory, but got {initial_tasks:?}"
@ -1012,22 +1096,22 @@ mod tests {
.unwrap(); .unwrap();
}); });
assert_eq!( assert_eq!(
task_template_names(&inventory, None, cx), task_template_names(&inventory, None, cx).await,
&expected_initial_state, &expected_initial_state,
); );
assert_eq!( assert_eq!(
resolved_task_names(&inventory, None, cx), resolved_task_names(&inventory, None, cx).await,
&expected_initial_state, &expected_initial_state,
"Tasks with equal amount of usages should be sorted alphanumerically" "Tasks with equal amount of usages should be sorted alphanumerically"
); );
register_task_used(&inventory, "2_task", cx); register_task_used(&inventory, "2_task", cx).await;
assert_eq!( assert_eq!(
task_template_names(&inventory, None, cx), task_template_names(&inventory, None, cx).await,
&expected_initial_state, &expected_initial_state,
); );
assert_eq!( assert_eq!(
resolved_task_names(&inventory, None, cx), resolved_task_names(&inventory, None, cx).await,
vec![ vec![
"2_task".to_string(), "2_task".to_string(),
"1_a_task".to_string(), "1_a_task".to_string(),
@ -1036,16 +1120,16 @@ mod tests {
], ],
); );
register_task_used(&inventory, "1_task", cx); register_task_used(&inventory, "1_task", cx).await;
register_task_used(&inventory, "1_task", cx); register_task_used(&inventory, "1_task", cx).await;
register_task_used(&inventory, "1_task", cx); register_task_used(&inventory, "1_task", cx).await;
register_task_used(&inventory, "3_task", cx); register_task_used(&inventory, "3_task", cx).await;
assert_eq!( assert_eq!(
task_template_names(&inventory, None, cx), task_template_names(&inventory, None, cx).await,
&expected_initial_state, &expected_initial_state,
); );
assert_eq!( assert_eq!(
resolved_task_names(&inventory, None, cx), resolved_task_names(&inventory, None, cx).await,
vec![ vec![
"3_task".to_string(), "3_task".to_string(),
"1_task".to_string(), "1_task".to_string(),
@ -1069,7 +1153,7 @@ mod tests {
.unwrap(); .unwrap();
}); });
assert_eq!( assert_eq!(
resolved_task_names(&inventory, None, cx), resolved_task_names(&inventory, None, cx).await,
vec![ vec![
"3_task".to_string(), "3_task".to_string(),
"1_task".to_string(), "1_task".to_string(),
@ -1079,7 +1163,7 @@ mod tests {
"Most recently used task should be at the top" "Most recently used task should be at the top"
); );
assert_eq!( assert_eq!(
resolved_task_names(&inventory, Some(worktree_id), cx), resolved_task_names(&inventory, Some(worktree_id), cx).await,
vec![ vec![
"3_task".to_string(), "3_task".to_string(),
"1_task".to_string(), "1_task".to_string(),
@ -1088,9 +1172,9 @@ mod tests {
"1_a_task".to_string(), "1_a_task".to_string(),
], ],
); );
register_worktree_task_used(&inventory, worktree_id, "worktree_task_1", cx); register_worktree_task_used(&inventory, worktree_id, "worktree_task_1", cx).await;
assert_eq!( assert_eq!(
resolved_task_names(&inventory, Some(worktree_id), cx), resolved_task_names(&inventory, Some(worktree_id), cx).await,
vec![ vec![
"worktree_task_1".to_string(), "worktree_task_1".to_string(),
"3_task".to_string(), "3_task".to_string(),
@ -1123,11 +1207,11 @@ mod tests {
"3_task".to_string(), "3_task".to_string(),
]; ];
assert_eq!( assert_eq!(
task_template_names(&inventory, None, cx), task_template_names(&inventory, None, cx).await,
&expected_updated_state, &expected_updated_state,
); );
assert_eq!( assert_eq!(
resolved_task_names(&inventory, None, cx), resolved_task_names(&inventory, None, cx).await,
vec![ vec![
"worktree_task_1".to_string(), "worktree_task_1".to_string(),
"1_a_task".to_string(), "1_a_task".to_string(),
@ -1140,13 +1224,13 @@ mod tests {
"After global tasks update, worktree task usage is not erased and it's the first still; global task is back to regular order as its file was updated" "After global tasks update, worktree task usage is not erased and it's the first still; global task is back to regular order as its file was updated"
); );
register_task_used(&inventory, "11_hello", cx); register_task_used(&inventory, "11_hello", cx).await;
assert_eq!( assert_eq!(
task_template_names(&inventory, None, cx), task_template_names(&inventory, None, cx).await,
&expected_updated_state, &expected_updated_state,
); );
assert_eq!( assert_eq!(
resolved_task_names(&inventory, None, cx), resolved_task_names(&inventory, None, cx).await,
vec![ vec![
"11_hello".to_string(), "11_hello".to_string(),
"worktree_task_1".to_string(), "worktree_task_1".to_string(),
@ -1162,7 +1246,8 @@ mod tests {
#[gpui::test] #[gpui::test]
async fn test_inventory_static_task_filters(cx: &mut TestAppContext) { async fn test_inventory_static_task_filters(cx: &mut TestAppContext) {
init_test(cx); init_test(cx);
let inventory = cx.update(Inventory::new); let fs = FakeFs::new(cx.executor());
let inventory = cx.update(|cx| Inventory::new(fs, cx));
let common_name = "common_task_name"; let common_name = "common_task_name";
let worktree_1 = WorktreeId::from_usize(1); let worktree_1 = WorktreeId::from_usize(1);
let worktree_2 = WorktreeId::from_usize(2); let worktree_2 = WorktreeId::from_usize(2);
@ -1319,12 +1404,17 @@ mod tests {
inventory: &Entity<Inventory>, inventory: &Entity<Inventory>,
worktree: Option<WorktreeId>, worktree: Option<WorktreeId>,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) -> Vec<String> { ) -> Task<Vec<String>> {
inventory.update(cx, |inventory, cx| { let tasks = inventory.update(cx, |inventory, cx| {
let mut task_contexts = TaskContexts::default(); let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context = task_contexts.active_worktree_context =
worktree.map(|worktree| (worktree, TaskContext::default())); worktree.map(|worktree| (worktree, TaskContext::default()));
let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
});
cx.background_spawn(async move {
let (used, current) = tasks.await;
used.into_iter() used.into_iter()
.chain(current) .chain(current)
.map(|(_, task)| task.original_task().label.clone()) .map(|(_, task)| task.original_task().label.clone())
@ -1353,17 +1443,20 @@ mod tests {
worktree: Option<WorktreeId>, worktree: Option<WorktreeId>,
cx: &mut TestAppContext, cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> { ) -> Vec<(TaskSourceKind, String)> {
inventory.update(cx, |inventory, cx| { let (used, current) = inventory
let mut task_contexts = TaskContexts::default(); .update(cx, |inventory, cx| {
task_contexts.active_worktree_context = let mut task_contexts = TaskContexts::default();
worktree.map(|worktree| (worktree, TaskContext::default())); task_contexts.active_worktree_context =
let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx); worktree.map(|worktree| (worktree, TaskContext::default()));
let mut all = used;
all.extend(current); inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
all.into_iter() })
.map(|(source_kind, task)| (source_kind, task.resolved_label)) .await;
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone())) let mut all = used;
.collect() all.extend(current);
}) all.into_iter()
.map(|(source_kind, task)| (source_kind, task.resolved_label))
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
.collect()
} }
} }

View file

@ -159,6 +159,7 @@ impl TaskStore {
} }
pub fn local( pub fn local(
fs: Arc<dyn Fs>,
buffer_store: WeakEntity<BufferStore>, buffer_store: WeakEntity<BufferStore>,
worktree_store: Entity<WorktreeStore>, worktree_store: Entity<WorktreeStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>, toolchain_store: Arc<dyn LanguageToolchainStore>,
@ -170,7 +171,7 @@ impl TaskStore {
downstream_client: None, downstream_client: None,
environment, environment,
}, },
task_inventory: Inventory::new(cx), task_inventory: Inventory::new(fs, cx),
buffer_store, buffer_store,
toolchain_store, toolchain_store,
worktree_store, worktree_store,
@ -178,6 +179,7 @@ impl TaskStore {
} }
pub fn remote( pub fn remote(
fs: Arc<dyn Fs>,
buffer_store: WeakEntity<BufferStore>, buffer_store: WeakEntity<BufferStore>,
worktree_store: Entity<WorktreeStore>, worktree_store: Entity<WorktreeStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>, toolchain_store: Arc<dyn LanguageToolchainStore>,
@ -190,7 +192,7 @@ impl TaskStore {
upstream_client, upstream_client,
project_id, project_id,
}, },
task_inventory: Inventory::new(cx), task_inventory: Inventory::new(fs, cx),
buffer_store, buffer_store,
toolchain_store, toolchain_store,
worktree_store, worktree_store,

View file

@ -146,6 +146,7 @@ impl HeadlessProject {
let task_store = cx.new(|cx| { let task_store = cx.new(|cx| {
let mut task_store = TaskStore::local( let mut task_store = TaskStore::local(
fs.clone(),
buffer_store.downgrade(), buffer_store.downgrade(),
worktree_store.clone(), worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(), toolchain_store.read(cx).as_language_toolchain_store(),

View file

@ -260,13 +260,14 @@ impl PickerDelegate for TasksModalDelegate {
Some(candidates) => Task::ready(string_match_candidates(candidates)), Some(candidates) => Task::ready(string_match_candidates(candidates)),
None => { None => {
if let Some(task_inventory) = self.task_store.read(cx).task_inventory().cloned() { if let Some(task_inventory) = self.task_store.read(cx).task_inventory().cloned() {
let (used, current) = task_inventory let task_list = task_inventory.update(cx, |this, cx| {
.read(cx) this.used_and_current_resolved_tasks(self.task_contexts.clone(), cx)
.used_and_current_resolved_tasks(&self.task_contexts, cx); });
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
let lsp_task_sources = self.task_contexts.lsp_task_sources.clone(); let lsp_task_sources = self.task_contexts.lsp_task_sources.clone();
let task_position = self.task_contexts.latest_selection; let task_position = self.task_contexts.latest_selection;
cx.spawn(async move |picker, cx| { cx.spawn(async move |picker, cx| {
let (used, current) = task_list.await;
let Ok((lsp_tasks, prefer_lsp)) = workspace.update(cx, |workspace, cx| { let Ok((lsp_tasks, prefer_lsp)) = workspace.update(cx, |workspace, cx| {
let lsp_tasks = editor::lsp_tasks( let lsp_tasks = editor::lsp_tasks(
workspace.project().clone(), workspace.project().clone(),

View file

@ -192,31 +192,33 @@ where
task_contexts(workspace, window, cx) task_contexts(workspace, window, cx)
})?; })?;
let task_contexts = task_contexts.await; let task_contexts = task_contexts.await;
let mut tasks = workspace.update(cx, |workspace, cx| { let mut tasks = workspace
let Some(task_inventory) = workspace .update(cx, |workspace, cx| {
.project() let Some(task_inventory) = workspace
.read(cx) .project()
.task_store() .read(cx)
.read(cx) .task_store()
.task_inventory() .read(cx)
.cloned() .task_inventory()
else { .cloned()
return Vec::new(); else {
}; return Task::ready(Vec::new());
let (file, language) = task_contexts };
.location() let (file, language) = task_contexts
.map(|location| { .location()
let buffer = location.buffer.read(cx); .map(|location| {
( let buffer = location.buffer.read(cx);
buffer.file().cloned(), (
buffer.language_at(location.range.start), buffer.file().cloned(),
) buffer.language_at(location.range.start),
}) )
.unwrap_or_default(); })
task_inventory .unwrap_or_default();
.read(cx) task_inventory
.list_tasks(file, language, task_contexts.worktree(), cx) .read(cx)
})?; .list_tasks(file, language, task_contexts.worktree(), cx)
})?
.await;
let did_spawn = workspace let did_spawn = workspace
.update_in(cx, |workspace, window, cx| { .update_in(cx, |workspace, window, cx| {