debugger/tasks: Remove TaskType enum (#29208)
Closes #ISSUE Release Notes: - N/A --------- Co-authored-by: Cole Miller <m@cole-miller.net> Co-authored-by: Anthony Eid <hello@anthonyeid.me> Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com> Co-authored-by: Anthony <anthony@zed.dev> Co-authored-by: Conrad <conrad@zed.dev>
This commit is contained in:
parent
053fafa90e
commit
67615b968b
53 changed files with 1272 additions and 1114 deletions
|
@ -1,6 +1,6 @@
|
|||
use super::{
|
||||
breakpoint_store::BreakpointStore,
|
||||
locators::DapLocator,
|
||||
locators,
|
||||
session::{self, Session, SessionStateEvent},
|
||||
};
|
||||
use crate::{
|
||||
|
@ -13,10 +13,12 @@ use anyhow::{Result, anyhow};
|
|||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use dap::{
|
||||
Capabilities, CompletionItem, CompletionsArguments, DapRegistry, EvaluateArguments,
|
||||
EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments, Source,
|
||||
StackFrameId, StartDebuggingRequestArguments,
|
||||
adapters::{DapStatus, DebugAdapterBinary, DebugAdapterName, TcpArguments},
|
||||
Capabilities, CompletionItem, CompletionsArguments, DapRegistry, DebugRequest,
|
||||
EvaluateArguments, EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments,
|
||||
Source, StackFrameId, StartDebuggingRequestArguments,
|
||||
adapters::{
|
||||
DapStatus, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition, TcpArguments,
|
||||
},
|
||||
client::SessionId,
|
||||
messages::Message,
|
||||
requests::{Completions, Evaluate, Request as _, RunInTerminal, StartDebugging},
|
||||
|
@ -49,9 +51,9 @@ use std::{
|
|||
ffi::OsStr,
|
||||
net::Ipv4Addr,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
sync::{Arc, Once},
|
||||
};
|
||||
use task::{DebugTaskDefinition, DebugTaskTemplate};
|
||||
use task::{DebugScenario, SpawnInTerminal};
|
||||
use util::ResultExt as _;
|
||||
use worktree::Worktree;
|
||||
|
||||
|
@ -95,7 +97,6 @@ pub struct LocalDapStore {
|
|||
environment: Entity<ProjectEnvironment>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
toolchain_store: Arc<dyn LanguageToolchainStore>,
|
||||
locators: HashMap<String, Arc<dyn DapLocator>>,
|
||||
}
|
||||
|
||||
pub struct SshDapStore {
|
||||
|
@ -118,9 +119,14 @@ pub struct DapStore {
|
|||
impl EventEmitter<DapStoreEvent> for DapStore {}
|
||||
|
||||
impl DapStore {
|
||||
pub fn init(client: &AnyProtoClient) {
|
||||
pub fn init(client: &AnyProtoClient, cx: &mut App) {
|
||||
static ADD_LOCATORS: Once = Once::new();
|
||||
client.add_entity_request_handler(Self::handle_run_debug_locator);
|
||||
client.add_entity_request_handler(Self::handle_get_debug_adapter_binary);
|
||||
ADD_LOCATORS.call_once(|| {
|
||||
DapRegistry::global(cx)
|
||||
.add_locator("cargo".into(), Arc::new(locators::cargo::CargoLocator {}))
|
||||
});
|
||||
}
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
|
@ -135,11 +141,6 @@ impl DapStore {
|
|||
breakpoint_store: Entity<BreakpointStore>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let locators = HashMap::from_iter([(
|
||||
"cargo".to_string(),
|
||||
Arc::new(super::locators::cargo::CargoLocator {}) as _,
|
||||
)]);
|
||||
|
||||
let mode = DapStoreMode::Local(LocalDapStore {
|
||||
fs,
|
||||
environment,
|
||||
|
@ -147,7 +148,6 @@ impl DapStore {
|
|||
node_runtime,
|
||||
toolchain_store,
|
||||
language_registry,
|
||||
locators,
|
||||
});
|
||||
|
||||
Self::new(mode, breakpoint_store, worktree_store, cx)
|
||||
|
@ -273,7 +273,7 @@ impl DapStore {
|
|||
DapStoreMode::Ssh(ssh) => {
|
||||
let request = ssh.upstream_client.request(proto::GetDebugAdapterBinary {
|
||||
project_id: ssh.upstream_project_id,
|
||||
task: Some(definition.to_proto()),
|
||||
definition: Some(definition.to_proto()),
|
||||
});
|
||||
let ssh_client = ssh.ssh_client.clone();
|
||||
|
||||
|
@ -326,34 +326,100 @@ impl DapStore {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn debug_scenario_for_build_task(
|
||||
&self,
|
||||
mut build: SpawnInTerminal,
|
||||
unresoved_label: SharedString,
|
||||
adapter: SharedString,
|
||||
cx: &mut App,
|
||||
) -> Option<DebugScenario> {
|
||||
build.args = build
|
||||
.args
|
||||
.into_iter()
|
||||
.map(|arg| {
|
||||
if arg.starts_with("$") {
|
||||
arg.strip_prefix("$")
|
||||
.and_then(|arg| build.env.get(arg).map(ToOwned::to_owned))
|
||||
.unwrap_or_else(|| arg)
|
||||
} else {
|
||||
arg
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
DapRegistry::global(cx)
|
||||
.locators()
|
||||
.values()
|
||||
.find(|locator| locator.accepts(&build))
|
||||
.map(|_| DebugScenario {
|
||||
adapter,
|
||||
label: format!("Debug `{}`", build.label).into(),
|
||||
build: Some(unresoved_label),
|
||||
request: None,
|
||||
initialize_args: None,
|
||||
tcp_connection: None,
|
||||
stop_on_entry: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run_debug_locator(
|
||||
&mut self,
|
||||
template: DebugTaskTemplate,
|
||||
mut build_command: SpawnInTerminal,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<DebugTaskDefinition>> {
|
||||
let Some(locator_name) = template.locator else {
|
||||
return Task::ready(Ok(template.definition));
|
||||
};
|
||||
|
||||
) -> Task<Result<DebugRequest>> {
|
||||
match &self.mode {
|
||||
DapStoreMode::Local(local) => {
|
||||
if let Some(locator) = local.locators.get(&locator_name).cloned() {
|
||||
cx.background_spawn(
|
||||
async move { locator.run_locator(template.definition).await },
|
||||
)
|
||||
DapStoreMode::Local(_) => {
|
||||
// Pre-resolve args with existing environment.
|
||||
build_command.args = build_command
|
||||
.args
|
||||
.into_iter()
|
||||
.map(|arg| {
|
||||
if arg.starts_with("$") {
|
||||
arg.strip_prefix("$")
|
||||
.and_then(|arg| build_command.env.get(arg).map(ToOwned::to_owned))
|
||||
.unwrap_or_else(|| arg)
|
||||
} else {
|
||||
arg
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let locators = DapRegistry::global(cx)
|
||||
.locators()
|
||||
.values()
|
||||
.filter(|locator| locator.accepts(&build_command))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
if !locators.is_empty() {
|
||||
cx.background_spawn(async move {
|
||||
for locator in locators {
|
||||
let result = locator
|
||||
.run(build_command.clone())
|
||||
.await
|
||||
.log_with_level(log::Level::Error);
|
||||
if let Some(result) = result {
|
||||
return Ok(result);
|
||||
}
|
||||
}
|
||||
Err(anyhow!(
|
||||
"None of the locators for task `{}` completed successfully",
|
||||
build_command.label
|
||||
))
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Couldn't find locator {}", locator_name)))
|
||||
Task::ready(Err(anyhow!(
|
||||
"Couldn't find any locator for task `{}`. Specify the `attach` or `launch` arguments in your debug scenario definition",
|
||||
build_command.label
|
||||
)))
|
||||
}
|
||||
}
|
||||
DapStoreMode::Ssh(ssh) => {
|
||||
let request = ssh.upstream_client.request(proto::RunDebugLocator {
|
||||
let request = ssh.upstream_client.request(proto::RunDebugLocators {
|
||||
project_id: ssh.upstream_project_id,
|
||||
locator: locator_name,
|
||||
task: Some(template.definition.to_proto()),
|
||||
build_command: Some(build_command.to_proto()),
|
||||
});
|
||||
cx.background_spawn(async move {
|
||||
let response = request.await?;
|
||||
DebugTaskDefinition::from_proto(response)
|
||||
DebugRequest::from_proto(response)
|
||||
})
|
||||
}
|
||||
DapStoreMode::Collab => {
|
||||
|
@ -943,22 +1009,19 @@ impl DapStore {
|
|||
|
||||
async fn handle_run_debug_locator(
|
||||
this: Entity<Self>,
|
||||
envelope: TypedEnvelope<proto::RunDebugLocator>,
|
||||
envelope: TypedEnvelope<proto::RunDebugLocators>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::DebugTaskDefinition> {
|
||||
let template = DebugTaskTemplate {
|
||||
locator: Some(envelope.payload.locator),
|
||||
definition: DebugTaskDefinition::from_proto(
|
||||
envelope
|
||||
.payload
|
||||
.task
|
||||
.ok_or_else(|| anyhow!("missing definition"))?,
|
||||
)?,
|
||||
};
|
||||
let definition = this
|
||||
.update(&mut cx, |this, cx| this.run_debug_locator(template, cx))?
|
||||
) -> Result<proto::DebugRequest> {
|
||||
let task = envelope
|
||||
.payload
|
||||
.build_command
|
||||
.ok_or_else(|| anyhow!("missing definition"))?;
|
||||
let build_task = SpawnInTerminal::from_proto(task);
|
||||
let request = this
|
||||
.update(&mut cx, |this, cx| this.run_debug_locator(build_task, cx))?
|
||||
.await?;
|
||||
Ok(definition.to_proto())
|
||||
|
||||
Ok(request.to_proto())
|
||||
}
|
||||
|
||||
async fn handle_get_debug_adapter_binary(
|
||||
|
@ -969,7 +1032,7 @@ impl DapStore {
|
|||
let definition = DebugTaskDefinition::from_proto(
|
||||
envelope
|
||||
.payload
|
||||
.task
|
||||
.definition
|
||||
.ok_or_else(|| anyhow!("missing definition"))?,
|
||||
)?;
|
||||
let binary = this
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use cargo::CargoLocator;
|
||||
use collections::HashMap;
|
||||
use gpui::SharedString;
|
||||
use locators::DapLocator;
|
||||
use task::{DebugTaskDefinition, DebugTaskTemplate};
|
||||
|
||||
mod cargo;
|
||||
pub mod locators;
|
||||
|
||||
pub(super) struct LocatorStore {
|
||||
locators: HashMap<SharedString, Box<dyn DapLocator>>,
|
||||
}
|
||||
|
||||
impl LocatorStore {
|
||||
pub(super) fn new() -> Self {
|
||||
Self { locators }
|
||||
}
|
||||
|
||||
pub(super) async fn resolve_debug_config(
|
||||
&self,
|
||||
template: DebugTaskTemplate,
|
||||
) -> Result<DebugTaskDefinition> {
|
||||
let Some(locator_name) = &template.locator else {
|
||||
return Ok(template.definition);
|
||||
};
|
||||
|
||||
if let Some(locator) = self.locators.get(locator_name as &str) {
|
||||
locator.run_locator(template.definition).await
|
||||
} else {
|
||||
Err(anyhow!("Couldn't find locator {}", locator_name))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +1 @@
|
|||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use task::DebugTaskDefinition;
|
||||
|
||||
pub(crate) mod cargo;
|
||||
|
||||
#[async_trait]
|
||||
pub(super) trait DapLocator: Send + Sync {
|
||||
async fn run_locator(&self, debug_config: DebugTaskDefinition) -> Result<DebugTaskDefinition>;
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
use super::DapLocator;
|
||||
use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use dap::{DapLocator, DebugRequest};
|
||||
use serde_json::Value;
|
||||
use smol::{
|
||||
io::AsyncReadExt,
|
||||
process::{Command, Stdio},
|
||||
};
|
||||
use task::DebugTaskDefinition;
|
||||
use task::SpawnInTerminal;
|
||||
|
||||
pub(crate) struct CargoLocator;
|
||||
|
||||
|
@ -37,26 +37,31 @@ async fn find_best_executable(executables: &[String], test_name: &str) -> Option
|
|||
}
|
||||
#[async_trait]
|
||||
impl DapLocator for CargoLocator {
|
||||
async fn run_locator(
|
||||
&self,
|
||||
mut debug_config: DebugTaskDefinition,
|
||||
) -> Result<DebugTaskDefinition> {
|
||||
let Some(launch_config) = (match &mut debug_config.request {
|
||||
task::DebugRequest::Launch(launch_config) => Some(launch_config),
|
||||
_ => None,
|
||||
}) else {
|
||||
return Err(anyhow!("Couldn't get launch config in locator"));
|
||||
fn accepts(&self, build_config: &SpawnInTerminal) -> bool {
|
||||
if build_config.command != "cargo" {
|
||||
return false;
|
||||
}
|
||||
let Some(command) = build_config.args.first().map(|s| s.as_str()) else {
|
||||
return false;
|
||||
};
|
||||
if matches!(command, "check" | "run") {
|
||||
return false;
|
||||
}
|
||||
!matches!(command, "test" | "bench")
|
||||
|| build_config.args.iter().any(|arg| arg == "--no-run")
|
||||
}
|
||||
|
||||
let Some(cwd) = launch_config.cwd.clone() else {
|
||||
async fn run(&self, build_config: SpawnInTerminal) -> Result<DebugRequest> {
|
||||
let Some(cwd) = build_config.cwd.clone() else {
|
||||
return Err(anyhow!(
|
||||
"Couldn't get cwd from debug config which is needed for locators"
|
||||
));
|
||||
};
|
||||
|
||||
let mut child = Command::new("cargo")
|
||||
.args(&launch_config.args)
|
||||
.args(&build_config.args)
|
||||
.arg("--message-format=json")
|
||||
.envs(build_config.env.iter().map(|(k, v)| (k.clone(), v.clone())))
|
||||
.current_dir(cwd)
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
@ -85,19 +90,16 @@ impl DapLocator for CargoLocator {
|
|||
return Err(anyhow!("Couldn't get executable in cargo locator"));
|
||||
};
|
||||
|
||||
let is_test = launch_config
|
||||
.args
|
||||
.first()
|
||||
.map_or(false, |arg| arg == "test");
|
||||
let is_test = build_config.args.first().map_or(false, |arg| arg == "test");
|
||||
|
||||
let mut test_name = None;
|
||||
if is_test {
|
||||
if let Some(package_index) = launch_config
|
||||
if let Some(package_index) = build_config
|
||||
.args
|
||||
.iter()
|
||||
.position(|arg| arg == "-p" || arg == "--package")
|
||||
{
|
||||
test_name = launch_config
|
||||
test_name = build_config
|
||||
.args
|
||||
.get(package_index + 2)
|
||||
.filter(|name| !name.starts_with("--"))
|
||||
|
@ -116,12 +118,17 @@ impl DapLocator for CargoLocator {
|
|||
return Err(anyhow!("Couldn't get executable in cargo locator"));
|
||||
};
|
||||
|
||||
launch_config.program = executable;
|
||||
let args = test_name.into_iter().collect();
|
||||
|
||||
launch_config.args.clear();
|
||||
if let Some(test_name) = test_name {
|
||||
launch_config.args.push(test_name);
|
||||
}
|
||||
Ok(debug_config)
|
||||
Ok(DebugRequest::Launch(task::LaunchRequest {
|
||||
program: executable,
|
||||
cwd: build_config.cwd.clone(),
|
||||
args,
|
||||
env: build_config
|
||||
.env
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use super::dap_command::{
|
|||
use super::dap_store::DapStore;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::{HashMap, HashSet, IndexMap, IndexSet};
|
||||
use dap::adapters::DebugAdapterBinary;
|
||||
use dap::adapters::{DebugAdapterBinary, DebugTaskDefinition};
|
||||
use dap::messages::Response;
|
||||
use dap::{
|
||||
Capabilities, ContinueArguments, EvaluateArgumentsContext, Module, Source, StackFrameId,
|
||||
|
@ -42,7 +42,6 @@ use std::{
|
|||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
use task::DebugTaskDefinition;
|
||||
use text::{PointUtf16, ToPointUtf16};
|
||||
use util::{ResultExt, merge_json_value_into};
|
||||
use worktree::Worktree;
|
||||
|
@ -125,7 +124,6 @@ enum Mode {
|
|||
pub struct LocalMode {
|
||||
client: Arc<DebugAdapterClient>,
|
||||
binary: DebugAdapterBinary,
|
||||
root_binary: Option<Arc<DebugAdapterBinary>>,
|
||||
pub(crate) breakpoint_store: Entity<BreakpointStore>,
|
||||
tmp_breakpoint: Option<SourceBreakpoint>,
|
||||
worktree: WeakEntity<Worktree>,
|
||||
|
@ -160,12 +158,6 @@ impl LocalMode {
|
|||
messages_tx.unbounded_send(message).ok();
|
||||
});
|
||||
|
||||
let root_binary = if let Some(parent_session) = parent_session.as_ref() {
|
||||
Some(parent_session.read_with(&cx, |session, _| session.root_binary().clone())?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let client = Arc::new(
|
||||
if let Some(client) = parent_session
|
||||
.and_then(|session| cx.update(|cx| session.read(cx).adapter_client()).ok())
|
||||
|
@ -186,7 +178,6 @@ impl LocalMode {
|
|||
breakpoint_store,
|
||||
worktree,
|
||||
tmp_breakpoint: None,
|
||||
root_binary,
|
||||
binary,
|
||||
})
|
||||
}
|
||||
|
@ -834,19 +825,6 @@ impl Session {
|
|||
&self.capabilities
|
||||
}
|
||||
|
||||
pub(crate) fn root_binary(&self) -> Arc<DebugAdapterBinary> {
|
||||
match &self.mode {
|
||||
Mode::Building => {
|
||||
// todo(debugger): Implement root_binary for building mode
|
||||
unimplemented!()
|
||||
}
|
||||
Mode::Running(running) => running
|
||||
.root_binary
|
||||
.clone()
|
||||
.unwrap_or_else(|| Arc::new(running.binary.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn binary(&self) -> &DebugAdapterBinary {
|
||||
let Mode::Running(local_mode) = &self.mode else {
|
||||
panic!("Session is not local");
|
||||
|
@ -855,10 +833,10 @@ impl Session {
|
|||
}
|
||||
|
||||
pub fn adapter_name(&self) -> SharedString {
|
||||
self.definition.adapter.clone().into()
|
||||
self.definition.adapter.clone()
|
||||
}
|
||||
|
||||
pub fn label(&self) -> String {
|
||||
pub fn label(&self) -> SharedString {
|
||||
self.definition.label.clone()
|
||||
}
|
||||
|
||||
|
@ -889,7 +867,7 @@ impl Session {
|
|||
}
|
||||
|
||||
pub(super) fn request_initialize(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let adapter_id = self.definition.adapter.clone();
|
||||
let adapter_id = String::from(self.definition.adapter.clone());
|
||||
let request = Initialize { adapter_id };
|
||||
match &self.mode {
|
||||
Mode::Running(local_mode) => {
|
||||
|
|
|
@ -826,7 +826,7 @@ impl Project {
|
|||
SettingsObserver::init(&client);
|
||||
TaskStore::init(Some(&client));
|
||||
ToolchainStore::init(&client);
|
||||
DapStore::init(&client);
|
||||
DapStore::init(&client, cx);
|
||||
BreakpointStore::init(&client);
|
||||
}
|
||||
|
||||
|
@ -1159,7 +1159,7 @@ impl Project {
|
|||
SettingsObserver::init(&ssh_proto);
|
||||
TaskStore::init(Some(&ssh_proto));
|
||||
ToolchainStore::init(&ssh_proto);
|
||||
DapStore::init(&ssh_proto);
|
||||
DapStore::init(&ssh_proto, cx);
|
||||
GitStore::init(&ssh_proto);
|
||||
|
||||
this
|
||||
|
|
|
@ -8,7 +8,7 @@ use lsp::LanguageServerName;
|
|||
use paths::{
|
||||
EDITORCONFIG_NAME, local_debug_file_relative_path, local_settings_file_relative_path,
|
||||
local_tasks_file_relative_path, local_vscode_launch_file_relative_path,
|
||||
local_vscode_tasks_file_relative_path,
|
||||
local_vscode_tasks_file_relative_path, task_file_name,
|
||||
};
|
||||
use rpc::{
|
||||
AnyProtoClient, TypedEnvelope,
|
||||
|
@ -18,7 +18,7 @@ use schemars::JsonSchema;
|
|||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources,
|
||||
SettingsStore, TaskKind, parse_json_with_comments, watch_config_file,
|
||||
SettingsStore, parse_json_with_comments, watch_config_file,
|
||||
};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
|
@ -377,7 +377,7 @@ pub struct SettingsObserver {
|
|||
worktree_store: Entity<WorktreeStore>,
|
||||
project_id: u64,
|
||||
task_store: Entity<TaskStore>,
|
||||
_global_task_config_watchers: (Task<()>, Task<()>),
|
||||
_global_task_config_watcher: Task<()>,
|
||||
}
|
||||
|
||||
/// SettingsObserver observers changes to .zed/{settings, task}.json files in local worktrees
|
||||
|
@ -405,19 +405,10 @@ impl SettingsObserver {
|
|||
mode: SettingsObserverMode::Local(fs.clone()),
|
||||
downstream_client: None,
|
||||
project_id: 0,
|
||||
_global_task_config_watchers: (
|
||||
Self::subscribe_to_global_task_file_changes(
|
||||
fs.clone(),
|
||||
TaskKind::Script,
|
||||
paths::tasks_file().clone(),
|
||||
cx,
|
||||
),
|
||||
Self::subscribe_to_global_task_file_changes(
|
||||
fs,
|
||||
TaskKind::Debug,
|
||||
paths::debug_tasks_file().clone(),
|
||||
cx,
|
||||
),
|
||||
_global_task_config_watcher: Self::subscribe_to_global_task_file_changes(
|
||||
fs.clone(),
|
||||
paths::tasks_file().clone(),
|
||||
cx,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -434,19 +425,10 @@ impl SettingsObserver {
|
|||
mode: SettingsObserverMode::Remote,
|
||||
downstream_client: None,
|
||||
project_id: 0,
|
||||
_global_task_config_watchers: (
|
||||
Self::subscribe_to_global_task_file_changes(
|
||||
fs.clone(),
|
||||
TaskKind::Script,
|
||||
paths::tasks_file().clone(),
|
||||
cx,
|
||||
),
|
||||
Self::subscribe_to_global_task_file_changes(
|
||||
fs.clone(),
|
||||
TaskKind::Debug,
|
||||
paths::debug_tasks_file().clone(),
|
||||
cx,
|
||||
),
|
||||
_global_task_config_watcher: Self::subscribe_to_global_task_file_changes(
|
||||
fs.clone(),
|
||||
paths::tasks_file().clone(),
|
||||
cx,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -575,7 +557,7 @@ impl SettingsObserver {
|
|||
)
|
||||
.unwrap(),
|
||||
);
|
||||
(settings_dir, LocalSettingsKind::Tasks(TaskKind::Script))
|
||||
(settings_dir, LocalSettingsKind::Tasks)
|
||||
} else if path.ends_with(local_vscode_tasks_file_relative_path()) {
|
||||
let settings_dir = Arc::<Path>::from(
|
||||
path.ancestors()
|
||||
|
@ -587,7 +569,7 @@ impl SettingsObserver {
|
|||
)
|
||||
.unwrap(),
|
||||
);
|
||||
(settings_dir, LocalSettingsKind::Tasks(TaskKind::Script))
|
||||
(settings_dir, LocalSettingsKind::Tasks)
|
||||
} else if path.ends_with(local_debug_file_relative_path()) {
|
||||
let settings_dir = Arc::<Path>::from(
|
||||
path.ancestors()
|
||||
|
@ -599,7 +581,7 @@ impl SettingsObserver {
|
|||
)
|
||||
.unwrap(),
|
||||
);
|
||||
(settings_dir, LocalSettingsKind::Tasks(TaskKind::Debug))
|
||||
(settings_dir, LocalSettingsKind::Debug)
|
||||
} else if path.ends_with(local_vscode_launch_file_relative_path()) {
|
||||
let settings_dir = Arc::<Path>::from(
|
||||
path.ancestors()
|
||||
|
@ -611,7 +593,7 @@ impl SettingsObserver {
|
|||
)
|
||||
.unwrap(),
|
||||
);
|
||||
(settings_dir, LocalSettingsKind::Tasks(TaskKind::Debug))
|
||||
(settings_dir, LocalSettingsKind::Debug)
|
||||
} else if path.ends_with(EDITORCONFIG_NAME) {
|
||||
let Some(settings_dir) = path.parent().map(Arc::from) else {
|
||||
continue;
|
||||
|
@ -747,7 +729,7 @@ impl SettingsObserver {
|
|||
}
|
||||
}
|
||||
}),
|
||||
LocalSettingsKind::Tasks(task_kind) => {
|
||||
LocalSettingsKind::Tasks => {
|
||||
let result = task_store.update(cx, |task_store, cx| {
|
||||
task_store.update_user_tasks(
|
||||
TaskSettingsLocation::Worktree(SettingsLocation {
|
||||
|
@ -755,7 +737,6 @@ impl SettingsObserver {
|
|||
path: directory.as_ref(),
|
||||
}),
|
||||
file_content.as_deref(),
|
||||
task_kind,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -772,7 +753,38 @@ impl SettingsObserver {
|
|||
}
|
||||
Ok(()) => {
|
||||
cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok(
|
||||
task_kind.config_in_dir(&directory)
|
||||
directory.join(task_file_name())
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalSettingsKind::Debug => {
|
||||
let result = task_store.update(cx, |task_store, cx| {
|
||||
task_store.update_user_debug_scenarios(
|
||||
TaskSettingsLocation::Worktree(SettingsLocation {
|
||||
worktree_id,
|
||||
path: directory.as_ref(),
|
||||
}),
|
||||
file_content.as_deref(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
match result {
|
||||
Err(InvalidSettingsError::Debug { path, message }) => {
|
||||
log::error!(
|
||||
"Failed to set local debug scenarios in {path:?}: {message:?}"
|
||||
);
|
||||
cx.emit(SettingsObserverEvent::LocalTasksUpdated(Err(
|
||||
InvalidSettingsError::Debug { path, message },
|
||||
)));
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to set local tasks: {e}");
|
||||
}
|
||||
Ok(()) => {
|
||||
cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok(
|
||||
directory.join(task_file_name())
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
@ -795,7 +807,6 @@ impl SettingsObserver {
|
|||
|
||||
fn subscribe_to_global_task_file_changes(
|
||||
fs: Arc<dyn Fs>,
|
||||
task_kind: TaskKind,
|
||||
file_path: PathBuf,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<()> {
|
||||
|
@ -815,7 +826,6 @@ impl SettingsObserver {
|
|||
.update_user_tasks(
|
||||
TaskSettingsLocation::Global(&file_path),
|
||||
Some(&user_tasks_content),
|
||||
task_kind,
|
||||
cx,
|
||||
)
|
||||
.log_err();
|
||||
|
@ -828,7 +838,6 @@ impl SettingsObserver {
|
|||
task_store.update_user_tasks(
|
||||
TaskSettingsLocation::Global(&file_path),
|
||||
Some(&user_tasks_content),
|
||||
task_kind,
|
||||
cx,
|
||||
)
|
||||
}) else {
|
||||
|
@ -856,15 +865,17 @@ impl SettingsObserver {
|
|||
pub fn local_settings_kind_from_proto(kind: proto::LocalSettingsKind) -> LocalSettingsKind {
|
||||
match kind {
|
||||
proto::LocalSettingsKind::Settings => LocalSettingsKind::Settings,
|
||||
proto::LocalSettingsKind::Tasks => LocalSettingsKind::Tasks(TaskKind::Script),
|
||||
proto::LocalSettingsKind::Tasks => LocalSettingsKind::Tasks,
|
||||
proto::LocalSettingsKind::Editorconfig => LocalSettingsKind::Editorconfig,
|
||||
proto::LocalSettingsKind::Debug => LocalSettingsKind::Debug,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_settings_kind_to_proto(kind: LocalSettingsKind) -> proto::LocalSettingsKind {
|
||||
match kind {
|
||||
LocalSettingsKind::Settings => proto::LocalSettingsKind::Settings,
|
||||
LocalSettingsKind::Tasks(_) => proto::LocalSettingsKind::Tasks,
|
||||
LocalSettingsKind::Tasks => proto::LocalSettingsKind::Tasks,
|
||||
LocalSettingsKind::Editorconfig => proto::LocalSettingsKind::Editorconfig,
|
||||
LocalSettingsKind::Debug => proto::LocalSettingsKind::Debug,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -292,7 +292,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
|
|||
})
|
||||
.into_iter()
|
||||
.map(|(source_kind, task)| {
|
||||
let resolved = task.resolved.unwrap();
|
||||
let resolved = task.resolved;
|
||||
(
|
||||
source_kind,
|
||||
task.resolved_label,
|
||||
|
@ -359,7 +359,6 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
|
|||
}])
|
||||
.to_string(),
|
||||
),
|
||||
settings::TaskKind::Script,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
@ -370,7 +369,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
|
|||
.update(|cx| get_all_tasks(&project, &task_contexts, cx))
|
||||
.into_iter()
|
||||
.map(|(source_kind, task)| {
|
||||
let resolved = task.resolved.unwrap();
|
||||
let resolved = task.resolved;
|
||||
(
|
||||
source_kind,
|
||||
task.resolved_label,
|
||||
|
@ -495,7 +494,7 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
|
|||
active_worktree_tasks
|
||||
.into_iter()
|
||||
.map(|(source_kind, task)| {
|
||||
let resolved = task.resolved.unwrap();
|
||||
let resolved = task.resolved;
|
||||
(source_kind, resolved.command)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
|
|
|
@ -13,14 +13,15 @@ use collections::{HashMap, HashSet, VecDeque};
|
|||
use gpui::{App, AppContext as _, Entity, SharedString, Task};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
ContextProvider, File, Language, LanguageToolchainStore, Location,
|
||||
Buffer, ContextProvider, File, Language, LanguageToolchainStore, Location,
|
||||
language_settings::language_settings,
|
||||
};
|
||||
use lsp::{LanguageServerId, LanguageServerName};
|
||||
use settings::{InvalidSettingsError, TaskKind, parse_json_with_comments};
|
||||
use paths::{debug_task_file_name, task_file_name};
|
||||
use settings::{InvalidSettingsError, parse_json_with_comments};
|
||||
use task::{
|
||||
DebugTaskTemplate, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates,
|
||||
TaskVariables, VariableName,
|
||||
DebugScenario, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates, TaskVariables,
|
||||
VariableName,
|
||||
};
|
||||
use text::{BufferId, Point, ToPoint};
|
||||
use util::{NumericPrefixWithSuffix, ResultExt as _, paths::PathExt as _, post_inc};
|
||||
|
@ -32,13 +33,84 @@ use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
|
|||
#[derive(Debug, Default)]
|
||||
pub struct Inventory {
|
||||
last_scheduled_tasks: VecDeque<(TaskSourceKind, ResolvedTask)>,
|
||||
templates_from_settings: ParsedTemplates,
|
||||
templates_from_settings: InventoryFor<TaskTemplate>,
|
||||
scenarios_from_settings: InventoryFor<DebugScenario>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct ParsedTemplates {
|
||||
global: HashMap<PathBuf, Vec<TaskTemplate>>,
|
||||
worktree: HashMap<WorktreeId, HashMap<(Arc<Path>, TaskKind), Vec<TaskTemplate>>>,
|
||||
// Helper trait for better error messages in [InventoryFor]
|
||||
trait InventoryContents: Clone {
|
||||
const GLOBAL_SOURCE_FILE: &'static str;
|
||||
const LABEL: &'static str;
|
||||
}
|
||||
|
||||
impl InventoryContents for TaskTemplate {
|
||||
const GLOBAL_SOURCE_FILE: &'static str = "tasks.json";
|
||||
const LABEL: &'static str = "tasks";
|
||||
}
|
||||
|
||||
impl InventoryContents for DebugScenario {
|
||||
const GLOBAL_SOURCE_FILE: &'static str = "debug.json";
|
||||
|
||||
const LABEL: &'static str = "debug scenarios";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct InventoryFor<T> {
|
||||
global: HashMap<PathBuf, Vec<T>>,
|
||||
worktree: HashMap<WorktreeId, HashMap<Arc<Path>, Vec<T>>>,
|
||||
}
|
||||
|
||||
impl<T: InventoryContents> InventoryFor<T> {
|
||||
fn worktree_scenarios(
|
||||
&self,
|
||||
worktree: Option<WorktreeId>,
|
||||
) -> impl '_ + Iterator<Item = (TaskSourceKind, T)> {
|
||||
worktree.into_iter().flat_map(|worktree| {
|
||||
self.worktree
|
||||
.get(&worktree)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|(directory, templates)| {
|
||||
templates.iter().map(move |template| (directory, template))
|
||||
})
|
||||
.map(move |(directory, template)| {
|
||||
(
|
||||
TaskSourceKind::Worktree {
|
||||
id: worktree,
|
||||
directory_in_worktree: directory.to_path_buf(),
|
||||
id_base: Cow::Owned(format!(
|
||||
"local worktree {} from directory {directory:?}",
|
||||
T::LABEL
|
||||
)),
|
||||
},
|
||||
template.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn global_scenarios(&self) -> impl '_ + Iterator<Item = (TaskSourceKind, T)> {
|
||||
self.global.iter().flat_map(|(file_path, templates)| {
|
||||
templates.into_iter().map(|template| {
|
||||
(
|
||||
TaskSourceKind::AbsPath {
|
||||
id_base: Cow::Owned(format!("global {}", T::GLOBAL_SOURCE_FILE)),
|
||||
abs_path: file_path.clone(),
|
||||
},
|
||||
template.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Default for InventoryFor<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
global: HashMap::default(),
|
||||
worktree: HashMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Kind of a source the tasks are fetched from, used to display more source information in the UI.
|
||||
|
@ -134,22 +206,40 @@ impl Inventory {
|
|||
cx.new(|_| Self::default())
|
||||
}
|
||||
|
||||
pub fn list_debug_tasks(&self) -> Vec<&TaskTemplate> {
|
||||
self.templates_from_settings
|
||||
.worktree
|
||||
.values()
|
||||
.flat_map(|tasks| {
|
||||
tasks.iter().filter_map(|(kind, tasks)| {
|
||||
if matches!(kind.1, TaskKind::Debug) {
|
||||
Some(tasks)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
pub fn list_debug_scenarios(&self, worktree: Option<WorktreeId>) -> Vec<DebugScenario> {
|
||||
let global_scenarios = self.global_debug_scenarios_from_settings();
|
||||
let worktree_scenarios = self.worktree_scenarios_from_settings(worktree);
|
||||
|
||||
worktree_scenarios
|
||||
.chain(global_scenarios)
|
||||
.map(|(_, scenario)| scenario)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn task_template_by_label(
|
||||
&self,
|
||||
buffer: Option<Entity<Buffer>>,
|
||||
label: &str,
|
||||
cx: &App,
|
||||
) -> Option<TaskTemplate> {
|
||||
let (worktree_id, file, language) = buffer
|
||||
.map(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
let file = buffer.file().cloned();
|
||||
(
|
||||
file.as_ref().map(|file| file.worktree_id(cx)),
|
||||
file,
|
||||
buffer.language().cloned(),
|
||||
)
|
||||
})
|
||||
.unwrap_or((None, None, None));
|
||||
|
||||
self.list_tasks(file, language, worktree_id, cx)
|
||||
.iter()
|
||||
.find(|(_, template)| template.label == label)
|
||||
.map(|val| val.1.clone())
|
||||
}
|
||||
|
||||
/// Pulls its task sources relevant to the worktree and the language given,
|
||||
/// returns all task templates with their source kinds, worktree tasks first, language tasks second
|
||||
/// and global tasks last. No specific order inside source kinds groups.
|
||||
|
@ -160,10 +250,11 @@ impl Inventory {
|
|||
worktree: Option<WorktreeId>,
|
||||
cx: &App,
|
||||
) -> Vec<(TaskSourceKind, TaskTemplate)> {
|
||||
let global_tasks = self.global_templates_from_settings();
|
||||
let worktree_tasks = self.worktree_templates_from_settings(worktree);
|
||||
let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language {
|
||||
name: language.name().into(),
|
||||
});
|
||||
let global_tasks = self.global_templates_from_settings();
|
||||
let language_tasks = language
|
||||
.filter(|language| {
|
||||
language_settings(Some(language.name()), file.as_ref(), cx)
|
||||
|
@ -173,11 +264,11 @@ impl Inventory {
|
|||
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
|
||||
.into_iter()
|
||||
.flat_map(|tasks| tasks.0.into_iter())
|
||||
.flat_map(|task| Some((task_source_kind.clone()?, task)))
|
||||
.chain(global_tasks);
|
||||
.flat_map(|task| Some((task_source_kind.clone()?, task)));
|
||||
|
||||
self.worktree_templates_from_settings(worktree)
|
||||
worktree_tasks
|
||||
.chain(language_tasks)
|
||||
.chain(global_tasks)
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -358,51 +449,27 @@ impl Inventory {
|
|||
fn global_templates_from_settings(
|
||||
&self,
|
||||
) -> impl '_ + Iterator<Item = (TaskSourceKind, TaskTemplate)> {
|
||||
self.templates_from_settings
|
||||
.global
|
||||
.iter()
|
||||
.flat_map(|(file_path, templates)| {
|
||||
templates.into_iter().map(|template| {
|
||||
(
|
||||
TaskSourceKind::AbsPath {
|
||||
id_base: match template.task_type {
|
||||
task::TaskType::Script => Cow::Borrowed("global tasks.json"),
|
||||
task::TaskType::Debug(_) => Cow::Borrowed("global debug.json"),
|
||||
},
|
||||
abs_path: file_path.clone(),
|
||||
},
|
||||
template.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
self.templates_from_settings.global_scenarios()
|
||||
}
|
||||
|
||||
fn global_debug_scenarios_from_settings(
|
||||
&self,
|
||||
) -> impl '_ + Iterator<Item = (TaskSourceKind, DebugScenario)> {
|
||||
self.scenarios_from_settings.global_scenarios()
|
||||
}
|
||||
|
||||
fn worktree_scenarios_from_settings(
|
||||
&self,
|
||||
worktree: Option<WorktreeId>,
|
||||
) -> impl '_ + Iterator<Item = (TaskSourceKind, DebugScenario)> {
|
||||
self.scenarios_from_settings.worktree_scenarios(worktree)
|
||||
}
|
||||
|
||||
fn worktree_templates_from_settings(
|
||||
&self,
|
||||
worktree: Option<WorktreeId>,
|
||||
) -> impl '_ + Iterator<Item = (TaskSourceKind, TaskTemplate)> {
|
||||
worktree.into_iter().flat_map(|worktree| {
|
||||
self.templates_from_settings
|
||||
.worktree
|
||||
.get(&worktree)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|(directory, templates)| {
|
||||
templates.iter().map(move |template| (directory, template))
|
||||
})
|
||||
.map(move |((directory, _task_kind), template)| {
|
||||
(
|
||||
TaskSourceKind::Worktree {
|
||||
id: worktree,
|
||||
directory_in_worktree: directory.to_path_buf(),
|
||||
id_base: Cow::Owned(format!(
|
||||
"local worktree tasks from directory {directory:?}"
|
||||
)),
|
||||
},
|
||||
template.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
self.templates_from_settings.worktree_scenarios(worktree)
|
||||
}
|
||||
|
||||
/// Updates in-memory task metadata from the JSON string given.
|
||||
|
@ -413,7 +480,6 @@ impl Inventory {
|
|||
&mut self,
|
||||
location: TaskSettingsLocation<'_>,
|
||||
raw_tasks_json: Option<&str>,
|
||||
task_kind: TaskKind,
|
||||
) -> Result<(), InvalidSettingsError> {
|
||||
let raw_tasks = match parse_json_with_comments::<Vec<serde_json::Value>>(
|
||||
raw_tasks_json.unwrap_or("[]"),
|
||||
|
@ -424,21 +490,16 @@ impl Inventory {
|
|||
path: match location {
|
||||
TaskSettingsLocation::Global(path) => path.to_owned(),
|
||||
TaskSettingsLocation::Worktree(settings_location) => {
|
||||
task_kind.config_in_dir(settings_location.path)
|
||||
settings_location.path.join(task_file_name())
|
||||
}
|
||||
},
|
||||
message: format!("Failed to parse tasks file content as a JSON array: {e}"),
|
||||
});
|
||||
}
|
||||
};
|
||||
let new_templates = raw_tasks
|
||||
.into_iter()
|
||||
.filter_map(|raw_template| match &task_kind {
|
||||
TaskKind::Script => serde_json::from_value::<TaskTemplate>(raw_template).log_err(),
|
||||
TaskKind::Debug => serde_json::from_value::<DebugTaskTemplate>(raw_template)
|
||||
.log_err()
|
||||
.map(|content| content.to_zed_format()),
|
||||
});
|
||||
let new_templates = raw_tasks.into_iter().filter_map(|raw_template| {
|
||||
serde_json::from_value::<TaskTemplate>(raw_template).log_err()
|
||||
});
|
||||
|
||||
let parsed_templates = &mut self.templates_from_settings;
|
||||
match location {
|
||||
|
@ -454,14 +515,72 @@ impl Inventory {
|
|||
if let Some(worktree_tasks) =
|
||||
parsed_templates.worktree.get_mut(&location.worktree_id)
|
||||
{
|
||||
worktree_tasks.remove(&(Arc::from(location.path), task_kind));
|
||||
worktree_tasks.remove(location.path);
|
||||
}
|
||||
} else {
|
||||
parsed_templates
|
||||
.worktree
|
||||
.entry(location.worktree_id)
|
||||
.or_default()
|
||||
.insert((Arc::from(location.path), task_kind), new_templates);
|
||||
.insert(Arc::from(location.path), new_templates);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Updates in-memory task metadata from the JSON string given.
|
||||
/// Will fail if the JSON is not a valid array of objects, but will continue if any object will not parse into a [`TaskTemplate`].
|
||||
///
|
||||
/// Global tasks are updated for no worktree provided, otherwise the worktree metadata for a given path will be updated.
|
||||
pub(crate) fn update_file_based_scenarios(
|
||||
&mut self,
|
||||
location: TaskSettingsLocation<'_>,
|
||||
raw_tasks_json: Option<&str>,
|
||||
) -> Result<(), InvalidSettingsError> {
|
||||
let raw_tasks = match parse_json_with_comments::<Vec<serde_json::Value>>(
|
||||
raw_tasks_json.unwrap_or("[]"),
|
||||
) {
|
||||
Ok(tasks) => tasks,
|
||||
Err(e) => {
|
||||
return Err(InvalidSettingsError::Debug {
|
||||
path: match location {
|
||||
TaskSettingsLocation::Global(path) => path.to_owned(),
|
||||
TaskSettingsLocation::Worktree(settings_location) => {
|
||||
settings_location.path.join(debug_task_file_name())
|
||||
}
|
||||
},
|
||||
message: format!("Failed to parse tasks file content as a JSON array: {e}"),
|
||||
});
|
||||
}
|
||||
};
|
||||
let new_templates = raw_tasks.into_iter().filter_map(|raw_template| {
|
||||
serde_json::from_value::<DebugScenario>(raw_template).log_err()
|
||||
});
|
||||
|
||||
let parsed_scenarios = &mut self.scenarios_from_settings;
|
||||
match location {
|
||||
TaskSettingsLocation::Global(path) => {
|
||||
parsed_scenarios
|
||||
.global
|
||||
.entry(path.to_owned())
|
||||
.insert_entry(new_templates.collect());
|
||||
}
|
||||
TaskSettingsLocation::Worktree(location) => {
|
||||
let new_templates = new_templates.collect::<Vec<_>>();
|
||||
if new_templates.is_empty() {
|
||||
if let Some(worktree_tasks) =
|
||||
parsed_scenarios.worktree.get_mut(&location.worktree_id)
|
||||
{
|
||||
worktree_tasks.remove(location.path);
|
||||
}
|
||||
} else {
|
||||
parsed_scenarios
|
||||
.worktree
|
||||
.entry(location.worktree_id)
|
||||
.or_default()
|
||||
.insert(Arc::from(location.path), new_templates);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -677,6 +796,10 @@ impl ContextProvider for BasicContextProvider {
|
|||
|
||||
Task::ready(Ok(task_variables))
|
||||
}
|
||||
|
||||
fn debug_adapter(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// A ContextProvider that doesn't provide any task variables on it's own, though it has some associated tasks.
|
||||
|
@ -700,6 +823,10 @@ impl ContextProvider for ContextProviderWithTasks {
|
|||
) -> Option<TaskTemplates> {
|
||||
Some(self.templates.clone())
|
||||
}
|
||||
|
||||
fn debug_adapter(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -744,7 +871,6 @@ mod tests {
|
|||
Some(&mock_tasks_from_names(
|
||||
expected_initial_state.iter().map(|name| name.as_str()),
|
||||
)),
|
||||
settings::TaskKind::Script,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
@ -800,7 +926,6 @@ mod tests {
|
|||
.into_iter()
|
||||
.chain(expected_initial_state.iter().map(|name| name.as_str())),
|
||||
)),
|
||||
settings::TaskKind::Script,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
@ -925,7 +1050,6 @@ mod tests {
|
|||
.iter()
|
||||
.map(|(_, name)| name.as_str()),
|
||||
)),
|
||||
settings::TaskKind::Script,
|
||||
)
|
||||
.unwrap();
|
||||
inventory
|
||||
|
@ -937,7 +1061,6 @@ mod tests {
|
|||
Some(&mock_tasks_from_names(
|
||||
worktree_1_tasks.iter().map(|(_, name)| name.as_str()),
|
||||
)),
|
||||
settings::TaskKind::Script,
|
||||
)
|
||||
.unwrap();
|
||||
inventory
|
||||
|
@ -949,7 +1072,6 @@ mod tests {
|
|||
Some(&mock_tasks_from_names(
|
||||
worktree_2_tasks.iter().map(|(_, name)| name.as_str()),
|
||||
)),
|
||||
settings::TaskKind::Script,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
|
|
@ -11,7 +11,7 @@ use language::{
|
|||
proto::{deserialize_anchor, serialize_anchor},
|
||||
};
|
||||
use rpc::{AnyProtoClient, TypedEnvelope, proto};
|
||||
use settings::{InvalidSettingsError, SettingsLocation, TaskKind};
|
||||
use settings::{InvalidSettingsError, SettingsLocation};
|
||||
use task::{TaskContext, TaskVariables, VariableName};
|
||||
use text::{BufferId, OffsetRangeExt};
|
||||
use util::ResultExt;
|
||||
|
@ -264,7 +264,6 @@ impl TaskStore {
|
|||
&self,
|
||||
location: TaskSettingsLocation<'_>,
|
||||
raw_tasks_json: Option<&str>,
|
||||
task_type: TaskKind,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<(), InvalidSettingsError> {
|
||||
let task_inventory = match self {
|
||||
|
@ -276,7 +275,26 @@ impl TaskStore {
|
|||
.filter(|json| !json.is_empty());
|
||||
|
||||
task_inventory.update(cx, |inventory, _| {
|
||||
inventory.update_file_based_tasks(location, raw_tasks_json, task_type)
|
||||
inventory.update_file_based_tasks(location, raw_tasks_json)
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn update_user_debug_scenarios(
|
||||
&self,
|
||||
location: TaskSettingsLocation<'_>,
|
||||
raw_tasks_json: Option<&str>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<(), InvalidSettingsError> {
|
||||
let task_inventory = match self {
|
||||
TaskStore::Functional(state) => &state.task_inventory,
|
||||
TaskStore::Noop => return Ok(()),
|
||||
};
|
||||
let raw_tasks_json = raw_tasks_json
|
||||
.map(|json| json.trim())
|
||||
.filter(|json| !json.is_empty());
|
||||
|
||||
task_inventory.update(cx, |inventory, _| {
|
||||
inventory.update_file_based_scenarios(location, raw_tasks_json)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue