task: Add task contexts (#8675)
This PR supplements tasks with additional environment variables; ideally we'll be able to write a task like: `cargo test -p $ZED_CURRENT_PACKAGE -- $ZED_CURRENT_FUNCTION` - [x] Flesh out multibuffer interactions - [x] Add ZED_SYMBOL detection based on tree-sitter queries - [ ] Add release note and demo - [x] Figure out a solution for rerun dilemma - should `task: rerun` reevaluate contexts for tasks? This PR introduced the following variables: - ZED_COLUMN - current line column - ZED_ROW - current line row and the following, which are available for buffers with associated files: - ZED_WORKTREE_ROOT - absolute path to the root of the current worktree. - ZED_FILE - absolute path to the file - ZED_SYMBOL - currently selected symbol; should match the last symbol shown in a symbol breadcrumb (e.g. `mod tests > fn test_task_contexts` should be equal to ZED_SYMBOL of `test_task_contexts`). Note that this isn't necessarily a test function or a function at all. Also, you can use them in `cwd` field of definitions (note though that we're using https://docs.rs/subst/latest/subst/#features for that, so don't expect a full shell functionality to work); the syntax should match up with your typical Unix shell. Release Notes: - Added task contexts, which are additional environment variables set by Zed for task execution; task content is dependent on the state of the editor at the time the task is spawned. --------- Co-authored-by: Anthony <anthonyeid7@protonmail.com>
This commit is contained in:
parent
b2f18cfe71
commit
2201b9b116
13 changed files with 623 additions and 190 deletions
|
@ -555,6 +555,7 @@ impl ExtensionStore {
|
|||
language_name.clone(),
|
||||
language.grammar.clone(),
|
||||
language.matcher.clone(),
|
||||
None,
|
||||
move || {
|
||||
let config = std::fs::read_to_string(language_path.join("config.toml"))?;
|
||||
let config: LanguageConfig = ::toml::from_str(&config)?;
|
||||
|
|
|
@ -120,6 +120,46 @@ pub struct Location {
|
|||
pub range: Range<Anchor>,
|
||||
}
|
||||
|
||||
pub struct LanguageContext {
|
||||
pub package: Option<String>,
|
||||
pub symbol: Option<String>,
|
||||
}
|
||||
|
||||
pub trait LanguageContextProvider: Send + Sync {
|
||||
fn build_context(&self, location: Location, cx: &mut AppContext) -> Result<LanguageContext>;
|
||||
}
|
||||
|
||||
/// A context provider that fills out LanguageContext without inspecting the contents.
|
||||
pub struct DefaultContextProvider;
|
||||
|
||||
impl LanguageContextProvider for DefaultContextProvider {
|
||||
fn build_context(
|
||||
&self,
|
||||
location: Location,
|
||||
cx: &mut AppContext,
|
||||
) -> gpui::Result<LanguageContext> {
|
||||
let symbols = location
|
||||
.buffer
|
||||
.read(cx)
|
||||
.snapshot()
|
||||
.symbols_containing(location.range.start, None);
|
||||
let symbol = symbols.and_then(|symbols| {
|
||||
symbols.last().map(|symbol| {
|
||||
let range = symbol
|
||||
.name_ranges
|
||||
.last()
|
||||
.cloned()
|
||||
.unwrap_or(0..symbol.text.len());
|
||||
symbol.text[range].to_string()
|
||||
})
|
||||
});
|
||||
Ok(LanguageContext {
|
||||
package: None,
|
||||
symbol,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a Language Server, with certain cached sync properties.
|
||||
/// Uses [`LspAdapter`] under the hood, but calls all 'static' methods
|
||||
/// once at startup, and caches the results.
|
||||
|
@ -727,6 +767,7 @@ pub struct Language {
|
|||
pub(crate) id: LanguageId,
|
||||
pub(crate) config: LanguageConfig,
|
||||
pub(crate) grammar: Option<Arc<Grammar>>,
|
||||
pub(crate) context_provider: Option<Arc<dyn LanguageContextProvider>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
|
@ -841,9 +882,18 @@ impl Language {
|
|||
highlight_map: Default::default(),
|
||||
})
|
||||
}),
|
||||
context_provider: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_context_provider(
|
||||
mut self,
|
||||
provider: Option<Arc<dyn LanguageContextProvider>>,
|
||||
) -> Self {
|
||||
self.context_provider = provider;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_queries(mut self, queries: LanguageQueries) -> Result<Self> {
|
||||
if let Some(query) = queries.highlights {
|
||||
self = self
|
||||
|
@ -1139,6 +1189,10 @@ impl Language {
|
|||
self.config.name.clone()
|
||||
}
|
||||
|
||||
pub fn context_provider(&self) -> Option<Arc<dyn LanguageContextProvider>> {
|
||||
self.context_provider.clone()
|
||||
}
|
||||
|
||||
pub fn highlight_text<'a>(
|
||||
self: &'a Arc<Self>,
|
||||
text: &'a Rope,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
CachedLspAdapter, Language, LanguageConfig, LanguageId, LanguageMatcher, LanguageServerName,
|
||||
LspAdapter, LspAdapterDelegate, PARSER, PLAIN_TEXT,
|
||||
CachedLspAdapter, Language, LanguageConfig, LanguageContextProvider, LanguageId,
|
||||
LanguageMatcher, LanguageServerName, LspAdapter, LspAdapterDelegate, PARSER, PLAIN_TEXT,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use collections::{hash_map, HashMap};
|
||||
|
@ -78,6 +78,7 @@ struct AvailableLanguage {
|
|||
matcher: LanguageMatcher,
|
||||
load: Arc<dyn Fn() -> Result<(LanguageConfig, LanguageQueries)> + 'static + Send + Sync>,
|
||||
loaded: bool,
|
||||
context_provider: Option<Arc<dyn LanguageContextProvider>>,
|
||||
}
|
||||
|
||||
enum AvailableGrammar {
|
||||
|
@ -188,6 +189,7 @@ impl LanguageRegistry {
|
|||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
None,
|
||||
move || Ok((config.clone(), Default::default())),
|
||||
)
|
||||
}
|
||||
|
@ -237,6 +239,7 @@ impl LanguageRegistry {
|
|||
name: Arc<str>,
|
||||
grammar_name: Option<Arc<str>>,
|
||||
matcher: LanguageMatcher,
|
||||
context_provider: Option<Arc<dyn LanguageContextProvider>>,
|
||||
load: impl Fn() -> Result<(LanguageConfig, LanguageQueries)> + 'static + Send + Sync,
|
||||
) {
|
||||
let load = Arc::new(load);
|
||||
|
@ -257,6 +260,8 @@ impl LanguageRegistry {
|
|||
grammar: grammar_name,
|
||||
matcher,
|
||||
load,
|
||||
|
||||
context_provider,
|
||||
loaded: false,
|
||||
});
|
||||
state.version += 1;
|
||||
|
@ -422,6 +427,7 @@ impl LanguageRegistry {
|
|||
.spawn(async move {
|
||||
let id = language.id;
|
||||
let name = language.name.clone();
|
||||
let provider = language.context_provider.clone();
|
||||
let language = async {
|
||||
let (config, queries) = (language.load)()?;
|
||||
|
||||
|
@ -431,7 +437,9 @@ impl LanguageRegistry {
|
|||
None
|
||||
};
|
||||
|
||||
Language::new_with_id(id, config, grammar).with_queries(queries)
|
||||
Language::new_with_id(id, config, grammar)
|
||||
.with_context_provider(provider)
|
||||
.with_queries(queries)
|
||||
}
|
||||
.await;
|
||||
|
||||
|
|
|
@ -122,212 +122,245 @@ pub fn init(
|
|||
("dart", tree_sitter_dart::language()),
|
||||
]);
|
||||
|
||||
let language = |asset_dir_name: &'static str, adapters: Vec<Arc<dyn LspAdapter>>| {
|
||||
let config = load_config(asset_dir_name);
|
||||
for adapter in adapters {
|
||||
languages.register_lsp_adapter(config.name.clone(), adapter);
|
||||
}
|
||||
languages.register_language(
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
move || Ok((config.clone(), load_queries(asset_dir_name))),
|
||||
);
|
||||
};
|
||||
|
||||
language(
|
||||
macro_rules! language {
|
||||
($name:literal) => {
|
||||
let config = load_config($name);
|
||||
languages.register_language(
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
Some(Arc::new(language::DefaultContextProvider)),
|
||||
move || Ok((config.clone(), load_queries($name))),
|
||||
);
|
||||
};
|
||||
($name:literal, $adapters:expr) => {
|
||||
let config = load_config($name);
|
||||
// typeck helper
|
||||
let adapters: Vec<Arc<dyn LspAdapter>> = $adapters;
|
||||
for adapter in adapters {
|
||||
languages.register_lsp_adapter(config.name.clone(), adapter);
|
||||
}
|
||||
languages.register_language(
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
Some(Arc::new(language::DefaultContextProvider)),
|
||||
move || Ok((config.clone(), load_queries($name))),
|
||||
);
|
||||
};
|
||||
($name:literal, $adapters:expr, $context_provider:expr) => {
|
||||
let config = load_config($name);
|
||||
// typeck helper
|
||||
let adapters: Vec<Arc<dyn LspAdapter>> = $adapters;
|
||||
for adapter in $adapters {
|
||||
languages.register_lsp_adapter(config.name.clone(), adapter);
|
||||
}
|
||||
languages.register_language(
|
||||
config.name.clone(),
|
||||
config.grammar.clone(),
|
||||
config.matcher.clone(),
|
||||
Some(Arc::new($context_provider)),
|
||||
move || Ok((config.clone(), load_queries($name))),
|
||||
);
|
||||
};
|
||||
}
|
||||
language!(
|
||||
"astro",
|
||||
vec![
|
||||
Arc::new(astro::AstroLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language("bash", vec![]);
|
||||
language("c", vec![Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>]);
|
||||
language("clojure", vec![Arc::new(clojure::ClojureLspAdapter)]);
|
||||
language("cpp", vec![Arc::new(c::CLspAdapter)]);
|
||||
language("csharp", vec![Arc::new(csharp::OmniSharpAdapter {})]);
|
||||
language(
|
||||
language!("bash");
|
||||
language!("c", vec![Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>]);
|
||||
language!("clojure", vec![Arc::new(clojure::ClojureLspAdapter)]);
|
||||
language!("cpp", vec![Arc::new(c::CLspAdapter)]);
|
||||
language!("csharp", vec![Arc::new(csharp::OmniSharpAdapter {})]);
|
||||
language!(
|
||||
"css",
|
||||
vec![
|
||||
Arc::new(css::CssLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
|
||||
language(
|
||||
language!(
|
||||
"dockerfile",
|
||||
vec![Arc::new(dockerfile::DockerfileLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
))],
|
||||
))]
|
||||
);
|
||||
|
||||
match &ElixirSettings::get(None, cx).lsp {
|
||||
elixir::ElixirLspSetting::ElixirLs => language(
|
||||
"elixir",
|
||||
vec![
|
||||
Arc::new(elixir::ElixirLspAdapter),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
),
|
||||
elixir::ElixirLspSetting::NextLs => {
|
||||
language("elixir", vec![Arc::new(elixir::NextLspAdapter)])
|
||||
elixir::ElixirLspSetting::ElixirLs => {
|
||||
language!(
|
||||
"elixir",
|
||||
vec![
|
||||
Arc::new(elixir::ElixirLspAdapter),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
]
|
||||
);
|
||||
}
|
||||
elixir::ElixirLspSetting::NextLs => {
|
||||
language!("elixir", vec![Arc::new(elixir::NextLspAdapter)]);
|
||||
}
|
||||
elixir::ElixirLspSetting::Local { path, arguments } => {
|
||||
language!(
|
||||
"elixir",
|
||||
vec![Arc::new(elixir::LocalLspAdapter {
|
||||
path: path.clone(),
|
||||
arguments: arguments.clone(),
|
||||
})]
|
||||
);
|
||||
}
|
||||
elixir::ElixirLspSetting::Local { path, arguments } => language(
|
||||
"elixir",
|
||||
vec![Arc::new(elixir::LocalLspAdapter {
|
||||
path: path.clone(),
|
||||
arguments: arguments.clone(),
|
||||
})],
|
||||
),
|
||||
}
|
||||
language("gitcommit", vec![]);
|
||||
language("erlang", vec![Arc::new(erlang::ErlangLspAdapter)]);
|
||||
language!("gitcommit");
|
||||
language!("erlang", vec![Arc::new(erlang::ErlangLspAdapter)]);
|
||||
|
||||
language("gleam", vec![Arc::new(gleam::GleamLspAdapter)]);
|
||||
language("go", vec![Arc::new(go::GoLspAdapter)]);
|
||||
language("gomod", vec![]);
|
||||
language("gowork", vec![]);
|
||||
language("zig", vec![Arc::new(zig::ZlsAdapter)]);
|
||||
language(
|
||||
language!("gleam", vec![Arc::new(gleam::GleamLspAdapter)]);
|
||||
language!("go", vec![Arc::new(go::GoLspAdapter)]);
|
||||
language!("gomod");
|
||||
language!("gowork");
|
||||
language!("zig", vec![Arc::new(zig::ZlsAdapter)]);
|
||||
language!(
|
||||
"heex",
|
||||
vec![
|
||||
Arc::new(elixir::ElixirLspAdapter),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"json",
|
||||
vec![Arc::new(json::JsonLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
languages.clone(),
|
||||
))],
|
||||
))]
|
||||
);
|
||||
language("markdown", vec![]);
|
||||
language(
|
||||
language!("markdown");
|
||||
language!(
|
||||
"python",
|
||||
vec![Arc::new(python::PythonLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
))],
|
||||
))]
|
||||
);
|
||||
language("rust", vec![Arc::new(rust::RustLspAdapter)]);
|
||||
language("toml", vec![Arc::new(toml::TaploLspAdapter)]);
|
||||
language!("rust", vec![Arc::new(rust::RustLspAdapter)]);
|
||||
language!("toml", vec![Arc::new(toml::TaploLspAdapter)]);
|
||||
match &DenoSettings::get(None, cx).enable {
|
||||
true => {
|
||||
language(
|
||||
language!(
|
||||
"tsx",
|
||||
vec![
|
||||
Arc::new(deno::DenoLspAdapter::new()),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language("typescript", vec![Arc::new(deno::DenoLspAdapter::new())]);
|
||||
language(
|
||||
language!("typescript", vec![Arc::new(deno::DenoLspAdapter::new())]);
|
||||
language!(
|
||||
"javascript",
|
||||
vec![
|
||||
Arc::new(deno::DenoLspAdapter::new()),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
}
|
||||
false => {
|
||||
language(
|
||||
language!(
|
||||
"tsx",
|
||||
vec![
|
||||
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"typescript",
|
||||
vec![
|
||||
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"javascript",
|
||||
vec![
|
||||
Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
language("haskell", vec![Arc::new(haskell::HaskellLanguageServer {})]);
|
||||
language(
|
||||
language!("haskell", vec![Arc::new(haskell::HaskellLanguageServer {})]);
|
||||
language!(
|
||||
"html",
|
||||
vec![
|
||||
Arc::new(html::HtmlLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language("ruby", vec![Arc::new(ruby::RubyLanguageServer)]);
|
||||
language(
|
||||
language!("ruby", vec![Arc::new(ruby::RubyLanguageServer)]);
|
||||
language!(
|
||||
"erb",
|
||||
vec![
|
||||
Arc::new(ruby::RubyLanguageServer),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language("scheme", vec![]);
|
||||
language("racket", vec![]);
|
||||
language("lua", vec![Arc::new(lua::LuaLspAdapter)]);
|
||||
language(
|
||||
language!("scheme");
|
||||
language!("racket");
|
||||
language!("lua", vec![Arc::new(lua::LuaLspAdapter)]);
|
||||
language!(
|
||||
"yaml",
|
||||
vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))],
|
||||
vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"svelte",
|
||||
vec![
|
||||
Arc::new(svelte::SvelteLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"php",
|
||||
vec![
|
||||
Arc::new(php::IntelephenseLspAdapter::new(node_runtime.clone())),
|
||||
Arc::new(tailwind::TailwindLspAdapter::new(node_runtime.clone())),
|
||||
],
|
||||
]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"purescript",
|
||||
vec![Arc::new(purescript::PurescriptLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
))],
|
||||
))]
|
||||
);
|
||||
language(
|
||||
language!(
|
||||
"elm",
|
||||
vec![Arc::new(elm::ElmLspAdapter::new(node_runtime.clone()))],
|
||||
vec![Arc::new(elm::ElmLspAdapter::new(node_runtime.clone()))]
|
||||
);
|
||||
language("glsl", vec![]);
|
||||
language("nix", vec![]);
|
||||
language("nu", vec![Arc::new(nu::NuLanguageServer {})]);
|
||||
language("ocaml", vec![Arc::new(ocaml::OCamlLspAdapter)]);
|
||||
language("ocaml-interface", vec![Arc::new(ocaml::OCamlLspAdapter)]);
|
||||
language(
|
||||
language!("glsl");
|
||||
language!("nix");
|
||||
language!("nu", vec![Arc::new(nu::NuLanguageServer {})]);
|
||||
language!("ocaml", vec![Arc::new(ocaml::OCamlLspAdapter)]);
|
||||
language!("ocaml-interface", vec![Arc::new(ocaml::OCamlLspAdapter)]);
|
||||
language!(
|
||||
"vue",
|
||||
vec![Arc::new(vue::VueLspAdapter::new(node_runtime.clone()))],
|
||||
vec![Arc::new(vue::VueLspAdapter::new(node_runtime.clone()))]
|
||||
);
|
||||
language("uiua", vec![Arc::new(uiua::UiuaLanguageServer {})]);
|
||||
language("proto", vec![]);
|
||||
language("terraform", vec![Arc::new(terraform::TerraformLspAdapter)]);
|
||||
language(
|
||||
language!("uiua", vec![Arc::new(uiua::UiuaLanguageServer {})]);
|
||||
language!("proto");
|
||||
language!("terraform", vec![Arc::new(terraform::TerraformLspAdapter)]);
|
||||
language!(
|
||||
"terraform-vars",
|
||||
vec![Arc::new(terraform::TerraformLspAdapter)],
|
||||
vec![Arc::new(terraform::TerraformLspAdapter)]
|
||||
);
|
||||
language("hcl", vec![]);
|
||||
language(
|
||||
language!("hcl", vec![]);
|
||||
language!(
|
||||
"prisma",
|
||||
vec![Arc::new(prisma::PrismaLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
))],
|
||||
))]
|
||||
);
|
||||
language("dart", vec![Arc::new(dart::DartLanguageServer {})]);
|
||||
language!("dart", vec![Arc::new(dart::DartLanguageServer {})]);
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
|
|
@ -10,13 +10,13 @@ use collections::{HashMap, VecDeque};
|
|||
use gpui::{AppContext, Context, Model, ModelContext, Subscription};
|
||||
use itertools::Itertools;
|
||||
use project_core::worktree::WorktreeId;
|
||||
use task::{Task, TaskId, TaskSource};
|
||||
use task::{Task, TaskContext, TaskId, TaskSource};
|
||||
use util::{post_inc, NumericPrefixWithSuffix};
|
||||
|
||||
/// Inventory tracks available tasks for a given project.
|
||||
pub struct Inventory {
|
||||
sources: Vec<SourceInInventory>,
|
||||
last_scheduled_tasks: VecDeque<TaskId>,
|
||||
last_scheduled_tasks: VecDeque<(TaskId, TaskContext)>,
|
||||
}
|
||||
|
||||
struct SourceInInventory {
|
||||
|
@ -133,17 +133,20 @@ impl Inventory {
|
|||
) -> Vec<(TaskSourceKind, Arc<dyn Task>)> {
|
||||
let mut lru_score = 0_u32;
|
||||
let tasks_by_usage = if lru {
|
||||
self.last_scheduled_tasks
|
||||
.iter()
|
||||
.rev()
|
||||
.fold(HashMap::default(), |mut tasks, id| {
|
||||
tasks.entry(id).or_insert_with(|| post_inc(&mut lru_score));
|
||||
self.last_scheduled_tasks.iter().rev().fold(
|
||||
HashMap::default(),
|
||||
|mut tasks, (id, context)| {
|
||||
tasks
|
||||
})
|
||||
.entry(id)
|
||||
.or_insert_with(|| (post_inc(&mut lru_score), Some(context)));
|
||||
tasks
|
||||
},
|
||||
)
|
||||
} else {
|
||||
HashMap::default()
|
||||
};
|
||||
let not_used_score = post_inc(&mut lru_score);
|
||||
let not_used_task_context = None;
|
||||
let not_used_score = (post_inc(&mut lru_score), not_used_task_context);
|
||||
self.sources
|
||||
.iter()
|
||||
.filter(|source| {
|
||||
|
@ -171,7 +174,8 @@ impl Inventory {
|
|||
.sorted_unstable_by(
|
||||
|((kind_a, task_a), usages_a), ((kind_b, task_b), usages_b)| {
|
||||
usages_a
|
||||
.cmp(usages_b)
|
||||
.0
|
||||
.cmp(&usages_b.0)
|
||||
.then(
|
||||
kind_a
|
||||
.worktree()
|
||||
|
@ -200,19 +204,21 @@ impl Inventory {
|
|||
}
|
||||
|
||||
/// Returns the last scheduled task, if any of the sources contains one with the matching id.
|
||||
pub fn last_scheduled_task(&self, cx: &mut AppContext) -> Option<Arc<dyn Task>> {
|
||||
self.last_scheduled_tasks.back().and_then(|id| {
|
||||
// TODO straighten the `Path` story to understand what has to be passed here: or it will break in the future.
|
||||
self.list_tasks(None, None, false, cx)
|
||||
.into_iter()
|
||||
.find(|(_, task)| task.id() == id)
|
||||
.map(|(_, task)| task)
|
||||
})
|
||||
pub fn last_scheduled_task(&self, cx: &mut AppContext) -> Option<(Arc<dyn Task>, TaskContext)> {
|
||||
self.last_scheduled_tasks
|
||||
.back()
|
||||
.and_then(|(id, task_context)| {
|
||||
// TODO straighten the `Path` story to understand what has to be passed here: or it will break in the future.
|
||||
self.list_tasks(None, None, false, cx)
|
||||
.into_iter()
|
||||
.find(|(_, task)| task.id() == id)
|
||||
.map(|(_, task)| (task, task_context.clone()))
|
||||
})
|
||||
}
|
||||
|
||||
/// Registers task "usage" as being scheduled – to be used for LRU sorting when listing all tasks.
|
||||
pub fn task_scheduled(&mut self, id: TaskId) {
|
||||
self.last_scheduled_tasks.push_back(id);
|
||||
pub fn task_scheduled(&mut self, id: TaskId, task_context: TaskContext) {
|
||||
self.last_scheduled_tasks.push_back((id, task_context));
|
||||
if self.last_scheduled_tasks.len() > 5_000 {
|
||||
self.last_scheduled_tasks.pop_front();
|
||||
}
|
||||
|
@ -221,14 +227,11 @@ impl Inventory {
|
|||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test_inventory {
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use gpui::{AppContext, Context as _, Model, ModelContext, TestAppContext};
|
||||
use project_core::worktree::WorktreeId;
|
||||
use task::{Task, TaskId, TaskSource};
|
||||
use task::{Task, TaskContext, TaskId, TaskSource};
|
||||
|
||||
use crate::Inventory;
|
||||
|
||||
|
@ -249,11 +252,11 @@ pub mod test_inventory {
|
|||
&self.name
|
||||
}
|
||||
|
||||
fn cwd(&self) -> Option<&Path> {
|
||||
fn cwd(&self) -> Option<&str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn exec(&self, _cwd: Option<PathBuf>) -> Option<task::SpawnInTerminal> {
|
||||
fn exec(&self, _cwd: TaskContext) -> Option<task::SpawnInTerminal> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -327,7 +330,7 @@ pub mod test_inventory {
|
|||
.into_iter()
|
||||
.find(|(_, task)| task.name() == task_name)
|
||||
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
|
||||
inventory.task_scheduled(task.1.id().clone());
|
||||
inventory.task_scheduled(task.1.id().clone(), TaskContext::default());
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ gpui.workspace = true
|
|||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
subst = "0.3.0"
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -36,6 +36,15 @@ pub struct SpawnInTerminal {
|
|||
pub allow_concurrent_runs: bool,
|
||||
}
|
||||
|
||||
/// Keeps track of the file associated with a task and context of tasks execution (i.e. current file or current function)
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct TaskContext {
|
||||
/// A path to a directory in which the task should be executed.
|
||||
pub cwd: Option<PathBuf>,
|
||||
/// Additional environment variables associated with a given task.
|
||||
pub env: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Represents a short lived recipe of a task, whose main purpose
|
||||
/// is to get spawned.
|
||||
pub trait Task {
|
||||
|
@ -44,10 +53,10 @@ pub trait Task {
|
|||
/// Human readable name of the task to display in the UI.
|
||||
fn name(&self) -> &str;
|
||||
/// Task's current working directory. If `None`, current project's root will be used.
|
||||
fn cwd(&self) -> Option<&Path>;
|
||||
fn cwd(&self) -> Option<&str>;
|
||||
/// Sets up everything needed to spawn the task in the given directory (`cwd`).
|
||||
/// If a task is intended to be spawned in the terminal, it should return the corresponding struct filled with the data necessary.
|
||||
fn exec(&self, cwd: Option<PathBuf>) -> Option<SpawnInTerminal>;
|
||||
fn exec(&self, cx: TaskContext) -> Option<SpawnInTerminal>;
|
||||
}
|
||||
|
||||
/// [`Source`] produces tasks that can be scheduled.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{SpawnInTerminal, Task, TaskId, TaskSource};
|
||||
use crate::{SpawnInTerminal, Task, TaskContext, TaskId, TaskSource};
|
||||
use gpui::{AppContext, Context, Model};
|
||||
|
||||
/// A storage and source of tasks generated out of user command prompt inputs.
|
||||
|
@ -30,21 +30,22 @@ impl Task for OneshotTask {
|
|||
&self.id.0
|
||||
}
|
||||
|
||||
fn cwd(&self) -> Option<&std::path::Path> {
|
||||
fn cwd(&self) -> Option<&str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn exec(&self, cwd: Option<std::path::PathBuf>) -> Option<SpawnInTerminal> {
|
||||
fn exec(&self, cx: TaskContext) -> Option<SpawnInTerminal> {
|
||||
if self.id().0.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let TaskContext { cwd, env } = cx;
|
||||
Some(SpawnInTerminal {
|
||||
id: self.id().clone(),
|
||||
label: self.name().to_owned(),
|
||||
command: self.id().0.clone(),
|
||||
args: vec![],
|
||||
cwd,
|
||||
env: Default::default(),
|
||||
env,
|
||||
use_new_terminal: Default::default(),
|
||||
allow_concurrent_runs: Default::default(),
|
||||
})
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
//! A source of tasks, based on a static configuration, deserialized from the tasks config file, and related infrastructure for tracking changes to the file.
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{borrow::Cow, path::Path, sync::Arc};
|
||||
|
||||
use collections::HashMap;
|
||||
use futures::StreamExt;
|
||||
|
@ -13,7 +9,7 @@ use schemars::{gen::SchemaSettings, JsonSchema};
|
|||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::{SpawnInTerminal, Task, TaskId, TaskSource};
|
||||
use crate::{SpawnInTerminal, Task, TaskContext, TaskId, TaskSource};
|
||||
use futures::channel::mpsc::UnboundedReceiver;
|
||||
|
||||
/// A single config file entry with the deserialized task definition.
|
||||
|
@ -24,7 +20,16 @@ struct StaticTask {
|
|||
}
|
||||
|
||||
impl Task for StaticTask {
|
||||
fn exec(&self, cwd: Option<PathBuf>) -> Option<SpawnInTerminal> {
|
||||
fn exec(&self, cx: TaskContext) -> Option<SpawnInTerminal> {
|
||||
let TaskContext { cwd, env } = cx;
|
||||
let cwd = self
|
||||
.definition
|
||||
.cwd
|
||||
.clone()
|
||||
.and_then(|path| subst::substitute(&path, &env).map(Into::into).ok())
|
||||
.or(cwd);
|
||||
let mut definition_env = self.definition.env.clone();
|
||||
definition_env.extend(env);
|
||||
Some(SpawnInTerminal {
|
||||
id: self.id.clone(),
|
||||
cwd,
|
||||
|
@ -33,7 +38,7 @@ impl Task for StaticTask {
|
|||
label: self.definition.label.clone(),
|
||||
command: self.definition.command.clone(),
|
||||
args: self.definition.args.clone(),
|
||||
env: self.definition.env.clone(),
|
||||
env: definition_env,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -45,7 +50,7 @@ impl Task for StaticTask {
|
|||
&self.id
|
||||
}
|
||||
|
||||
fn cwd(&self) -> Option<&Path> {
|
||||
fn cwd(&self) -> Option<&str> {
|
||||
self.definition.cwd.as_deref()
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +77,7 @@ pub(crate) struct Definition {
|
|||
pub env: HashMap<String, String>,
|
||||
/// Current working directory to spawn the command into, defaults to current project root.
|
||||
#[serde(default)]
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub cwd: Option<String>,
|
||||
/// Whether to use a new terminal tab or reuse the existing one to spawn the process.
|
||||
#[serde(default)]
|
||||
pub use_new_terminal: bool,
|
||||
|
|
|
@ -7,6 +7,7 @@ license = "GPL-3.0-or-later"
|
|||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
editor.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
menu.workspace = true
|
||||
|
@ -17,10 +18,14 @@ serde.workspace = true
|
|||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
language.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
serde_json.workspace = true
|
||||
tree-sitter-rust.workspace = true
|
||||
tree-sitter-typescript.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
use std::path::PathBuf;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, ViewContext, WindowContext};
|
||||
use language::Point;
|
||||
use modal::TasksModal;
|
||||
use task::Task;
|
||||
use project::{Location, WorktreeId};
|
||||
use task::{Task, TaskContext};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
|
@ -15,16 +18,28 @@ pub fn init(cx: &mut AppContext) {
|
|||
.register_action(|workspace, _: &modal::Spawn, cx| {
|
||||
let inventory = workspace.project().read(cx).task_inventory().clone();
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
workspace
|
||||
.toggle_modal(cx, |cx| TasksModal::new(inventory, workspace_handle, cx))
|
||||
let cwd = task_cwd(workspace, cx).log_err().flatten();
|
||||
let task_context = task_context(workspace, cwd, cx);
|
||||
workspace.toggle_modal(cx, |cx| {
|
||||
TasksModal::new(inventory, task_context, workspace_handle, cx)
|
||||
})
|
||||
})
|
||||
.register_action(move |workspace, _: &modal::Rerun, cx| {
|
||||
if let Some(task) = workspace.project().update(cx, |project, cx| {
|
||||
project
|
||||
.task_inventory()
|
||||
.update(cx, |inventory, cx| inventory.last_scheduled_task(cx))
|
||||
}) {
|
||||
schedule_task(workspace, task.as_ref(), cx)
|
||||
.register_action(move |workspace, action: &modal::Rerun, cx| {
|
||||
if let Some((task, old_context)) =
|
||||
workspace.project().update(cx, |project, cx| {
|
||||
project
|
||||
.task_inventory()
|
||||
.update(cx, |inventory, cx| inventory.last_scheduled_task(cx))
|
||||
})
|
||||
{
|
||||
let task_context = if action.reevaluate_context {
|
||||
let cwd = task_cwd(workspace, cx).log_err().flatten();
|
||||
task_context(workspace, cwd, cx)
|
||||
} else {
|
||||
old_context
|
||||
};
|
||||
|
||||
schedule_task(workspace, task.as_ref(), task_context, cx)
|
||||
};
|
||||
});
|
||||
},
|
||||
|
@ -32,16 +47,117 @@ pub fn init(cx: &mut AppContext) {
|
|||
.detach();
|
||||
}
|
||||
|
||||
fn schedule_task(workspace: &Workspace, task: &dyn Task, cx: &mut ViewContext<'_, Workspace>) {
|
||||
let cwd = match task.cwd() {
|
||||
Some(cwd) => Some(cwd.to_path_buf()),
|
||||
None => task_cwd(workspace, cx).log_err().flatten(),
|
||||
};
|
||||
let spawn_in_terminal = task.exec(cwd);
|
||||
fn task_context(
|
||||
workspace: &Workspace,
|
||||
cwd: Option<PathBuf>,
|
||||
cx: &mut WindowContext<'_>,
|
||||
) -> TaskContext {
|
||||
let current_editor = workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.act_as::<Editor>(cx))
|
||||
.clone();
|
||||
if let Some(current_editor) = current_editor {
|
||||
(|| {
|
||||
let editor = current_editor.read(cx);
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let (buffer, _, _) = editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.point_to_buffer_offset(selection.start, cx)?;
|
||||
|
||||
current_editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let selection_range = selection.range();
|
||||
let start = snapshot
|
||||
.display_snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_after(selection_range.start)
|
||||
.text_anchor;
|
||||
let end = snapshot
|
||||
.display_snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_after(selection_range.end)
|
||||
.text_anchor;
|
||||
let Point { row, column } = snapshot
|
||||
.display_snapshot
|
||||
.buffer_snapshot
|
||||
.offset_to_point(selection_range.start);
|
||||
let row = row + 1;
|
||||
let column = column + 1;
|
||||
let location = Location {
|
||||
buffer: buffer.clone(),
|
||||
range: start..end,
|
||||
};
|
||||
|
||||
let current_file = location
|
||||
.buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| file.path().to_string_lossy().to_string());
|
||||
let worktree_id = location
|
||||
.buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| WorktreeId::from_usize(file.worktree_id()));
|
||||
let context = buffer
|
||||
.read(cx)
|
||||
.language()
|
||||
.and_then(|language| language.context_provider())
|
||||
.and_then(|provider| provider.build_context(location, cx).ok());
|
||||
|
||||
let worktree_path = worktree_id.and_then(|worktree_id| {
|
||||
workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
.map(|worktree| worktree.read(cx).abs_path().to_string_lossy().to_string())
|
||||
});
|
||||
|
||||
let mut env = HashMap::from_iter([
|
||||
("ZED_ROW".into(), row.to_string()),
|
||||
("ZED_COLUMN".into(), column.to_string()),
|
||||
]);
|
||||
if let Some(path) = current_file {
|
||||
env.insert("ZED_FILE".into(), path);
|
||||
}
|
||||
if let Some(worktree_path) = worktree_path {
|
||||
env.insert("ZED_WORKTREE_ROOT".into(), worktree_path);
|
||||
}
|
||||
if let Some(language_context) = context {
|
||||
if let Some(symbol) = language_context.symbol {
|
||||
env.insert("ZED_SYMBOL".into(), symbol);
|
||||
}
|
||||
}
|
||||
|
||||
Some(TaskContext {
|
||||
cwd: cwd.clone(),
|
||||
env,
|
||||
})
|
||||
})
|
||||
})()
|
||||
.unwrap_or_else(|| TaskContext {
|
||||
cwd,
|
||||
env: Default::default(),
|
||||
})
|
||||
} else {
|
||||
TaskContext {
|
||||
cwd,
|
||||
env: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn schedule_task(
|
||||
workspace: &Workspace,
|
||||
task: &dyn Task,
|
||||
task_cx: TaskContext,
|
||||
cx: &mut ViewContext<'_, Workspace>,
|
||||
) {
|
||||
let spawn_in_terminal = task.exec(task_cx.clone());
|
||||
if let Some(spawn_in_terminal) = spawn_in_terminal {
|
||||
workspace.project().update(cx, |project, cx| {
|
||||
project.task_inventory().update(cx, |inventory, _| {
|
||||
inventory.task_scheduled(task.id().clone());
|
||||
inventory.task_scheduled(task.id().clone(), task_cx);
|
||||
})
|
||||
});
|
||||
cx.emit(workspace::Event::SpawnTask(spawn_in_terminal));
|
||||
|
@ -82,3 +198,176 @@ fn task_cwd(workspace: &Workspace, cx: &mut WindowContext) -> anyhow::Result<Opt
|
|||
};
|
||||
Ok(cwd.map(|path| path.to_path_buf()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use editor::Editor;
|
||||
use gpui::{Entity, TestAppContext};
|
||||
use language::{DefaultContextProvider, Language, LanguageConfig};
|
||||
use project::{FakeFs, Project, TaskSourceKind};
|
||||
use serde_json::json;
|
||||
use task::{oneshot_source::OneshotSource, TaskContext};
|
||||
use ui::VisualContext;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
use crate::{task_context, task_cwd};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_default_language_context(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
json!({
|
||||
".zed": {
|
||||
"tasks.json": r#"[
|
||||
{
|
||||
"label": "example task",
|
||||
"command": "echo",
|
||||
"args": ["4"]
|
||||
},
|
||||
{
|
||||
"label": "another one",
|
||||
"command": "echo",
|
||||
"args": ["55"]
|
||||
},
|
||||
]"#,
|
||||
},
|
||||
"a.ts": "function this_is_a_test() { }",
|
||||
"rust": {
|
||||
"b.rs": "use std; fn this_is_a_rust_file() { }",
|
||||
}
|
||||
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let rust_language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig::default(),
|
||||
Some(tree_sitter_rust::language()),
|
||||
)
|
||||
.with_outline_query(
|
||||
r#"(function_item
|
||||
"fn" @context
|
||||
name: (_) @name) @item"#,
|
||||
)
|
||||
.unwrap()
|
||||
.with_context_provider(Some(Arc::new(DefaultContextProvider))),
|
||||
);
|
||||
|
||||
let typescript_language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig::default(),
|
||||
Some(tree_sitter_typescript::language_typescript()),
|
||||
)
|
||||
.with_outline_query(
|
||||
r#"(function_declaration
|
||||
"async"? @context
|
||||
"function" @context
|
||||
name: (_) @name
|
||||
parameters: (formal_parameters
|
||||
"(" @context
|
||||
")" @context)) @item"#,
|
||||
)
|
||||
.unwrap()
|
||||
.with_context_provider(Some(Arc::new(DefaultContextProvider))),
|
||||
);
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
project.update(cx, |project, cx| {
|
||||
project.task_inventory().update(cx, |inventory, cx| {
|
||||
inventory.add_source(TaskSourceKind::UserInput, |cx| OneshotSource::new(cx), cx)
|
||||
})
|
||||
});
|
||||
let worktree_id = project.update(cx, |project, cx| {
|
||||
project.worktrees().next().unwrap().read(cx).id()
|
||||
});
|
||||
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
|
||||
|
||||
let buffer1 = workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.project()
|
||||
.update(cx, |this, cx| this.open_buffer((worktree_id, "a.ts"), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
buffer1.update(cx, |this, cx| {
|
||||
this.set_language(Some(typescript_language), cx)
|
||||
});
|
||||
let editor1 = cx.new_view(|cx| Editor::for_buffer(buffer1, Some(project.clone()), cx));
|
||||
let buffer2 = workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.project().update(cx, |this, cx| {
|
||||
this.open_buffer((worktree_id, "rust/b.rs"), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
buffer2.update(cx, |this, cx| this.set_language(Some(rust_language), cx));
|
||||
let editor2 = cx.new_view(|cx| Editor::for_buffer(buffer2, Some(project), cx));
|
||||
workspace.update(cx, |this, cx| {
|
||||
this.add_item_to_center(Box::new(editor1.clone()), cx);
|
||||
this.add_item_to_center(Box::new(editor2.clone()), cx);
|
||||
assert_eq!(this.active_item(cx).unwrap().item_id(), editor2.entity_id());
|
||||
assert_eq!(
|
||||
task_context(this, task_cwd(this, cx).unwrap(), cx),
|
||||
TaskContext {
|
||||
cwd: Some("/dir".into()),
|
||||
env: HashMap::from_iter([
|
||||
("ZED_FILE".into(), "rust/b.rs".into()),
|
||||
("ZED_WORKTREE_ROOT".into(), "/dir".into()),
|
||||
("ZED_ROW".into(), "1".into()),
|
||||
("ZED_COLUMN".into(), "1".into()),
|
||||
])
|
||||
}
|
||||
);
|
||||
// And now, let's select an identifier.
|
||||
editor2.update(cx, |this, cx| {
|
||||
this.change_selections(None, cx, |selections| selections.select_ranges([14..18]))
|
||||
});
|
||||
assert_eq!(
|
||||
task_context(this, task_cwd(this, cx).unwrap(), cx),
|
||||
TaskContext {
|
||||
cwd: Some("/dir".into()),
|
||||
env: HashMap::from_iter([
|
||||
("ZED_FILE".into(), "rust/b.rs".into()),
|
||||
("ZED_WORKTREE_ROOT".into(), "/dir".into()),
|
||||
("ZED_SYMBOL".into(), "this_is_a_rust_file".into()),
|
||||
("ZED_ROW".into(), "1".into()),
|
||||
("ZED_COLUMN".into(), "15".into()),
|
||||
])
|
||||
}
|
||||
);
|
||||
|
||||
// Now, let's switch the active item to .ts file.
|
||||
this.activate_item(&editor1, cx);
|
||||
assert_eq!(
|
||||
task_context(this, task_cwd(this, cx).unwrap(), cx),
|
||||
TaskContext {
|
||||
cwd: Some("/dir".into()),
|
||||
env: HashMap::from_iter([
|
||||
("ZED_FILE".into(), "a.ts".into()),
|
||||
("ZED_WORKTREE_ROOT".into(), "/dir".into()),
|
||||
("ZED_SYMBOL".into(), "this_is_a_test".into()),
|
||||
("ZED_ROW".into(), "1".into()),
|
||||
("ZED_COLUMN".into(), "1".into()),
|
||||
])
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
|
||||
cx.update(|cx| {
|
||||
let state = AppState::test(cx);
|
||||
language::init(cx);
|
||||
crate::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
state
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,23 +2,36 @@ use std::{path::PathBuf, sync::Arc};
|
|||
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions, rems, AppContext, DismissEvent, EventEmitter, FocusableView, InteractiveElement,
|
||||
Model, ParentElement, Render, SharedString, Styled, Subscription, View, ViewContext,
|
||||
VisualContext, WeakView,
|
||||
actions, impl_actions, rems, AppContext, DismissEvent, EventEmitter, FocusableView,
|
||||
InteractiveElement, Model, ParentElement, Render, SharedString, Styled, Subscription, View,
|
||||
ViewContext, VisualContext, WeakView,
|
||||
};
|
||||
use picker::{
|
||||
highlighted_match_with_paths::{HighlightedMatchWithPaths, HighlightedText},
|
||||
Picker, PickerDelegate,
|
||||
};
|
||||
use project::{Inventory, ProjectPath, TaskSourceKind};
|
||||
use task::{oneshot_source::OneshotSource, Task};
|
||||
use task::{oneshot_source::OneshotSource, Task, TaskContext};
|
||||
use ui::{v_flex, ListItem, ListItemSpacing, RenderOnce, Selectable, WindowContext};
|
||||
use util::{paths::PathExt, ResultExt};
|
||||
use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::schedule_task;
|
||||
use serde::Deserialize;
|
||||
actions!(task, [Spawn]);
|
||||
|
||||
actions!(task, [Spawn, Rerun]);
|
||||
/// Rerun last task
|
||||
#[derive(PartialEq, Clone, Deserialize, Default)]
|
||||
pub struct Rerun {
|
||||
#[serde(default)]
|
||||
/// Controls whether the task context is reevaluated prior to execution of a task.
|
||||
/// If it is not, environment variables such as ZED_COLUMN, ZED_FILE are gonna be the same as in the last execution of a task
|
||||
/// If it is, these variables will be updated to reflect current state of editor at the time task::Rerun is executed.
|
||||
/// default: false
|
||||
pub reevaluate_context: bool,
|
||||
}
|
||||
|
||||
impl_actions!(task, [Rerun]);
|
||||
|
||||
/// A modal used to spawn new tasks.
|
||||
pub(crate) struct TasksModalDelegate {
|
||||
|
@ -28,10 +41,15 @@ pub(crate) struct TasksModalDelegate {
|
|||
selected_index: usize,
|
||||
workspace: WeakView<Workspace>,
|
||||
prompt: String,
|
||||
task_context: TaskContext,
|
||||
}
|
||||
|
||||
impl TasksModalDelegate {
|
||||
fn new(inventory: Model<Inventory>, workspace: WeakView<Workspace>) -> Self {
|
||||
fn new(
|
||||
inventory: Model<Inventory>,
|
||||
task_context: TaskContext,
|
||||
workspace: WeakView<Workspace>,
|
||||
) -> Self {
|
||||
Self {
|
||||
inventory,
|
||||
workspace,
|
||||
|
@ -39,6 +57,7 @@ impl TasksModalDelegate {
|
|||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
prompt: String::default(),
|
||||
task_context,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,11 +98,16 @@ pub(crate) struct TasksModal {
|
|||
impl TasksModal {
|
||||
pub(crate) fn new(
|
||||
inventory: Model<Inventory>,
|
||||
task_context: TaskContext,
|
||||
workspace: WeakView<Workspace>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let picker = cx
|
||||
.new_view(|cx| Picker::uniform_list(TasksModalDelegate::new(inventory, workspace), cx));
|
||||
let picker = cx.new_view(|cx| {
|
||||
Picker::uniform_list(
|
||||
TasksModalDelegate::new(inventory, task_context, workspace),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let _subscription = cx.subscribe(&picker, |_, _, _, cx| {
|
||||
cx.emit(DismissEvent);
|
||||
});
|
||||
|
@ -223,7 +247,7 @@ impl PickerDelegate for TasksModalDelegate {
|
|||
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
schedule_task(workspace, task.as_ref(), cx);
|
||||
schedule_task(workspace, task.as_ref(), self.task_context.clone(), cx);
|
||||
})
|
||||
.ok();
|
||||
cx.emit(DismissEvent);
|
||||
|
@ -279,13 +303,12 @@ mod tests {
|
|||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use workspace::AppState;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_spawn_tasks_modal_query_reuse(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
crate::tests::init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
|
@ -431,16 +454,4 @@ mod tests {
|
|||
.collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
|
||||
cx.update(|cx| {
|
||||
let state = AppState::test(cx);
|
||||
language::init(cx);
|
||||
crate::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
state
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue