extensions: Yet another PR for debugger touchups (#32822)

We'll now clean up DAP locators for unloaded extensions and load schemas
proper

I can now load a custom Ruby extensions with all bells and whistles and
use it as my debugger.

Release Notes:

- N/A
This commit is contained in:
Piotr Osiewicz 2025-06-17 09:34:55 +02:00 committed by GitHub
parent d92d52b508
commit 0e794fa0ac
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 262 additions and 45 deletions

View file

@ -50,20 +50,23 @@ impl DapRegistry {
let name = adapter.name();
let _previous_value = self.0.write().adapters.insert(name, adapter);
}
pub fn add_locator(&self, locator: Arc<dyn DapLocator>) {
self.0.write().locators.insert(locator.name(), locator);
}
pub fn remove_adapter(&self, name: &str) {
self.0.write().adapters.remove(name);
}
pub fn remove_locator(&self, locator: &str) {
self.0.write().locators.remove(locator);
}
pub fn adapter_language(&self, adapter_name: &str) -> Option<LanguageName> {
self.adapter(adapter_name)
.and_then(|adapter| adapter.adapter_language_name())
}
pub fn add_locator(&self, locator: Arc<dyn DapLocator>) {
let _previous_value = self.0.write().locators.insert(locator.name(), locator);
debug_assert!(
_previous_value.is_none(),
"Attempted to insert a new debug locator when one is already registered"
);
}
pub async fn adapters_schema(&self) -> task::AdapterSchemas {
let mut schemas = AdapterSchemas(vec![]);

View file

@ -1,7 +1,7 @@
mod extension_dap_adapter;
mod extension_locator_adapter;
use std::sync::Arc;
use std::{path::Path, sync::Arc};
use dap::DapRegistry;
use extension::{ExtensionDebugAdapterProviderProxy, ExtensionHostProxy};
@ -34,8 +34,11 @@ impl ExtensionDebugAdapterProviderProxy for DebugAdapterRegistryProxy {
&self,
extension: Arc<dyn extension::Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
) {
if let Some(adapter) = ExtensionDapAdapter::new(extension, debug_adapter_name).log_err() {
if let Some(adapter) =
ExtensionDapAdapter::new(extension, debug_adapter_name, schema_path).log_err()
{
self.debug_adapter_registry.add_adapter(Arc::new(adapter));
}
}
@ -51,4 +54,13 @@ impl ExtensionDebugAdapterProviderProxy for DebugAdapterRegistryProxy {
locator_name,
)));
}
fn unregister_debug_adapter(&self, debug_adapter_name: Arc<str>) {
self.debug_adapter_registry
.remove_adapter(&debug_adapter_name);
}
fn unregister_debug_locator(&self, locator_name: Arc<str>) {
self.debug_adapter_registry.remove_locator(&locator_name);
}
}

View file

@ -26,11 +26,13 @@ impl ExtensionDapAdapter {
pub(crate) fn new(
extension: Arc<dyn extension::Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
) -> Result<Self> {
let schema = std::fs::read_to_string(extension.path_from_extension(
&Path::new("debug_adapter_schemas").join(debug_adapter_name.as_ref()),
))
.with_context(|| format!("Failed to read debug adapter schema for {debug_adapter_name}"))?;
let schema = std::fs::read_to_string(&schema_path).with_context(|| {
format!(
"Failed to read debug adapter schema for {debug_adapter_name} (from path: `{schema_path:?}`)"
)
})?;
let schema = serde_json::Value::from_str(&schema).with_context(|| {
format!("Debug adapter schema for {debug_adapter_name} is not a valid JSON")
})?;

View file

@ -1,7 +1,7 @@
use crate::{
ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, parse_wasm_extension_version,
};
use anyhow::{Context as _, Result, bail, ensure};
use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use futures::io::BufReader;
@ -98,21 +98,20 @@ impl ExtensionBuilder {
log::info!("compiled Rust extension {}", extension_dir.display());
}
let debug_adapters_dir = extension_dir.join("debug_adapter_schemas");
if !extension_manifest.debug_adapters.is_empty() {
ensure!(
debug_adapters_dir.exists(),
"Expected debug adapter schemas directory to exist"
);
}
for debug_adapter_name in &extension_manifest.debug_adapters {
let debug_adapter_schema_path = debug_adapters_dir.join(debug_adapter_name.as_ref());
for (debug_adapter_name, meta) in &mut extension_manifest.debug_adapters {
let debug_adapter_relative_schema_path =
meta.schema_path.clone().unwrap_or_else(|| {
Path::new("debug_adapter_schemas")
.join(Path::new(debug_adapter_name.as_ref()).with_extension("json"))
});
let debug_adapter_schema_path = extension_dir.join(debug_adapter_relative_schema_path);
let debug_adapter_schema = fs::read_to_string(&debug_adapter_schema_path)
.with_context(|| {
format!("failed to read debug adapter schema for `{debug_adapter_name}`")
format!("failed to read debug adapter schema for `{debug_adapter_name}` from `{debug_adapter_schema_path:?}`")
})?;
_ = serde_json::Value::from_str(&debug_adapter_schema).with_context(|| {
format!("Debug adapter schema for `{debug_adapter_name}` is not a valid JSON")
format!("Debug adapter schema for `{debug_adapter_name}` (path: `{debug_adapter_schema_path:?}`) is not a valid JSON")
})?;
}
for (grammar_name, grammar_metadata) in &extension_manifest.grammars {

View file

@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::Result;
@ -411,17 +411,29 @@ impl ExtensionIndexedDocsProviderProxy for ExtensionHostProxy {
}
pub trait ExtensionDebugAdapterProviderProxy: Send + Sync + 'static {
fn register_debug_adapter(&self, extension: Arc<dyn Extension>, debug_adapter_name: Arc<str>);
fn register_debug_adapter(
&self,
extension: Arc<dyn Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
);
fn register_debug_locator(&self, extension: Arc<dyn Extension>, locator_name: Arc<str>);
fn unregister_debug_adapter(&self, debug_adapter_name: Arc<str>);
fn unregister_debug_locator(&self, locator_name: Arc<str>);
}
impl ExtensionDebugAdapterProviderProxy for ExtensionHostProxy {
fn register_debug_adapter(&self, extension: Arc<dyn Extension>, debug_adapter_name: Arc<str>) {
fn register_debug_adapter(
&self,
extension: Arc<dyn Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.register_debug_adapter(extension, debug_adapter_name)
proxy.register_debug_adapter(extension, debug_adapter_name, schema_path)
}
fn register_debug_locator(&self, extension: Arc<dyn Extension>, locator_name: Arc<str>) {
@ -431,4 +443,18 @@ impl ExtensionDebugAdapterProviderProxy for ExtensionHostProxy {
proxy.register_debug_locator(extension, locator_name)
}
fn unregister_debug_adapter(&self, debug_adapter_name: Arc<str>) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.unregister_debug_adapter(debug_adapter_name)
}
fn unregister_debug_locator(&self, locator_name: Arc<str>) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.unregister_debug_locator(locator_name)
}
}

View file

@ -88,9 +88,9 @@ pub struct ExtensionManifest {
#[serde(default)]
pub capabilities: Vec<ExtensionCapability>,
#[serde(default)]
pub debug_adapters: Vec<Arc<str>>,
pub debug_adapters: BTreeMap<Arc<str>, DebugAdapterManifestEntry>,
#[serde(default)]
pub debug_locators: Vec<Arc<str>>,
pub debug_locators: BTreeMap<Arc<str>, DebugLocatorManifestEntry>,
}
impl ExtensionManifest {
@ -210,6 +210,14 @@ pub struct SlashCommandManifestEntry {
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct IndexedDocsProviderEntry {}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct DebugAdapterManifestEntry {
pub schema_path: Option<PathBuf>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct DebugLocatorManifestEntry {}
impl ExtensionManifest {
pub async fn load(fs: Arc<dyn Fs>, extension_dir: &Path) -> Result<Self> {
let extension_name = extension_dir
@ -278,8 +286,8 @@ fn manifest_from_old_manifest(
indexed_docs_providers: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
debug_adapters: vec![],
debug_locators: vec![],
debug_adapters: Default::default(),
debug_locators: Default::default(),
}
}

View file

@ -20,9 +20,10 @@ pub use wit::{
make_file_executable,
zed::extension::context_server::ContextServerConfiguration,
zed::extension::dap::{
AttachRequest, BuildTaskDefinition, BuildTaskDefinitionTemplatePayload, BuildTaskTemplate,
DebugAdapterBinary, DebugConfig, DebugRequest, DebugScenario, DebugTaskDefinition,
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest, TaskTemplate,
TcpArguments, TcpArgumentsTemplate, resolve_tcp_template,
LaunchRequest, StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
TaskTemplate, TcpArguments, TcpArgumentsTemplate, resolve_tcp_template,
},
zed::extension::github::{
GithubRelease, GithubReleaseAsset, GithubReleaseOptions, github_release_by_tag_name,
@ -198,12 +199,15 @@ pub trait Extension: Send + Sync {
&mut self,
_adapter_name: String,
_config: DebugTaskDefinition,
_user_provided_path: Option<String>,
_user_provided_debug_adapter_path: Option<String>,
_worktree: &Worktree,
) -> Result<DebugAdapterBinary, String> {
Err("`get_dap_binary` not implemented".to_string())
}
/// Determines whether the specified adapter configuration should *launch* a new debuggee process
/// or *attach* to an existing one. This function should not perform any further validation (outside of determining the kind of a request).
/// This function should return an error when the kind cannot be determined (rather than fall back to a known default).
fn dap_request_kind(
&mut self,
_adapter_name: String,
@ -211,12 +215,31 @@ pub trait Extension: Send + Sync {
) -> Result<StartDebuggingRequestArgumentsRequest, String> {
Err("`dap_request_kind` not implemented".to_string())
}
fn dap_config_to_scenario(
&mut self,
_adapter_name: DebugConfig,
) -> Result<DebugScenario, String> {
/// Converts a high-level definition of a debug scenario (originating in a new session UI) to a "low-level" configuration suitable for a particular adapter.
///
/// In layman's terms: given a program, list of arguments, current working directory and environment variables,
/// create a configuration that can be used to start a debug session.
fn dap_config_to_scenario(&mut self, _config: DebugConfig) -> Result<DebugScenario, String> {
Err("`dap_config_to_scenario` not implemented".to_string())
}
/// Locators are entities that convert a Zed task into a debug scenario.
///
/// They can be provided even by extensions that don't provide a debug adapter.
/// For all tasks applicable to a given buffer, Zed will query all locators to find one that can turn the task into a debug scenario.
/// A converted debug scenario can include a build task (it shouldn't contain any configuration in such case); a build task result will later
/// be resolved with [`Extension::run_dap_locator`].
///
/// To work through a real-world example, take a `cargo run` task and a hypothetical `cargo` locator:
/// 1. We may need to modify the task; in this case, it is problematic that `cargo run` spawns a binary. We should turn `cargo run` into a debug scenario with
/// `cargo build` task. This is the decision we make at `dap_locator_create_scenario` scope.
/// 2. Then, after the build task finishes, we will run `run_dap_locator` of the locator that produced the build task to find the program to be debugged. This function
/// should give us a debugger-agnostic configuration for launching a debug target (that we end up resolving with [`Extension::dap_config_to_scenario`]). It's almost as if the user
/// found the artifact path by themselves.
///
/// Note that you're not obliged to use build tasks with locators. Specifically, it is sufficient to provide a debug configuration directly in the return value of
/// `dap_locator_create_scenario` if you're able to do that. Make sure to not fill out `build` field in that case, as that will prevent Zed from running second phase of resolution in such case.
/// This might be of particular relevance to interpreted languages.
fn dap_locator_create_scenario(
&mut self,
_locator_name: String,
@ -226,6 +249,9 @@ pub trait Extension: Send + Sync {
) -> Option<DebugScenario> {
None
}
/// Runs the second phase of locator resolution.
/// See [`Extension::dap_locator_create_scenario`] for a hefty comment on locators.
fn run_dap_locator(
&mut self,
_locator_name: String,

View file

@ -1149,6 +1149,12 @@ impl ExtensionStore {
for (server_id, _) in extension.manifest.context_servers.iter() {
self.proxy.unregister_context_server(server_id.clone(), cx);
}
for (adapter, _) in extension.manifest.debug_adapters.iter() {
self.proxy.unregister_debug_adapter(adapter.clone());
}
for (locator, _) in extension.manifest.debug_locators.iter() {
self.proxy.unregister_debug_locator(locator.clone());
}
}
self.wasm_extensions
@ -1344,12 +1350,24 @@ impl ExtensionStore {
.register_indexed_docs_provider(extension.clone(), provider_id.clone());
}
for debug_adapter in &manifest.debug_adapters {
this.proxy
.register_debug_adapter(extension.clone(), debug_adapter.clone());
for (debug_adapter, meta) in &manifest.debug_adapters {
let mut path = root_dir.clone();
path.push(Path::new(manifest.id.as_ref()));
if let Some(schema_path) = &meta.schema_path {
path.push(schema_path);
} else {
path.push("debug_adapter_schemas");
path.push(Path::new(debug_adapter.as_ref()).with_extension("json"));
}
this.proxy.register_debug_adapter(
extension.clone(),
debug_adapter.clone(),
&path,
);
}
for debug_adapter in &manifest.debug_adapters {
for debug_adapter in manifest.debug_locators.keys() {
this.proxy
.register_debug_locator(extension.clone(), debug_adapter.clone());
}