Debugger implementation (#13433)
### DISCLAIMER > As of 6th March 2025, debugger is still in development. We plan to merge it behind a staff-only feature flag for staff use only, followed by non-public release and then finally a public one (akin to how Git panel release was handled). This is done to ensure the best experience when it gets released. ### END OF DISCLAIMER **The current state of the debugger implementation:** https://github.com/user-attachments/assets/c4deff07-80dd-4dc6-ad2e-0c252a478fe9 https://github.com/user-attachments/assets/e1ed2345-b750-4bb6-9c97-50961b76904f ---- All the todo's are in the following channel, so it's easier to work on this together: https://zed.dev/channel/zed-debugger-11370 If you are on Linux, you can use the following command to join the channel: ```cli zed https://zed.dev/channel/zed-debugger-11370 ``` ## Current Features - Collab - Breakpoints - Sync when you (re)join a project - Sync when you add/remove a breakpoint - Sync active debug line - Stack frames - Click on stack frame - View variables that belong to the stack frame - Visit the source file - Restart stack frame (if adapter supports this) - Variables - Loaded sources - Modules - Controls - Continue - Step back - Stepping granularity (configurable) - Step into - Stepping granularity (configurable) - Step over - Stepping granularity (configurable) - Step out - Stepping granularity (configurable) - Debug console - Breakpoints - Log breakpoints - line breakpoints - Persistent between zed sessions (configurable) - Multi buffer support - Toggle disable/enable all breakpoints - Stack frames - Click on stack frame - View variables that belong to the stack frame - Visit the source file - Show collapsed stack frames - Restart stack frame (if adapter supports this) - Loaded sources - View all used loaded sources if supported by adapter. - Modules - View all used modules (if adapter supports this) - Variables - Copy value - Copy name - Copy memory reference - Set value (if adapter supports this) - keyboard navigation - Debug Console - See logs - View output that was sent from debug adapter - Output grouping - Evaluate code - Updates the variable list - Auto completion - If not supported by adapter, we will show auto-completion for existing variables - Debug Terminal - Run custom commands and change env values right inside your Zed terminal - Attach to process (if adapter supports this) - Process picker - Controls - Continue - Step back - Stepping granularity (configurable) - Step into - Stepping granularity (configurable) - Step over - Stepping granularity (configurable) - Step out - Stepping granularity (configurable) - Disconnect - Restart - Stop - Warning when a debug session exited without hitting any breakpoint - Debug view to see Adapter/RPC log messages - Testing - Fake debug adapter - Fake requests & events --- Release Notes: - N/A --------- Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Co-authored-by: Anthony Eid <hello@anthonyeid.me> Co-authored-by: Anthony <anthony@zed.dev> Co-authored-by: Piotr Osiewicz <peterosiewicz@gmail.com> Co-authored-by: Piotr <piotr@zed.dev>
This commit is contained in:
parent
ed4e654fdf
commit
41a60ffecf
156 changed files with 25840 additions and 451 deletions
53
crates/dap/Cargo.toml
Normal file
53
crates/dap/Cargo.toml
Normal file
|
@ -0,0 +1,53 @@
|
|||
[package]
|
||||
name = "dap"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"gpui/test-support",
|
||||
"util/test-support",
|
||||
"task/test-support",
|
||||
"async-pipe",
|
||||
"settings/test-support",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-compression.workspace = true
|
||||
async-pipe = { workspace = true, optional = true }
|
||||
async-tar.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
dap-types.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
node_runtime.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
task.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
async-pipe.workspace = true
|
||||
env_logger.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
task = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
1
crates/dap/LICENSE-GPL
Symbolic link
1
crates/dap/LICENSE-GPL
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../LICENSE-GPL
|
9
crates/dap/docs/breakpoints.md
Normal file
9
crates/dap/docs/breakpoints.md
Normal file
|
@ -0,0 +1,9 @@
|
|||
# Overview
|
||||
|
||||
The active `Project` is responsible for maintain opened and closed breakpoints
|
||||
as well as serializing breakpoints to save. At a high level project serializes
|
||||
the positions of breakpoints that don't belong to any active buffers and handles
|
||||
converting breakpoints from serializing to active whenever a buffer is opened/closed.
|
||||
|
||||
`Project` also handles sending all relevant breakpoint information to debug adapter's
|
||||
during debugging or when starting a debugger.
|
370
crates/dap/src/adapters.rs
Normal file
370
crates/dap/src/adapters.rs
Normal file
|
@ -0,0 +1,370 @@
|
|||
use ::fs::Fs;
|
||||
use anyhow::{anyhow, Context as _, Ok, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use futures::io::BufReader;
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
pub use http_client::{github::latest_github_release, HttpClient};
|
||||
use language::LanguageToolchainStore;
|
||||
use node_runtime::NodeRuntime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use settings::WorktreeId;
|
||||
use smol::{self, fs::File, lock::Mutex};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
ffi::{OsStr, OsString},
|
||||
fmt::Debug,
|
||||
net::Ipv4Addr,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use task::DebugAdapterConfig;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DapStatus {
|
||||
None,
|
||||
CheckingForUpdate,
|
||||
Downloading,
|
||||
Failed { error: String },
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait DapDelegate {
|
||||
fn worktree_id(&self) -> WorktreeId;
|
||||
fn http_client(&self) -> Arc<dyn HttpClient>;
|
||||
fn node_runtime(&self) -> NodeRuntime;
|
||||
fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore>;
|
||||
fn fs(&self) -> Arc<dyn Fs>;
|
||||
fn updated_adapters(&self) -> Arc<Mutex<HashSet<DebugAdapterName>>>;
|
||||
fn update_status(&self, dap_name: DebugAdapterName, status: DapStatus);
|
||||
fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn shell_env(&self) -> collections::HashMap<String, String>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub struct DebugAdapterName(pub Arc<str>);
|
||||
|
||||
impl Deref for DebugAdapterName {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for DebugAdapterName {
|
||||
fn as_ref(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for DebugAdapterName {
|
||||
fn as_ref(&self) -> &Path {
|
||||
Path::new(&*self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DebugAdapterName {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DebugAdapterName> for SharedString {
|
||||
fn from(name: DebugAdapterName) -> Self {
|
||||
SharedString::from(name.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for DebugAdapterName {
|
||||
fn from(str: &'a str) -> DebugAdapterName {
|
||||
DebugAdapterName(str.to_string().into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TcpArguments {
|
||||
pub host: Ipv4Addr,
|
||||
pub port: u16,
|
||||
pub timeout: Option<u64>,
|
||||
}
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DebugAdapterBinary {
|
||||
pub command: String,
|
||||
pub arguments: Option<Vec<OsString>>,
|
||||
pub envs: Option<HashMap<String, String>>,
|
||||
pub cwd: Option<PathBuf>,
|
||||
pub connection: Option<TcpArguments>,
|
||||
}
|
||||
|
||||
pub struct AdapterVersion {
|
||||
pub tag_name: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
pub enum DownloadedFileType {
|
||||
Vsix,
|
||||
GzipTar,
|
||||
Zip,
|
||||
}
|
||||
|
||||
pub struct GithubRepo {
|
||||
pub repo_name: String,
|
||||
pub repo_owner: String,
|
||||
}
|
||||
|
||||
pub async fn download_adapter_from_github(
|
||||
adapter_name: DebugAdapterName,
|
||||
github_version: AdapterVersion,
|
||||
file_type: DownloadedFileType,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<PathBuf> {
|
||||
let adapter_path = paths::debug_adapters_dir().join(&adapter_name);
|
||||
let version_path = adapter_path.join(format!("{}_{}", adapter_name, github_version.tag_name));
|
||||
let fs = delegate.fs();
|
||||
|
||||
if version_path.exists() {
|
||||
return Ok(version_path);
|
||||
}
|
||||
|
||||
if !adapter_path.exists() {
|
||||
fs.create_dir(&adapter_path.as_path())
|
||||
.await
|
||||
.context("Failed creating adapter path")?;
|
||||
}
|
||||
|
||||
log::debug!(
|
||||
"Downloading adapter {} from {}",
|
||||
adapter_name,
|
||||
&github_version.url,
|
||||
);
|
||||
|
||||
let mut response = delegate
|
||||
.http_client()
|
||||
.get(&github_version.url, Default::default(), true)
|
||||
.await
|
||||
.context("Error downloading release")?;
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"download failed with status {}",
|
||||
response.status().to_string()
|
||||
))?;
|
||||
}
|
||||
|
||||
match file_type {
|
||||
DownloadedFileType::GzipTar => {
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
|
||||
let archive = Archive::new(decompressed_bytes);
|
||||
archive.unpack(&version_path).await?;
|
||||
}
|
||||
DownloadedFileType::Zip | DownloadedFileType::Vsix => {
|
||||
let zip_path = version_path.with_extension("zip");
|
||||
|
||||
let mut file = File::create(&zip_path).await?;
|
||||
futures::io::copy(response.body_mut(), &mut file).await?;
|
||||
|
||||
// we cannot check the status as some adapter include files with names that trigger `Illegal byte sequence`
|
||||
util::command::new_smol_command("unzip")
|
||||
.arg(&zip_path)
|
||||
.arg("-d")
|
||||
.arg(&version_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
util::fs::remove_matching(&adapter_path, |entry| {
|
||||
entry
|
||||
.file_name()
|
||||
.is_some_and(|file| file.to_string_lossy().ends_with(".zip"))
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
// remove older versions
|
||||
util::fs::remove_matching(&adapter_path, |entry| {
|
||||
entry.to_string_lossy() != version_path.to_string_lossy()
|
||||
})
|
||||
.await;
|
||||
|
||||
Ok(version_path)
|
||||
}
|
||||
|
||||
pub async fn fetch_latest_adapter_version_from_github(
|
||||
github_repo: GithubRepo,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", github_repo.repo_owner, github_repo.repo_name),
|
||||
false,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(AdapterVersion {
|
||||
tag_name: release.tag_name,
|
||||
url: release.zipball_url,
|
||||
})
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait DebugAdapter: 'static + Send + Sync {
|
||||
fn name(&self) -> DebugAdapterName;
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
if delegate
|
||||
.updated_adapters()
|
||||
.lock()
|
||||
.await
|
||||
.contains(&self.name())
|
||||
{
|
||||
log::info!("Using cached debug adapter binary {}", self.name());
|
||||
|
||||
if let Some(binary) = self
|
||||
.get_installed_binary(delegate, &config, user_installed_path.clone(), cx)
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
return Ok(binary);
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Cached binary {} is corrupt falling back to install",
|
||||
self.name()
|
||||
);
|
||||
}
|
||||
|
||||
log::info!("Getting latest version of debug adapter {}", self.name());
|
||||
delegate.update_status(self.name(), DapStatus::CheckingForUpdate);
|
||||
if let Some(version) = self.fetch_latest_adapter_version(delegate).await.log_err() {
|
||||
log::info!(
|
||||
"Installiing latest version of debug adapter {}",
|
||||
self.name()
|
||||
);
|
||||
delegate.update_status(self.name(), DapStatus::Downloading);
|
||||
self.install_binary(version, delegate).await?;
|
||||
|
||||
delegate
|
||||
.updated_adapters()
|
||||
.lock_arc()
|
||||
.await
|
||||
.insert(self.name());
|
||||
}
|
||||
|
||||
self.get_installed_binary(delegate, &config, user_installed_path, cx)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion>;
|
||||
|
||||
/// Installs the binary for the debug adapter.
|
||||
/// This method is called when the adapter binary is not found or needs to be updated.
|
||||
/// It should download and install the necessary files for the debug adapter to function.
|
||||
async fn install_binary(
|
||||
&self,
|
||||
version: AdapterVersion,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<()>;
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary>;
|
||||
|
||||
/// Should return base configuration to make the debug adapter work
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value;
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeAdapter {}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeAdapter {
|
||||
const ADAPTER_NAME: &'static str = "fake-adapter";
|
||||
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for FakeAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
Ok(DebugAdapterBinary {
|
||||
command: "command".into(),
|
||||
arguments: None,
|
||||
connection: None,
|
||||
envs: None,
|
||||
cwd: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version(
|
||||
&self,
|
||||
_delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
unimplemented!("fetch latest adapter version");
|
||||
}
|
||||
|
||||
async fn install_binary(
|
||||
&self,
|
||||
_version: AdapterVersion,
|
||||
_delegate: &dyn DapDelegate,
|
||||
) -> Result<()> {
|
||||
unimplemented!("install binary");
|
||||
}
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
unimplemented!("get installed binary");
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
use serde_json::json;
|
||||
use task::DebugRequestType;
|
||||
|
||||
json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"process_id": if let DebugRequestType::Attach(attach_config) = &config.request {
|
||||
attach_config.process_id
|
||||
} else {
|
||||
None
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
490
crates/dap/src/client.rs
Normal file
490
crates/dap/src/client.rs
Normal file
|
@ -0,0 +1,490 @@
|
|||
use crate::{
|
||||
adapters::{DebugAdapterBinary, DebugAdapterName},
|
||||
transport::{IoKind, LogKind, TransportDelegate},
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use dap_types::{
|
||||
messages::{Message, Response},
|
||||
requests::Request,
|
||||
};
|
||||
use futures::{channel::oneshot, select, FutureExt as _};
|
||||
use gpui::{AppContext, AsyncApp, BackgroundExecutor};
|
||||
use smol::channel::{Receiver, Sender};
|
||||
use std::{
|
||||
hash::Hash,
|
||||
sync::atomic::{AtomicU64, Ordering},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
const DAP_REQUEST_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
const DAP_REQUEST_TIMEOUT: Duration = Duration::from_secs(12);
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct SessionId(pub u32);
|
||||
|
||||
impl SessionId {
|
||||
pub fn from_proto(client_id: u64) -> Self {
|
||||
Self(client_id as u32)
|
||||
}
|
||||
|
||||
pub fn to_proto(&self) -> u64 {
|
||||
self.0 as u64
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a connection to the debug adapter process, either via stdout/stdin or a socket.
|
||||
pub struct DebugAdapterClient {
|
||||
id: SessionId,
|
||||
name: DebugAdapterName,
|
||||
sequence_count: AtomicU64,
|
||||
binary: DebugAdapterBinary,
|
||||
executor: BackgroundExecutor,
|
||||
transport_delegate: TransportDelegate,
|
||||
}
|
||||
|
||||
pub type DapMessageHandler = Box<dyn FnMut(Message) + 'static + Send + Sync>;
|
||||
|
||||
impl DebugAdapterClient {
|
||||
pub async fn start(
|
||||
id: SessionId,
|
||||
name: DebugAdapterName,
|
||||
binary: DebugAdapterBinary,
|
||||
message_handler: DapMessageHandler,
|
||||
cx: AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let ((server_rx, server_tx), transport_delegate) =
|
||||
TransportDelegate::start(&binary, cx.clone()).await?;
|
||||
let this = Self {
|
||||
id,
|
||||
name,
|
||||
binary,
|
||||
transport_delegate,
|
||||
sequence_count: AtomicU64::new(1),
|
||||
executor: cx.background_executor().clone(),
|
||||
};
|
||||
log::info!("Successfully connected to debug adapter");
|
||||
|
||||
let client_id = this.id;
|
||||
|
||||
// start handling events/reverse requests
|
||||
|
||||
cx.background_spawn(Self::handle_receive_messages(
|
||||
client_id,
|
||||
server_rx,
|
||||
server_tx.clone(),
|
||||
message_handler,
|
||||
))
|
||||
.detach();
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub async fn reconnect(
|
||||
&self,
|
||||
session_id: SessionId,
|
||||
binary: DebugAdapterBinary,
|
||||
message_handler: DapMessageHandler,
|
||||
cx: AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let binary = match self.transport_delegate.transport() {
|
||||
crate::transport::Transport::Tcp(tcp_transport) => DebugAdapterBinary {
|
||||
command: binary.command,
|
||||
arguments: binary.arguments,
|
||||
envs: binary.envs,
|
||||
cwd: binary.cwd,
|
||||
connection: Some(crate::adapters::TcpArguments {
|
||||
host: tcp_transport.host,
|
||||
port: tcp_transport.port,
|
||||
timeout: Some(tcp_transport.timeout),
|
||||
}),
|
||||
},
|
||||
_ => self.binary.clone(),
|
||||
};
|
||||
|
||||
Self::start(session_id, self.name(), binary, message_handler, cx).await
|
||||
}
|
||||
|
||||
async fn handle_receive_messages(
|
||||
client_id: SessionId,
|
||||
server_rx: Receiver<Message>,
|
||||
client_tx: Sender<Message>,
|
||||
mut message_handler: DapMessageHandler,
|
||||
) -> Result<()> {
|
||||
let result = loop {
|
||||
let message = match server_rx.recv().await {
|
||||
Ok(message) => message,
|
||||
Err(e) => break Err(e.into()),
|
||||
};
|
||||
|
||||
match message {
|
||||
Message::Event(ev) => {
|
||||
log::debug!("Client {} received event `{}`", client_id.0, &ev);
|
||||
|
||||
message_handler(Message::Event(ev))
|
||||
}
|
||||
Message::Request(req) => {
|
||||
log::debug!(
|
||||
"Client {} received reverse request `{}`",
|
||||
client_id.0,
|
||||
&req.command
|
||||
);
|
||||
|
||||
message_handler(Message::Request(req))
|
||||
}
|
||||
Message::Response(response) => {
|
||||
log::debug!("Received response after request timeout: {:#?}", response);
|
||||
}
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
drop(client_tx);
|
||||
|
||||
log::debug!("Handle receive messages dropped");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Send a request to an adapter and get a response back
|
||||
/// Note: This function will block until a response is sent back from the adapter
|
||||
pub async fn request<R: Request>(&self, arguments: R::Arguments) -> Result<R::Response> {
|
||||
let serialized_arguments = serde_json::to_value(arguments)?;
|
||||
|
||||
let (callback_tx, callback_rx) = oneshot::channel::<Result<Response>>();
|
||||
|
||||
let sequence_id = self.next_sequence_id();
|
||||
|
||||
let request = crate::messages::Request {
|
||||
seq: sequence_id,
|
||||
command: R::COMMAND.to_string(),
|
||||
arguments: Some(serialized_arguments),
|
||||
};
|
||||
|
||||
self.transport_delegate
|
||||
.add_pending_request(sequence_id, callback_tx)
|
||||
.await;
|
||||
|
||||
log::debug!(
|
||||
"Client {} send `{}` request with sequence_id: {}",
|
||||
self.id.0,
|
||||
R::COMMAND.to_string(),
|
||||
sequence_id
|
||||
);
|
||||
|
||||
self.send_message(Message::Request(request)).await?;
|
||||
|
||||
let mut timeout = self.executor.timer(DAP_REQUEST_TIMEOUT).fuse();
|
||||
let command = R::COMMAND.to_string();
|
||||
|
||||
select! {
|
||||
response = callback_rx.fuse() => {
|
||||
log::debug!(
|
||||
"Client {} received response for: `{}` sequence_id: {}",
|
||||
self.id.0,
|
||||
command,
|
||||
sequence_id
|
||||
);
|
||||
|
||||
let response = response??;
|
||||
match response.success {
|
||||
true => Ok(serde_json::from_value(response.body.unwrap_or_default())?),
|
||||
false => Err(anyhow!("Request failed: {}", response.message.unwrap_or_default())),
|
||||
}
|
||||
}
|
||||
|
||||
_ = timeout => {
|
||||
self.transport_delegate.cancel_pending_request(&sequence_id).await;
|
||||
log::error!("Cancelled DAP request for {command:?} id {sequence_id} which took over {DAP_REQUEST_TIMEOUT:?}");
|
||||
anyhow::bail!("DAP request timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_message(&self, message: Message) -> Result<()> {
|
||||
self.transport_delegate.send_message(message).await
|
||||
}
|
||||
|
||||
pub fn id(&self) -> SessionId {
|
||||
self.id
|
||||
}
|
||||
|
||||
pub fn name(&self) -> DebugAdapterName {
|
||||
self.name.clone()
|
||||
}
|
||||
pub fn binary(&self) -> &DebugAdapterBinary {
|
||||
&self.binary
|
||||
}
|
||||
|
||||
/// Get the next sequence id to be used in a request
|
||||
pub fn next_sequence_id(&self) -> u64 {
|
||||
self.sequence_count.fetch_add(1, Ordering::Relaxed)
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) -> Result<()> {
|
||||
self.transport_delegate.shutdown().await
|
||||
}
|
||||
|
||||
pub fn has_adapter_logs(&self) -> bool {
|
||||
self.transport_delegate.has_adapter_logs()
|
||||
}
|
||||
|
||||
pub fn add_log_handler<F>(&self, f: F, kind: LogKind)
|
||||
where
|
||||
F: 'static + Send + FnMut(IoKind, &str),
|
||||
{
|
||||
self.transport_delegate.add_log_handler(f, kind);
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn on_request<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
where
|
||||
F: 'static
|
||||
+ Send
|
||||
+ FnMut(u64, R::Arguments) -> Result<R::Response, dap_types::ErrorResponse>,
|
||||
{
|
||||
let transport = self.transport_delegate.transport().as_fake();
|
||||
transport.on_request::<R, F>(handler).await;
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn fake_reverse_request<R: dap_types::requests::Request>(&self, args: R::Arguments) {
|
||||
self.send_message(Message::Request(dap_types::messages::Request {
|
||||
seq: self.sequence_count.load(Ordering::Relaxed),
|
||||
command: R::COMMAND.into(),
|
||||
arguments: serde_json::to_value(args).ok(),
|
||||
}))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn on_response<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
where
|
||||
F: 'static + Send + Fn(Response),
|
||||
{
|
||||
let transport = self.transport_delegate.transport().as_fake();
|
||||
transport.on_response::<R, F>(handler).await;
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn fake_event(&self, event: dap_types::messages::Events) {
|
||||
self.send_message(Message::Event(Box::new(event)))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{client::DebugAdapterClient, debugger_settings::DebuggerSettings};
|
||||
use dap_types::{
|
||||
messages::Events,
|
||||
requests::{Initialize, Request, RunInTerminal},
|
||||
Capabilities, InitializeRequestArguments, InitializeRequestArgumentsPathFormat,
|
||||
RunInTerminalRequestArguments,
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
use serde_json::json;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
};
|
||||
|
||||
pub fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
if std::env::var("RUST_LOG").is_ok() {
|
||||
env_logger::try_init().ok();
|
||||
}
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
DebuggerSettings::register(cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
pub async fn test_initialize_client(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName("adapter".into()),
|
||||
DebugAdapterBinary {
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
envs: Default::default(),
|
||||
connection: None,
|
||||
cwd: None,
|
||||
},
|
||||
Box::new(|_| panic!("Did not expect to hit this code path")),
|
||||
cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
client
|
||||
.on_request::<Initialize, _>(move |_, _| {
|
||||
Ok(dap_types::Capabilities {
|
||||
supports_configuration_done_request: Some(true),
|
||||
..Default::default()
|
||||
})
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
let response = client
|
||||
.request::<Initialize>(InitializeRequestArguments {
|
||||
client_id: Some("zed".to_owned()),
|
||||
client_name: Some("Zed".to_owned()),
|
||||
adapter_id: "fake-adapter".to_owned(),
|
||||
locale: Some("en-US".to_owned()),
|
||||
path_format: Some(InitializeRequestArgumentsPathFormat::Path),
|
||||
supports_variable_type: Some(true),
|
||||
supports_variable_paging: Some(false),
|
||||
supports_run_in_terminal_request: Some(true),
|
||||
supports_memory_references: Some(true),
|
||||
supports_progress_reporting: Some(false),
|
||||
supports_invalidated_event: Some(false),
|
||||
lines_start_at1: Some(true),
|
||||
columns_start_at1: Some(true),
|
||||
supports_memory_event: Some(false),
|
||||
supports_args_can_be_interpreted_by_shell: Some(false),
|
||||
supports_start_debugging_request: Some(true),
|
||||
supports_ansistyling: Some(false),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
dap_types::Capabilities {
|
||||
supports_configuration_done_request: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
response
|
||||
);
|
||||
|
||||
client.shutdown().await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
pub async fn test_calls_event_handler(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let called_event_handler = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName("adapter".into()),
|
||||
DebugAdapterBinary {
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
envs: Default::default(),
|
||||
connection: None,
|
||||
cwd: None,
|
||||
},
|
||||
Box::new({
|
||||
let called_event_handler = called_event_handler.clone();
|
||||
move |event| {
|
||||
called_event_handler.store(true, Ordering::SeqCst);
|
||||
|
||||
assert_eq!(
|
||||
Message::Event(Box::new(Events::Initialized(
|
||||
Some(Capabilities::default())
|
||||
))),
|
||||
event
|
||||
);
|
||||
}
|
||||
}),
|
||||
cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
client
|
||||
.fake_event(Events::Initialized(Some(Capabilities::default())))
|
||||
.await;
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
assert!(
|
||||
called_event_handler.load(std::sync::atomic::Ordering::SeqCst),
|
||||
"Event handler was not called"
|
||||
);
|
||||
|
||||
client.shutdown().await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
pub async fn test_calls_event_handler_for_reverse_request(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let called_event_handler = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName(Arc::from("test-adapter")),
|
||||
DebugAdapterBinary {
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
envs: Default::default(),
|
||||
connection: None,
|
||||
cwd: None,
|
||||
},
|
||||
Box::new({
|
||||
let called_event_handler = called_event_handler.clone();
|
||||
move |event| {
|
||||
called_event_handler.store(true, Ordering::SeqCst);
|
||||
|
||||
assert_eq!(
|
||||
Message::Request(dap_types::messages::Request {
|
||||
seq: 1,
|
||||
command: RunInTerminal::COMMAND.into(),
|
||||
arguments: Some(json!({
|
||||
"cwd": "/project/path/src",
|
||||
"args": ["node", "test.js"],
|
||||
}))
|
||||
}),
|
||||
event
|
||||
);
|
||||
}
|
||||
}),
|
||||
cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
client
|
||||
.fake_reverse_request::<RunInTerminal>(RunInTerminalRequestArguments {
|
||||
kind: None,
|
||||
title: None,
|
||||
cwd: "/project/path/src".into(),
|
||||
args: vec!["node".into(), "test.js".into()],
|
||||
env: None,
|
||||
args_can_be_interpreted_by_shell: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
assert!(
|
||||
called_event_handler.load(std::sync::atomic::Ordering::SeqCst),
|
||||
"Event handler was not called"
|
||||
);
|
||||
|
||||
client.shutdown().await.unwrap();
|
||||
}
|
||||
}
|
59
crates/dap/src/debugger_settings.rs
Normal file
59
crates/dap/src/debugger_settings.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
use dap_types::SteppingGranularity;
|
||||
use gpui::{App, Global};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy)]
|
||||
#[serde(default)]
|
||||
pub struct DebuggerSettings {
|
||||
/// Determines the stepping granularity.
|
||||
///
|
||||
/// Default: line
|
||||
pub stepping_granularity: SteppingGranularity,
|
||||
/// Whether the breakpoints should be reused across Zed sessions.
|
||||
///
|
||||
/// Default: true
|
||||
pub save_breakpoints: bool,
|
||||
/// Whether to show the debug button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
pub button: bool,
|
||||
/// Time in milliseconds until timeout error when connecting to a TCP debug adapter
|
||||
///
|
||||
/// Default: 2000ms
|
||||
pub timeout: u64,
|
||||
/// Whether to log messages between active debug adapters and Zed
|
||||
///
|
||||
/// Default: true
|
||||
pub log_dap_communications: bool,
|
||||
/// Whether to format dap messages in when adding them to debug adapter logger
|
||||
///
|
||||
/// Default: true
|
||||
pub format_dap_log_messages: bool,
|
||||
}
|
||||
|
||||
impl Default for DebuggerSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
button: true,
|
||||
save_breakpoints: true,
|
||||
stepping_granularity: SteppingGranularity::Line,
|
||||
timeout: 2000,
|
||||
log_dap_communications: true,
|
||||
format_dap_log_messages: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Settings for DebuggerSettings {
|
||||
const KEY: Option<&'static str> = Some("debugger");
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> anyhow::Result<Self> {
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
impl Global for DebuggerSettings {}
|
38
crates/dap/src/lib.rs
Normal file
38
crates/dap/src/lib.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
pub mod adapters;
|
||||
pub mod client;
|
||||
pub mod debugger_settings;
|
||||
pub mod proto_conversions;
|
||||
pub mod transport;
|
||||
|
||||
pub use dap_types::*;
|
||||
pub use task::{DebugAdapterConfig, DebugAdapterKind, DebugRequestType};
|
||||
|
||||
pub type ScopeId = u64;
|
||||
pub type VariableReference = u64;
|
||||
pub type StackFrameId = u64;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub use adapters::FakeAdapter;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn test_config(
|
||||
request: DebugRequestType,
|
||||
fail: Option<bool>,
|
||||
caps: Option<Capabilities>,
|
||||
) -> DebugAdapterConfig {
|
||||
DebugAdapterConfig {
|
||||
label: "test config".into(),
|
||||
kind: DebugAdapterKind::Fake((
|
||||
fail.unwrap_or_default(),
|
||||
caps.unwrap_or(Capabilities {
|
||||
supports_step_back: Some(false),
|
||||
..Default::default()
|
||||
}),
|
||||
)),
|
||||
request,
|
||||
program: None,
|
||||
supports_attach: false,
|
||||
cwd: None,
|
||||
initialize_args: None,
|
||||
}
|
||||
}
|
591
crates/dap/src/proto_conversions.rs
Normal file
591
crates/dap/src/proto_conversions.rs
Normal file
|
@ -0,0 +1,591 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use client::proto::{
|
||||
self, DapChecksum, DapChecksumAlgorithm, DapEvaluateContext, DapModule, DapScope,
|
||||
DapScopePresentationHint, DapSource, DapSourcePresentationHint, DapStackFrame, DapVariable,
|
||||
};
|
||||
use dap_types::{OutputEventCategory, OutputEventGroup, ScopePresentationHint, Source};
|
||||
|
||||
pub trait ProtoConversion {
|
||||
type ProtoType;
|
||||
type Output;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType;
|
||||
fn from_proto(payload: Self::ProtoType) -> Self::Output;
|
||||
}
|
||||
|
||||
impl<T> ProtoConversion for Vec<T>
|
||||
where
|
||||
T: ProtoConversion<Output = T>,
|
||||
{
|
||||
type ProtoType = Vec<T::ProtoType>;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
self.iter().map(|item| item.to_proto()).collect()
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
payload
|
||||
.into_iter()
|
||||
.map(|item| T::from_proto(item))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::Scope {
|
||||
type ProtoType = DapScope;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
Self::ProtoType {
|
||||
name: self.name.clone(),
|
||||
presentation_hint: self
|
||||
.presentation_hint
|
||||
.as_ref()
|
||||
.map(|hint| hint.to_proto().into()),
|
||||
variables_reference: self.variables_reference,
|
||||
named_variables: self.named_variables,
|
||||
indexed_variables: self.indexed_variables,
|
||||
expensive: self.expensive,
|
||||
source: self.source.as_ref().map(Source::to_proto),
|
||||
line: self.line,
|
||||
end_line: self.end_line,
|
||||
column: self.column,
|
||||
end_column: self.end_column,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
let presentation_hint = payload
|
||||
.presentation_hint
|
||||
.and_then(DapScopePresentationHint::from_i32);
|
||||
Self {
|
||||
name: payload.name,
|
||||
presentation_hint: presentation_hint.map(ScopePresentationHint::from_proto),
|
||||
variables_reference: payload.variables_reference,
|
||||
named_variables: payload.named_variables,
|
||||
indexed_variables: payload.indexed_variables,
|
||||
expensive: payload.expensive,
|
||||
source: payload.source.map(dap_types::Source::from_proto),
|
||||
line: payload.line,
|
||||
end_line: payload.end_line,
|
||||
column: payload.column,
|
||||
end_column: payload.end_column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::Variable {
|
||||
type ProtoType = DapVariable;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
Self::ProtoType {
|
||||
name: self.name.clone(),
|
||||
value: self.value.clone(),
|
||||
r#type: self.type_.clone(),
|
||||
evaluate_name: self.evaluate_name.clone(),
|
||||
variables_reference: self.variables_reference,
|
||||
named_variables: self.named_variables,
|
||||
indexed_variables: self.indexed_variables,
|
||||
memory_reference: self.memory_reference.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
Self {
|
||||
name: payload.name,
|
||||
value: payload.value,
|
||||
type_: payload.r#type,
|
||||
evaluate_name: payload.evaluate_name,
|
||||
presentation_hint: None, // TODO Debugger Collab Add this
|
||||
variables_reference: payload.variables_reference,
|
||||
named_variables: payload.named_variables,
|
||||
indexed_variables: payload.indexed_variables,
|
||||
memory_reference: payload.memory_reference,
|
||||
declaration_location_reference: None, // TODO
|
||||
value_location_reference: None, // TODO
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::ScopePresentationHint {
|
||||
type ProtoType = DapScopePresentationHint;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::ScopePresentationHint::Locals => DapScopePresentationHint::Locals,
|
||||
dap_types::ScopePresentationHint::Arguments => DapScopePresentationHint::Arguments,
|
||||
dap_types::ScopePresentationHint::Registers => DapScopePresentationHint::Registers,
|
||||
dap_types::ScopePresentationHint::ReturnValue => DapScopePresentationHint::ReturnValue,
|
||||
dap_types::ScopePresentationHint::Unknown => DapScopePresentationHint::ScopeUnknown,
|
||||
&_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
DapScopePresentationHint::Locals => dap_types::ScopePresentationHint::Locals,
|
||||
DapScopePresentationHint::Arguments => dap_types::ScopePresentationHint::Arguments,
|
||||
DapScopePresentationHint::Registers => dap_types::ScopePresentationHint::Registers,
|
||||
DapScopePresentationHint::ReturnValue => dap_types::ScopePresentationHint::ReturnValue,
|
||||
DapScopePresentationHint::ScopeUnknown => dap_types::ScopePresentationHint::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::SourcePresentationHint {
|
||||
type ProtoType = DapSourcePresentationHint;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::SourcePresentationHint::Normal => DapSourcePresentationHint::SourceNormal,
|
||||
dap_types::SourcePresentationHint::Emphasize => DapSourcePresentationHint::Emphasize,
|
||||
dap_types::SourcePresentationHint::Deemphasize => {
|
||||
DapSourcePresentationHint::Deemphasize
|
||||
}
|
||||
dap_types::SourcePresentationHint::Unknown => DapSourcePresentationHint::SourceUnknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
DapSourcePresentationHint::SourceNormal => dap_types::SourcePresentationHint::Normal,
|
||||
DapSourcePresentationHint::Emphasize => dap_types::SourcePresentationHint::Emphasize,
|
||||
DapSourcePresentationHint::Deemphasize => {
|
||||
dap_types::SourcePresentationHint::Deemphasize
|
||||
}
|
||||
DapSourcePresentationHint::SourceUnknown => dap_types::SourcePresentationHint::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::Checksum {
|
||||
type ProtoType = DapChecksum;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
DapChecksum {
|
||||
algorithm: self.algorithm.to_proto().into(),
|
||||
checksum: self.checksum.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
Self {
|
||||
algorithm: dap_types::ChecksumAlgorithm::from_proto(payload.algorithm()),
|
||||
checksum: payload.checksum,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::ChecksumAlgorithm {
|
||||
type ProtoType = DapChecksumAlgorithm;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::ChecksumAlgorithm::Md5 => DapChecksumAlgorithm::Md5,
|
||||
dap_types::ChecksumAlgorithm::Sha1 => DapChecksumAlgorithm::Sha1,
|
||||
dap_types::ChecksumAlgorithm::Sha256 => DapChecksumAlgorithm::Sha256,
|
||||
dap_types::ChecksumAlgorithm::Timestamp => DapChecksumAlgorithm::Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
DapChecksumAlgorithm::Md5 => dap_types::ChecksumAlgorithm::Md5,
|
||||
DapChecksumAlgorithm::Sha1 => dap_types::ChecksumAlgorithm::Sha1,
|
||||
DapChecksumAlgorithm::Sha256 => dap_types::ChecksumAlgorithm::Sha256,
|
||||
DapChecksumAlgorithm::Timestamp => dap_types::ChecksumAlgorithm::Timestamp,
|
||||
DapChecksumAlgorithm::ChecksumAlgorithmUnspecified => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::Source {
|
||||
type ProtoType = DapSource;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
Self::ProtoType {
|
||||
name: self.name.clone(),
|
||||
path: self.path.clone(),
|
||||
source_reference: self.source_reference,
|
||||
presentation_hint: self.presentation_hint.map(|hint| hint.to_proto().into()),
|
||||
origin: self.origin.clone(),
|
||||
sources: self
|
||||
.sources
|
||||
.clone()
|
||||
.map(|src| src.to_proto())
|
||||
.unwrap_or_default(),
|
||||
adapter_data: Default::default(), // TODO Debugger Collab
|
||||
checksums: self
|
||||
.checksums
|
||||
.clone()
|
||||
.map(|c| c.to_proto())
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
Self {
|
||||
name: payload.name.clone(),
|
||||
path: payload.path.clone(),
|
||||
source_reference: payload.source_reference,
|
||||
presentation_hint: payload
|
||||
.presentation_hint
|
||||
.and_then(DapSourcePresentationHint::from_i32)
|
||||
.map(dap_types::SourcePresentationHint::from_proto),
|
||||
origin: payload.origin.clone(),
|
||||
sources: Some(Vec::<dap_types::Source>::from_proto(payload.sources)),
|
||||
checksums: Some(Vec::<dap_types::Checksum>::from_proto(payload.checksums)),
|
||||
adapter_data: None, // TODO Debugger Collab
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::StackFrame {
|
||||
type ProtoType = DapStackFrame;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
Self::ProtoType {
|
||||
id: self.id,
|
||||
name: self.name.clone(),
|
||||
source: self.source.as_ref().map(|src| src.to_proto()),
|
||||
line: self.line,
|
||||
column: self.column,
|
||||
end_line: self.end_line,
|
||||
end_column: self.end_column,
|
||||
can_restart: self.can_restart,
|
||||
instruction_pointer_reference: self.instruction_pointer_reference.clone(),
|
||||
module_id: None, // TODO Debugger Collab
|
||||
presentation_hint: None, // TODO Debugger Collab
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
Self {
|
||||
id: payload.id,
|
||||
name: payload.name,
|
||||
source: payload.source.map(dap_types::Source::from_proto),
|
||||
line: payload.line,
|
||||
column: payload.column,
|
||||
end_line: payload.end_line,
|
||||
end_column: payload.end_column,
|
||||
can_restart: payload.can_restart,
|
||||
instruction_pointer_reference: payload.instruction_pointer_reference,
|
||||
module_id: None, // TODO Debugger Collab
|
||||
presentation_hint: None, // TODO Debugger Collab
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::Module {
|
||||
type ProtoType = DapModule;
|
||||
type Output = Result<Self>;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
let id = match &self.id {
|
||||
dap_types::ModuleId::Number(num) => proto::dap_module_id::Id::Number(*num),
|
||||
dap_types::ModuleId::String(string) => proto::dap_module_id::Id::String(string.clone()),
|
||||
};
|
||||
|
||||
DapModule {
|
||||
id: Some(proto::DapModuleId { id: Some(id) }),
|
||||
name: self.name.clone(),
|
||||
path: self.path.clone(),
|
||||
is_optimized: self.is_optimized,
|
||||
is_user_code: self.is_user_code,
|
||||
version: self.version.clone(),
|
||||
symbol_status: self.symbol_status.clone(),
|
||||
symbol_file_path: self.symbol_file_path.clone(),
|
||||
date_time_stamp: self.date_time_stamp.clone(),
|
||||
address_range: self.address_range.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Result<Self> {
|
||||
let id = match payload
|
||||
.id
|
||||
.ok_or(anyhow!("All DapModule proto messages must have an id"))?
|
||||
.id
|
||||
.ok_or(anyhow!("All DapModuleID proto messages must have an id"))?
|
||||
{
|
||||
proto::dap_module_id::Id::String(string) => dap_types::ModuleId::String(string),
|
||||
proto::dap_module_id::Id::Number(num) => dap_types::ModuleId::Number(num),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
id,
|
||||
name: payload.name,
|
||||
path: payload.path,
|
||||
is_optimized: payload.is_optimized,
|
||||
is_user_code: payload.is_user_code,
|
||||
version: payload.version,
|
||||
symbol_status: payload.symbol_status,
|
||||
symbol_file_path: payload.symbol_file_path,
|
||||
date_time_stamp: payload.date_time_stamp,
|
||||
address_range: payload.address_range,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::SteppingGranularity {
|
||||
type ProtoType = proto::SteppingGranularity;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::SteppingGranularity::Statement => proto::SteppingGranularity::Statement,
|
||||
dap_types::SteppingGranularity::Line => proto::SteppingGranularity::Line,
|
||||
dap_types::SteppingGranularity::Instruction => proto::SteppingGranularity::Instruction,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
proto::SteppingGranularity::Line => dap_types::SteppingGranularity::Line,
|
||||
proto::SteppingGranularity::Instruction => dap_types::SteppingGranularity::Instruction,
|
||||
proto::SteppingGranularity::Statement => dap_types::SteppingGranularity::Statement,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::OutputEventCategory {
|
||||
type ProtoType = proto::DapOutputCategory;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
Self::Console => proto::DapOutputCategory::ConsoleOutput,
|
||||
Self::Important => proto::DapOutputCategory::Important,
|
||||
Self::Stdout => proto::DapOutputCategory::Stdout,
|
||||
Self::Stderr => proto::DapOutputCategory::Stderr,
|
||||
_ => proto::DapOutputCategory::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
proto::DapOutputCategory::ConsoleOutput => Self::Console,
|
||||
proto::DapOutputCategory::Important => Self::Important,
|
||||
proto::DapOutputCategory::Stdout => Self::Stdout,
|
||||
proto::DapOutputCategory::Stderr => Self::Stderr,
|
||||
proto::DapOutputCategory::Unknown => Self::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::OutputEvent {
|
||||
type ProtoType = proto::DapOutputEvent;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
proto::DapOutputEvent {
|
||||
category: self
|
||||
.category
|
||||
.as_ref()
|
||||
.map(|category| category.to_proto().into()),
|
||||
output: self.output.clone(),
|
||||
variables_reference: self.variables_reference,
|
||||
source: self.source.as_ref().map(|source| source.to_proto()),
|
||||
line: self.line.map(|line| line as u32),
|
||||
column: self.column.map(|column| column as u32),
|
||||
group: self.group.map(|group| group.to_proto().into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
dap_types::OutputEvent {
|
||||
category: payload
|
||||
.category
|
||||
.and_then(proto::DapOutputCategory::from_i32)
|
||||
.map(OutputEventCategory::from_proto),
|
||||
output: payload.output.clone(),
|
||||
variables_reference: payload.variables_reference,
|
||||
source: payload.source.map(Source::from_proto),
|
||||
line: payload.line.map(|line| line as u64),
|
||||
column: payload.column.map(|column| column as u64),
|
||||
group: payload
|
||||
.group
|
||||
.and_then(proto::DapOutputEventGroup::from_i32)
|
||||
.map(OutputEventGroup::from_proto),
|
||||
data: None,
|
||||
location_reference: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::OutputEventGroup {
|
||||
type ProtoType = proto::DapOutputEventGroup;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::OutputEventGroup::Start => proto::DapOutputEventGroup::Start,
|
||||
dap_types::OutputEventGroup::StartCollapsed => {
|
||||
proto::DapOutputEventGroup::StartCollapsed
|
||||
}
|
||||
dap_types::OutputEventGroup::End => proto::DapOutputEventGroup::End,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
proto::DapOutputEventGroup::Start => Self::Start,
|
||||
proto::DapOutputEventGroup::StartCollapsed => Self::StartCollapsed,
|
||||
proto::DapOutputEventGroup::End => Self::End,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::CompletionItem {
|
||||
type ProtoType = proto::DapCompletionItem;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
proto::DapCompletionItem {
|
||||
label: self.label.clone(),
|
||||
text: self.text.clone(),
|
||||
detail: self.detail.clone(),
|
||||
typ: self
|
||||
.type_
|
||||
.as_ref()
|
||||
.map(ProtoConversion::to_proto)
|
||||
.map(|typ| typ.into()),
|
||||
start: self.start,
|
||||
length: self.length,
|
||||
selection_start: self.selection_start,
|
||||
selection_length: self.selection_length,
|
||||
sort_text: self.sort_text.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
let typ = payload.typ(); // todo(debugger): This might be a potential issue/bug because it defaults to a type when it's None
|
||||
|
||||
Self {
|
||||
label: payload.label,
|
||||
detail: payload.detail,
|
||||
sort_text: payload.sort_text,
|
||||
text: payload.text.clone(),
|
||||
type_: Some(dap_types::CompletionItemType::from_proto(typ)),
|
||||
start: payload.start,
|
||||
length: payload.length,
|
||||
selection_start: payload.selection_start,
|
||||
selection_length: payload.selection_length,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::EvaluateArgumentsContext {
|
||||
type ProtoType = DapEvaluateContext;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::EvaluateArgumentsContext::Variables => {
|
||||
proto::DapEvaluateContext::EvaluateVariables
|
||||
}
|
||||
dap_types::EvaluateArgumentsContext::Watch => proto::DapEvaluateContext::Watch,
|
||||
dap_types::EvaluateArgumentsContext::Hover => proto::DapEvaluateContext::Hover,
|
||||
dap_types::EvaluateArgumentsContext::Repl => proto::DapEvaluateContext::Repl,
|
||||
dap_types::EvaluateArgumentsContext::Clipboard => proto::DapEvaluateContext::Clipboard,
|
||||
dap_types::EvaluateArgumentsContext::Unknown => {
|
||||
proto::DapEvaluateContext::EvaluateUnknown
|
||||
}
|
||||
_ => proto::DapEvaluateContext::EvaluateUnknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
proto::DapEvaluateContext::EvaluateVariables => {
|
||||
dap_types::EvaluateArgumentsContext::Variables
|
||||
}
|
||||
proto::DapEvaluateContext::Watch => dap_types::EvaluateArgumentsContext::Watch,
|
||||
proto::DapEvaluateContext::Hover => dap_types::EvaluateArgumentsContext::Hover,
|
||||
proto::DapEvaluateContext::Repl => dap_types::EvaluateArgumentsContext::Repl,
|
||||
proto::DapEvaluateContext::Clipboard => dap_types::EvaluateArgumentsContext::Clipboard,
|
||||
proto::DapEvaluateContext::EvaluateUnknown => {
|
||||
dap_types::EvaluateArgumentsContext::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::CompletionItemType {
|
||||
type ProtoType = proto::DapCompletionItemType;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
match self {
|
||||
dap_types::CompletionItemType::Class => proto::DapCompletionItemType::Class,
|
||||
dap_types::CompletionItemType::Color => proto::DapCompletionItemType::Color,
|
||||
dap_types::CompletionItemType::Constructor => proto::DapCompletionItemType::Constructor,
|
||||
dap_types::CompletionItemType::Customcolor => proto::DapCompletionItemType::Customcolor,
|
||||
dap_types::CompletionItemType::Enum => proto::DapCompletionItemType::Enum,
|
||||
dap_types::CompletionItemType::Field => proto::DapCompletionItemType::Field,
|
||||
dap_types::CompletionItemType::File => proto::DapCompletionItemType::CompletionItemFile,
|
||||
dap_types::CompletionItemType::Function => proto::DapCompletionItemType::Function,
|
||||
dap_types::CompletionItemType::Interface => proto::DapCompletionItemType::Interface,
|
||||
dap_types::CompletionItemType::Keyword => proto::DapCompletionItemType::Keyword,
|
||||
dap_types::CompletionItemType::Method => proto::DapCompletionItemType::Method,
|
||||
dap_types::CompletionItemType::Module => proto::DapCompletionItemType::Module,
|
||||
dap_types::CompletionItemType::Property => proto::DapCompletionItemType::Property,
|
||||
dap_types::CompletionItemType::Reference => proto::DapCompletionItemType::Reference,
|
||||
dap_types::CompletionItemType::Snippet => proto::DapCompletionItemType::Snippet,
|
||||
dap_types::CompletionItemType::Text => proto::DapCompletionItemType::Text,
|
||||
dap_types::CompletionItemType::Unit => proto::DapCompletionItemType::Unit,
|
||||
dap_types::CompletionItemType::Value => proto::DapCompletionItemType::Value,
|
||||
dap_types::CompletionItemType::Variable => proto::DapCompletionItemType::Variable,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
match payload {
|
||||
proto::DapCompletionItemType::Class => dap_types::CompletionItemType::Class,
|
||||
proto::DapCompletionItemType::Color => dap_types::CompletionItemType::Color,
|
||||
proto::DapCompletionItemType::CompletionItemFile => dap_types::CompletionItemType::File,
|
||||
proto::DapCompletionItemType::Constructor => dap_types::CompletionItemType::Constructor,
|
||||
proto::DapCompletionItemType::Customcolor => dap_types::CompletionItemType::Customcolor,
|
||||
proto::DapCompletionItemType::Enum => dap_types::CompletionItemType::Enum,
|
||||
proto::DapCompletionItemType::Field => dap_types::CompletionItemType::Field,
|
||||
proto::DapCompletionItemType::Function => dap_types::CompletionItemType::Function,
|
||||
proto::DapCompletionItemType::Interface => dap_types::CompletionItemType::Interface,
|
||||
proto::DapCompletionItemType::Keyword => dap_types::CompletionItemType::Keyword,
|
||||
proto::DapCompletionItemType::Method => dap_types::CompletionItemType::Method,
|
||||
proto::DapCompletionItemType::Module => dap_types::CompletionItemType::Module,
|
||||
proto::DapCompletionItemType::Property => dap_types::CompletionItemType::Property,
|
||||
proto::DapCompletionItemType::Reference => dap_types::CompletionItemType::Reference,
|
||||
proto::DapCompletionItemType::Snippet => dap_types::CompletionItemType::Snippet,
|
||||
proto::DapCompletionItemType::Text => dap_types::CompletionItemType::Text,
|
||||
proto::DapCompletionItemType::Unit => dap_types::CompletionItemType::Unit,
|
||||
proto::DapCompletionItemType::Value => dap_types::CompletionItemType::Value,
|
||||
proto::DapCompletionItemType::Variable => dap_types::CompletionItemType::Variable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtoConversion for dap_types::Thread {
|
||||
type ProtoType = proto::DapThread;
|
||||
type Output = Self;
|
||||
|
||||
fn to_proto(&self) -> Self::ProtoType {
|
||||
proto::DapThread {
|
||||
id: self.id,
|
||||
name: self.name.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_proto(payload: Self::ProtoType) -> Self {
|
||||
Self {
|
||||
id: payload.id,
|
||||
name: payload.name,
|
||||
}
|
||||
}
|
||||
}
|
891
crates/dap/src/transport.rs
Normal file
891
crates/dap/src/transport.rs
Normal file
|
@ -0,0 +1,891 @@
|
|||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use dap_types::{
|
||||
messages::{Message, Response},
|
||||
ErrorResponse,
|
||||
};
|
||||
use futures::{channel::oneshot, select, AsyncRead, AsyncReadExt as _, AsyncWrite, FutureExt as _};
|
||||
use gpui::AsyncApp;
|
||||
use settings::Settings as _;
|
||||
use smallvec::SmallVec;
|
||||
use smol::{
|
||||
channel::{unbounded, Receiver, Sender},
|
||||
io::{AsyncBufReadExt as _, AsyncWriteExt, BufReader},
|
||||
lock::Mutex,
|
||||
net::{TcpListener, TcpStream},
|
||||
process::Child,
|
||||
};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
net::{Ipv4Addr, SocketAddrV4},
|
||||
process::Stdio,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use task::TCPHost;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{adapters::DebugAdapterBinary, debugger_settings::DebuggerSettings};
|
||||
|
||||
pub type IoHandler = Box<dyn Send + FnMut(IoKind, &str)>;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum LogKind {
|
||||
Adapter,
|
||||
Rpc,
|
||||
}
|
||||
|
||||
pub enum IoKind {
|
||||
StdIn,
|
||||
StdOut,
|
||||
StdErr,
|
||||
}
|
||||
|
||||
pub struct TransportPipe {
|
||||
input: Box<dyn AsyncWrite + Unpin + Send + 'static>,
|
||||
output: Box<dyn AsyncRead + Unpin + Send + 'static>,
|
||||
stdout: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
|
||||
stderr: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
|
||||
}
|
||||
|
||||
impl TransportPipe {
|
||||
pub fn new(
|
||||
input: Box<dyn AsyncWrite + Unpin + Send + 'static>,
|
||||
output: Box<dyn AsyncRead + Unpin + Send + 'static>,
|
||||
stdout: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
|
||||
stderr: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
|
||||
) -> Self {
|
||||
TransportPipe {
|
||||
input,
|
||||
output,
|
||||
stdout,
|
||||
stderr,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Requests = Arc<Mutex<HashMap<u64, oneshot::Sender<Result<Response>>>>>;
|
||||
type LogHandlers = Arc<parking_lot::Mutex<SmallVec<[(LogKind, IoHandler); 2]>>>;
|
||||
|
||||
pub enum Transport {
|
||||
Stdio(StdioTransport),
|
||||
Tcp(TcpTransport),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Fake(FakeTransport),
|
||||
}
|
||||
|
||||
impl Transport {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
async fn start(_: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
return FakeTransport::start(cx)
|
||||
.await
|
||||
.map(|(transports, fake)| (transports, Self::Fake(fake)));
|
||||
}
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
if binary.connection.is_some() {
|
||||
TcpTransport::start(binary, cx)
|
||||
.await
|
||||
.map(|(transports, tcp)| (transports, Self::Tcp(tcp)))
|
||||
} else {
|
||||
StdioTransport::start(binary, cx)
|
||||
.await
|
||||
.map(|(transports, stdio)| (transports, Self::Stdio(stdio)))
|
||||
}
|
||||
}
|
||||
|
||||
fn has_adapter_logs(&self) -> bool {
|
||||
match self {
|
||||
Transport::Stdio(stdio_transport) => stdio_transport.has_adapter_logs(),
|
||||
Transport::Tcp(tcp_transport) => tcp_transport.has_adapter_logs(),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Transport::Fake(fake_transport) => fake_transport.has_adapter_logs(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn kill(&self) -> Result<()> {
|
||||
match self {
|
||||
Transport::Stdio(stdio_transport) => stdio_transport.kill().await,
|
||||
Transport::Tcp(tcp_transport) => tcp_transport.kill().await,
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
Transport::Fake(fake_transport) => fake_transport.kill().await,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub(crate) fn as_fake(&self) -> &FakeTransport {
|
||||
match self {
|
||||
Transport::Fake(fake_transport) => fake_transport,
|
||||
_ => panic!("Not a fake transport layer"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TransportDelegate {
|
||||
log_handlers: LogHandlers,
|
||||
current_requests: Requests,
|
||||
pending_requests: Requests,
|
||||
transport: Transport,
|
||||
server_tx: Arc<Mutex<Option<Sender<Message>>>>,
|
||||
}
|
||||
|
||||
impl TransportDelegate {
|
||||
pub(crate) async fn start(
|
||||
binary: &DebugAdapterBinary,
|
||||
cx: AsyncApp,
|
||||
) -> Result<((Receiver<Message>, Sender<Message>), Self)> {
|
||||
let (transport_pipes, transport) = Transport::start(binary, cx.clone()).await?;
|
||||
let mut this = Self {
|
||||
transport,
|
||||
server_tx: Default::default(),
|
||||
log_handlers: Default::default(),
|
||||
current_requests: Default::default(),
|
||||
pending_requests: Default::default(),
|
||||
};
|
||||
let messages = this.start_handlers(transport_pipes, cx).await?;
|
||||
Ok((messages, this))
|
||||
}
|
||||
|
||||
async fn start_handlers(
|
||||
&mut self,
|
||||
mut params: TransportPipe,
|
||||
cx: AsyncApp,
|
||||
) -> Result<(Receiver<Message>, Sender<Message>)> {
|
||||
let (client_tx, server_rx) = unbounded::<Message>();
|
||||
let (server_tx, client_rx) = unbounded::<Message>();
|
||||
|
||||
let log_dap_communications =
|
||||
cx.update(|cx| DebuggerSettings::get_global(cx).log_dap_communications)
|
||||
.with_context(|| "Failed to get Debugger Setting log dap communications error in transport::start_handlers. Defaulting to false")
|
||||
.unwrap_or(false);
|
||||
|
||||
let log_handler = if log_dap_communications {
|
||||
Some(self.log_handlers.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
cx.update(|cx| {
|
||||
if let Some(stdout) = params.stdout.take() {
|
||||
cx.background_executor()
|
||||
.spawn(Self::handle_adapter_log(stdout, log_handler.clone()))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(Self::handle_output(
|
||||
params.output,
|
||||
client_tx,
|
||||
self.pending_requests.clone(),
|
||||
log_handler.clone(),
|
||||
))
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
if let Some(stderr) = params.stderr.take() {
|
||||
cx.background_executor()
|
||||
.spawn(Self::handle_error(stderr, self.log_handlers.clone()))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(Self::handle_input(
|
||||
params.input,
|
||||
client_rx,
|
||||
self.current_requests.clone(),
|
||||
self.pending_requests.clone(),
|
||||
log_handler.clone(),
|
||||
))
|
||||
.detach_and_log_err(cx);
|
||||
})?;
|
||||
|
||||
{
|
||||
let mut lock = self.server_tx.lock().await;
|
||||
*lock = Some(server_tx.clone());
|
||||
}
|
||||
|
||||
Ok((server_rx, server_tx))
|
||||
}
|
||||
|
||||
pub(crate) async fn add_pending_request(
|
||||
&self,
|
||||
sequence_id: u64,
|
||||
request: oneshot::Sender<Result<Response>>,
|
||||
) {
|
||||
let mut pending_requests = self.pending_requests.lock().await;
|
||||
pending_requests.insert(sequence_id, request);
|
||||
}
|
||||
|
||||
pub(crate) async fn cancel_pending_request(&self, sequence_id: &u64) {
|
||||
let mut pending_requests = self.pending_requests.lock().await;
|
||||
pending_requests.remove(sequence_id);
|
||||
}
|
||||
|
||||
pub(crate) async fn send_message(&self, message: Message) -> Result<()> {
|
||||
if let Some(server_tx) = self.server_tx.lock().await.as_ref() {
|
||||
server_tx
|
||||
.send(message)
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to send message: {}", e))
|
||||
} else {
|
||||
Err(anyhow!("Server tx already dropped"))
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_adapter_log<Stdout>(
|
||||
stdout: Stdout,
|
||||
log_handlers: Option<LogHandlers>,
|
||||
) -> Result<()>
|
||||
where
|
||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
let mut reader = BufReader::new(stdout);
|
||||
let mut line = String::new();
|
||||
|
||||
let result = loop {
|
||||
line.truncate(0);
|
||||
|
||||
let bytes_read = match reader.read_line(&mut line).await {
|
||||
Ok(bytes_read) => bytes_read,
|
||||
Err(e) => break Err(e.into()),
|
||||
};
|
||||
|
||||
if bytes_read == 0 {
|
||||
break Err(anyhow!("Debugger log stream closed"));
|
||||
}
|
||||
|
||||
if let Some(log_handlers) = log_handlers.as_ref() {
|
||||
for (kind, handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Adapter) {
|
||||
handler(IoKind::StdOut, line.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
log::debug!("Handle adapter log dropped");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn build_rpc_message(message: String) -> String {
|
||||
format!("Content-Length: {}\r\n\r\n{}", message.len(), message)
|
||||
}
|
||||
|
||||
async fn handle_input<Stdin>(
|
||||
mut server_stdin: Stdin,
|
||||
client_rx: Receiver<Message>,
|
||||
current_requests: Requests,
|
||||
pending_requests: Requests,
|
||||
log_handlers: Option<LogHandlers>,
|
||||
) -> Result<()>
|
||||
where
|
||||
Stdin: AsyncWrite + Unpin + Send + 'static,
|
||||
{
|
||||
let result = loop {
|
||||
match client_rx.recv().await {
|
||||
Ok(message) => {
|
||||
if let Message::Request(request) = &message {
|
||||
if let Some(sender) = current_requests.lock().await.remove(&request.seq) {
|
||||
pending_requests.lock().await.insert(request.seq, sender);
|
||||
}
|
||||
}
|
||||
|
||||
let message = match serde_json::to_string(&message) {
|
||||
Ok(message) => message,
|
||||
Err(e) => break Err(e.into()),
|
||||
};
|
||||
|
||||
if let Some(log_handlers) = log_handlers.as_ref() {
|
||||
for (kind, log_handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Rpc) {
|
||||
log_handler(IoKind::StdIn, &message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = server_stdin
|
||||
.write_all(Self::build_rpc_message(message).as_bytes())
|
||||
.await
|
||||
{
|
||||
break Err(e.into());
|
||||
}
|
||||
|
||||
if let Err(e) = server_stdin.flush().await {
|
||||
break Err(e.into());
|
||||
}
|
||||
}
|
||||
Err(error) => break Err(error.into()),
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
log::debug!("Handle adapter input dropped");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
async fn handle_output<Stdout>(
|
||||
server_stdout: Stdout,
|
||||
client_tx: Sender<Message>,
|
||||
pending_requests: Requests,
|
||||
log_handlers: Option<LogHandlers>,
|
||||
) -> Result<()>
|
||||
where
|
||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
let mut recv_buffer = String::new();
|
||||
let mut reader = BufReader::new(server_stdout);
|
||||
|
||||
let result = loop {
|
||||
let message =
|
||||
Self::receive_server_message(&mut reader, &mut recv_buffer, log_handlers.as_ref())
|
||||
.await;
|
||||
|
||||
match message {
|
||||
Ok(Message::Response(res)) => {
|
||||
if let Some(tx) = pending_requests.lock().await.remove(&res.request_seq) {
|
||||
if let Err(e) = tx.send(Self::process_response(res)) {
|
||||
log::trace!("Did not send response `{:?}` for a cancelled", e);
|
||||
}
|
||||
} else {
|
||||
client_tx.send(Message::Response(res)).await?;
|
||||
};
|
||||
}
|
||||
Ok(message) => {
|
||||
client_tx.send(message).await?;
|
||||
}
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
drop(client_tx);
|
||||
|
||||
log::debug!("Handle adapter output dropped");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
async fn handle_error<Stderr>(stderr: Stderr, log_handlers: LogHandlers) -> Result<()>
|
||||
where
|
||||
Stderr: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
let mut buffer = String::new();
|
||||
|
||||
let mut reader = BufReader::new(stderr);
|
||||
|
||||
let result = loop {
|
||||
match reader.read_line(&mut buffer).await {
|
||||
Ok(0) => break Err(anyhow!("debugger error stream closed")),
|
||||
Ok(_) => {
|
||||
for (kind, log_handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Adapter) {
|
||||
log_handler(IoKind::StdErr, buffer.as_str());
|
||||
}
|
||||
}
|
||||
|
||||
buffer.truncate(0);
|
||||
}
|
||||
Err(error) => break Err(error.into()),
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
log::debug!("Handle adapter error dropped");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn process_response(response: Response) -> Result<Response> {
|
||||
if response.success {
|
||||
Ok(response)
|
||||
} else {
|
||||
if let Some(error_message) = response
|
||||
.body
|
||||
.clone()
|
||||
.and_then(|body| serde_json::from_value::<ErrorResponse>(body).ok())
|
||||
.and_then(|response| response.error.map(|msg| msg.format))
|
||||
.or_else(|| response.message.clone())
|
||||
{
|
||||
return Err(anyhow!(error_message));
|
||||
};
|
||||
|
||||
Err(anyhow!(
|
||||
"Received error response from adapter. Response: {:?}",
|
||||
response.clone()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn receive_server_message<Stdout>(
|
||||
reader: &mut BufReader<Stdout>,
|
||||
buffer: &mut String,
|
||||
log_handlers: Option<&LogHandlers>,
|
||||
) -> Result<Message>
|
||||
where
|
||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||
{
|
||||
let mut content_length = None;
|
||||
loop {
|
||||
buffer.truncate(0);
|
||||
|
||||
if reader
|
||||
.read_line(buffer)
|
||||
.await
|
||||
.with_context(|| "reading a message from server")?
|
||||
== 0
|
||||
{
|
||||
return Err(anyhow!("debugger reader stream closed"));
|
||||
};
|
||||
|
||||
if buffer == "\r\n" {
|
||||
break;
|
||||
}
|
||||
|
||||
let parts = buffer.trim().split_once(": ");
|
||||
|
||||
match parts {
|
||||
Some(("Content-Length", value)) => {
|
||||
content_length = Some(value.parse().context("invalid content length")?);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let content_length = content_length.context("missing content length")?;
|
||||
|
||||
let mut content = vec![0; content_length];
|
||||
reader
|
||||
.read_exact(&mut content)
|
||||
.await
|
||||
.with_context(|| "reading after a loop")?;
|
||||
|
||||
let message = std::str::from_utf8(&content).context("invalid utf8 from server")?;
|
||||
|
||||
if let Some(log_handlers) = log_handlers {
|
||||
for (kind, log_handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Rpc) {
|
||||
log_handler(IoKind::StdOut, &message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(serde_json::from_str::<Message>(message)?)
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) -> Result<()> {
|
||||
log::debug!("Start shutdown client");
|
||||
|
||||
if let Some(server_tx) = self.server_tx.lock().await.take().as_ref() {
|
||||
server_tx.close();
|
||||
}
|
||||
|
||||
let mut current_requests = self.current_requests.lock().await;
|
||||
let mut pending_requests = self.pending_requests.lock().await;
|
||||
|
||||
current_requests.clear();
|
||||
pending_requests.clear();
|
||||
|
||||
let _ = self.transport.kill().await.log_err();
|
||||
|
||||
drop(current_requests);
|
||||
drop(pending_requests);
|
||||
|
||||
log::debug!("Shutdown client completed");
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
|
||||
pub fn has_adapter_logs(&self) -> bool {
|
||||
self.transport.has_adapter_logs()
|
||||
}
|
||||
|
||||
pub fn transport(&self) -> &Transport {
|
||||
&self.transport
|
||||
}
|
||||
|
||||
pub fn add_log_handler<F>(&self, f: F, kind: LogKind)
|
||||
where
|
||||
F: 'static + Send + FnMut(IoKind, &str),
|
||||
{
|
||||
let mut log_handlers = self.log_handlers.lock();
|
||||
log_handlers.push((kind, Box::new(f)));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TcpTransport {
|
||||
pub port: u16,
|
||||
pub host: Ipv4Addr,
|
||||
pub timeout: u64,
|
||||
process: Mutex<Child>,
|
||||
}
|
||||
|
||||
impl TcpTransport {
|
||||
/// Get an open port to use with the tcp client when not supplied by debug config
|
||||
pub async fn port(host: &TCPHost) -> Result<u16> {
|
||||
if let Some(port) = host.port {
|
||||
Ok(port)
|
||||
} else {
|
||||
Ok(TcpListener::bind(SocketAddrV4::new(host.host(), 0))
|
||||
.await?
|
||||
.local_addr()?
|
||||
.port())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code, reason = "This is used in non test builds of Zed")]
|
||||
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
let Some(connection_args) = binary.connection.as_ref() else {
|
||||
return Err(anyhow!("No connection arguments provided"));
|
||||
};
|
||||
|
||||
let host = connection_args.host;
|
||||
let port = connection_args.port;
|
||||
|
||||
let mut command = util::command::new_smol_command(&binary.command);
|
||||
|
||||
if let Some(cwd) = &binary.cwd {
|
||||
command.current_dir(cwd);
|
||||
}
|
||||
|
||||
if let Some(args) = &binary.arguments {
|
||||
command.args(args);
|
||||
}
|
||||
|
||||
if let Some(envs) = &binary.envs {
|
||||
command.envs(envs);
|
||||
}
|
||||
|
||||
command
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.kill_on_drop(true);
|
||||
|
||||
let mut process = command
|
||||
.spawn()
|
||||
.with_context(|| "failed to start debug adapter.")?;
|
||||
|
||||
let address = SocketAddrV4::new(host, port);
|
||||
|
||||
let timeout = connection_args.timeout.unwrap_or_else(|| {
|
||||
cx.update(|cx| DebuggerSettings::get_global(cx).timeout)
|
||||
.unwrap_or(2000u64)
|
||||
});
|
||||
|
||||
let (rx, tx) = select! {
|
||||
_ = cx.background_executor().timer(Duration::from_millis(timeout)).fuse() => {
|
||||
return Err(anyhow!(format!("Connection to TCP DAP timeout {}:{}", host, port)))
|
||||
},
|
||||
result = cx.spawn(|cx| async move {
|
||||
loop {
|
||||
match TcpStream::connect(address).await {
|
||||
Ok(stream) => return stream.split(),
|
||||
Err(_) => {
|
||||
cx.background_executor().timer(Duration::from_millis(100)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}).fuse() => result
|
||||
};
|
||||
log::info!(
|
||||
"Debug adapter has connected to TCP server {}:{}",
|
||||
host,
|
||||
port
|
||||
);
|
||||
let stdout = process.stdout.take();
|
||||
let stderr = process.stderr.take();
|
||||
|
||||
let this = Self {
|
||||
port,
|
||||
host,
|
||||
process: Mutex::new(process),
|
||||
timeout,
|
||||
};
|
||||
|
||||
let pipe = TransportPipe::new(
|
||||
Box::new(tx),
|
||||
Box::new(BufReader::new(rx)),
|
||||
stdout.map(|s| Box::new(s) as Box<dyn AsyncRead + Unpin + Send>),
|
||||
stderr.map(|s| Box::new(s) as Box<dyn AsyncRead + Unpin + Send>),
|
||||
);
|
||||
|
||||
Ok((pipe, this))
|
||||
}
|
||||
|
||||
fn has_adapter_logs(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn kill(&self) -> Result<()> {
|
||||
self.process.lock().await.kill()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct StdioTransport {
|
||||
process: Mutex<Child>,
|
||||
}
|
||||
|
||||
impl StdioTransport {
|
||||
#[allow(dead_code, reason = "This is used in non test builds of Zed")]
|
||||
async fn start(binary: &DebugAdapterBinary, _: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
let mut command = util::command::new_smol_command(&binary.command);
|
||||
|
||||
if let Some(cwd) = &binary.cwd {
|
||||
command.current_dir(cwd);
|
||||
}
|
||||
|
||||
if let Some(args) = &binary.arguments {
|
||||
command.args(args);
|
||||
}
|
||||
|
||||
if let Some(envs) = &binary.envs {
|
||||
command.envs(envs);
|
||||
}
|
||||
|
||||
command
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.kill_on_drop(true);
|
||||
|
||||
let mut process = command
|
||||
.spawn()
|
||||
.with_context(|| "failed to spawn command.")?;
|
||||
|
||||
let stdin = process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("Failed to open stdin"))?;
|
||||
let stdout = process
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("Failed to open stdout"))?;
|
||||
let stderr = process
|
||||
.stderr
|
||||
.take()
|
||||
.map(|io_err| Box::new(io_err) as Box<dyn AsyncRead + Unpin + Send>);
|
||||
|
||||
if stderr.is_none() {
|
||||
bail!(
|
||||
"Failed to connect to stderr for debug adapter command {}",
|
||||
&binary.command
|
||||
);
|
||||
}
|
||||
|
||||
log::info!("Debug adapter has connected to stdio adapter");
|
||||
|
||||
let process = Mutex::new(process);
|
||||
|
||||
Ok((
|
||||
TransportPipe::new(
|
||||
Box::new(stdin),
|
||||
Box::new(BufReader::new(stdout)),
|
||||
None,
|
||||
stderr,
|
||||
),
|
||||
Self { process },
|
||||
))
|
||||
}
|
||||
|
||||
fn has_adapter_logs(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
async fn kill(&self) -> Result<()> {
|
||||
self.process.lock().await.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type RequestHandler = Box<
|
||||
dyn Send
|
||||
+ FnMut(
|
||||
u64,
|
||||
serde_json::Value,
|
||||
Arc<Mutex<async_pipe::PipeWriter>>,
|
||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send>>,
|
||||
>;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type ResponseHandler = Box<dyn Send + Fn(Response)>;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeTransport {
|
||||
// for sending fake response back from adapter side
|
||||
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
|
||||
// for reverse request responses
|
||||
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeTransport {
|
||||
pub async fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
|
||||
where
|
||||
F: 'static + Send + FnMut(u64, R::Arguments) -> Result<R::Response, ErrorResponse>,
|
||||
{
|
||||
self.request_handlers.lock().await.insert(
|
||||
R::COMMAND,
|
||||
Box::new(
|
||||
move |seq, args, writer: Arc<Mutex<async_pipe::PipeWriter>>| {
|
||||
let response = handler(seq, serde_json::from_value(args).unwrap());
|
||||
|
||||
let message = serde_json::to_string(&Message::Response(Response {
|
||||
seq: seq + 1,
|
||||
request_seq: seq,
|
||||
success: response.as_ref().is_ok(),
|
||||
command: R::COMMAND.into(),
|
||||
body: util::maybe!({ serde_json::to_value(response.ok()?).ok() }),
|
||||
message: None,
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let writer = writer.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let mut writer = writer.lock().await;
|
||||
writer
|
||||
.write_all(TransportDelegate::build_rpc_message(message).as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
})
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn on_response<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
where
|
||||
F: 'static + Send + Fn(Response),
|
||||
{
|
||||
self.response_handlers
|
||||
.lock()
|
||||
.await
|
||||
.insert(R::COMMAND, Box::new(handler));
|
||||
}
|
||||
|
||||
async fn start(cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
let this = Self {
|
||||
request_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
response_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
};
|
||||
use dap_types::requests::{Request, RunInTerminal, StartDebugging};
|
||||
use serde_json::json;
|
||||
|
||||
let (stdin_writer, stdin_reader) = async_pipe::pipe();
|
||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||
|
||||
let request_handlers = this.request_handlers.clone();
|
||||
let response_handlers = this.response_handlers.clone();
|
||||
let stdout_writer = Arc::new(Mutex::new(stdout_writer));
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut reader = BufReader::new(stdin_reader);
|
||||
let mut buffer = String::new();
|
||||
|
||||
loop {
|
||||
let message =
|
||||
TransportDelegate::receive_server_message(&mut reader, &mut buffer, None)
|
||||
.await;
|
||||
|
||||
match message {
|
||||
Err(error) => {
|
||||
break anyhow!(error);
|
||||
}
|
||||
Ok(message) => {
|
||||
match message {
|
||||
Message::Request(request) => {
|
||||
// redirect reverse requests to stdout writer/reader
|
||||
if request.command == RunInTerminal::COMMAND
|
||||
|| request.command == StartDebugging::COMMAND
|
||||
{
|
||||
let message =
|
||||
serde_json::to_string(&Message::Request(request))
|
||||
.unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
} else {
|
||||
if let Some(handle) = request_handlers
|
||||
.lock()
|
||||
.await
|
||||
.get_mut(request.command.as_str())
|
||||
{
|
||||
handle(
|
||||
request.seq,
|
||||
request.arguments.unwrap_or(json!({})),
|
||||
stdout_writer.clone(),
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
log::error!(
|
||||
"No request handler for {}",
|
||||
request.command
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Message::Event(event) => {
|
||||
let message =
|
||||
serde_json::to_string(&Message::Event(event)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
Message::Response(response) => {
|
||||
if let Some(handle) = response_handlers
|
||||
.lock()
|
||||
.await
|
||||
.get(response.command.as_str())
|
||||
{
|
||||
handle(response);
|
||||
} else {
|
||||
log::error!("No response handler for {}", response.command);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok((
|
||||
TransportPipe::new(Box::new(stdin_writer), Box::new(stdout_reader), None, None),
|
||||
this,
|
||||
))
|
||||
}
|
||||
|
||||
fn has_adapter_logs(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
async fn kill(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue