debugger: More tidy up for SSH (#28993)
Split `locator` out of DebugTaskDefinition to make it clearer when location needs to happen. Release Notes: - N/A --------- Co-authored-by: Anthony Eid <hello@anthonyeid.me> Co-authored-by: Anthony <anthony@zed.dev> Co-authored-by: Cole Miller <m@cole-miller.net>
This commit is contained in:
parent
d13cd007a2
commit
9d35f0389d
57 changed files with 1146 additions and 884 deletions
5
Cargo.lock
generated
5
Cargo.lock
generated
|
@ -4013,6 +4013,7 @@ dependencies = [
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"paths",
|
"paths",
|
||||||
|
"proto",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -4897,7 +4898,6 @@ dependencies = [
|
||||||
"client",
|
"client",
|
||||||
"collections",
|
"collections",
|
||||||
"context_server",
|
"context_server",
|
||||||
"dap",
|
|
||||||
"dirs 5.0.1",
|
"dirs 5.0.1",
|
||||||
"env_logger 0.11.8",
|
"env_logger 0.11.8",
|
||||||
"extension",
|
"extension",
|
||||||
|
@ -11743,6 +11743,7 @@ dependencies = [
|
||||||
"client",
|
"client",
|
||||||
"clock",
|
"clock",
|
||||||
"dap",
|
"dap",
|
||||||
|
"dap_adapters",
|
||||||
"env_logger 0.11.8",
|
"env_logger 0.11.8",
|
||||||
"extension",
|
"extension",
|
||||||
"extension_host",
|
"extension_host",
|
||||||
|
@ -14214,11 +14215,11 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"collections",
|
"collections",
|
||||||
"dap-types",
|
|
||||||
"futures 0.3.31",
|
"futures 0.3.31",
|
||||||
"gpui",
|
"gpui",
|
||||||
"hex",
|
"hex",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
|
"proto",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::tests::TestServer;
|
use crate::tests::TestServer;
|
||||||
use call::ActiveCall;
|
use call::ActiveCall;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use dap::DapRegistry;
|
|
||||||
use extension::ExtensionHostProxy;
|
use extension::ExtensionHostProxy;
|
||||||
use fs::{FakeFs, Fs as _, RemoveOptions};
|
use fs::{FakeFs, Fs as _, RemoveOptions};
|
||||||
use futures::StreamExt as _;
|
use futures::StreamExt as _;
|
||||||
|
@ -86,7 +86,6 @@ async fn test_sharing_an_ssh_remote_project(
|
||||||
http_client: remote_http_client,
|
http_client: remote_http_client,
|
||||||
node_runtime: node,
|
node_runtime: node,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters: Arc::new(DapRegistry::fake()),
|
|
||||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||||
},
|
},
|
||||||
cx,
|
cx,
|
||||||
|
@ -254,7 +253,6 @@ async fn test_ssh_collaboration_git_branches(
|
||||||
http_client: remote_http_client,
|
http_client: remote_http_client,
|
||||||
node_runtime: node,
|
node_runtime: node,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters: Arc::new(DapRegistry::fake()),
|
|
||||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||||
},
|
},
|
||||||
cx,
|
cx,
|
||||||
|
@ -460,7 +458,6 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||||
http_client: remote_http_client,
|
http_client: remote_http_client,
|
||||||
node_runtime: NodeRuntime::unavailable(),
|
node_runtime: NodeRuntime::unavailable(),
|
||||||
languages,
|
languages,
|
||||||
debug_adapters: Arc::new(DapRegistry::fake()),
|
|
||||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||||
},
|
},
|
||||||
cx,
|
cx,
|
||||||
|
|
|
@ -14,7 +14,7 @@ use client::{
|
||||||
use clock::FakeSystemClock;
|
use clock::FakeSystemClock;
|
||||||
use collab_ui::channel_view::ChannelView;
|
use collab_ui::channel_view::ChannelView;
|
||||||
use collections::{HashMap, HashSet};
|
use collections::{HashMap, HashSet};
|
||||||
use dap::DapRegistry;
|
|
||||||
use fs::FakeFs;
|
use fs::FakeFs;
|
||||||
use futures::{StreamExt as _, channel::oneshot};
|
use futures::{StreamExt as _, channel::oneshot};
|
||||||
use git::GitHostingProviderRegistry;
|
use git::GitHostingProviderRegistry;
|
||||||
|
@ -275,14 +275,12 @@ impl TestServer {
|
||||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||||
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
|
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||||
let debug_adapters = Arc::new(DapRegistry::default());
|
|
||||||
let session = cx.new(|cx| AppSession::new(Session::test(), cx));
|
let session = cx.new(|cx| AppSession::new(Session::test(), cx));
|
||||||
let app_state = Arc::new(workspace::AppState {
|
let app_state = Arc::new(workspace::AppState {
|
||||||
client: client.clone(),
|
client: client.clone(),
|
||||||
user_store: user_store.clone(),
|
user_store: user_store.clone(),
|
||||||
workspace_store,
|
workspace_store,
|
||||||
languages: language_registry,
|
languages: language_registry,
|
||||||
debug_adapters,
|
|
||||||
fs: fs.clone(),
|
fs: fs.clone(),
|
||||||
build_window_options: |_, _| Default::default(),
|
build_window_options: |_, _| Default::default(),
|
||||||
node_runtime: NodeRuntime::unavailable(),
|
node_runtime: NodeRuntime::unavailable(),
|
||||||
|
@ -798,7 +796,6 @@ impl TestClient {
|
||||||
self.app_state.node_runtime.clone(),
|
self.app_state.node_runtime.clone(),
|
||||||
self.app_state.user_store.clone(),
|
self.app_state.user_store.clone(),
|
||||||
self.app_state.languages.clone(),
|
self.app_state.languages.clone(),
|
||||||
self.app_state.debug_adapters.clone(),
|
|
||||||
self.app_state.fs.clone(),
|
self.app_state.fs.clone(),
|
||||||
None,
|
None,
|
||||||
cx,
|
cx,
|
||||||
|
|
|
@ -39,6 +39,7 @@ log.workspace = true
|
||||||
node_runtime.workspace = true
|
node_runtime.workspace = true
|
||||||
parking_lot.workspace = true
|
parking_lot.workspace = true
|
||||||
paths.workspace = true
|
paths.workspace = true
|
||||||
|
proto.workspace = true
|
||||||
schemars.workspace = true
|
schemars.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
|
|
@ -3,7 +3,8 @@ use anyhow::{Context as _, Result, anyhow};
|
||||||
use async_compression::futures::bufread::GzipDecoder;
|
use async_compression::futures::bufread::GzipDecoder;
|
||||||
use async_tar::Archive;
|
use async_tar::Archive;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use dap_types::StartDebuggingRequestArguments;
|
use collections::HashMap;
|
||||||
|
use dap_types::{StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest};
|
||||||
use futures::io::BufReader;
|
use futures::io::BufReader;
|
||||||
use gpui::{AsyncApp, SharedString};
|
use gpui::{AsyncApp, SharedString};
|
||||||
pub use http_client::{HttpClient, github::latest_github_release};
|
pub use http_client::{HttpClient, github::latest_github_release};
|
||||||
|
@ -13,16 +14,10 @@ use serde::{Deserialize, Serialize};
|
||||||
use settings::WorktreeId;
|
use settings::WorktreeId;
|
||||||
use smol::{self, fs::File, lock::Mutex};
|
use smol::{self, fs::File, lock::Mutex};
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Borrow,
|
borrow::Borrow, collections::HashSet, ffi::OsStr, fmt::Debug, net::Ipv4Addr, ops::Deref,
|
||||||
collections::{HashMap, HashSet},
|
path::PathBuf, sync::Arc,
|
||||||
ffi::{OsStr, OsString},
|
|
||||||
fmt::Debug,
|
|
||||||
net::Ipv4Addr,
|
|
||||||
ops::Deref,
|
|
||||||
path::PathBuf,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
};
|
||||||
use task::DebugTaskDefinition;
|
use task::{DebugTaskDefinition, TcpArgumentsTemplate};
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
@ -93,17 +88,91 @@ pub struct TcpArguments {
|
||||||
pub port: u16,
|
pub port: u16,
|
||||||
pub timeout: Option<u64>,
|
pub timeout: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TcpArguments {
|
||||||
|
pub fn from_proto(proto: proto::TcpHost) -> anyhow::Result<Self> {
|
||||||
|
let host = TcpArgumentsTemplate::from_proto(proto)?;
|
||||||
|
Ok(TcpArguments {
|
||||||
|
host: host.host.ok_or_else(|| anyhow!("missing host"))?,
|
||||||
|
port: host.port.ok_or_else(|| anyhow!("missing port"))?,
|
||||||
|
timeout: host.timeout,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_proto(&self) -> proto::TcpHost {
|
||||||
|
TcpArgumentsTemplate {
|
||||||
|
host: Some(self.host),
|
||||||
|
port: Some(self.port),
|
||||||
|
timeout: self.timeout,
|
||||||
|
}
|
||||||
|
.to_proto()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct DebugAdapterBinary {
|
pub struct DebugAdapterBinary {
|
||||||
pub adapter_name: DebugAdapterName,
|
|
||||||
pub command: String,
|
pub command: String,
|
||||||
pub arguments: Option<Vec<OsString>>,
|
pub arguments: Vec<String>,
|
||||||
pub envs: Option<HashMap<String, String>>,
|
pub envs: HashMap<String, String>,
|
||||||
pub cwd: Option<PathBuf>,
|
pub cwd: Option<PathBuf>,
|
||||||
pub connection: Option<TcpArguments>,
|
pub connection: Option<TcpArguments>,
|
||||||
pub request_args: StartDebuggingRequestArguments,
|
pub request_args: StartDebuggingRequestArguments,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl DebugAdapterBinary {
|
||||||
|
pub fn from_proto(binary: proto::DebugAdapterBinary) -> anyhow::Result<Self> {
|
||||||
|
let request = match binary.launch_type() {
|
||||||
|
proto::debug_adapter_binary::LaunchType::Launch => {
|
||||||
|
StartDebuggingRequestArgumentsRequest::Launch
|
||||||
|
}
|
||||||
|
proto::debug_adapter_binary::LaunchType::Attach => {
|
||||||
|
StartDebuggingRequestArgumentsRequest::Attach
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(DebugAdapterBinary {
|
||||||
|
command: binary.command,
|
||||||
|
arguments: binary.arguments,
|
||||||
|
envs: binary.envs.into_iter().collect(),
|
||||||
|
connection: binary
|
||||||
|
.connection
|
||||||
|
.map(TcpArguments::from_proto)
|
||||||
|
.transpose()?,
|
||||||
|
request_args: StartDebuggingRequestArguments {
|
||||||
|
configuration: serde_json::from_str(&binary.configuration)?,
|
||||||
|
request,
|
||||||
|
},
|
||||||
|
cwd: binary.cwd.map(|cwd| cwd.into()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_proto(&self) -> proto::DebugAdapterBinary {
|
||||||
|
proto::DebugAdapterBinary {
|
||||||
|
command: self.command.clone(),
|
||||||
|
arguments: self.arguments.clone(),
|
||||||
|
envs: self
|
||||||
|
.envs
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| (k.clone(), v.clone()))
|
||||||
|
.collect(),
|
||||||
|
cwd: self
|
||||||
|
.cwd
|
||||||
|
.as_ref()
|
||||||
|
.map(|cwd| cwd.to_string_lossy().to_string()),
|
||||||
|
connection: self.connection.as_ref().map(|c| c.to_proto()),
|
||||||
|
launch_type: match self.request_args.request {
|
||||||
|
StartDebuggingRequestArgumentsRequest::Launch => {
|
||||||
|
proto::debug_adapter_binary::LaunchType::Launch.into()
|
||||||
|
}
|
||||||
|
StartDebuggingRequestArgumentsRequest::Attach => {
|
||||||
|
proto::debug_adapter_binary::LaunchType::Attach.into()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
configuration: self.request_args.configuration.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct AdapterVersion {
|
pub struct AdapterVersion {
|
||||||
pub tag_name: String,
|
pub tag_name: String,
|
||||||
|
@ -318,22 +387,22 @@ impl FakeAdapter {
|
||||||
|
|
||||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use task::DebugRequestType;
|
use task::DebugRequest;
|
||||||
|
|
||||||
let value = json!({
|
let value = json!({
|
||||||
"request": match config.request {
|
"request": match config.request {
|
||||||
DebugRequestType::Launch(_) => "launch",
|
DebugRequest::Launch(_) => "launch",
|
||||||
DebugRequestType::Attach(_) => "attach",
|
DebugRequest::Attach(_) => "attach",
|
||||||
},
|
},
|
||||||
"process_id": if let DebugRequestType::Attach(attach_config) = &config.request {
|
"process_id": if let DebugRequest::Attach(attach_config) = &config.request {
|
||||||
attach_config.process_id
|
attach_config.process_id
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
let request = match config.request {
|
let request = match config.request {
|
||||||
DebugRequestType::Launch(_) => dap_types::StartDebuggingRequestArgumentsRequest::Launch,
|
DebugRequest::Launch(_) => dap_types::StartDebuggingRequestArgumentsRequest::Launch,
|
||||||
DebugRequestType::Attach(_) => dap_types::StartDebuggingRequestArgumentsRequest::Attach,
|
DebugRequest::Attach(_) => dap_types::StartDebuggingRequestArgumentsRequest::Attach,
|
||||||
};
|
};
|
||||||
StartDebuggingRequestArguments {
|
StartDebuggingRequestArguments {
|
||||||
configuration: value,
|
configuration: value,
|
||||||
|
@ -357,11 +426,10 @@ impl DebugAdapter for FakeAdapter {
|
||||||
_: &mut AsyncApp,
|
_: &mut AsyncApp,
|
||||||
) -> Result<DebugAdapterBinary> {
|
) -> Result<DebugAdapterBinary> {
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
adapter_name: Self::ADAPTER_NAME.into(),
|
|
||||||
command: "command".into(),
|
command: "command".into(),
|
||||||
arguments: None,
|
arguments: vec![],
|
||||||
connection: None,
|
connection: None,
|
||||||
envs: None,
|
envs: HashMap::default(),
|
||||||
cwd: None,
|
cwd: None,
|
||||||
request_args: self.request_args(config),
|
request_args: self.request_args(config),
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
adapters::{DebugAdapterBinary, DebugAdapterName},
|
adapters::DebugAdapterBinary,
|
||||||
transport::{IoKind, LogKind, TransportDelegate},
|
transport::{IoKind, LogKind, TransportDelegate},
|
||||||
};
|
};
|
||||||
use anyhow::{Result, anyhow};
|
use anyhow::{Result, anyhow};
|
||||||
|
@ -88,7 +88,6 @@ impl DebugAdapterClient {
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let binary = match self.transport_delegate.transport() {
|
let binary = match self.transport_delegate.transport() {
|
||||||
crate::transport::Transport::Tcp(tcp_transport) => DebugAdapterBinary {
|
crate::transport::Transport::Tcp(tcp_transport) => DebugAdapterBinary {
|
||||||
adapter_name: binary.adapter_name,
|
|
||||||
command: binary.command,
|
command: binary.command,
|
||||||
arguments: binary.arguments,
|
arguments: binary.arguments,
|
||||||
envs: binary.envs,
|
envs: binary.envs,
|
||||||
|
@ -219,9 +218,6 @@ impl DebugAdapterClient {
|
||||||
self.id
|
self.id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name(&self) -> DebugAdapterName {
|
|
||||||
self.binary.adapter_name.clone()
|
|
||||||
}
|
|
||||||
pub fn binary(&self) -> &DebugAdapterBinary {
|
pub fn binary(&self) -> &DebugAdapterBinary {
|
||||||
&self.binary
|
&self.binary
|
||||||
}
|
}
|
||||||
|
@ -322,7 +318,6 @@ mod tests {
|
||||||
let client = DebugAdapterClient::start(
|
let client = DebugAdapterClient::start(
|
||||||
crate::client::SessionId(1),
|
crate::client::SessionId(1),
|
||||||
DebugAdapterBinary {
|
DebugAdapterBinary {
|
||||||
adapter_name: "adapter".into(),
|
|
||||||
command: "command".into(),
|
command: "command".into(),
|
||||||
arguments: Default::default(),
|
arguments: Default::default(),
|
||||||
envs: Default::default(),
|
envs: Default::default(),
|
||||||
|
@ -393,7 +388,6 @@ mod tests {
|
||||||
let client = DebugAdapterClient::start(
|
let client = DebugAdapterClient::start(
|
||||||
crate::client::SessionId(1),
|
crate::client::SessionId(1),
|
||||||
DebugAdapterBinary {
|
DebugAdapterBinary {
|
||||||
adapter_name: "adapter".into(),
|
|
||||||
command: "command".into(),
|
command: "command".into(),
|
||||||
arguments: Default::default(),
|
arguments: Default::default(),
|
||||||
envs: Default::default(),
|
envs: Default::default(),
|
||||||
|
@ -447,7 +441,6 @@ mod tests {
|
||||||
let client = DebugAdapterClient::start(
|
let client = DebugAdapterClient::start(
|
||||||
crate::client::SessionId(1),
|
crate::client::SessionId(1),
|
||||||
DebugAdapterBinary {
|
DebugAdapterBinary {
|
||||||
adapter_name: "test-adapter".into(),
|
|
||||||
command: "command".into(),
|
command: "command".into(),
|
||||||
arguments: Default::default(),
|
arguments: Default::default(),
|
||||||
envs: Default::default(),
|
envs: Default::default(),
|
||||||
|
|
|
@ -7,7 +7,7 @@ pub mod transport;
|
||||||
|
|
||||||
pub use dap_types::*;
|
pub use dap_types::*;
|
||||||
pub use registry::DapRegistry;
|
pub use registry::DapRegistry;
|
||||||
pub use task::DebugRequestType;
|
pub use task::DebugRequest;
|
||||||
|
|
||||||
pub type ScopeId = u64;
|
pub type ScopeId = u64;
|
||||||
pub type VariableReference = u64;
|
pub type VariableReference = u64;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use gpui::{App, Global};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
|
|
||||||
use crate::adapters::{DebugAdapter, DebugAdapterName};
|
use crate::adapters::{DebugAdapter, DebugAdapterName};
|
||||||
|
@ -11,8 +12,20 @@ struct DapRegistryState {
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
/// Stores available debug adapters.
|
/// Stores available debug adapters.
|
||||||
pub struct DapRegistry(Arc<RwLock<DapRegistryState>>);
|
pub struct DapRegistry(Arc<RwLock<DapRegistryState>>);
|
||||||
|
impl Global for DapRegistry {}
|
||||||
|
|
||||||
impl DapRegistry {
|
impl DapRegistry {
|
||||||
|
pub fn global(cx: &mut App) -> &mut Self {
|
||||||
|
let ret = cx.default_global::<Self>();
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
if ret.adapter(crate::FakeAdapter::ADAPTER_NAME).is_none() {
|
||||||
|
ret.add_adapter(Arc::new(crate::FakeAdapter::new()));
|
||||||
|
}
|
||||||
|
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
|
||||||
pub fn add_adapter(&self, adapter: Arc<dyn DebugAdapter>) {
|
pub fn add_adapter(&self, adapter: Arc<dyn DebugAdapter>) {
|
||||||
let name = adapter.name();
|
let name = adapter.name();
|
||||||
let _previous_value = self.0.write().adapters.insert(name, adapter);
|
let _previous_value = self.0.write().adapters.insert(name, adapter);
|
||||||
|
@ -21,19 +34,12 @@ impl DapRegistry {
|
||||||
"Attempted to insert a new debug adapter when one is already registered"
|
"Attempted to insert a new debug adapter when one is already registered"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn adapter(&self, name: &str) -> Option<Arc<dyn DebugAdapter>> {
|
pub fn adapter(&self, name: &str) -> Option<Arc<dyn DebugAdapter>> {
|
||||||
self.0.read().adapters.get(name).cloned()
|
self.0.read().adapters.get(name).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enumerate_adapters(&self) -> Vec<DebugAdapterName> {
|
pub fn enumerate_adapters(&self) -> Vec<DebugAdapterName> {
|
||||||
self.0.read().adapters.keys().cloned().collect()
|
self.0.read().adapters.keys().cloned().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
pub fn fake() -> Self {
|
|
||||||
use crate::FakeAdapter;
|
|
||||||
|
|
||||||
let register = Self::default();
|
|
||||||
register.add_adapter(Arc::new(FakeAdapter::new()));
|
|
||||||
register
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
use task::TCPHost;
|
use task::TcpArgumentsTemplate;
|
||||||
use util::ResultExt as _;
|
use util::ResultExt as _;
|
||||||
|
|
||||||
use crate::{adapters::DebugAdapterBinary, debugger_settings::DebuggerSettings};
|
use crate::{adapters::DebugAdapterBinary, debugger_settings::DebuggerSettings};
|
||||||
|
@ -74,16 +74,14 @@ pub enum Transport {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Transport {
|
impl Transport {
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
async fn start(_: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
return FakeTransport::start(cx)
|
|
||||||
.await
|
|
||||||
.map(|(transports, fake)| (transports, Self::Fake(fake)));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(test, feature = "test-support")))]
|
|
||||||
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
if cfg!(any(test, feature = "test-support")) {
|
||||||
|
return FakeTransport::start(cx)
|
||||||
|
.await
|
||||||
|
.map(|(transports, fake)| (transports, Self::Fake(fake)));
|
||||||
|
}
|
||||||
|
|
||||||
if binary.connection.is_some() {
|
if binary.connection.is_some() {
|
||||||
TcpTransport::start(binary, cx)
|
TcpTransport::start(binary, cx)
|
||||||
.await
|
.await
|
||||||
|
@ -520,18 +518,21 @@ pub struct TcpTransport {
|
||||||
|
|
||||||
impl TcpTransport {
|
impl TcpTransport {
|
||||||
/// Get an open port to use with the tcp client when not supplied by debug config
|
/// Get an open port to use with the tcp client when not supplied by debug config
|
||||||
pub async fn port(host: &TCPHost) -> Result<u16> {
|
pub async fn port(host: &TcpArgumentsTemplate) -> Result<u16> {
|
||||||
if let Some(port) = host.port {
|
if let Some(port) = host.port {
|
||||||
Ok(port)
|
Ok(port)
|
||||||
} else {
|
} else {
|
||||||
Ok(TcpListener::bind(SocketAddrV4::new(host.host(), 0))
|
Self::unused_port(host.host()).await
|
||||||
.await?
|
|
||||||
.local_addr()?
|
|
||||||
.port())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code, reason = "This is used in non test builds of Zed")]
|
pub async fn unused_port(host: Ipv4Addr) -> Result<u16> {
|
||||||
|
Ok(TcpListener::bind(SocketAddrV4::new(host, 0))
|
||||||
|
.await?
|
||||||
|
.local_addr()?
|
||||||
|
.port())
|
||||||
|
}
|
||||||
|
|
||||||
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||||
let Some(connection_args) = binary.connection.as_ref() else {
|
let Some(connection_args) = binary.connection.as_ref() else {
|
||||||
return Err(anyhow!("No connection arguments provided"));
|
return Err(anyhow!("No connection arguments provided"));
|
||||||
|
@ -546,13 +547,8 @@ impl TcpTransport {
|
||||||
command.current_dir(cwd);
|
command.current_dir(cwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(args) = &binary.arguments {
|
command.args(&binary.arguments);
|
||||||
command.args(args);
|
command.envs(&binary.envs);
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(envs) = &binary.envs {
|
|
||||||
command.envs(envs);
|
|
||||||
}
|
|
||||||
|
|
||||||
command
|
command
|
||||||
.stdin(Stdio::null())
|
.stdin(Stdio::null())
|
||||||
|
@ -635,13 +631,8 @@ impl StdioTransport {
|
||||||
command.current_dir(cwd);
|
command.current_dir(cwd);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(args) = &binary.arguments {
|
command.args(&binary.arguments);
|
||||||
command.args(args);
|
command.envs(&binary.envs);
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(envs) = &binary.envs {
|
|
||||||
command.envs(envs);
|
|
||||||
}
|
|
||||||
|
|
||||||
command
|
command
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use std::{path::PathBuf, sync::OnceLock};
|
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
|
||||||
|
|
||||||
use anyhow::{Result, bail};
|
use anyhow::{Result, bail};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use dap::adapters::latest_github_release;
|
use dap::adapters::latest_github_release;
|
||||||
use gpui::AsyncApp;
|
use gpui::AsyncApp;
|
||||||
use task::{DebugRequestType, DebugTaskDefinition};
|
use task::{DebugRequest, DebugTaskDefinition};
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
|
||||||
|
@ -19,8 +19,8 @@ impl CodeLldbDebugAdapter {
|
||||||
fn request_args(&self, config: &DebugTaskDefinition) -> dap::StartDebuggingRequestArguments {
|
fn request_args(&self, config: &DebugTaskDefinition) -> dap::StartDebuggingRequestArguments {
|
||||||
let mut configuration = json!({
|
let mut configuration = json!({
|
||||||
"request": match config.request {
|
"request": match config.request {
|
||||||
DebugRequestType::Launch(_) => "launch",
|
DebugRequest::Launch(_) => "launch",
|
||||||
DebugRequestType::Attach(_) => "attach",
|
DebugRequest::Attach(_) => "attach",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
let map = configuration.as_object_mut().unwrap();
|
let map = configuration.as_object_mut().unwrap();
|
||||||
|
@ -28,10 +28,10 @@ impl CodeLldbDebugAdapter {
|
||||||
map.insert("name".into(), Value::String(config.label.clone()));
|
map.insert("name".into(), Value::String(config.label.clone()));
|
||||||
let request = config.request.to_dap();
|
let request = config.request.to_dap();
|
||||||
match &config.request {
|
match &config.request {
|
||||||
DebugRequestType::Attach(attach) => {
|
DebugRequest::Attach(attach) => {
|
||||||
map.insert("pid".into(), attach.process_id.into());
|
map.insert("pid".into(), attach.process_id.into());
|
||||||
}
|
}
|
||||||
DebugRequestType::Launch(launch) => {
|
DebugRequest::Launch(launch) => {
|
||||||
map.insert("program".into(), launch.program.clone().into());
|
map.insert("program".into(), launch.program.clone().into());
|
||||||
|
|
||||||
if !launch.args.is_empty() {
|
if !launch.args.is_empty() {
|
||||||
|
@ -140,16 +140,13 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
||||||
.ok_or_else(|| anyhow!("Adapter path is expected to be valid UTF-8"))?;
|
.ok_or_else(|| anyhow!("Adapter path is expected to be valid UTF-8"))?;
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
command,
|
command,
|
||||||
cwd: Some(adapter_dir),
|
cwd: None,
|
||||||
arguments: Some(vec![
|
arguments: vec![
|
||||||
"--settings".into(),
|
"--settings".into(),
|
||||||
json!({"sourceLanguages": ["cpp", "rust"]})
|
json!({"sourceLanguages": ["cpp", "rust"]}).to_string(),
|
||||||
.to_string()
|
],
|
||||||
.into(),
|
|
||||||
]),
|
|
||||||
request_args: self.request_args(config),
|
request_args: self.request_args(config),
|
||||||
adapter_name: "test".into(),
|
envs: HashMap::default(),
|
||||||
envs: None,
|
|
||||||
connection: None,
|
connection: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use anyhow::{Result, anyhow};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use codelldb::CodeLldbDebugAdapter;
|
use codelldb::CodeLldbDebugAdapter;
|
||||||
use dap::{
|
use dap::{
|
||||||
DapRegistry, DebugRequestType,
|
DapRegistry, DebugRequest,
|
||||||
adapters::{
|
adapters::{
|
||||||
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
|
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
|
||||||
GithubRepo,
|
GithubRepo,
|
||||||
|
@ -19,23 +19,26 @@ use dap::{
|
||||||
};
|
};
|
||||||
use gdb::GdbDebugAdapter;
|
use gdb::GdbDebugAdapter;
|
||||||
use go::GoDebugAdapter;
|
use go::GoDebugAdapter;
|
||||||
|
use gpui::{App, BorrowAppContext};
|
||||||
use javascript::JsDebugAdapter;
|
use javascript::JsDebugAdapter;
|
||||||
use php::PhpDebugAdapter;
|
use php::PhpDebugAdapter;
|
||||||
use python::PythonDebugAdapter;
|
use python::PythonDebugAdapter;
|
||||||
use serde_json::{Value, json};
|
use serde_json::{Value, json};
|
||||||
use task::TCPHost;
|
use task::TcpArgumentsTemplate;
|
||||||
|
|
||||||
pub fn init(registry: Arc<DapRegistry>) {
|
pub fn init(cx: &mut App) {
|
||||||
registry.add_adapter(Arc::from(CodeLldbDebugAdapter::default()));
|
cx.update_default_global(|registry: &mut DapRegistry, _cx| {
|
||||||
registry.add_adapter(Arc::from(PythonDebugAdapter));
|
registry.add_adapter(Arc::from(CodeLldbDebugAdapter::default()));
|
||||||
registry.add_adapter(Arc::from(PhpDebugAdapter));
|
registry.add_adapter(Arc::from(PythonDebugAdapter));
|
||||||
registry.add_adapter(Arc::from(JsDebugAdapter));
|
registry.add_adapter(Arc::from(PhpDebugAdapter));
|
||||||
registry.add_adapter(Arc::from(GoDebugAdapter));
|
registry.add_adapter(Arc::from(JsDebugAdapter));
|
||||||
registry.add_adapter(Arc::from(GdbDebugAdapter));
|
registry.add_adapter(Arc::from(GoDebugAdapter));
|
||||||
|
registry.add_adapter(Arc::from(GdbDebugAdapter));
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn configure_tcp_connection(
|
pub(crate) async fn configure_tcp_connection(
|
||||||
tcp_connection: TCPHost,
|
tcp_connection: TcpArgumentsTemplate,
|
||||||
) -> Result<(Ipv4Addr, u16, Option<u64>)> {
|
) -> Result<(Ipv4Addr, u16, Option<u64>)> {
|
||||||
let host = tcp_connection.host();
|
let host = tcp_connection.host();
|
||||||
let timeout = tcp_connection.timeout;
|
let timeout = tcp_connection.timeout;
|
||||||
|
@ -53,7 +56,7 @@ trait ToDap {
|
||||||
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest;
|
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToDap for DebugRequestType {
|
impl ToDap for DebugRequest {
|
||||||
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest {
|
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest {
|
||||||
match self {
|
match self {
|
||||||
Self::Launch(_) => dap::StartDebuggingRequestArgumentsRequest::Launch,
|
Self::Launch(_) => dap::StartDebuggingRequestArgumentsRequest::Launch,
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use std::ffi::OsStr;
|
use std::{collections::HashMap, ffi::OsStr};
|
||||||
|
|
||||||
use anyhow::{Result, bail};
|
use anyhow::{Result, bail};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use dap::StartDebuggingRequestArguments;
|
use dap::StartDebuggingRequestArguments;
|
||||||
use gpui::AsyncApp;
|
use gpui::AsyncApp;
|
||||||
use task::{DebugRequestType, DebugTaskDefinition};
|
use task::{DebugRequest, DebugTaskDefinition};
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
|
||||||
|
@ -17,18 +17,18 @@ impl GdbDebugAdapter {
|
||||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||||
let mut args = json!({
|
let mut args = json!({
|
||||||
"request": match config.request {
|
"request": match config.request {
|
||||||
DebugRequestType::Launch(_) => "launch",
|
DebugRequest::Launch(_) => "launch",
|
||||||
DebugRequestType::Attach(_) => "attach",
|
DebugRequest::Attach(_) => "attach",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
let map = args.as_object_mut().unwrap();
|
let map = args.as_object_mut().unwrap();
|
||||||
match &config.request {
|
match &config.request {
|
||||||
DebugRequestType::Attach(attach) => {
|
DebugRequest::Attach(attach) => {
|
||||||
map.insert("pid".into(), attach.process_id.into());
|
map.insert("pid".into(), attach.process_id.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
DebugRequestType::Launch(launch) => {
|
DebugRequest::Launch(launch) => {
|
||||||
map.insert("program".into(), launch.program.clone().into());
|
map.insert("program".into(), launch.program.clone().into());
|
||||||
|
|
||||||
if !launch.args.is_empty() {
|
if !launch.args.is_empty() {
|
||||||
|
@ -82,10 +82,9 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||||
let gdb_path = user_setting_path.unwrap_or(gdb_path?);
|
let gdb_path = user_setting_path.unwrap_or(gdb_path?);
|
||||||
|
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
adapter_name: Self::ADAPTER_NAME.into(),
|
|
||||||
command: gdb_path,
|
command: gdb_path,
|
||||||
arguments: Some(vec!["-i=dap".into()]),
|
arguments: vec!["-i=dap".into()],
|
||||||
envs: None,
|
envs: HashMap::default(),
|
||||||
cwd: None,
|
cwd: None,
|
||||||
connection: None,
|
connection: None,
|
||||||
request_args: self.request_args(config),
|
request_args: self.request_args(config),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use dap::StartDebuggingRequestArguments;
|
use dap::StartDebuggingRequestArguments;
|
||||||
use gpui::AsyncApp;
|
use gpui::AsyncApp;
|
||||||
use std::{ffi::OsStr, path::PathBuf};
|
use std::{collections::HashMap, ffi::OsStr, path::PathBuf};
|
||||||
use task::DebugTaskDefinition;
|
use task::DebugTaskDefinition;
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
@ -12,12 +12,12 @@ impl GoDebugAdapter {
|
||||||
const ADAPTER_NAME: &'static str = "Delve";
|
const ADAPTER_NAME: &'static str = "Delve";
|
||||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||||
let mut args = match &config.request {
|
let mut args = match &config.request {
|
||||||
dap::DebugRequestType::Attach(attach_config) => {
|
dap::DebugRequest::Attach(attach_config) => {
|
||||||
json!({
|
json!({
|
||||||
"processId": attach_config.process_id,
|
"processId": attach_config.process_id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
dap::DebugRequestType::Launch(launch_config) => json!({
|
dap::DebugRequest::Launch(launch_config) => json!({
|
||||||
"program": launch_config.program,
|
"program": launch_config.program,
|
||||||
"cwd": launch_config.cwd,
|
"cwd": launch_config.cwd,
|
||||||
"args": launch_config.args
|
"args": launch_config.args
|
||||||
|
@ -92,15 +92,14 @@ impl DebugAdapter for GoDebugAdapter {
|
||||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||||
|
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
adapter_name: self.name(),
|
|
||||||
command: delve_path,
|
command: delve_path,
|
||||||
arguments: Some(vec![
|
arguments: vec![
|
||||||
"dap".into(),
|
"dap".into(),
|
||||||
"--listen".into(),
|
"--listen".into(),
|
||||||
format!("{}:{}", host, port).into(),
|
format!("{}:{}", host, port),
|
||||||
]),
|
],
|
||||||
cwd: None,
|
cwd: None,
|
||||||
envs: None,
|
envs: HashMap::default(),
|
||||||
connection: Some(adapters::TcpArguments {
|
connection: Some(adapters::TcpArguments {
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use adapters::latest_github_release;
|
use adapters::latest_github_release;
|
||||||
use dap::StartDebuggingRequestArguments;
|
use dap::StartDebuggingRequestArguments;
|
||||||
use gpui::AsyncApp;
|
use gpui::AsyncApp;
|
||||||
use std::path::PathBuf;
|
use std::{collections::HashMap, path::PathBuf};
|
||||||
use task::{DebugRequestType, DebugTaskDefinition};
|
use task::{DebugRequest, DebugTaskDefinition};
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
|
||||||
|
@ -18,16 +18,16 @@ impl JsDebugAdapter {
|
||||||
let mut args = json!({
|
let mut args = json!({
|
||||||
"type": "pwa-node",
|
"type": "pwa-node",
|
||||||
"request": match config.request {
|
"request": match config.request {
|
||||||
DebugRequestType::Launch(_) => "launch",
|
DebugRequest::Launch(_) => "launch",
|
||||||
DebugRequestType::Attach(_) => "attach",
|
DebugRequest::Attach(_) => "attach",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
let map = args.as_object_mut().unwrap();
|
let map = args.as_object_mut().unwrap();
|
||||||
match &config.request {
|
match &config.request {
|
||||||
DebugRequestType::Attach(attach) => {
|
DebugRequest::Attach(attach) => {
|
||||||
map.insert("processId".into(), attach.process_id.into());
|
map.insert("processId".into(), attach.process_id.into());
|
||||||
}
|
}
|
||||||
DebugRequestType::Launch(launch) => {
|
DebugRequest::Launch(launch) => {
|
||||||
map.insert("program".into(), launch.program.clone().into());
|
map.insert("program".into(), launch.program.clone().into());
|
||||||
|
|
||||||
if !launch.args.is_empty() {
|
if !launch.args.is_empty() {
|
||||||
|
@ -106,20 +106,22 @@ impl DebugAdapter for JsDebugAdapter {
|
||||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||||
|
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
adapter_name: self.name(),
|
|
||||||
command: delegate
|
command: delegate
|
||||||
.node_runtime()
|
.node_runtime()
|
||||||
.binary_path()
|
.binary_path()
|
||||||
.await?
|
.await?
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.into_owned(),
|
.into_owned(),
|
||||||
arguments: Some(vec![
|
arguments: vec![
|
||||||
adapter_path.join(Self::ADAPTER_PATH).into(),
|
adapter_path
|
||||||
port.to_string().into(),
|
.join(Self::ADAPTER_PATH)
|
||||||
host.to_string().into(),
|
.to_string_lossy()
|
||||||
]),
|
.to_string(),
|
||||||
|
port.to_string(),
|
||||||
|
host.to_string(),
|
||||||
|
],
|
||||||
cwd: None,
|
cwd: None,
|
||||||
envs: None,
|
envs: HashMap::default(),
|
||||||
connection: Some(adapters::TcpArguments {
|
connection: Some(adapters::TcpArguments {
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use adapters::latest_github_release;
|
use adapters::latest_github_release;
|
||||||
use dap::adapters::TcpArguments;
|
use dap::adapters::TcpArguments;
|
||||||
use gpui::AsyncApp;
|
use gpui::AsyncApp;
|
||||||
use std::path::PathBuf;
|
use std::{collections::HashMap, path::PathBuf};
|
||||||
use task::DebugTaskDefinition;
|
use task::DebugTaskDefinition;
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
@ -19,20 +19,18 @@ impl PhpDebugAdapter {
|
||||||
config: &DebugTaskDefinition,
|
config: &DebugTaskDefinition,
|
||||||
) -> Result<dap::StartDebuggingRequestArguments> {
|
) -> Result<dap::StartDebuggingRequestArguments> {
|
||||||
match &config.request {
|
match &config.request {
|
||||||
dap::DebugRequestType::Attach(_) => {
|
dap::DebugRequest::Attach(_) => {
|
||||||
anyhow::bail!("php adapter does not support attaching")
|
anyhow::bail!("php adapter does not support attaching")
|
||||||
}
|
}
|
||||||
dap::DebugRequestType::Launch(launch_config) => {
|
dap::DebugRequest::Launch(launch_config) => Ok(dap::StartDebuggingRequestArguments {
|
||||||
Ok(dap::StartDebuggingRequestArguments {
|
configuration: json!({
|
||||||
configuration: json!({
|
"program": launch_config.program,
|
||||||
"program": launch_config.program,
|
"cwd": launch_config.cwd,
|
||||||
"cwd": launch_config.cwd,
|
"args": launch_config.args,
|
||||||
"args": launch_config.args,
|
"stopOnEntry": config.stop_on_entry.unwrap_or_default(),
|
||||||
"stopOnEntry": config.stop_on_entry.unwrap_or_default(),
|
}),
|
||||||
}),
|
request: config.request.to_dap(),
|
||||||
request: config.request.to_dap(),
|
}),
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -94,24 +92,26 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||||
|
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
adapter_name: self.name(),
|
|
||||||
command: delegate
|
command: delegate
|
||||||
.node_runtime()
|
.node_runtime()
|
||||||
.binary_path()
|
.binary_path()
|
||||||
.await?
|
.await?
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.into_owned(),
|
.into_owned(),
|
||||||
arguments: Some(vec![
|
arguments: vec![
|
||||||
adapter_path.join(Self::ADAPTER_PATH).into(),
|
adapter_path
|
||||||
format!("--server={}", port).into(),
|
.join(Self::ADAPTER_PATH)
|
||||||
]),
|
.to_string_lossy()
|
||||||
|
.to_string(),
|
||||||
|
format!("--server={}", port),
|
||||||
|
],
|
||||||
connection: Some(TcpArguments {
|
connection: Some(TcpArguments {
|
||||||
port,
|
port,
|
||||||
host,
|
host,
|
||||||
timeout,
|
timeout,
|
||||||
}),
|
}),
|
||||||
cwd: None,
|
cwd: None,
|
||||||
envs: None,
|
envs: HashMap::default(),
|
||||||
request_args: self.request_args(config)?,
|
request_args: self.request_args(config)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::*;
|
use crate::*;
|
||||||
use dap::{DebugRequestType, StartDebuggingRequestArguments};
|
use dap::{DebugRequest, StartDebuggingRequestArguments};
|
||||||
use gpui::AsyncApp;
|
use gpui::AsyncApp;
|
||||||
use std::{ffi::OsStr, path::PathBuf};
|
use std::{collections::HashMap, ffi::OsStr, path::PathBuf};
|
||||||
use task::DebugTaskDefinition;
|
use task::DebugTaskDefinition;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -16,18 +16,18 @@ impl PythonDebugAdapter {
|
||||||
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
fn request_args(&self, config: &DebugTaskDefinition) -> StartDebuggingRequestArguments {
|
||||||
let mut args = json!({
|
let mut args = json!({
|
||||||
"request": match config.request {
|
"request": match config.request {
|
||||||
DebugRequestType::Launch(_) => "launch",
|
DebugRequest::Launch(_) => "launch",
|
||||||
DebugRequestType::Attach(_) => "attach",
|
DebugRequest::Attach(_) => "attach",
|
||||||
},
|
},
|
||||||
"subProcess": true,
|
"subProcess": true,
|
||||||
"redirectOutput": true,
|
"redirectOutput": true,
|
||||||
});
|
});
|
||||||
let map = args.as_object_mut().unwrap();
|
let map = args.as_object_mut().unwrap();
|
||||||
match &config.request {
|
match &config.request {
|
||||||
DebugRequestType::Attach(attach) => {
|
DebugRequest::Attach(attach) => {
|
||||||
map.insert("processId".into(), attach.process_id.into());
|
map.insert("processId".into(), attach.process_id.into());
|
||||||
}
|
}
|
||||||
DebugRequestType::Launch(launch) => {
|
DebugRequest::Launch(launch) => {
|
||||||
map.insert("program".into(), launch.program.clone().into());
|
map.insert("program".into(), launch.program.clone().into());
|
||||||
map.insert("args".into(), launch.args.clone().into());
|
map.insert("args".into(), launch.args.clone().into());
|
||||||
|
|
||||||
|
@ -141,20 +141,22 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(DebugAdapterBinary {
|
Ok(DebugAdapterBinary {
|
||||||
adapter_name: self.name(),
|
|
||||||
command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
|
command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
|
||||||
arguments: Some(vec![
|
arguments: vec![
|
||||||
debugpy_dir.join(Self::ADAPTER_PATH).into(),
|
debugpy_dir
|
||||||
format!("--port={}", port).into(),
|
.join(Self::ADAPTER_PATH)
|
||||||
format!("--host={}", host).into(),
|
.to_string_lossy()
|
||||||
]),
|
.to_string(),
|
||||||
|
format!("--port={}", port),
|
||||||
|
format!("--host={}", host),
|
||||||
|
],
|
||||||
connection: Some(adapters::TcpArguments {
|
connection: Some(adapters::TcpArguments {
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
timeout,
|
timeout,
|
||||||
}),
|
}),
|
||||||
cwd: None,
|
cwd: None,
|
||||||
envs: None,
|
envs: HashMap::default(),
|
||||||
request_args: self.request_args(config),
|
request_args: self.request_args(config),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -566,11 +566,13 @@ impl DapLogView {
|
||||||
.dap_store()
|
.dap_store()
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.sessions()
|
.sessions()
|
||||||
.filter_map(|client| {
|
.filter_map(|session| {
|
||||||
let client = client.read(cx).adapter_client()?;
|
let session = session.read(cx);
|
||||||
|
session.adapter_name();
|
||||||
|
let client = session.adapter_client()?;
|
||||||
Some(DapMenuItem {
|
Some(DapMenuItem {
|
||||||
client_id: client.id(),
|
client_id: client.id(),
|
||||||
client_name: client.name().0.as_ref().into(),
|
client_name: session.adapter_name().to_string(),
|
||||||
has_adapter_logs: client.has_adapter_logs(),
|
has_adapter_logs: client.has_adapter_logs(),
|
||||||
selected_entry: self.current_view.map_or(LogKind::Adapter, |(_, kind)| kind),
|
selected_entry: self.current_view.map_or(LogKind::Adapter, |(_, kind)| kind),
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use dap::DebugRequestType;
|
use dap::DebugRequest;
|
||||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||||
use gpui::Subscription;
|
use gpui::Subscription;
|
||||||
use gpui::{DismissEvent, Entity, EventEmitter, Focusable, Render};
|
use gpui::{DismissEvent, Entity, EventEmitter, Focusable, Render};
|
||||||
|
@ -216,10 +216,10 @@ impl PickerDelegate for AttachModalDelegate {
|
||||||
};
|
};
|
||||||
|
|
||||||
match &mut self.debug_config.request {
|
match &mut self.debug_config.request {
|
||||||
DebugRequestType::Attach(config) => {
|
DebugRequest::Attach(config) => {
|
||||||
config.process_id = Some(candidate.pid);
|
config.process_id = Some(candidate.pid);
|
||||||
}
|
}
|
||||||
DebugRequestType::Launch(_) => {
|
DebugRequest::Launch(_) => {
|
||||||
debug_panic!("Debugger attach modal used on launch debug config");
|
debug_panic!("Debugger attach modal used on launch debug config");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{Result, anyhow};
|
use anyhow::{Result, anyhow};
|
||||||
use dap::DebugRequestType;
|
use dap::{DapRegistry, DebugRequest};
|
||||||
use editor::{Editor, EditorElement, EditorStyle};
|
use editor::{Editor, EditorElement, EditorStyle};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, TextStyle,
|
App, AppContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, TextStyle,
|
||||||
|
@ -13,7 +13,7 @@ use gpui::{
|
||||||
};
|
};
|
||||||
use project::Project;
|
use project::Project;
|
||||||
use settings::Settings;
|
use settings::Settings;
|
||||||
use task::{DebugTaskDefinition, LaunchConfig};
|
use task::{DebugTaskDefinition, DebugTaskTemplate, LaunchRequest};
|
||||||
use theme::ThemeSettings;
|
use theme::ThemeSettings;
|
||||||
use ui::{
|
use ui::{
|
||||||
ActiveTheme, Button, ButtonCommon, ButtonSize, CheckboxWithLabel, Clickable, Color, Context,
|
ActiveTheme, Button, ButtonCommon, ButtonSize, CheckboxWithLabel, Clickable, Color, Context,
|
||||||
|
@ -37,9 +37,9 @@ pub(super) struct NewSessionModal {
|
||||||
last_selected_profile_name: Option<SharedString>,
|
last_selected_profile_name: Option<SharedString>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn suggested_label(request: &DebugRequestType, debugger: &str) -> String {
|
fn suggested_label(request: &DebugRequest, debugger: &str) -> String {
|
||||||
match request {
|
match request {
|
||||||
DebugRequestType::Launch(config) => {
|
DebugRequest::Launch(config) => {
|
||||||
let last_path_component = Path::new(&config.program)
|
let last_path_component = Path::new(&config.program)
|
||||||
.file_name()
|
.file_name()
|
||||||
.map(|name| name.to_string_lossy())
|
.map(|name| name.to_string_lossy())
|
||||||
|
@ -47,7 +47,7 @@ fn suggested_label(request: &DebugRequestType, debugger: &str) -> String {
|
||||||
|
|
||||||
format!("{} ({debugger})", last_path_component)
|
format!("{} ({debugger})", last_path_component)
|
||||||
}
|
}
|
||||||
DebugRequestType::Attach(config) => format!(
|
DebugRequest::Attach(config) => format!(
|
||||||
"pid: {} ({debugger})",
|
"pid: {} ({debugger})",
|
||||||
config.process_id.unwrap_or(u32::MAX)
|
config.process_id.unwrap_or(u32::MAX)
|
||||||
),
|
),
|
||||||
|
@ -71,7 +71,7 @@ impl NewSessionModal {
|
||||||
.and_then(|def| def.stop_on_entry);
|
.and_then(|def| def.stop_on_entry);
|
||||||
|
|
||||||
let launch_config = match past_debug_definition.map(|def| def.request) {
|
let launch_config = match past_debug_definition.map(|def| def.request) {
|
||||||
Some(DebugRequestType::Launch(launch_config)) => Some(launch_config),
|
Some(DebugRequest::Launch(launch_config)) => Some(launch_config),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -96,7 +96,6 @@ impl NewSessionModal {
|
||||||
request,
|
request,
|
||||||
initialize_args: self.initialize_args.clone(),
|
initialize_args: self.initialize_args.clone(),
|
||||||
tcp_connection: None,
|
tcp_connection: None,
|
||||||
locator: None,
|
|
||||||
stop_on_entry: match self.stop_on_entry {
|
stop_on_entry: match self.stop_on_entry {
|
||||||
ToggleState::Selected => Some(true),
|
ToggleState::Selected => Some(true),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -131,20 +130,16 @@ impl NewSessionModal {
|
||||||
let project = workspace.update(cx, |workspace, _| workspace.project().clone())?;
|
let project = workspace.update(cx, |workspace, _| workspace.project().clone())?;
|
||||||
|
|
||||||
let task = project.update(cx, |this, cx| {
|
let task = project.update(cx, |this, cx| {
|
||||||
if let Some(debug_config) =
|
let template = DebugTaskTemplate {
|
||||||
config
|
locator: None,
|
||||||
.clone()
|
definition: config.clone(),
|
||||||
.to_zed_format()
|
};
|
||||||
.ok()
|
if let Some(debug_config) = template
|
||||||
.and_then(|task_template| {
|
.to_zed_format()
|
||||||
task_template
|
.resolve_task("debug_task", &task_context)
|
||||||
.resolve_task("debug_task", &task_context)
|
.and_then(|resolved_task| resolved_task.resolved_debug_adapter_config())
|
||||||
.and_then(|resolved_task| {
|
|
||||||
resolved_task.resolved_debug_adapter_config()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
{
|
{
|
||||||
this.start_debug_session(debug_config, cx)
|
this.start_debug_session(debug_config.definition, cx)
|
||||||
} else {
|
} else {
|
||||||
this.start_debug_session(config, cx)
|
this.start_debug_session(config, cx)
|
||||||
}
|
}
|
||||||
|
@ -214,12 +209,7 @@ impl NewSessionModal {
|
||||||
};
|
};
|
||||||
|
|
||||||
let available_adapters = workspace
|
let available_adapters = workspace
|
||||||
.update(cx, |this, cx| {
|
.update(cx, |_, cx| DapRegistry::global(cx).enumerate_adapters())
|
||||||
this.project()
|
|
||||||
.read(cx)
|
|
||||||
.debug_adapters()
|
|
||||||
.enumerate_adapters()
|
|
||||||
})
|
|
||||||
.ok()
|
.ok()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
@ -251,14 +241,14 @@ impl NewSessionModal {
|
||||||
this.debugger = Some(task.adapter.clone().into());
|
this.debugger = Some(task.adapter.clone().into());
|
||||||
this.initialize_args = task.initialize_args.clone();
|
this.initialize_args = task.initialize_args.clone();
|
||||||
match &task.request {
|
match &task.request {
|
||||||
DebugRequestType::Launch(launch_config) => {
|
DebugRequest::Launch(launch_config) => {
|
||||||
this.mode = NewSessionMode::launch(
|
this.mode = NewSessionMode::launch(
|
||||||
Some(launch_config.clone()),
|
Some(launch_config.clone()),
|
||||||
window,
|
window,
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
DebugRequestType::Attach(_) => {
|
DebugRequest::Attach(_) => {
|
||||||
let Ok(project) = this
|
let Ok(project) = this
|
||||||
.workspace
|
.workspace
|
||||||
.read_with(cx, |this, _| this.project().clone())
|
.read_with(cx, |this, _| this.project().clone())
|
||||||
|
@ -285,7 +275,7 @@ impl NewSessionModal {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let available_adapters: Vec<DebugTaskDefinition> = workspace
|
let available_adapters: Vec<DebugTaskTemplate> = workspace
|
||||||
.update(cx, |this, cx| {
|
.update(cx, |this, cx| {
|
||||||
this.project()
|
this.project()
|
||||||
.read(cx)
|
.read(cx)
|
||||||
|
@ -303,9 +293,9 @@ impl NewSessionModal {
|
||||||
|
|
||||||
for debug_definition in available_adapters {
|
for debug_definition in available_adapters {
|
||||||
menu = menu.entry(
|
menu = menu.entry(
|
||||||
debug_definition.label.clone(),
|
debug_definition.definition.label.clone(),
|
||||||
None,
|
None,
|
||||||
setter_for_name(debug_definition),
|
setter_for_name(debug_definition.definition),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
menu
|
menu
|
||||||
|
@ -322,7 +312,7 @@ struct LaunchMode {
|
||||||
|
|
||||||
impl LaunchMode {
|
impl LaunchMode {
|
||||||
fn new(
|
fn new(
|
||||||
past_launch_config: Option<LaunchConfig>,
|
past_launch_config: Option<LaunchRequest>,
|
||||||
window: &mut Window,
|
window: &mut Window,
|
||||||
cx: &mut App,
|
cx: &mut App,
|
||||||
) -> Entity<Self> {
|
) -> Entity<Self> {
|
||||||
|
@ -348,9 +338,9 @@ impl LaunchMode {
|
||||||
cx.new(|_| Self { program, cwd })
|
cx.new(|_| Self { program, cwd })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn debug_task(&self, cx: &App) -> task::LaunchConfig {
|
fn debug_task(&self, cx: &App) -> task::LaunchRequest {
|
||||||
let path = self.cwd.read(cx).text(cx);
|
let path = self.cwd.read(cx).text(cx);
|
||||||
task::LaunchConfig {
|
task::LaunchRequest {
|
||||||
program: self.program.read(cx).text(cx),
|
program: self.program.read(cx).text(cx),
|
||||||
cwd: path.is_empty().not().then(|| PathBuf::from(path)),
|
cwd: path.is_empty().not().then(|| PathBuf::from(path)),
|
||||||
args: Default::default(),
|
args: Default::default(),
|
||||||
|
@ -373,10 +363,9 @@ impl AttachMode {
|
||||||
) -> Entity<Self> {
|
) -> Entity<Self> {
|
||||||
let debug_definition = DebugTaskDefinition {
|
let debug_definition = DebugTaskDefinition {
|
||||||
label: "Attach New Session Setup".into(),
|
label: "Attach New Session Setup".into(),
|
||||||
request: dap::DebugRequestType::Attach(task::AttachConfig { process_id: None }),
|
request: dap::DebugRequest::Attach(task::AttachRequest { process_id: None }),
|
||||||
tcp_connection: None,
|
tcp_connection: None,
|
||||||
adapter: debugger.clone().unwrap_or_default().into(),
|
adapter: debugger.clone().unwrap_or_default().into(),
|
||||||
locator: None,
|
|
||||||
initialize_args: None,
|
initialize_args: None,
|
||||||
stop_on_entry: Some(false),
|
stop_on_entry: Some(false),
|
||||||
};
|
};
|
||||||
|
@ -391,8 +380,8 @@ impl AttachMode {
|
||||||
attach_picker,
|
attach_picker,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn debug_task(&self) -> task::AttachConfig {
|
fn debug_task(&self) -> task::AttachRequest {
|
||||||
task::AttachConfig { process_id: None }
|
task::AttachRequest { process_id: None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -406,7 +395,7 @@ enum NewSessionMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NewSessionMode {
|
impl NewSessionMode {
|
||||||
fn debug_task(&self, cx: &App) -> DebugRequestType {
|
fn debug_task(&self, cx: &App) -> DebugRequest {
|
||||||
match self {
|
match self {
|
||||||
NewSessionMode::Launch(entity) => entity.read(cx).debug_task(cx).into(),
|
NewSessionMode::Launch(entity) => entity.read(cx).debug_task(cx).into(),
|
||||||
NewSessionMode::Attach(entity) => entity.read(cx).debug_task().into(),
|
NewSessionMode::Attach(entity) => entity.read(cx).debug_task().into(),
|
||||||
|
@ -488,7 +477,7 @@ impl NewSessionMode {
|
||||||
Self::Attach(AttachMode::new(debugger, project, window, cx))
|
Self::Attach(AttachMode::new(debugger, project, window, cx))
|
||||||
}
|
}
|
||||||
fn launch(
|
fn launch(
|
||||||
past_launch_config: Option<LaunchConfig>,
|
past_launch_config: Option<LaunchRequest>,
|
||||||
window: &mut Window,
|
window: &mut Window,
|
||||||
cx: &mut Context<NewSessionModal>,
|
cx: &mut Context<NewSessionModal>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
|
|
@ -5,7 +5,7 @@ use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||||
use menu::Confirm;
|
use menu::Confirm;
|
||||||
use project::{FakeFs, Project};
|
use project::{FakeFs, Project};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use task::{AttachConfig, DebugTaskDefinition, TCPHost};
|
use task::{AttachRequest, DebugTaskDefinition, TcpArgumentsTemplate};
|
||||||
use tests::{init_test, init_test_workspace};
|
use tests::{init_test, init_test_workspace};
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
|
@ -31,13 +31,12 @@ async fn test_direct_attach_to_process(executor: BackgroundExecutor, cx: &mut Te
|
||||||
cx,
|
cx,
|
||||||
DebugTaskDefinition {
|
DebugTaskDefinition {
|
||||||
adapter: "fake-adapter".to_string(),
|
adapter: "fake-adapter".to_string(),
|
||||||
request: dap::DebugRequestType::Attach(AttachConfig {
|
request: dap::DebugRequest::Attach(AttachRequest {
|
||||||
process_id: Some(10),
|
process_id: Some(10),
|
||||||
}),
|
}),
|
||||||
label: "label".to_string(),
|
label: "label".to_string(),
|
||||||
initialize_args: None,
|
initialize_args: None,
|
||||||
tcp_connection: None,
|
tcp_connection: None,
|
||||||
locator: None,
|
|
||||||
stop_on_entry: None,
|
stop_on_entry: None,
|
||||||
},
|
},
|
||||||
|client| {
|
|client| {
|
||||||
|
@ -105,11 +104,10 @@ async fn test_show_attach_modal_and_select_process(
|
||||||
project.clone(),
|
project.clone(),
|
||||||
DebugTaskDefinition {
|
DebugTaskDefinition {
|
||||||
adapter: FakeAdapter::ADAPTER_NAME.into(),
|
adapter: FakeAdapter::ADAPTER_NAME.into(),
|
||||||
request: dap::DebugRequestType::Attach(AttachConfig::default()),
|
request: dap::DebugRequest::Attach(AttachRequest::default()),
|
||||||
label: "attach example".into(),
|
label: "attach example".into(),
|
||||||
initialize_args: None,
|
initialize_args: None,
|
||||||
tcp_connection: Some(TCPHost::default()),
|
tcp_connection: Some(TcpArgumentsTemplate::default()),
|
||||||
locator: None,
|
|
||||||
stop_on_entry: None,
|
stop_on_entry: None,
|
||||||
},
|
},
|
||||||
vec![
|
vec![
|
||||||
|
|
|
@ -5111,44 +5111,21 @@ impl Editor {
|
||||||
CodeActionsItem::Task(task_source_kind, resolved_task) => {
|
CodeActionsItem::Task(task_source_kind, resolved_task) => {
|
||||||
match resolved_task.task_type() {
|
match resolved_task.task_type() {
|
||||||
task::TaskType::Script => workspace.update(cx, |workspace, cx| {
|
task::TaskType::Script => workspace.update(cx, |workspace, cx| {
|
||||||
workspace::tasks::schedule_resolved_task(
|
workspace.schedule_resolved_task(
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
task_source_kind,
|
||||||
resolved_task,
|
resolved_task,
|
||||||
false,
|
false,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
|
|
||||||
Some(Task::ready(Ok(())))
|
Some(Task::ready(Ok(())))
|
||||||
}),
|
}),
|
||||||
task::TaskType::Debug(debug_args) => {
|
task::TaskType::Debug(_) => {
|
||||||
if debug_args.locator.is_some() {
|
workspace.update(cx, |workspace, cx| {
|
||||||
workspace.update(cx, |workspace, cx| {
|
workspace.schedule_debug_task(resolved_task, window, cx);
|
||||||
workspace::tasks::schedule_resolved_task(
|
});
|
||||||
workspace,
|
Some(Task::ready(Ok(())))
|
||||||
task_source_kind,
|
|
||||||
resolved_task,
|
|
||||||
false,
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
return Some(Task::ready(Ok(())));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(project) = self.project.as_ref() {
|
|
||||||
project
|
|
||||||
.update(cx, |project, cx| {
|
|
||||||
project.start_debug_session(
|
|
||||||
resolved_task.resolved_debug_adapter_config().unwrap(),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.detach_and_log_err(cx);
|
|
||||||
Some(Task::ready(Ok(())))
|
|
||||||
} else {
|
|
||||||
Some(Task::ready(Ok(())))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6845,12 +6822,12 @@ impl Editor {
|
||||||
resolved.reveal = reveal_strategy;
|
resolved.reveal = reveal_strategy;
|
||||||
|
|
||||||
workspace
|
workspace
|
||||||
.update(cx, |workspace, cx| {
|
.update_in(cx, |workspace, window, cx| {
|
||||||
workspace::tasks::schedule_resolved_task(
|
workspace.schedule_resolved_task(
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
task_source_kind,
|
||||||
resolved_task,
|
resolved_task,
|
||||||
false,
|
false,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
|
|
|
@ -15,7 +15,6 @@ clap.workspace = true
|
||||||
client.workspace = true
|
client.workspace = true
|
||||||
collections.workspace = true
|
collections.workspace = true
|
||||||
context_server.workspace = true
|
context_server.workspace = true
|
||||||
dap.workspace = true
|
|
||||||
dirs = "5.0"
|
dirs = "5.0"
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
extension.workspace = true
|
extension.workspace = true
|
||||||
|
|
|
@ -3,7 +3,6 @@ use agent::{ThreadEvent, ThreadStore};
|
||||||
use anyhow::{Context as _, Result, anyhow};
|
use anyhow::{Context as _, Result, anyhow};
|
||||||
use assistant_tool::ToolWorkingSet;
|
use assistant_tool::ToolWorkingSet;
|
||||||
use client::proto::LspWorkProgress;
|
use client::proto::LspWorkProgress;
|
||||||
use dap::DapRegistry;
|
|
||||||
use futures::channel::mpsc;
|
use futures::channel::mpsc;
|
||||||
use futures::{FutureExt, StreamExt as _, select_biased};
|
use futures::{FutureExt, StreamExt as _, select_biased};
|
||||||
use gpui::{App, AppContext as _, AsyncApp, Entity, Task};
|
use gpui::{App, AppContext as _, AsyncApp, Entity, Task};
|
||||||
|
@ -243,7 +242,6 @@ impl Example {
|
||||||
app_state.node_runtime.clone(),
|
app_state.node_runtime.clone(),
|
||||||
app_state.user_store.clone(),
|
app_state.user_store.clone(),
|
||||||
app_state.languages.clone(),
|
app_state.languages.clone(),
|
||||||
Arc::new(DapRegistry::default()),
|
|
||||||
app_state.fs.clone(),
|
app_state.fs.clone(),
|
||||||
None,
|
None,
|
||||||
cx,
|
cx,
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
pub mod breakpoint_store;
|
pub mod breakpoint_store;
|
||||||
pub mod dap_command;
|
pub mod dap_command;
|
||||||
pub mod dap_store;
|
pub mod dap_store;
|
||||||
mod locator_store;
|
pub mod locators;
|
||||||
pub mod session;
|
pub mod session;
|
||||||
|
|
||||||
#[cfg(any(feature = "test-support", test))]
|
#[cfg(any(feature = "test-support", test))]
|
||||||
|
|
|
@ -1,16 +1,18 @@
|
||||||
use super::{
|
use super::{
|
||||||
breakpoint_store::BreakpointStore,
|
breakpoint_store::BreakpointStore,
|
||||||
locator_store::LocatorStore,
|
locators::DapLocator,
|
||||||
session::{self, Session, SessionStateEvent},
|
session::{self, Session, SessionStateEvent},
|
||||||
};
|
};
|
||||||
use crate::{ProjectEnvironment, debugger};
|
use crate::{
|
||||||
|
ProjectEnvironment, debugger, project_settings::ProjectSettings, worktree_store::WorktreeStore,
|
||||||
|
};
|
||||||
use anyhow::{Result, anyhow};
|
use anyhow::{Result, anyhow};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use dap::{
|
use dap::{
|
||||||
Capabilities, CompletionItem, CompletionsArguments, ErrorResponse, EvaluateArguments,
|
Capabilities, CompletionItem, CompletionsArguments, DapRegistry, ErrorResponse,
|
||||||
EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments, Source,
|
EvaluateArguments, EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments,
|
||||||
StartDebuggingRequestArguments,
|
Source, StartDebuggingRequestArguments,
|
||||||
adapters::{DapStatus, DebugAdapterBinary, DebugAdapterName},
|
adapters::{DapStatus, DebugAdapterBinary, DebugAdapterName},
|
||||||
client::SessionId,
|
client::SessionId,
|
||||||
messages::Message,
|
messages::Message,
|
||||||
|
@ -22,8 +24,7 @@ use futures::{
|
||||||
future::{Shared, join_all},
|
future::{Shared, join_all},
|
||||||
};
|
};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, SharedString,
|
App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity,
|
||||||
Task, WeakEntity,
|
|
||||||
};
|
};
|
||||||
use http_client::HttpClient;
|
use http_client::HttpClient;
|
||||||
use language::{BinaryStatus, LanguageRegistry, LanguageToolchainStore};
|
use language::{BinaryStatus, LanguageRegistry, LanguageToolchainStore};
|
||||||
|
@ -35,17 +36,16 @@ use rpc::{
|
||||||
proto::{self},
|
proto::{self},
|
||||||
};
|
};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use settings::WorktreeId;
|
use settings::{Settings, WorktreeId};
|
||||||
use smol::{lock::Mutex, stream::StreamExt};
|
use smol::{lock::Mutex, stream::StreamExt};
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Borrow,
|
borrow::Borrow,
|
||||||
collections::{BTreeMap, HashSet},
|
collections::{BTreeMap, HashSet},
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::{Arc, atomic::Ordering::SeqCst},
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use std::{collections::VecDeque, sync::atomic::AtomicU32};
|
use task::{DebugTaskDefinition, DebugTaskTemplate};
|
||||||
use task::DebugTaskDefinition;
|
|
||||||
use util::ResultExt as _;
|
use util::ResultExt as _;
|
||||||
use worktree::Worktree;
|
use worktree::Worktree;
|
||||||
|
|
||||||
|
@ -71,45 +71,26 @@ pub enum DapStoreEvent {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
pub enum DapStoreMode {
|
enum DapStoreMode {
|
||||||
Local(LocalDapStore), // ssh host and collab host
|
Local(LocalDapStore),
|
||||||
Remote(RemoteDapStore), // collab guest
|
Ssh(SshDapStore),
|
||||||
|
Collab,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LocalDapStore {
|
pub struct LocalDapStore {
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
node_runtime: NodeRuntime,
|
node_runtime: NodeRuntime,
|
||||||
next_session_id: AtomicU32,
|
|
||||||
http_client: Arc<dyn HttpClient>,
|
http_client: Arc<dyn HttpClient>,
|
||||||
environment: Entity<ProjectEnvironment>,
|
environment: Entity<ProjectEnvironment>,
|
||||||
language_registry: Arc<LanguageRegistry>,
|
language_registry: Arc<LanguageRegistry>,
|
||||||
|
worktree_store: Entity<WorktreeStore>,
|
||||||
toolchain_store: Arc<dyn LanguageToolchainStore>,
|
toolchain_store: Arc<dyn LanguageToolchainStore>,
|
||||||
locator_store: Arc<LocatorStore>,
|
locators: HashMap<String, Arc<dyn DapLocator>>,
|
||||||
start_debugging_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
|
|
||||||
_start_debugging_task: Task<()>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LocalDapStore {
|
pub struct SshDapStore {
|
||||||
fn next_session_id(&self) -> SessionId {
|
|
||||||
SessionId(self.next_session_id.fetch_add(1, SeqCst))
|
|
||||||
}
|
|
||||||
pub(crate) fn locate_binary(
|
|
||||||
&self,
|
|
||||||
mut definition: DebugTaskDefinition,
|
|
||||||
executor: BackgroundExecutor,
|
|
||||||
) -> Task<DebugTaskDefinition> {
|
|
||||||
let locator_store = self.locator_store.clone();
|
|
||||||
executor.spawn(async move {
|
|
||||||
let _ = locator_store.resolve_debug_config(&mut definition).await;
|
|
||||||
definition
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct RemoteDapStore {
|
|
||||||
upstream_client: AnyProtoClient,
|
upstream_client: AnyProtoClient,
|
||||||
upstream_project_id: u64,
|
upstream_project_id: u64,
|
||||||
event_queue: Option<VecDeque<DapStoreEvent>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DapStore {
|
pub struct DapStore {
|
||||||
|
@ -117,25 +98,17 @@ pub struct DapStore {
|
||||||
downstream_client: Option<(AnyProtoClient, u64)>,
|
downstream_client: Option<(AnyProtoClient, u64)>,
|
||||||
breakpoint_store: Entity<BreakpointStore>,
|
breakpoint_store: Entity<BreakpointStore>,
|
||||||
sessions: BTreeMap<SessionId, Entity<Session>>,
|
sessions: BTreeMap<SessionId, Entity<Session>>,
|
||||||
|
next_session_id: u32,
|
||||||
|
start_debugging_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
|
||||||
|
_start_debugging_task: Task<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EventEmitter<DapStoreEvent> for DapStore {}
|
impl EventEmitter<DapStoreEvent> for DapStore {}
|
||||||
|
|
||||||
impl DapStore {
|
impl DapStore {
|
||||||
pub fn init(_client: &AnyProtoClient) {
|
pub fn init(client: &AnyProtoClient) {
|
||||||
// todo(debugger): Reenable these after we finish handle_dap_command refactor
|
client.add_entity_request_handler(Self::handle_run_debug_locator);
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<NextCommand>);
|
client.add_entity_request_handler(Self::handle_get_debug_adapter_binary);
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<StepInCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<StepOutCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<StepBackCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<ContinueCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<PauseCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<DisconnectCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<TerminateThreadsCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<TerminateCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<RestartCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<VariablesCommand>);
|
|
||||||
// client.add_entity_request_handler(Self::handle_dap_command::<RestartStackFrameCommand>);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[expect(clippy::too_many_arguments)]
|
#[expect(clippy::too_many_arguments)]
|
||||||
|
@ -146,15 +119,62 @@ impl DapStore {
|
||||||
language_registry: Arc<LanguageRegistry>,
|
language_registry: Arc<LanguageRegistry>,
|
||||||
environment: Entity<ProjectEnvironment>,
|
environment: Entity<ProjectEnvironment>,
|
||||||
toolchain_store: Arc<dyn LanguageToolchainStore>,
|
toolchain_store: Arc<dyn LanguageToolchainStore>,
|
||||||
|
worktree_store: Entity<WorktreeStore>,
|
||||||
breakpoint_store: Entity<BreakpointStore>,
|
breakpoint_store: Entity<BreakpointStore>,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
cx.on_app_quit(Self::shutdown_sessions).detach();
|
cx.on_app_quit(Self::shutdown_sessions).detach();
|
||||||
|
|
||||||
|
let locators = HashMap::from_iter([(
|
||||||
|
"cargo".to_string(),
|
||||||
|
Arc::new(super::locators::cargo::CargoLocator {}) as _,
|
||||||
|
)]);
|
||||||
|
|
||||||
|
let mode = DapStoreMode::Local(LocalDapStore {
|
||||||
|
fs,
|
||||||
|
environment,
|
||||||
|
http_client,
|
||||||
|
node_runtime,
|
||||||
|
toolchain_store,
|
||||||
|
worktree_store,
|
||||||
|
language_registry,
|
||||||
|
locators,
|
||||||
|
});
|
||||||
|
|
||||||
|
Self::new(mode, breakpoint_store, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_ssh(
|
||||||
|
project_id: u64,
|
||||||
|
upstream_client: AnyProtoClient,
|
||||||
|
breakpoint_store: Entity<BreakpointStore>,
|
||||||
|
cx: &mut Context<Self>,
|
||||||
|
) -> Self {
|
||||||
|
let mode = DapStoreMode::Ssh(SshDapStore {
|
||||||
|
upstream_client,
|
||||||
|
upstream_project_id: project_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
Self::new(mode, breakpoint_store, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_collab(
|
||||||
|
_project_id: u64,
|
||||||
|
_upstream_client: AnyProtoClient,
|
||||||
|
breakpoint_store: Entity<BreakpointStore>,
|
||||||
|
cx: &mut Context<Self>,
|
||||||
|
) -> Self {
|
||||||
|
Self::new(DapStoreMode::Collab, breakpoint_store, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(
|
||||||
|
mode: DapStoreMode,
|
||||||
|
breakpoint_store: Entity<BreakpointStore>,
|
||||||
|
cx: &mut Context<Self>,
|
||||||
|
) -> Self {
|
||||||
let (start_debugging_tx, mut message_rx) =
|
let (start_debugging_tx, mut message_rx) =
|
||||||
futures::channel::mpsc::unbounded::<(SessionId, Message)>();
|
futures::channel::mpsc::unbounded::<(SessionId, Message)>();
|
||||||
|
let task = cx.spawn(async move |this, cx| {
|
||||||
let _start_debugging_task = cx.spawn(async move |this, cx| {
|
|
||||||
while let Some((session_id, message)) = message_rx.next().await {
|
while let Some((session_id, message)) = message_rx.next().await {
|
||||||
match message {
|
match message {
|
||||||
Message::Request(request) => {
|
Message::Request(request) => {
|
||||||
|
@ -174,94 +194,135 @@ impl DapStore {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
mode: DapStoreMode::Local(LocalDapStore {
|
mode,
|
||||||
fs,
|
_start_debugging_task: task,
|
||||||
environment,
|
start_debugging_tx,
|
||||||
http_client,
|
next_session_id: 0,
|
||||||
node_runtime,
|
|
||||||
toolchain_store,
|
|
||||||
language_registry,
|
|
||||||
start_debugging_tx,
|
|
||||||
_start_debugging_task,
|
|
||||||
locator_store: Arc::from(LocatorStore::new()),
|
|
||||||
next_session_id: Default::default(),
|
|
||||||
}),
|
|
||||||
downstream_client: None,
|
downstream_client: None,
|
||||||
breakpoint_store,
|
breakpoint_store,
|
||||||
sessions: Default::default(),
|
sessions: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_remote(
|
pub fn get_debug_adapter_binary(
|
||||||
project_id: u64,
|
&mut self,
|
||||||
upstream_client: AnyProtoClient,
|
definition: DebugTaskDefinition,
|
||||||
breakpoint_store: Entity<BreakpointStore>,
|
cx: &mut Context<Self>,
|
||||||
) -> Self {
|
) -> Task<Result<DebugAdapterBinary>> {
|
||||||
Self {
|
|
||||||
mode: DapStoreMode::Remote(RemoteDapStore {
|
|
||||||
upstream_client,
|
|
||||||
upstream_project_id: project_id,
|
|
||||||
event_queue: Some(VecDeque::default()),
|
|
||||||
}),
|
|
||||||
downstream_client: None,
|
|
||||||
breakpoint_store,
|
|
||||||
sessions: Default::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_remote(&self) -> Option<&RemoteDapStore> {
|
|
||||||
match &self.mode {
|
match &self.mode {
|
||||||
DapStoreMode::Remote(remote_dap_store) => Some(remote_dap_store),
|
DapStoreMode::Local(local) => {
|
||||||
_ => None,
|
let Some(worktree) = local.worktree_store.read(cx).visible_worktrees(cx).next()
|
||||||
|
else {
|
||||||
|
return Task::ready(Err(anyhow!("Failed to find a worktree")));
|
||||||
|
};
|
||||||
|
let Some(adapter) = DapRegistry::global(cx).adapter(&definition.adapter) else {
|
||||||
|
return Task::ready(Err(anyhow!("Failed to find a debug adapter")));
|
||||||
|
};
|
||||||
|
|
||||||
|
let user_installed_path = ProjectSettings::get_global(cx)
|
||||||
|
.dap
|
||||||
|
.get(&adapter.name())
|
||||||
|
.and_then(|s| s.binary.as_ref().map(PathBuf::from));
|
||||||
|
|
||||||
|
let delegate = self.delegate(&worktree, cx);
|
||||||
|
let cwd: Arc<Path> = definition
|
||||||
|
.cwd()
|
||||||
|
.unwrap_or(worktree.read(cx).abs_path().as_ref())
|
||||||
|
.into();
|
||||||
|
|
||||||
|
cx.spawn(async move |this, cx| {
|
||||||
|
let mut binary = adapter
|
||||||
|
.get_binary(&delegate, &definition, user_installed_path, cx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let env = this
|
||||||
|
.update(cx, |this, cx| {
|
||||||
|
this.as_local()
|
||||||
|
.unwrap()
|
||||||
|
.environment
|
||||||
|
.update(cx, |environment, cx| {
|
||||||
|
environment.get_directory_environment(cwd, cx)
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if let Some(mut env) = env {
|
||||||
|
env.extend(std::mem::take(&mut binary.envs));
|
||||||
|
binary.envs = env;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(binary)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
DapStoreMode::Ssh(ssh) => {
|
||||||
|
let request = ssh.upstream_client.request(proto::GetDebugAdapterBinary {
|
||||||
|
project_id: ssh.upstream_project_id,
|
||||||
|
task: Some(definition.to_proto()),
|
||||||
|
});
|
||||||
|
|
||||||
|
cx.background_spawn(async move {
|
||||||
|
let response = request.await?;
|
||||||
|
DebugAdapterBinary::from_proto(response)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
DapStoreMode::Collab => {
|
||||||
|
Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remote_event_queue(&mut self) -> Option<VecDeque<DapStoreEvent>> {
|
pub fn run_debug_locator(
|
||||||
if let DapStoreMode::Remote(remote) = &mut self.mode {
|
&mut self,
|
||||||
remote.event_queue.take()
|
template: DebugTaskTemplate,
|
||||||
} else {
|
cx: &mut Context<Self>,
|
||||||
None
|
) -> Task<Result<DebugTaskDefinition>> {
|
||||||
|
let Some(locator_name) = template.locator else {
|
||||||
|
return Task::ready(Ok(template.definition));
|
||||||
|
};
|
||||||
|
|
||||||
|
match &self.mode {
|
||||||
|
DapStoreMode::Local(local) => {
|
||||||
|
if let Some(locator) = local.locators.get(&locator_name).cloned() {
|
||||||
|
cx.background_spawn(
|
||||||
|
async move { locator.run_locator(template.definition).await },
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Task::ready(Err(anyhow!("Couldn't find locator {}", locator_name)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DapStoreMode::Ssh(ssh) => {
|
||||||
|
let request = ssh.upstream_client.request(proto::RunDebugLocator {
|
||||||
|
project_id: ssh.upstream_project_id,
|
||||||
|
locator: locator_name,
|
||||||
|
task: Some(template.definition.to_proto()),
|
||||||
|
});
|
||||||
|
cx.background_spawn(async move {
|
||||||
|
let response = request.await?;
|
||||||
|
DebugTaskDefinition::from_proto(response)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
DapStoreMode::Collab => {
|
||||||
|
Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_local(&self) -> Option<&LocalDapStore> {
|
fn as_local(&self) -> Option<&LocalDapStore> {
|
||||||
match &self.mode {
|
match &self.mode {
|
||||||
DapStoreMode::Local(local_dap_store) => Some(local_dap_store),
|
DapStoreMode::Local(local_dap_store) => Some(local_dap_store),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_local_mut(&mut self) -> Option<&mut LocalDapStore> {
|
|
||||||
match &mut self.mode {
|
|
||||||
DapStoreMode::Local(local_dap_store) => Some(local_dap_store),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> {
|
|
||||||
match &self.mode {
|
|
||||||
DapStoreMode::Remote(RemoteDapStore {
|
|
||||||
upstream_client,
|
|
||||||
upstream_project_id,
|
|
||||||
..
|
|
||||||
}) => Some((upstream_client.clone(), *upstream_project_id)),
|
|
||||||
|
|
||||||
DapStoreMode::Local(_) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn downstream_client(&self) -> Option<&(AnyProtoClient, u64)> {
|
|
||||||
self.downstream_client.as_ref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_remote_client(
|
pub fn add_remote_client(
|
||||||
&mut self,
|
&mut self,
|
||||||
session_id: SessionId,
|
session_id: SessionId,
|
||||||
ignore: Option<bool>,
|
ignore: Option<bool>,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) {
|
) {
|
||||||
if let DapStoreMode::Remote(remote) = &self.mode {
|
if let DapStoreMode::Ssh(remote) = &self.mode {
|
||||||
self.sessions.insert(
|
self.sessions.insert(
|
||||||
session_id,
|
session_id,
|
||||||
cx.new(|_| {
|
cx.new(|_| {
|
||||||
|
@ -328,7 +389,7 @@ impl DapStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delegate(&self, worktree: &Entity<Worktree>, cx: &mut App) -> DapAdapterDelegate {
|
fn delegate(&self, worktree: &Entity<Worktree>, cx: &mut App) -> DapAdapterDelegate {
|
||||||
let Some(local_store) = self.as_local() else {
|
let Some(local_store) = self.as_local() else {
|
||||||
unimplemented!("Starting session on remote side");
|
unimplemented!("Starting session on remote side");
|
||||||
};
|
};
|
||||||
|
@ -354,11 +415,7 @@ impl DapStore {
|
||||||
parent_session: Option<Entity<Session>>,
|
parent_session: Option<Entity<Session>>,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) -> (SessionId, Task<Result<Entity<Session>>>) {
|
) -> (SessionId, Task<Result<Entity<Session>>>) {
|
||||||
let Some(local_store) = self.as_local() else {
|
let session_id = SessionId(util::post_inc(&mut self.next_session_id));
|
||||||
unimplemented!("Starting session on remote side");
|
|
||||||
};
|
|
||||||
|
|
||||||
let session_id = local_store.next_session_id();
|
|
||||||
|
|
||||||
if let Some(session) = &parent_session {
|
if let Some(session) = &parent_session {
|
||||||
session.update(cx, |session, _| {
|
session.update(cx, |session, _| {
|
||||||
|
@ -368,7 +425,7 @@ impl DapStore {
|
||||||
|
|
||||||
let (initialized_tx, initialized_rx) = oneshot::channel();
|
let (initialized_tx, initialized_rx) = oneshot::channel();
|
||||||
|
|
||||||
let start_debugging_tx = local_store.start_debugging_tx.clone();
|
let start_debugging_tx = self.start_debugging_tx.clone();
|
||||||
|
|
||||||
let task = cx.spawn(async move |this, cx| {
|
let task = cx.spawn(async move |this, cx| {
|
||||||
let start_client_task = this.update(cx, |this, cx| {
|
let start_client_task = this.update(cx, |this, cx| {
|
||||||
|
@ -682,10 +739,6 @@ impl DapStore {
|
||||||
session_id: SessionId,
|
session_id: SessionId,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) -> Task<Result<()>> {
|
) -> Task<Result<()>> {
|
||||||
let Some(_) = self.as_local_mut() else {
|
|
||||||
return Task::ready(Err(anyhow!("Cannot shutdown session on remote side")));
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(session) = self.sessions.remove(&session_id) else {
|
let Some(session) = self.sessions.remove(&session_id) else {
|
||||||
return Task::ready(Err(anyhow!("Could not find session: {:?}", session_id)));
|
return Task::ready(Err(anyhow!("Could not find session: {:?}", session_id)));
|
||||||
};
|
};
|
||||||
|
@ -748,6 +801,45 @@ impl DapStore {
|
||||||
|
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn handle_run_debug_locator(
|
||||||
|
this: Entity<Self>,
|
||||||
|
envelope: TypedEnvelope<proto::RunDebugLocator>,
|
||||||
|
mut cx: AsyncApp,
|
||||||
|
) -> Result<proto::DebugTaskDefinition> {
|
||||||
|
let template = DebugTaskTemplate {
|
||||||
|
locator: Some(envelope.payload.locator),
|
||||||
|
definition: DebugTaskDefinition::from_proto(
|
||||||
|
envelope
|
||||||
|
.payload
|
||||||
|
.task
|
||||||
|
.ok_or_else(|| anyhow!("missing definition"))?,
|
||||||
|
)?,
|
||||||
|
};
|
||||||
|
let definition = this
|
||||||
|
.update(&mut cx, |this, cx| this.run_debug_locator(template, cx))?
|
||||||
|
.await?;
|
||||||
|
Ok(definition.to_proto())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn handle_get_debug_adapter_binary(
|
||||||
|
this: Entity<Self>,
|
||||||
|
envelope: TypedEnvelope<proto::GetDebugAdapterBinary>,
|
||||||
|
mut cx: AsyncApp,
|
||||||
|
) -> Result<proto::DebugAdapterBinary> {
|
||||||
|
let definition = DebugTaskDefinition::from_proto(
|
||||||
|
envelope
|
||||||
|
.payload
|
||||||
|
.task
|
||||||
|
.ok_or_else(|| anyhow!("missing definition"))?,
|
||||||
|
)?;
|
||||||
|
let binary = this
|
||||||
|
.update(&mut cx, |this, cx| {
|
||||||
|
this.get_debug_adapter_binary(definition, cx)
|
||||||
|
})?
|
||||||
|
.await?;
|
||||||
|
Ok(binary.to_proto())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_new_session(
|
fn create_new_session(
|
||||||
|
|
|
@ -3,10 +3,10 @@ use cargo::CargoLocator;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use gpui::SharedString;
|
use gpui::SharedString;
|
||||||
use locators::DapLocator;
|
use locators::DapLocator;
|
||||||
use task::DebugTaskDefinition;
|
use task::{DebugTaskDefinition, DebugTaskTemplate};
|
||||||
|
|
||||||
mod cargo;
|
mod cargo;
|
||||||
mod locators;
|
pub mod locators;
|
||||||
|
|
||||||
pub(super) struct LocatorStore {
|
pub(super) struct LocatorStore {
|
||||||
locators: HashMap<SharedString, Box<dyn DapLocator>>,
|
locators: HashMap<SharedString, Box<dyn DapLocator>>,
|
||||||
|
@ -14,24 +14,19 @@ pub(super) struct LocatorStore {
|
||||||
|
|
||||||
impl LocatorStore {
|
impl LocatorStore {
|
||||||
pub(super) fn new() -> Self {
|
pub(super) fn new() -> Self {
|
||||||
let locators = HashMap::from_iter([(
|
|
||||||
SharedString::new("cargo"),
|
|
||||||
Box::new(CargoLocator {}) as Box<dyn DapLocator>,
|
|
||||||
)]);
|
|
||||||
Self { locators }
|
Self { locators }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn resolve_debug_config(
|
pub(super) async fn resolve_debug_config(
|
||||||
&self,
|
&self,
|
||||||
debug_config: &mut DebugTaskDefinition,
|
template: DebugTaskTemplate,
|
||||||
) -> Result<()> {
|
) -> Result<DebugTaskDefinition> {
|
||||||
let Some(locator_name) = &debug_config.locator else {
|
let Some(locator_name) = &template.locator else {
|
||||||
log::debug!("Attempted to resolve debug config without a locator field");
|
return Ok(template.definition);
|
||||||
return Ok(());
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(locator) = self.locators.get(locator_name as &str) {
|
if let Some(locator) = self.locators.get(locator_name as &str) {
|
||||||
locator.run_locator(debug_config).await
|
locator.run_locator(template.definition).await
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow!("Couldn't find locator {}", locator_name))
|
Err(anyhow!("Couldn't find locator {}", locator_name))
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,9 @@ use anyhow::Result;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use task::DebugTaskDefinition;
|
use task::DebugTaskDefinition;
|
||||||
|
|
||||||
|
pub(crate) mod cargo;
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub(super) trait DapLocator: Send + Sync {
|
pub(super) trait DapLocator: Send + Sync {
|
||||||
async fn run_locator(&self, debug_config: &mut DebugTaskDefinition) -> Result<()>;
|
async fn run_locator(&self, debug_config: DebugTaskDefinition) -> Result<DebugTaskDefinition>;
|
||||||
}
|
}
|
|
@ -8,7 +8,7 @@ use smol::{
|
||||||
};
|
};
|
||||||
use task::DebugTaskDefinition;
|
use task::DebugTaskDefinition;
|
||||||
|
|
||||||
pub(super) struct CargoLocator;
|
pub(crate) struct CargoLocator;
|
||||||
|
|
||||||
async fn find_best_executable(executables: &[String], test_name: &str) -> Option<String> {
|
async fn find_best_executable(executables: &[String], test_name: &str) -> Option<String> {
|
||||||
if executables.len() == 1 {
|
if executables.len() == 1 {
|
||||||
|
@ -37,9 +37,12 @@ async fn find_best_executable(executables: &[String], test_name: &str) -> Option
|
||||||
}
|
}
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl DapLocator for CargoLocator {
|
impl DapLocator for CargoLocator {
|
||||||
async fn run_locator(&self, debug_config: &mut DebugTaskDefinition) -> Result<()> {
|
async fn run_locator(
|
||||||
|
&self,
|
||||||
|
mut debug_config: DebugTaskDefinition,
|
||||||
|
) -> Result<DebugTaskDefinition> {
|
||||||
let Some(launch_config) = (match &mut debug_config.request {
|
let Some(launch_config) = (match &mut debug_config.request {
|
||||||
task::DebugRequestType::Launch(launch_config) => Some(launch_config),
|
task::DebugRequest::Launch(launch_config) => Some(launch_config),
|
||||||
_ => None,
|
_ => None,
|
||||||
}) else {
|
}) else {
|
||||||
return Err(anyhow!("Couldn't get launch config in locator"));
|
return Err(anyhow!("Couldn't get launch config in locator"));
|
||||||
|
@ -119,6 +122,6 @@ impl DapLocator for CargoLocator {
|
||||||
if let Some(test_name) = test_name {
|
if let Some(test_name) = test_name {
|
||||||
launch_config.args.push(test_name);
|
launch_config.args.push(test_name);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(debug_config)
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -396,7 +396,7 @@ impl LocalMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn request_initialization(&self, cx: &App) -> Task<Result<Capabilities>> {
|
fn request_initialization(&self, cx: &App) -> Task<Result<Capabilities>> {
|
||||||
let adapter_id = self.binary.adapter_name.to_string();
|
let adapter_id = self.definition.adapter.clone();
|
||||||
|
|
||||||
self.request(Initialize { adapter_id }, cx.background_executor().clone())
|
self.request(Initialize { adapter_id }, cx.background_executor().clone())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::{path::Path, sync::Arc};
|
use std::{path::Path, sync::Arc};
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use dap::{DebugRequestType, client::DebugAdapterClient};
|
use dap::{DebugRequest, client::DebugAdapterClient};
|
||||||
use gpui::{App, AppContext, Entity, Subscription, Task};
|
use gpui::{App, AppContext, Entity, Subscription, Task};
|
||||||
use task::DebugTaskDefinition;
|
use task::DebugTaskDefinition;
|
||||||
|
|
||||||
|
@ -53,11 +53,10 @@ pub fn start_debug_session<T: Fn(&Arc<DebugAdapterClient>) + 'static>(
|
||||||
cx,
|
cx,
|
||||||
DebugTaskDefinition {
|
DebugTaskDefinition {
|
||||||
adapter: "fake-adapter".to_string(),
|
adapter: "fake-adapter".to_string(),
|
||||||
request: DebugRequestType::Launch(Default::default()),
|
request: DebugRequest::Launch(Default::default()),
|
||||||
label: "test".to_string(),
|
label: "test".to_string(),
|
||||||
initialize_args: None,
|
initialize_args: None,
|
||||||
tcp_connection: None,
|
tcp_connection: None,
|
||||||
locator: None,
|
|
||||||
stop_on_entry: None,
|
stop_on_entry: None,
|
||||||
},
|
},
|
||||||
configure,
|
configure,
|
||||||
|
|
|
@ -39,7 +39,10 @@ use client::{
|
||||||
};
|
};
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
|
|
||||||
use dap::{DapRegistry, client::DebugAdapterClient};
|
use dap::{
|
||||||
|
adapters::{DebugAdapterBinary, TcpArguments},
|
||||||
|
client::DebugAdapterClient,
|
||||||
|
};
|
||||||
|
|
||||||
use collections::{BTreeSet, HashMap, HashSet};
|
use collections::{BTreeSet, HashMap, HashSet};
|
||||||
use debounced_delay::DebouncedDelay;
|
use debounced_delay::DebouncedDelay;
|
||||||
|
@ -94,6 +97,7 @@ use snippet::Snippet;
|
||||||
use snippet_provider::SnippetProvider;
|
use snippet_provider::SnippetProvider;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
|
net::Ipv4Addr,
|
||||||
ops::Range,
|
ops::Range,
|
||||||
path::{Component, Path, PathBuf},
|
path::{Component, Path, PathBuf},
|
||||||
pin::pin,
|
pin::pin,
|
||||||
|
@ -103,7 +107,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use task_store::TaskStore;
|
use task_store::TaskStore;
|
||||||
use terminals::Terminals;
|
use terminals::{SshCommand, Terminals, wrap_for_ssh};
|
||||||
use text::{Anchor, BufferId};
|
use text::{Anchor, BufferId};
|
||||||
use toolchain_store::EmptyToolchainStore;
|
use toolchain_store::EmptyToolchainStore;
|
||||||
use util::{
|
use util::{
|
||||||
|
@ -165,7 +169,6 @@ pub struct Project {
|
||||||
active_entry: Option<ProjectEntryId>,
|
active_entry: Option<ProjectEntryId>,
|
||||||
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
|
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
|
||||||
languages: Arc<LanguageRegistry>,
|
languages: Arc<LanguageRegistry>,
|
||||||
debug_adapters: Arc<DapRegistry>,
|
|
||||||
dap_store: Entity<DapStore>,
|
dap_store: Entity<DapStore>,
|
||||||
|
|
||||||
breakpoint_store: Entity<BreakpointStore>,
|
breakpoint_store: Entity<BreakpointStore>,
|
||||||
|
@ -834,7 +837,6 @@ impl Project {
|
||||||
node: NodeRuntime,
|
node: NodeRuntime,
|
||||||
user_store: Entity<UserStore>,
|
user_store: Entity<UserStore>,
|
||||||
languages: Arc<LanguageRegistry>,
|
languages: Arc<LanguageRegistry>,
|
||||||
debug_adapters: Arc<DapRegistry>,
|
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
env: Option<HashMap<String, String>>,
|
env: Option<HashMap<String, String>>,
|
||||||
cx: &mut App,
|
cx: &mut App,
|
||||||
|
@ -873,6 +875,7 @@ impl Project {
|
||||||
languages.clone(),
|
languages.clone(),
|
||||||
environment.clone(),
|
environment.clone(),
|
||||||
toolchain_store.read(cx).as_language_toolchain_store(),
|
toolchain_store.read(cx).as_language_toolchain_store(),
|
||||||
|
worktree_store.clone(),
|
||||||
breakpoint_store.clone(),
|
breakpoint_store.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
|
@ -955,7 +958,6 @@ impl Project {
|
||||||
active_entry: None,
|
active_entry: None,
|
||||||
snippets,
|
snippets,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters,
|
|
||||||
client,
|
client,
|
||||||
task_store,
|
task_store,
|
||||||
user_store,
|
user_store,
|
||||||
|
@ -1065,13 +1067,14 @@ impl Project {
|
||||||
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
|
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
|
||||||
|
|
||||||
let breakpoint_store =
|
let breakpoint_store =
|
||||||
cx.new(|_| BreakpointStore::remote(SSH_PROJECT_ID, client.clone().into()));
|
cx.new(|_| BreakpointStore::remote(SSH_PROJECT_ID, ssh_proto.clone()));
|
||||||
|
|
||||||
let dap_store = cx.new(|_| {
|
let dap_store = cx.new(|cx| {
|
||||||
DapStore::new_remote(
|
DapStore::new_ssh(
|
||||||
SSH_PROJECT_ID,
|
SSH_PROJECT_ID,
|
||||||
client.clone().into(),
|
ssh_proto.clone(),
|
||||||
breakpoint_store.clone(),
|
breakpoint_store.clone(),
|
||||||
|
cx,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1113,7 +1116,6 @@ impl Project {
|
||||||
active_entry: None,
|
active_entry: None,
|
||||||
snippets,
|
snippets,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters: Arc::new(DapRegistry::default()),
|
|
||||||
client,
|
client,
|
||||||
task_store,
|
task_store,
|
||||||
user_store,
|
user_store,
|
||||||
|
@ -1251,8 +1253,13 @@ impl Project {
|
||||||
|
|
||||||
let breakpoint_store =
|
let breakpoint_store =
|
||||||
cx.new(|_| BreakpointStore::remote(remote_id, client.clone().into()))?;
|
cx.new(|_| BreakpointStore::remote(remote_id, client.clone().into()))?;
|
||||||
let dap_store = cx.new(|_cx| {
|
let dap_store = cx.new(|cx| {
|
||||||
DapStore::new_remote(remote_id, client.clone().into(), breakpoint_store.clone())
|
DapStore::new_collab(
|
||||||
|
remote_id,
|
||||||
|
client.clone().into(),
|
||||||
|
breakpoint_store.clone(),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let lsp_store = cx.new(|cx| {
|
let lsp_store = cx.new(|cx| {
|
||||||
|
@ -1337,7 +1344,6 @@ impl Project {
|
||||||
collaborators: Default::default(),
|
collaborators: Default::default(),
|
||||||
join_project_response_message_id: response.message_id,
|
join_project_response_message_id: response.message_id,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters: Arc::new(DapRegistry::default()),
|
|
||||||
user_store: user_store.clone(),
|
user_store: user_store.clone(),
|
||||||
task_store,
|
task_store,
|
||||||
snippets,
|
snippets,
|
||||||
|
@ -1459,49 +1465,68 @@ impl Project {
|
||||||
|
|
||||||
pub fn start_debug_session(
|
pub fn start_debug_session(
|
||||||
&mut self,
|
&mut self,
|
||||||
config: DebugTaskDefinition,
|
definition: DebugTaskDefinition,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) -> Task<Result<Entity<Session>>> {
|
) -> Task<Result<Entity<Session>>> {
|
||||||
let Some(worktree) = self.worktrees(cx).find(|tree| tree.read(cx).is_visible()) else {
|
let Some(worktree) = self.worktrees(cx).find(|tree| tree.read(cx).is_visible()) else {
|
||||||
return Task::ready(Err(anyhow!("Failed to find a worktree")));
|
return Task::ready(Err(anyhow!("Failed to find a worktree")));
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(adapter) = self.debug_adapters.adapter(&config.adapter) else {
|
let ssh_client = self.ssh_client().clone();
|
||||||
return Task::ready(Err(anyhow!("Failed to find a debug adapter")));
|
|
||||||
};
|
|
||||||
|
|
||||||
let user_installed_path = ProjectSettings::get_global(cx)
|
|
||||||
.dap
|
|
||||||
.get(&adapter.name())
|
|
||||||
.and_then(|s| s.binary.as_ref().map(PathBuf::from));
|
|
||||||
|
|
||||||
let result = cx.spawn(async move |this, cx| {
|
let result = cx.spawn(async move |this, cx| {
|
||||||
let delegate = this.update(cx, |project, cx| {
|
let mut binary = this
|
||||||
project
|
.update(cx, |this, cx| {
|
||||||
.dap_store
|
this.dap_store.update(cx, |dap_store, cx| {
|
||||||
.update(cx, |dap_store, cx| dap_store.delegate(&worktree, cx))
|
dap_store.get_debug_adapter_binary(definition.clone(), cx)
|
||||||
})?;
|
|
||||||
|
|
||||||
let task = this.update(cx, |project, cx| {
|
|
||||||
project.dap_store.read(cx).as_local().and_then(|local| {
|
|
||||||
config.locator.is_some().then(|| {
|
|
||||||
local.locate_binary(config.clone(), cx.background_executor().clone())
|
|
||||||
})
|
})
|
||||||
})
|
})?
|
||||||
})?;
|
|
||||||
let config = if let Some(task) = task {
|
|
||||||
task.await
|
|
||||||
} else {
|
|
||||||
config
|
|
||||||
};
|
|
||||||
let binary = adapter
|
|
||||||
.get_binary(&delegate, &config, user_installed_path, cx)
|
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
if let Some(ssh_client) = ssh_client {
|
||||||
|
let mut ssh_command = ssh_client.update(cx, |ssh, _| {
|
||||||
|
anyhow::Ok(SshCommand {
|
||||||
|
arguments: ssh
|
||||||
|
.ssh_args()
|
||||||
|
.ok_or_else(|| anyhow!("SSH arguments not found"))?,
|
||||||
|
})
|
||||||
|
})??;
|
||||||
|
|
||||||
|
let mut connection = None;
|
||||||
|
if let Some(c) = binary.connection {
|
||||||
|
let local_bind_addr = Ipv4Addr::new(127, 0, 0, 1);
|
||||||
|
let port = dap::transport::TcpTransport::unused_port(local_bind_addr).await?;
|
||||||
|
|
||||||
|
ssh_command.add_port_forwarding(port, c.host.to_string(), c.port);
|
||||||
|
connection = Some(TcpArguments {
|
||||||
|
port: c.port,
|
||||||
|
host: local_bind_addr,
|
||||||
|
timeout: c.timeout,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let (program, args) = wrap_for_ssh(
|
||||||
|
&ssh_command,
|
||||||
|
Some((&binary.command, &binary.arguments)),
|
||||||
|
binary.cwd.as_deref(),
|
||||||
|
binary.envs,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
binary = DebugAdapterBinary {
|
||||||
|
command: program,
|
||||||
|
arguments: args,
|
||||||
|
envs: HashMap::default(),
|
||||||
|
cwd: None,
|
||||||
|
connection,
|
||||||
|
request_args: binary.request_args,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let ret = this
|
let ret = this
|
||||||
.update(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
project.dap_store.update(cx, |dap_store, cx| {
|
project.dap_store.update(cx, |dap_store, cx| {
|
||||||
dap_store.new_session(binary, config, worktree.downgrade(), None, cx)
|
dap_store.new_session(binary, definition, worktree.downgrade(), None, cx)
|
||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.1
|
.1
|
||||||
|
@ -1520,7 +1545,6 @@ impl Project {
|
||||||
|
|
||||||
let fs = Arc::new(RealFs::new(None, cx.background_executor().clone()));
|
let fs = Arc::new(RealFs::new(None, cx.background_executor().clone()));
|
||||||
let languages = LanguageRegistry::test(cx.background_executor().clone());
|
let languages = LanguageRegistry::test(cx.background_executor().clone());
|
||||||
let debug_adapters = DapRegistry::default().into();
|
|
||||||
let clock = Arc::new(FakeSystemClock::new());
|
let clock = Arc::new(FakeSystemClock::new());
|
||||||
let http_client = http_client::FakeHttpClient::with_404_response();
|
let http_client = http_client::FakeHttpClient::with_404_response();
|
||||||
let client = cx
|
let client = cx
|
||||||
|
@ -1534,7 +1558,6 @@ impl Project {
|
||||||
node_runtime::NodeRuntime::unavailable(),
|
node_runtime::NodeRuntime::unavailable(),
|
||||||
user_store,
|
user_store,
|
||||||
Arc::new(languages),
|
Arc::new(languages),
|
||||||
debug_adapters,
|
|
||||||
fs,
|
fs,
|
||||||
None,
|
None,
|
||||||
cx,
|
cx,
|
||||||
|
@ -1565,7 +1588,6 @@ impl Project {
|
||||||
use clock::FakeSystemClock;
|
use clock::FakeSystemClock;
|
||||||
|
|
||||||
let languages = LanguageRegistry::test(cx.executor());
|
let languages = LanguageRegistry::test(cx.executor());
|
||||||
let debug_adapters = DapRegistry::fake();
|
|
||||||
let clock = Arc::new(FakeSystemClock::new());
|
let clock = Arc::new(FakeSystemClock::new());
|
||||||
let http_client = http_client::FakeHttpClient::with_404_response();
|
let http_client = http_client::FakeHttpClient::with_404_response();
|
||||||
let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
|
let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
|
||||||
|
@ -1576,7 +1598,6 @@ impl Project {
|
||||||
node_runtime::NodeRuntime::unavailable(),
|
node_runtime::NodeRuntime::unavailable(),
|
||||||
user_store,
|
user_store,
|
||||||
Arc::new(languages),
|
Arc::new(languages),
|
||||||
Arc::new(debug_adapters),
|
|
||||||
fs,
|
fs,
|
||||||
None,
|
None,
|
||||||
cx,
|
cx,
|
||||||
|
@ -1620,10 +1641,6 @@ impl Project {
|
||||||
&self.languages
|
&self.languages
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn debug_adapters(&self) -> &Arc<DapRegistry> {
|
|
||||||
&self.debug_adapters
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn client(&self) -> Arc<Client> {
|
pub fn client(&self) -> Arc<Client> {
|
||||||
self.client.clone()
|
self.client.clone()
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ use language::{
|
||||||
use lsp::{LanguageServerId, LanguageServerName};
|
use lsp::{LanguageServerId, LanguageServerName};
|
||||||
use settings::{InvalidSettingsError, TaskKind, parse_json_with_comments};
|
use settings::{InvalidSettingsError, TaskKind, parse_json_with_comments};
|
||||||
use task::{
|
use task::{
|
||||||
DebugTaskDefinition, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates,
|
DebugTaskTemplate, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates,
|
||||||
TaskVariables, VariableName,
|
TaskVariables, VariableName,
|
||||||
};
|
};
|
||||||
use text::{BufferId, Point, ToPoint};
|
use text::{BufferId, Point, ToPoint};
|
||||||
|
@ -435,9 +435,9 @@ impl Inventory {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|raw_template| match &task_kind {
|
.filter_map(|raw_template| match &task_kind {
|
||||||
TaskKind::Script => serde_json::from_value::<TaskTemplate>(raw_template).log_err(),
|
TaskKind::Script => serde_json::from_value::<TaskTemplate>(raw_template).log_err(),
|
||||||
TaskKind::Debug => serde_json::from_value::<DebugTaskDefinition>(raw_template)
|
TaskKind::Debug => serde_json::from_value::<DebugTaskTemplate>(raw_template)
|
||||||
.log_err()
|
.log_err()
|
||||||
.and_then(|content| content.to_zed_format().log_err()),
|
.map(|content| content.to_zed_format()),
|
||||||
});
|
});
|
||||||
|
|
||||||
let parsed_templates = &mut self.templates_from_settings;
|
let parsed_templates = &mut self.templates_from_settings;
|
||||||
|
|
|
@ -9,20 +9,16 @@ use smol::channel::bounded;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
env::{self},
|
env::{self},
|
||||||
iter,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use task::{Shell, ShellBuilder, SpawnInTerminal};
|
use task::{DEFAULT_REMOTE_SHELL, Shell, ShellBuilder, SpawnInTerminal};
|
||||||
use terminal::{
|
use terminal::{
|
||||||
TaskState, TaskStatus, Terminal, TerminalBuilder,
|
TaskState, TaskStatus, Terminal, TerminalBuilder,
|
||||||
terminal_settings::{self, TerminalSettings, VenvSettings},
|
terminal_settings::{self, TerminalSettings, VenvSettings},
|
||||||
};
|
};
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
// #[cfg(target_os = "macos")]
|
|
||||||
// use std::os::unix::ffi::OsStrExt;
|
|
||||||
|
|
||||||
pub struct Terminals {
|
pub struct Terminals {
|
||||||
pub(crate) local_handles: Vec<WeakEntity<terminal::Terminal>>,
|
pub(crate) local_handles: Vec<WeakEntity<terminal::Terminal>>,
|
||||||
}
|
}
|
||||||
|
@ -48,7 +44,15 @@ pub enum TerminalKind {
|
||||||
/// SshCommand describes how to connect to a remote server
|
/// SshCommand describes how to connect to a remote server
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct SshCommand {
|
pub struct SshCommand {
|
||||||
arguments: Vec<String>,
|
pub arguments: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SshCommand {
|
||||||
|
pub fn add_port_forwarding(&mut self, local_port: u16, host: String, remote_port: u16) {
|
||||||
|
self.arguments.push("-L".to_string());
|
||||||
|
self.arguments
|
||||||
|
.push(format!("{}:{}:{}", local_port, host, remote_port));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
|
@ -551,7 +555,7 @@ impl Project {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn wrap_for_ssh(
|
pub fn wrap_for_ssh(
|
||||||
ssh_command: &SshCommand,
|
ssh_command: &SshCommand,
|
||||||
command: Option<(&String, &Vec<String>)>,
|
command: Option<(&String, &Vec<String>)>,
|
||||||
path: Option<&Path>,
|
path: Option<&Path>,
|
||||||
|
@ -559,9 +563,14 @@ fn wrap_for_ssh(
|
||||||
venv_directory: Option<&Path>,
|
venv_directory: Option<&Path>,
|
||||||
) -> (String, Vec<String>) {
|
) -> (String, Vec<String>) {
|
||||||
let to_run = if let Some((command, args)) = command {
|
let to_run = if let Some((command, args)) = command {
|
||||||
let command = Cow::Borrowed(command.as_str());
|
// DEFAULT_REMOTE_SHELL is '"${SHELL:-sh}"' so must not be escaped
|
||||||
|
let command: Option<Cow<str>> = if command == DEFAULT_REMOTE_SHELL {
|
||||||
|
Some(command.into())
|
||||||
|
} else {
|
||||||
|
shlex::try_quote(command).ok()
|
||||||
|
};
|
||||||
let args = args.iter().filter_map(|arg| shlex::try_quote(arg).ok());
|
let args = args.iter().filter_map(|arg| shlex::try_quote(arg).ok());
|
||||||
iter::once(command).chain(args).join(" ")
|
command.into_iter().chain(args).join(" ")
|
||||||
} else {
|
} else {
|
||||||
"exec ${SHELL:-sh} -l".to_string()
|
"exec ${SHELL:-sh} -l".to_string()
|
||||||
};
|
};
|
||||||
|
|
|
@ -508,7 +508,6 @@ enum DapStackPresentationHint {
|
||||||
Subtle = 2;
|
Subtle = 2;
|
||||||
StackUnknown = 3;
|
StackUnknown = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message DapModule {
|
message DapModule {
|
||||||
DapModuleId id = 1;
|
DapModuleId id = 1;
|
||||||
string name = 2;
|
string name = 2;
|
||||||
|
@ -522,9 +521,62 @@ message DapModule {
|
||||||
optional string address_range = 10;
|
optional string address_range = 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message DebugTaskDefinition {
|
||||||
|
string adapter = 1;
|
||||||
|
string label = 2;
|
||||||
|
oneof request {
|
||||||
|
DebugLaunchRequest debug_launch_request = 3;
|
||||||
|
DebugAttachRequest debug_attach_request = 4;
|
||||||
|
}
|
||||||
|
optional string initialize_args = 5;
|
||||||
|
optional TcpHost tcp_connection = 6;
|
||||||
|
optional bool stop_on_entry = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
message TcpHost {
|
||||||
|
optional uint32 port = 1;
|
||||||
|
optional string host = 2;
|
||||||
|
optional uint64 timeout = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DebugLaunchRequest {
|
||||||
|
string program = 1;
|
||||||
|
optional string cwd = 2;
|
||||||
|
repeated string args = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DebugAttachRequest {
|
||||||
|
uint32 process_id = 1;
|
||||||
|
}
|
||||||
|
|
||||||
message DapModuleId {
|
message DapModuleId {
|
||||||
oneof id {
|
oneof id {
|
||||||
uint32 number = 1;
|
uint32 number = 1;
|
||||||
string string = 2;
|
string string = 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message GetDebugAdapterBinary {
|
||||||
|
uint64 project_id = 1;
|
||||||
|
DebugTaskDefinition task = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message DebugAdapterBinary {
|
||||||
|
string command = 1;
|
||||||
|
repeated string arguments = 2;
|
||||||
|
map<string, string> envs = 3;
|
||||||
|
optional string cwd = 4;
|
||||||
|
optional TcpHost connection = 5;
|
||||||
|
string configuration = 7;
|
||||||
|
LaunchType launch_type = 8;
|
||||||
|
enum LaunchType {
|
||||||
|
Attach = 0;
|
||||||
|
Launch = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message RunDebugLocator {
|
||||||
|
uint64 project_id = 1;
|
||||||
|
string locator = 2;
|
||||||
|
DebugTaskDefinition task = 3;
|
||||||
|
}
|
||||||
|
|
|
@ -380,7 +380,12 @@ message Envelope {
|
||||||
StopLanguageServers stop_language_servers = 336;
|
StopLanguageServers stop_language_servers = 336;
|
||||||
|
|
||||||
LspExtRunnables lsp_ext_runnables = 337;
|
LspExtRunnables lsp_ext_runnables = 337;
|
||||||
LspExtRunnablesResponse lsp_ext_runnables_response = 338; // current max
|
LspExtRunnablesResponse lsp_ext_runnables_response = 338;
|
||||||
|
|
||||||
|
GetDebugAdapterBinary get_debug_adapter_binary = 339;
|
||||||
|
DebugAdapterBinary debug_adapter_binary = 340;
|
||||||
|
RunDebugLocator run_debug_locator = 341;
|
||||||
|
DebugTaskDefinition debug_task_definition = 342; // current max
|
||||||
}
|
}
|
||||||
|
|
||||||
reserved 87 to 88;
|
reserved 87 to 88;
|
||||||
|
|
|
@ -302,6 +302,10 @@ messages!(
|
||||||
(GitDiff, Background),
|
(GitDiff, Background),
|
||||||
(GitDiffResponse, Background),
|
(GitDiffResponse, Background),
|
||||||
(GitInit, Background),
|
(GitInit, Background),
|
||||||
|
(GetDebugAdapterBinary, Background),
|
||||||
|
(DebugAdapterBinary, Background),
|
||||||
|
(RunDebugLocator, Background),
|
||||||
|
(DebugTaskDefinition, Background),
|
||||||
);
|
);
|
||||||
|
|
||||||
request_messages!(
|
request_messages!(
|
||||||
|
@ -460,6 +464,8 @@ request_messages!(
|
||||||
(GitDiff, GitDiffResponse),
|
(GitDiff, GitDiffResponse),
|
||||||
(GitInit, Ack),
|
(GitInit, Ack),
|
||||||
(ToggleBreakpoint, Ack),
|
(ToggleBreakpoint, Ack),
|
||||||
|
(GetDebugAdapterBinary, DebugAdapterBinary),
|
||||||
|
(RunDebugLocator, DebugTaskDefinition),
|
||||||
);
|
);
|
||||||
|
|
||||||
entity_messages!(
|
entity_messages!(
|
||||||
|
@ -579,6 +585,8 @@ entity_messages!(
|
||||||
GitInit,
|
GitInit,
|
||||||
BreakpointsForFile,
|
BreakpointsForFile,
|
||||||
ToggleBreakpoint,
|
ToggleBreakpoint,
|
||||||
|
RunDebugLocator,
|
||||||
|
GetDebugAdapterBinary,
|
||||||
);
|
);
|
||||||
|
|
||||||
entity_messages!(
|
entity_messages!(
|
||||||
|
|
|
@ -570,7 +570,6 @@ pub async fn open_ssh_project(
|
||||||
app_state.node_runtime.clone(),
|
app_state.node_runtime.clone(),
|
||||||
app_state.user_store.clone(),
|
app_state.user_store.clone(),
|
||||||
app_state.languages.clone(),
|
app_state.languages.clone(),
|
||||||
app_state.debug_adapters.clone(),
|
|
||||||
app_state.fs.clone(),
|
app_state.fs.clone(),
|
||||||
None,
|
None,
|
||||||
cx,
|
cx,
|
||||||
|
|
|
@ -29,7 +29,7 @@ backtrace = "0.3"
|
||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
client.workspace = true
|
client.workspace = true
|
||||||
dap.workspace = true
|
dap_adapters.workspace = true
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
extension.workspace = true
|
extension.workspace = true
|
||||||
extension_host.workspace = true
|
extension_host.workspace = true
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use ::proto::{FromProto, ToProto};
|
use ::proto::{FromProto, ToProto};
|
||||||
use anyhow::{Result, anyhow};
|
use anyhow::{Result, anyhow};
|
||||||
use dap::DapRegistry;
|
|
||||||
use extension::ExtensionHostProxy;
|
use extension::ExtensionHostProxy;
|
||||||
use extension_host::headless_host::HeadlessExtensionStore;
|
use extension_host::headless_host::HeadlessExtensionStore;
|
||||||
use fs::Fs;
|
use fs::Fs;
|
||||||
|
@ -41,6 +41,7 @@ pub struct HeadlessProject {
|
||||||
pub buffer_store: Entity<BufferStore>,
|
pub buffer_store: Entity<BufferStore>,
|
||||||
pub lsp_store: Entity<LspStore>,
|
pub lsp_store: Entity<LspStore>,
|
||||||
pub task_store: Entity<TaskStore>,
|
pub task_store: Entity<TaskStore>,
|
||||||
|
pub dap_store: Entity<DapStore>,
|
||||||
pub settings_observer: Entity<SettingsObserver>,
|
pub settings_observer: Entity<SettingsObserver>,
|
||||||
pub next_entry_id: Arc<AtomicUsize>,
|
pub next_entry_id: Arc<AtomicUsize>,
|
||||||
pub languages: Arc<LanguageRegistry>,
|
pub languages: Arc<LanguageRegistry>,
|
||||||
|
@ -54,7 +55,6 @@ pub struct HeadlessAppState {
|
||||||
pub http_client: Arc<dyn HttpClient>,
|
pub http_client: Arc<dyn HttpClient>,
|
||||||
pub node_runtime: NodeRuntime,
|
pub node_runtime: NodeRuntime,
|
||||||
pub languages: Arc<LanguageRegistry>,
|
pub languages: Arc<LanguageRegistry>,
|
||||||
pub debug_adapters: Arc<DapRegistry>,
|
|
||||||
pub extension_host_proxy: Arc<ExtensionHostProxy>,
|
pub extension_host_proxy: Arc<ExtensionHostProxy>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +72,6 @@ impl HeadlessProject {
|
||||||
http_client,
|
http_client,
|
||||||
node_runtime,
|
node_runtime,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters: _debug_adapters,
|
|
||||||
extension_host_proxy: proxy,
|
extension_host_proxy: proxy,
|
||||||
}: HeadlessAppState,
|
}: HeadlessAppState,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
|
@ -114,6 +113,7 @@ impl HeadlessProject {
|
||||||
languages.clone(),
|
languages.clone(),
|
||||||
environment.clone(),
|
environment.clone(),
|
||||||
toolchain_store.read(cx).as_language_toolchain_store(),
|
toolchain_store.read(cx).as_language_toolchain_store(),
|
||||||
|
worktree_store.clone(),
|
||||||
breakpoint_store.clone(),
|
breakpoint_store.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
|
@ -258,6 +258,7 @@ impl HeadlessProject {
|
||||||
buffer_store,
|
buffer_store,
|
||||||
lsp_store,
|
lsp_store,
|
||||||
task_store,
|
task_store,
|
||||||
|
dap_store,
|
||||||
next_entry_id: Default::default(),
|
next_entry_id: Default::default(),
|
||||||
languages,
|
languages,
|
||||||
extensions,
|
extensions,
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
use crate::headless_project::HeadlessProject;
|
use crate::headless_project::HeadlessProject;
|
||||||
use client::{Client, UserStore};
|
use client::{Client, UserStore};
|
||||||
use clock::FakeSystemClock;
|
use clock::FakeSystemClock;
|
||||||
use dap::DapRegistry;
|
|
||||||
use extension::ExtensionHostProxy;
|
use extension::ExtensionHostProxy;
|
||||||
use fs::{FakeFs, Fs};
|
use fs::{FakeFs, Fs};
|
||||||
use gpui::{AppContext as _, Entity, SemanticVersion, TestAppContext};
|
use gpui::{AppContext as _, Entity, SemanticVersion, TestAppContext};
|
||||||
|
@ -1566,7 +1566,6 @@ pub async fn init_test(
|
||||||
let http_client = Arc::new(BlockedHttpClient);
|
let http_client = Arc::new(BlockedHttpClient);
|
||||||
let node_runtime = NodeRuntime::unavailable();
|
let node_runtime = NodeRuntime::unavailable();
|
||||||
let languages = Arc::new(LanguageRegistry::new(cx.executor()));
|
let languages = Arc::new(LanguageRegistry::new(cx.executor()));
|
||||||
let debug_adapters = DapRegistry::default().into();
|
|
||||||
let proxy = Arc::new(ExtensionHostProxy::new());
|
let proxy = Arc::new(ExtensionHostProxy::new());
|
||||||
server_cx.update(HeadlessProject::init);
|
server_cx.update(HeadlessProject::init);
|
||||||
let headless = server_cx.new(|cx| {
|
let headless = server_cx.new(|cx| {
|
||||||
|
@ -1579,7 +1578,6 @@ pub async fn init_test(
|
||||||
http_client,
|
http_client,
|
||||||
node_runtime,
|
node_runtime,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters,
|
|
||||||
extension_host_proxy: proxy,
|
extension_host_proxy: proxy,
|
||||||
},
|
},
|
||||||
cx,
|
cx,
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::headless_project::HeadlessAppState;
|
||||||
use anyhow::{Context as _, Result, anyhow};
|
use anyhow::{Context as _, Result, anyhow};
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use client::{ProxySettings, telemetry};
|
use client::{ProxySettings, telemetry};
|
||||||
use dap::DapRegistry;
|
|
||||||
use extension::ExtensionHostProxy;
|
use extension::ExtensionHostProxy;
|
||||||
use fs::{Fs, RealFs};
|
use fs::{Fs, RealFs};
|
||||||
use futures::channel::mpsc;
|
use futures::channel::mpsc;
|
||||||
|
@ -441,6 +441,7 @@ pub fn execute_run(
|
||||||
|
|
||||||
GitHostingProviderRegistry::set_global(git_hosting_provider_registry, cx);
|
GitHostingProviderRegistry::set_global(git_hosting_provider_registry, cx);
|
||||||
git_hosting_providers::init(cx);
|
git_hosting_providers::init(cx);
|
||||||
|
dap_adapters::init(cx);
|
||||||
|
|
||||||
extension::init(cx);
|
extension::init(cx);
|
||||||
let extension_host_proxy = ExtensionHostProxy::global(cx);
|
let extension_host_proxy = ExtensionHostProxy::global(cx);
|
||||||
|
@ -472,7 +473,6 @@ pub fn execute_run(
|
||||||
let mut languages = LanguageRegistry::new(cx.background_executor().clone());
|
let mut languages = LanguageRegistry::new(cx.background_executor().clone());
|
||||||
languages.set_language_server_download_dir(paths::languages_dir().clone());
|
languages.set_language_server_download_dir(paths::languages_dir().clone());
|
||||||
let languages = Arc::new(languages);
|
let languages = Arc::new(languages);
|
||||||
let debug_adapters = DapRegistry::default().into();
|
|
||||||
|
|
||||||
HeadlessProject::new(
|
HeadlessProject::new(
|
||||||
HeadlessAppState {
|
HeadlessAppState {
|
||||||
|
@ -481,7 +481,6 @@ pub fn execute_run(
|
||||||
http_client,
|
http_client,
|
||||||
node_runtime,
|
node_runtime,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters,
|
|
||||||
extension_host_proxy,
|
extension_host_proxy,
|
||||||
},
|
},
|
||||||
cx,
|
cx,
|
||||||
|
|
|
@ -17,11 +17,11 @@ workspace = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
collections.workspace = true
|
collections.workspace = true
|
||||||
dap-types.workspace = true
|
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
gpui.workspace = true
|
gpui.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
parking_lot.workspace = true
|
parking_lot.workspace = true
|
||||||
|
proto.workspace = true
|
||||||
schemars.workspace = true
|
schemars.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
|
|
@ -1,21 +1,14 @@
|
||||||
use dap_types::StartDebuggingRequestArguments;
|
use anyhow::Result;
|
||||||
use schemars::{JsonSchema, r#gen::SchemaSettings};
|
use schemars::{JsonSchema, r#gen::SchemaSettings};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::net::Ipv4Addr;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use util::ResultExt;
|
use std::{net::Ipv4Addr, path::Path};
|
||||||
|
|
||||||
use crate::{TaskTemplate, TaskTemplates, TaskType, task_template::DebugArgs};
|
use crate::{TaskTemplate, TaskType, task_template::DebugArgs};
|
||||||
|
|
||||||
impl Default for DebugConnectionType {
|
|
||||||
fn default() -> Self {
|
|
||||||
DebugConnectionType::TCP(TCPHost::default())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents the host information of the debug adapter
|
/// Represents the host information of the debug adapter
|
||||||
#[derive(Default, Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
#[derive(Default, Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
||||||
pub struct TCPHost {
|
pub struct TcpArgumentsTemplate {
|
||||||
/// The port that the debug adapter is listening on
|
/// The port that the debug adapter is listening on
|
||||||
///
|
///
|
||||||
/// Default: We will try to find an open port
|
/// Default: We will try to find an open port
|
||||||
|
@ -30,23 +23,39 @@ pub struct TCPHost {
|
||||||
pub timeout: Option<u64>,
|
pub timeout: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TCPHost {
|
impl TcpArgumentsTemplate {
|
||||||
/// Get the host or fallback to the default host
|
/// Get the host or fallback to the default host
|
||||||
pub fn host(&self) -> Ipv4Addr {
|
pub fn host(&self) -> Ipv4Addr {
|
||||||
self.host.unwrap_or_else(|| Ipv4Addr::new(127, 0, 0, 1))
|
self.host.unwrap_or_else(|| Ipv4Addr::new(127, 0, 0, 1))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn from_proto(proto: proto::TcpHost) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
port: proto.port.map(|p| p.try_into()).transpose()?,
|
||||||
|
host: proto.host.map(|h| h.parse()).transpose()?,
|
||||||
|
timeout: proto.timeout,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_proto(&self) -> proto::TcpHost {
|
||||||
|
proto::TcpHost {
|
||||||
|
port: self.port.map(|p| p.into()),
|
||||||
|
host: self.host.map(|h| h.to_string()),
|
||||||
|
timeout: self.timeout,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents the attach request information of the debug adapter
|
/// Represents the attach request information of the debug adapter
|
||||||
#[derive(Default, Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
#[derive(Default, Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
||||||
pub struct AttachConfig {
|
pub struct AttachRequest {
|
||||||
/// The processId to attach to, if left empty we will show a process picker
|
/// The processId to attach to, if left empty we will show a process picker
|
||||||
pub process_id: Option<u32>,
|
pub process_id: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents the launch request information of the debug adapter
|
/// Represents the launch request information of the debug adapter
|
||||||
#[derive(Deserialize, Serialize, Default, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
#[derive(Deserialize, Serialize, Default, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
||||||
pub struct LaunchConfig {
|
pub struct LaunchRequest {
|
||||||
/// The program that you trying to debug
|
/// The program that you trying to debug
|
||||||
pub program: String,
|
pub program: String,
|
||||||
/// The current working directory of your project
|
/// The current working directory of your project
|
||||||
|
@ -59,47 +68,26 @@ pub struct LaunchConfig {
|
||||||
/// Represents the type that will determine which request to call on the debug adapter
|
/// Represents the type that will determine which request to call on the debug adapter
|
||||||
#[derive(Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
#[derive(Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
||||||
#[serde(rename_all = "lowercase", untagged)]
|
#[serde(rename_all = "lowercase", untagged)]
|
||||||
pub enum DebugRequestType {
|
pub enum DebugRequest {
|
||||||
/// Call the `launch` request on the debug adapter
|
/// Call the `launch` request on the debug adapter
|
||||||
Launch(LaunchConfig),
|
Launch(LaunchRequest),
|
||||||
/// Call the `attach` request on the debug adapter
|
/// Call the `attach` request on the debug adapter
|
||||||
Attach(AttachConfig),
|
Attach(AttachRequest),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<LaunchConfig> for DebugRequestType {
|
impl From<LaunchRequest> for DebugRequest {
|
||||||
fn from(launch_config: LaunchConfig) -> Self {
|
fn from(launch_config: LaunchRequest) -> Self {
|
||||||
DebugRequestType::Launch(launch_config)
|
DebugRequest::Launch(launch_config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<AttachConfig> for DebugRequestType {
|
impl From<AttachRequest> for DebugRequest {
|
||||||
fn from(attach_config: AttachConfig) -> Self {
|
fn from(attach_config: AttachRequest) -> Self {
|
||||||
DebugRequestType::Attach(attach_config)
|
DebugRequest::Attach(attach_config)
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Represents a request for starting the debugger.
|
|
||||||
/// Contrary to `DebugRequestType`, `DebugRequestDisposition` is not Serializable.
|
|
||||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
|
||||||
pub enum DebugRequestDisposition {
|
|
||||||
/// Debug session configured by the user.
|
|
||||||
UserConfigured(DebugRequestType),
|
|
||||||
/// Debug session configured by the debug adapter
|
|
||||||
ReverseRequest(StartDebuggingRequestArguments),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DebugRequestDisposition {
|
|
||||||
/// Get the current working directory from request if it's a launch request and exits
|
|
||||||
pub fn cwd(&self) -> Option<PathBuf> {
|
|
||||||
match self {
|
|
||||||
Self::UserConfigured(DebugRequestType::Launch(launch_config)) => {
|
|
||||||
launch_config.cwd.clone()
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<TaskTemplate> for DebugTaskDefinition {
|
impl TryFrom<TaskTemplate> for DebugTaskTemplate {
|
||||||
type Error = ();
|
type Error = ();
|
||||||
|
|
||||||
fn try_from(value: TaskTemplate) -> Result<Self, Self::Error> {
|
fn try_from(value: TaskTemplate) -> Result<Self, Self::Error> {
|
||||||
|
@ -108,40 +96,40 @@ impl TryFrom<TaskTemplate> for DebugTaskDefinition {
|
||||||
};
|
};
|
||||||
|
|
||||||
let request = match debug_args.request {
|
let request = match debug_args.request {
|
||||||
crate::DebugArgsRequest::Launch => DebugRequestType::Launch(LaunchConfig {
|
crate::DebugArgsRequest::Launch => DebugRequest::Launch(LaunchRequest {
|
||||||
program: value.command,
|
program: value.command,
|
||||||
cwd: value.cwd.map(PathBuf::from),
|
cwd: value.cwd.map(PathBuf::from),
|
||||||
args: value.args,
|
args: value.args,
|
||||||
}),
|
}),
|
||||||
crate::DebugArgsRequest::Attach(attach_config) => {
|
crate::DebugArgsRequest::Attach(attach_config) => DebugRequest::Attach(attach_config),
|
||||||
DebugRequestType::Attach(attach_config)
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(DebugTaskDefinition {
|
Ok(DebugTaskTemplate {
|
||||||
adapter: debug_args.adapter,
|
|
||||||
request,
|
|
||||||
label: value.label,
|
|
||||||
initialize_args: debug_args.initialize_args,
|
|
||||||
tcp_connection: debug_args.tcp_connection,
|
|
||||||
locator: debug_args.locator,
|
locator: debug_args.locator,
|
||||||
stop_on_entry: debug_args.stop_on_entry,
|
definition: DebugTaskDefinition {
|
||||||
|
adapter: debug_args.adapter,
|
||||||
|
request,
|
||||||
|
label: value.label,
|
||||||
|
initialize_args: debug_args.initialize_args,
|
||||||
|
tcp_connection: debug_args.tcp_connection,
|
||||||
|
stop_on_entry: debug_args.stop_on_entry,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DebugTaskDefinition {
|
impl DebugTaskTemplate {
|
||||||
/// Translate from debug definition to a task template
|
/// Translate from debug definition to a task template
|
||||||
pub fn to_zed_format(self) -> anyhow::Result<TaskTemplate> {
|
pub fn to_zed_format(self) -> TaskTemplate {
|
||||||
let (command, cwd, request) = match self.request {
|
let (command, cwd, request) = match self.definition.request {
|
||||||
DebugRequestType::Launch(launch_config) => (
|
DebugRequest::Launch(launch_config) => (
|
||||||
launch_config.program,
|
launch_config.program,
|
||||||
launch_config
|
launch_config
|
||||||
.cwd
|
.cwd
|
||||||
.map(|cwd| cwd.to_string_lossy().to_string()),
|
.map(|cwd| cwd.to_string_lossy().to_string()),
|
||||||
crate::task_template::DebugArgsRequest::Launch,
|
crate::task_template::DebugArgsRequest::Launch,
|
||||||
),
|
),
|
||||||
DebugRequestType::Attach(attach_config) => (
|
DebugRequest::Attach(attach_config) => (
|
||||||
"".to_owned(),
|
"".to_owned(),
|
||||||
None,
|
None,
|
||||||
crate::task_template::DebugArgsRequest::Attach(attach_config),
|
crate::task_template::DebugArgsRequest::Attach(attach_config),
|
||||||
|
@ -149,34 +137,33 @@ impl DebugTaskDefinition {
|
||||||
};
|
};
|
||||||
|
|
||||||
let task_type = TaskType::Debug(DebugArgs {
|
let task_type = TaskType::Debug(DebugArgs {
|
||||||
adapter: self.adapter,
|
adapter: self.definition.adapter,
|
||||||
request,
|
request,
|
||||||
initialize_args: self.initialize_args,
|
initialize_args: self.definition.initialize_args,
|
||||||
locator: self.locator,
|
locator: self.locator,
|
||||||
tcp_connection: self.tcp_connection,
|
tcp_connection: self.definition.tcp_connection,
|
||||||
stop_on_entry: self.stop_on_entry,
|
stop_on_entry: self.definition.stop_on_entry,
|
||||||
});
|
});
|
||||||
|
|
||||||
let label = self.label.clone();
|
let label = self.definition.label.clone();
|
||||||
|
|
||||||
Ok(TaskTemplate {
|
TaskTemplate {
|
||||||
label,
|
label,
|
||||||
command,
|
command,
|
||||||
args: vec![],
|
args: vec![],
|
||||||
task_type,
|
task_type,
|
||||||
cwd,
|
cwd,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Represents the type of the debugger adapter connection
|
|
||||||
#[derive(Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
#[derive(Deserialize, Serialize, PartialEq, Eq, JsonSchema, Clone, Debug)]
|
||||||
#[serde(rename_all = "lowercase", tag = "connection")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum DebugConnectionType {
|
pub struct DebugTaskTemplate {
|
||||||
/// Connect to the debug adapter via TCP
|
pub locator: Option<String>,
|
||||||
TCP(TCPHost),
|
#[serde(flatten)]
|
||||||
/// Connect to the debug adapter via STDIO
|
pub definition: DebugTaskDefinition,
|
||||||
STDIO,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This struct represent a user created debug task
|
/// This struct represent a user created debug task
|
||||||
|
@ -187,7 +174,7 @@ pub struct DebugTaskDefinition {
|
||||||
pub adapter: String,
|
pub adapter: String,
|
||||||
/// The type of request that should be called on the debug adapter
|
/// The type of request that should be called on the debug adapter
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub request: DebugRequestType,
|
pub request: DebugRequest,
|
||||||
/// Name of the debug task
|
/// Name of the debug task
|
||||||
pub label: String,
|
pub label: String,
|
||||||
/// Additional initialization arguments to be sent on DAP initialization
|
/// Additional initialization arguments to be sent on DAP initialization
|
||||||
|
@ -197,18 +184,83 @@ pub struct DebugTaskDefinition {
|
||||||
/// If provided, this will be used to connect to the debug adapter instead of
|
/// If provided, this will be used to connect to the debug adapter instead of
|
||||||
/// spawning a new process. This is useful for connecting to a debug adapter
|
/// spawning a new process. This is useful for connecting to a debug adapter
|
||||||
/// that is already running or is started by another process.
|
/// that is already running or is started by another process.
|
||||||
pub tcp_connection: Option<TCPHost>,
|
pub tcp_connection: Option<TcpArgumentsTemplate>,
|
||||||
/// Locator to use
|
|
||||||
/// -- cargo
|
|
||||||
pub locator: Option<String>,
|
|
||||||
/// Whether to tell the debug adapter to stop on entry
|
/// Whether to tell the debug adapter to stop on entry
|
||||||
pub stop_on_entry: Option<bool>,
|
pub stop_on_entry: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl DebugTaskDefinition {
|
||||||
|
pub fn cwd(&self) -> Option<&Path> {
|
||||||
|
if let DebugRequest::Launch(config) = &self.request {
|
||||||
|
config.cwd.as_deref()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn to_proto(&self) -> proto::DebugTaskDefinition {
|
||||||
|
proto::DebugTaskDefinition {
|
||||||
|
adapter: self.adapter.clone(),
|
||||||
|
request: Some(match &self.request {
|
||||||
|
DebugRequest::Launch(config) => {
|
||||||
|
proto::debug_task_definition::Request::DebugLaunchRequest(
|
||||||
|
proto::DebugLaunchRequest {
|
||||||
|
program: config.program.clone(),
|
||||||
|
cwd: config.cwd.as_ref().map(|c| c.to_string_lossy().to_string()),
|
||||||
|
args: config.args.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
DebugRequest::Attach(attach_request) => {
|
||||||
|
proto::debug_task_definition::Request::DebugAttachRequest(
|
||||||
|
proto::DebugAttachRequest {
|
||||||
|
process_id: attach_request.process_id.unwrap_or_default(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
label: self.label.clone(),
|
||||||
|
initialize_args: self.initialize_args.as_ref().map(|v| v.to_string()),
|
||||||
|
tcp_connection: self.tcp_connection.as_ref().map(|t| t.to_proto()),
|
||||||
|
stop_on_entry: self.stop_on_entry,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_proto(proto: proto::DebugTaskDefinition) -> Result<Self> {
|
||||||
|
let request = proto
|
||||||
|
.request
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("request is required"))?;
|
||||||
|
Ok(Self {
|
||||||
|
label: proto.label,
|
||||||
|
initialize_args: proto.initialize_args.map(|v| v.into()),
|
||||||
|
tcp_connection: proto
|
||||||
|
.tcp_connection
|
||||||
|
.map(TcpArgumentsTemplate::from_proto)
|
||||||
|
.transpose()?,
|
||||||
|
stop_on_entry: proto.stop_on_entry,
|
||||||
|
adapter: proto.adapter.clone(),
|
||||||
|
request: match request {
|
||||||
|
proto::debug_task_definition::Request::DebugAttachRequest(config) => {
|
||||||
|
DebugRequest::Attach(AttachRequest {
|
||||||
|
process_id: Some(config.process_id),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
proto::debug_task_definition::Request::DebugLaunchRequest(config) => {
|
||||||
|
DebugRequest::Launch(LaunchRequest {
|
||||||
|
program: config.program,
|
||||||
|
cwd: config.cwd.map(|cwd| cwd.into()),
|
||||||
|
args: config.args,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A group of Debug Tasks defined in a JSON file.
|
/// A group of Debug Tasks defined in a JSON file.
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct DebugTaskFile(pub Vec<DebugTaskDefinition>);
|
pub struct DebugTaskFile(pub Vec<DebugTaskTemplate>);
|
||||||
|
|
||||||
impl DebugTaskFile {
|
impl DebugTaskFile {
|
||||||
/// Generates JSON schema of Tasks JSON template format.
|
/// Generates JSON schema of Tasks JSON template format.
|
||||||
|
@ -222,31 +274,17 @@ impl DebugTaskFile {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<DebugTaskFile> for TaskTemplates {
|
|
||||||
type Error = anyhow::Error;
|
|
||||||
|
|
||||||
fn try_from(value: DebugTaskFile) -> Result<Self, Self::Error> {
|
|
||||||
let templates = value
|
|
||||||
.0
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|debug_definition| debug_definition.to_zed_format().log_err())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Self(templates))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::{DebugRequestType, LaunchConfig};
|
use crate::{DebugRequest, LaunchRequest};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_deserialize_non_attach_task() {
|
fn test_can_deserialize_non_attach_task() {
|
||||||
let deserialized: DebugRequestType =
|
let deserialized: DebugRequest =
|
||||||
serde_json::from_str(r#"{"program": "cafebabe"}"#).unwrap();
|
serde_json::from_str(r#"{"program": "cafebabe"}"#).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
deserialized,
|
deserialized,
|
||||||
DebugRequestType::Launch(LaunchConfig {
|
DebugRequest::Launch(LaunchRequest {
|
||||||
program: "cafebabe".to_owned(),
|
program: "cafebabe".to_owned(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
//! Baseline interface of Tasks in Zed: all tasks in Zed are intended to use those for implementing their own logic.
|
//! Baseline interface of Tasks in Zed: all tasks in Zed are intended to use those for implementing their own logic.
|
||||||
#![deny(missing_docs)]
|
|
||||||
|
|
||||||
mod debug_format;
|
mod debug_format;
|
||||||
mod serde_helpers;
|
mod serde_helpers;
|
||||||
|
@ -16,8 +15,8 @@ use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
pub use debug_format::{
|
pub use debug_format::{
|
||||||
AttachConfig, DebugConnectionType, DebugRequestDisposition, DebugRequestType,
|
AttachRequest, DebugRequest, DebugTaskDefinition, DebugTaskFile, DebugTaskTemplate,
|
||||||
DebugTaskDefinition, DebugTaskFile, LaunchConfig, TCPHost,
|
LaunchRequest, TcpArgumentsTemplate,
|
||||||
};
|
};
|
||||||
pub use task_template::{
|
pub use task_template::{
|
||||||
DebugArgs, DebugArgsRequest, HideStrategy, RevealStrategy, TaskModal, TaskTemplate,
|
DebugArgs, DebugArgsRequest, HideStrategy, RevealStrategy, TaskModal, TaskTemplate,
|
||||||
|
@ -104,7 +103,7 @@ impl ResolvedTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the configuration for the debug adapter that should be used for this task.
|
/// Get the configuration for the debug adapter that should be used for this task.
|
||||||
pub fn resolved_debug_adapter_config(&self) -> Option<DebugTaskDefinition> {
|
pub fn resolved_debug_adapter_config(&self) -> Option<DebugTaskTemplate> {
|
||||||
match self.original_task.task_type.clone() {
|
match self.original_task.task_type.clone() {
|
||||||
TaskType::Debug(debug_args) if self.resolved.is_some() => {
|
TaskType::Debug(debug_args) if self.resolved.is_some() => {
|
||||||
let resolved = self
|
let resolved = self
|
||||||
|
@ -127,25 +126,27 @@ impl ResolvedTask {
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Some(DebugTaskDefinition {
|
Some(DebugTaskTemplate {
|
||||||
label: resolved.label.clone(),
|
|
||||||
adapter: debug_args.adapter.clone(),
|
|
||||||
request: match debug_args.request {
|
|
||||||
crate::task_template::DebugArgsRequest::Launch => {
|
|
||||||
DebugRequestType::Launch(LaunchConfig {
|
|
||||||
program: resolved.command.clone(),
|
|
||||||
cwd: resolved.cwd.clone(),
|
|
||||||
args,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
crate::task_template::DebugArgsRequest::Attach(attach_config) => {
|
|
||||||
DebugRequestType::Attach(attach_config)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
initialize_args: debug_args.initialize_args,
|
|
||||||
tcp_connection: debug_args.tcp_connection,
|
|
||||||
locator: debug_args.locator.clone(),
|
locator: debug_args.locator.clone(),
|
||||||
stop_on_entry: debug_args.stop_on_entry,
|
definition: DebugTaskDefinition {
|
||||||
|
label: resolved.label.clone(),
|
||||||
|
adapter: debug_args.adapter.clone(),
|
||||||
|
request: match debug_args.request {
|
||||||
|
crate::task_template::DebugArgsRequest::Launch => {
|
||||||
|
DebugRequest::Launch(LaunchRequest {
|
||||||
|
program: resolved.command.clone(),
|
||||||
|
cwd: resolved.cwd.clone(),
|
||||||
|
args,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
crate::task_template::DebugArgsRequest::Attach(attach_config) => {
|
||||||
|
DebugRequest::Attach(attach_config)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
initialize_args: debug_args.initialize_args,
|
||||||
|
tcp_connection: debug_args.tcp_connection,
|
||||||
|
stop_on_entry: debug_args.stop_on_entry,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -366,6 +367,8 @@ pub struct ShellBuilder {
|
||||||
args: Vec<String>,
|
args: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub static DEFAULT_REMOTE_SHELL: &str = "\"${SHELL:-sh}\"";
|
||||||
|
|
||||||
impl ShellBuilder {
|
impl ShellBuilder {
|
||||||
/// Create a new ShellBuilder as configured.
|
/// Create a new ShellBuilder as configured.
|
||||||
pub fn new(is_local: bool, shell: &Shell) -> Self {
|
pub fn new(is_local: bool, shell: &Shell) -> Self {
|
||||||
|
@ -374,7 +377,7 @@ impl ShellBuilder {
|
||||||
if is_local {
|
if is_local {
|
||||||
(Self::system_shell(), Vec::new())
|
(Self::system_shell(), Vec::new())
|
||||||
} else {
|
} else {
|
||||||
("\"${SHELL:-sh}\"".to_string(), Vec::new())
|
(DEFAULT_REMOTE_SHELL.to_string(), Vec::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Shell::Program(shell) => (shell.clone(), Vec::new()),
|
Shell::Program(shell) => (shell.clone(), Vec::new()),
|
||||||
|
|
|
@ -7,8 +7,9 @@ use std::path::PathBuf;
|
||||||
use util::serde::default_true;
|
use util::serde::default_true;
|
||||||
use util::{ResultExt, truncate_and_remove_front};
|
use util::{ResultExt, truncate_and_remove_front};
|
||||||
|
|
||||||
|
use crate::debug_format::TcpArgumentsTemplate;
|
||||||
use crate::{
|
use crate::{
|
||||||
AttachConfig, ResolvedTask, RevealTarget, Shell, SpawnInTerminal, TCPHost, TaskContext, TaskId,
|
AttachRequest, ResolvedTask, RevealTarget, Shell, SpawnInTerminal, TaskContext, TaskId,
|
||||||
VariableName, ZED_VARIABLE_NAME_PREFIX,
|
VariableName, ZED_VARIABLE_NAME_PREFIX,
|
||||||
serde_helpers::{non_empty_string_vec, non_empty_string_vec_json_schema},
|
serde_helpers::{non_empty_string_vec, non_empty_string_vec_json_schema},
|
||||||
};
|
};
|
||||||
|
@ -83,7 +84,7 @@ pub enum DebugArgsRequest {
|
||||||
/// launch (program, cwd) are stored in TaskTemplate as (command, cwd)
|
/// launch (program, cwd) are stored in TaskTemplate as (command, cwd)
|
||||||
Launch,
|
Launch,
|
||||||
/// Attach
|
/// Attach
|
||||||
Attach(AttachConfig),
|
Attach(AttachRequest),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Eq, PartialEq, Clone, Debug)]
|
#[derive(Deserialize, Eq, PartialEq, Clone, Debug)]
|
||||||
|
@ -94,7 +95,7 @@ pub struct DebugArgs {
|
||||||
/// Adapter choice
|
/// Adapter choice
|
||||||
pub adapter: String,
|
pub adapter: String,
|
||||||
/// TCP connection to make with debug adapter
|
/// TCP connection to make with debug adapter
|
||||||
pub tcp_connection: Option<TCPHost>,
|
pub tcp_connection: Option<TcpArgumentsTemplate>,
|
||||||
/// Args to send to debug adapter
|
/// Args to send to debug adapter
|
||||||
pub initialize_args: Option<serde_json::value::Value>,
|
pub initialize_args: Option<serde_json::value::Value>,
|
||||||
/// the locator to use
|
/// the locator to use
|
||||||
|
|
|
@ -11,7 +11,7 @@ use itertools::Itertools;
|
||||||
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
|
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
|
||||||
use project::{TaskSourceKind, task_store::TaskStore};
|
use project::{TaskSourceKind, task_store::TaskStore};
|
||||||
use task::{
|
use task::{
|
||||||
DebugRequestType, DebugTaskDefinition, ResolvedTask, RevealTarget, TaskContext, TaskModal,
|
DebugRequest, DebugTaskDefinition, ResolvedTask, RevealTarget, TaskContext, TaskModal,
|
||||||
TaskTemplate, TaskType,
|
TaskTemplate, TaskType,
|
||||||
};
|
};
|
||||||
use ui::{
|
use ui::{
|
||||||
|
@ -21,7 +21,7 @@ use ui::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use util::{ResultExt, truncate_and_trailoff};
|
use util::{ResultExt, truncate_and_trailoff};
|
||||||
use workspace::{ModalView, Workspace, tasks::schedule_resolved_task};
|
use workspace::{ModalView, Workspace};
|
||||||
pub use zed_actions::{Rerun, Spawn};
|
pub use zed_actions::{Rerun, Spawn};
|
||||||
|
|
||||||
/// A modal used to spawn new tasks.
|
/// A modal used to spawn new tasks.
|
||||||
|
@ -334,7 +334,7 @@ impl PickerDelegate for TasksModalDelegate {
|
||||||
fn confirm(
|
fn confirm(
|
||||||
&mut self,
|
&mut self,
|
||||||
omit_history_entry: bool,
|
omit_history_entry: bool,
|
||||||
_: &mut Window,
|
window: &mut Window,
|
||||||
cx: &mut Context<picker::Picker<Self>>,
|
cx: &mut Context<picker::Picker<Self>>,
|
||||||
) {
|
) {
|
||||||
let current_match_index = self.selected_index();
|
let current_match_index = self.selected_index();
|
||||||
|
@ -360,17 +360,14 @@ impl PickerDelegate for TasksModalDelegate {
|
||||||
}
|
}
|
||||||
|
|
||||||
match task.task_type() {
|
match task.task_type() {
|
||||||
TaskType::Debug(config) if config.locator.is_none() => {
|
TaskType::Debug(_) => {
|
||||||
let Some(config): Option<DebugTaskDefinition> =
|
let Some(config) = task.resolved_debug_adapter_config() else {
|
||||||
task.resolved_debug_adapter_config()
|
|
||||||
else {
|
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
let config = config.definition;
|
||||||
|
|
||||||
match &config.request {
|
match &config.request {
|
||||||
DebugRequestType::Attach(attach_config)
|
DebugRequest::Attach(attach_config) if attach_config.process_id.is_none() => {
|
||||||
if attach_config.process_id.is_none() =>
|
|
||||||
{
|
|
||||||
cx.emit(ShowAttachModal {
|
cx.emit(ShowAttachModal {
|
||||||
debug_config: config.clone(),
|
debug_config: config.clone(),
|
||||||
});
|
});
|
||||||
|
@ -379,24 +376,20 @@ impl PickerDelegate for TasksModalDelegate {
|
||||||
_ => {
|
_ => {
|
||||||
self.workspace
|
self.workspace
|
||||||
.update(cx, |workspace, cx| {
|
.update(cx, |workspace, cx| {
|
||||||
workspace.project().update(cx, |project, cx| {
|
workspace.schedule_debug_task(task, window, cx);
|
||||||
project
|
|
||||||
.start_debug_session(config, cx)
|
|
||||||
.detach_and_log_err(cx);
|
|
||||||
});
|
|
||||||
})
|
})
|
||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
TaskType::Script => {
|
||||||
self.workspace
|
self.workspace
|
||||||
.update(cx, |workspace, cx| {
|
.update(cx, |workspace, cx| {
|
||||||
schedule_resolved_task(
|
workspace.schedule_resolved_task(
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
task_source_kind,
|
||||||
task,
|
task,
|
||||||
omit_history_entry,
|
omit_history_entry,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
|
@ -566,7 +559,7 @@ impl PickerDelegate for TasksModalDelegate {
|
||||||
fn confirm_input(
|
fn confirm_input(
|
||||||
&mut self,
|
&mut self,
|
||||||
omit_history_entry: bool,
|
omit_history_entry: bool,
|
||||||
_: &mut Window,
|
window: &mut Window,
|
||||||
cx: &mut Context<Picker<Self>>,
|
cx: &mut Context<Picker<Self>>,
|
||||||
) {
|
) {
|
||||||
let Some((task_source_kind, mut task)) = self.spawn_oneshot() else {
|
let Some((task_source_kind, mut task)) = self.spawn_oneshot() else {
|
||||||
|
@ -584,36 +577,17 @@ impl PickerDelegate for TasksModalDelegate {
|
||||||
self.workspace
|
self.workspace
|
||||||
.update(cx, |workspace, cx| {
|
.update(cx, |workspace, cx| {
|
||||||
match task.task_type() {
|
match task.task_type() {
|
||||||
TaskType::Script => schedule_resolved_task(
|
TaskType::Script => workspace.schedule_resolved_task(
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
task_source_kind,
|
||||||
task,
|
task,
|
||||||
omit_history_entry,
|
omit_history_entry,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
),
|
),
|
||||||
// todo(debugger): Should create a schedule_resolved_debug_task function
|
// todo(debugger): Should create a schedule_resolved_debug_task function
|
||||||
// This would allow users to access to debug history and other issues
|
// This would allow users to access to debug history and other issues
|
||||||
TaskType::Debug(debug_args) => {
|
TaskType::Debug(_) => {
|
||||||
let Some(debug_config) = task.resolved_debug_adapter_config() else {
|
workspace.schedule_debug_task(task, window, cx);
|
||||||
// todo(debugger) log an error, this should never happen
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
if debug_args.locator.is_some() {
|
|
||||||
schedule_resolved_task(
|
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
|
||||||
task,
|
|
||||||
omit_history_entry,
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
workspace.project().update(cx, |project, cx| {
|
|
||||||
project
|
|
||||||
.start_debug_session(debug_config, cx)
|
|
||||||
.detach_and_log_err(cx);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
|
|
|
@ -8,8 +8,7 @@ use project::{Location, TaskContexts, TaskSourceKind, Worktree};
|
||||||
use task::{
|
use task::{
|
||||||
RevealTarget, TaskContext, TaskId, TaskModal, TaskTemplate, TaskVariables, VariableName,
|
RevealTarget, TaskContext, TaskId, TaskModal, TaskTemplate, TaskVariables, VariableName,
|
||||||
};
|
};
|
||||||
use workspace::tasks::schedule_task;
|
use workspace::Workspace;
|
||||||
use workspace::{Workspace, tasks::schedule_resolved_task};
|
|
||||||
|
|
||||||
mod modal;
|
mod modal;
|
||||||
|
|
||||||
|
@ -50,15 +49,15 @@ pub fn init(cx: &mut App) {
|
||||||
let task_contexts = task_contexts.await;
|
let task_contexts = task_contexts.await;
|
||||||
let default_context = TaskContext::default();
|
let default_context = TaskContext::default();
|
||||||
workspace
|
workspace
|
||||||
.update_in(cx, |workspace, _, cx| {
|
.update_in(cx, |workspace, window, cx| {
|
||||||
schedule_task(
|
workspace.schedule_task(
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
task_source_kind,
|
||||||
&original_task,
|
&original_task,
|
||||||
task_contexts
|
task_contexts
|
||||||
.active_context()
|
.active_context()
|
||||||
.unwrap_or(&default_context),
|
.unwrap_or(&default_context),
|
||||||
false,
|
false,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -75,11 +74,11 @@ pub fn init(cx: &mut App) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
schedule_resolved_task(
|
workspace.schedule_resolved_task(
|
||||||
workspace,
|
|
||||||
task_source_kind,
|
task_source_kind,
|
||||||
last_scheduled_task,
|
last_scheduled_task,
|
||||||
false,
|
false,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -217,7 +216,7 @@ where
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let did_spawn = workspace
|
let did_spawn = workspace
|
||||||
.update(cx, |workspace, cx| {
|
.update_in(cx, |workspace, window, cx| {
|
||||||
let default_context = TaskContext::default();
|
let default_context = TaskContext::default();
|
||||||
let active_context = task_contexts.active_context().unwrap_or(&default_context);
|
let active_context = task_contexts.active_context().unwrap_or(&default_context);
|
||||||
|
|
||||||
|
@ -228,12 +227,12 @@ where
|
||||||
target_task.reveal_target = target_override;
|
target_task.reveal_target = target_override;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
schedule_task(
|
workspace.schedule_task(
|
||||||
workspace,
|
|
||||||
task_source_kind.clone(),
|
task_source_kind.clone(),
|
||||||
target_task,
|
target_task,
|
||||||
active_context,
|
active_context,
|
||||||
false,
|
false,
|
||||||
|
window,
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
true
|
true
|
||||||
|
|
|
@ -46,13 +46,14 @@ use smol::channel::{Receiver, Sender};
|
||||||
use task::{HideStrategy, Shell, TaskId};
|
use task::{HideStrategy, Shell, TaskId};
|
||||||
use terminal_settings::{AlternateScroll, CursorShape, TerminalSettings};
|
use terminal_settings::{AlternateScroll, CursorShape, TerminalSettings};
|
||||||
use theme::{ActiveTheme, Theme};
|
use theme::{ActiveTheme, Theme};
|
||||||
use util::{paths::home_dir, truncate_and_trailoff};
|
use util::{ResultExt, paths::home_dir, truncate_and_trailoff};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cmp::{self, min},
|
cmp::{self, min},
|
||||||
fmt::Display,
|
fmt::Display,
|
||||||
ops::{Deref, Index, RangeInclusive},
|
ops::{Deref, Index, RangeInclusive},
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
|
process::ExitStatus,
|
||||||
sync::{Arc, LazyLock},
|
sync::{Arc, LazyLock},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
@ -109,7 +110,6 @@ pub enum Event {
|
||||||
SelectionsChanged,
|
SelectionsChanged,
|
||||||
NewNavigationTarget(Option<MaybeNavigationTarget>),
|
NewNavigationTarget(Option<MaybeNavigationTarget>),
|
||||||
Open(MaybeNavigationTarget),
|
Open(MaybeNavigationTarget),
|
||||||
TaskLocatorReady { task_id: TaskId, success: bool },
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -351,7 +351,7 @@ impl TerminalBuilder {
|
||||||
max_scroll_history_lines: Option<usize>,
|
max_scroll_history_lines: Option<usize>,
|
||||||
is_ssh_terminal: bool,
|
is_ssh_terminal: bool,
|
||||||
window: AnyWindowHandle,
|
window: AnyWindowHandle,
|
||||||
completion_tx: Sender<()>,
|
completion_tx: Sender<Option<ExitStatus>>,
|
||||||
debug_terminal: bool,
|
debug_terminal: bool,
|
||||||
cx: &App,
|
cx: &App,
|
||||||
) -> Result<TerminalBuilder> {
|
) -> Result<TerminalBuilder> {
|
||||||
|
@ -639,7 +639,7 @@ pub enum SelectionPhase {
|
||||||
|
|
||||||
pub struct Terminal {
|
pub struct Terminal {
|
||||||
pty_tx: Notifier,
|
pty_tx: Notifier,
|
||||||
completion_tx: Sender<()>,
|
completion_tx: Sender<Option<ExitStatus>>,
|
||||||
term: Arc<FairMutex<Term<ZedListener>>>,
|
term: Arc<FairMutex<Term<ZedListener>>>,
|
||||||
term_config: Config,
|
term_config: Config,
|
||||||
events: VecDeque<InternalEvent>,
|
events: VecDeque<InternalEvent>,
|
||||||
|
@ -670,7 +670,7 @@ pub struct TaskState {
|
||||||
pub label: String,
|
pub label: String,
|
||||||
pub command_label: String,
|
pub command_label: String,
|
||||||
pub status: TaskStatus,
|
pub status: TaskStatus,
|
||||||
pub completion_rx: Receiver<()>,
|
pub completion_rx: Receiver<Option<ExitStatus>>,
|
||||||
pub hide: HideStrategy,
|
pub hide: HideStrategy,
|
||||||
pub show_summary: bool,
|
pub show_summary: bool,
|
||||||
pub show_command: bool,
|
pub show_command: bool,
|
||||||
|
@ -1859,20 +1859,30 @@ impl Terminal {
|
||||||
self.debug_terminal
|
self.debug_terminal
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn wait_for_completed_task(&self, cx: &App) -> Task<()> {
|
pub fn wait_for_completed_task(&self, cx: &App) -> Task<Option<ExitStatus>> {
|
||||||
if let Some(task) = self.task() {
|
if let Some(task) = self.task() {
|
||||||
if task.status == TaskStatus::Running {
|
if task.status == TaskStatus::Running {
|
||||||
let completion_receiver = task.completion_rx.clone();
|
let completion_receiver = task.completion_rx.clone();
|
||||||
return cx.spawn(async move |_| {
|
return cx
|
||||||
let _ = completion_receiver.recv().await;
|
.spawn(async move |_| completion_receiver.recv().await.log_err().flatten());
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Task::ready(())
|
Task::ready(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_task_finished(&mut self, error_code: Option<i32>, cx: &mut Context<Terminal>) {
|
fn register_task_finished(&mut self, error_code: Option<i32>, cx: &mut Context<Terminal>) {
|
||||||
self.completion_tx.try_send(()).ok();
|
let e: Option<ExitStatus> = error_code.map(|code| {
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
return std::os::unix::process::ExitStatusExt::from_raw(code);
|
||||||
|
}
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
return std::os::windows::process::ExitStatusExt::from_raw(code as u32);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
self.completion_tx.try_send(e).ok();
|
||||||
let task = match &mut self.task {
|
let task = match &mut self.task {
|
||||||
Some(task) => task,
|
Some(task) => task,
|
||||||
None => {
|
None => {
|
||||||
|
@ -1911,11 +1921,6 @@ impl Terminal {
|
||||||
unsafe { append_text_to_term(&mut self.term.lock(), &lines_to_show) };
|
unsafe { append_text_to_term(&mut self.term.lock(), &lines_to_show) };
|
||||||
}
|
}
|
||||||
|
|
||||||
cx.emit(Event::TaskLocatorReady {
|
|
||||||
task_id: task.id.clone(),
|
|
||||||
success: finished_successfully,
|
|
||||||
});
|
|
||||||
|
|
||||||
match task.hide {
|
match task.hide {
|
||||||
HideStrategy::Never => {}
|
HideStrategy::Never => {}
|
||||||
HideStrategy::Always => {
|
HideStrategy::Always => {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{cmp, ops::ControlFlow, path::PathBuf, sync::Arc, time::Duration};
|
use std::{cmp, ops::ControlFlow, path::PathBuf, process::ExitStatus, sync::Arc, time::Duration};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
TerminalView, default_working_directory,
|
TerminalView, default_working_directory,
|
||||||
|
@ -9,7 +9,7 @@ use crate::{
|
||||||
use breadcrumbs::Breadcrumbs;
|
use breadcrumbs::Breadcrumbs;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use db::kvp::KEY_VALUE_STORE;
|
use db::kvp::KEY_VALUE_STORE;
|
||||||
use futures::future::join_all;
|
use futures::{channel::oneshot, future::join_all};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
Action, AnyView, App, AsyncApp, AsyncWindowContext, Context, Corner, Entity, EventEmitter,
|
Action, AnyView, App, AsyncApp, AsyncWindowContext, Context, Corner, Entity, EventEmitter,
|
||||||
ExternalPaths, FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled,
|
ExternalPaths, FocusHandle, Focusable, IntoElement, ParentElement, Pixels, Render, Styled,
|
||||||
|
@ -279,17 +279,9 @@ impl TerminalPanel {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(workspace) = workspace.upgrade() {
|
if let Some(workspace) = workspace.upgrade() {
|
||||||
terminal_panel
|
workspace
|
||||||
.update_in(&mut cx, |_, window, cx| {
|
.update(&mut cx, |workspace, _| {
|
||||||
cx.subscribe_in(&workspace, window, |terminal_panel, _, e, window, cx| {
|
workspace.set_terminal_provider(TerminalProvider(terminal_panel.clone()))
|
||||||
if let workspace::Event::SpawnTask {
|
|
||||||
action: spawn_in_terminal,
|
|
||||||
} = e
|
|
||||||
{
|
|
||||||
terminal_panel.spawn_task(spawn_in_terminal, window, cx);
|
|
||||||
};
|
|
||||||
})
|
|
||||||
.detach();
|
|
||||||
})
|
})
|
||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
|
@ -486,12 +478,17 @@ impl TerminalPanel {
|
||||||
.detach_and_log_err(cx);
|
.detach_and_log_err(cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn spawn_task(&mut self, task: &SpawnInTerminal, window: &mut Window, cx: &mut Context<Self>) {
|
fn spawn_task(
|
||||||
|
&mut self,
|
||||||
|
task: &SpawnInTerminal,
|
||||||
|
window: &mut Window,
|
||||||
|
cx: &mut Context<Self>,
|
||||||
|
) -> Task<Result<Entity<Terminal>>> {
|
||||||
let Ok(is_local) = self
|
let Ok(is_local) = self
|
||||||
.workspace
|
.workspace
|
||||||
.update(cx, |workspace, cx| workspace.project().read(cx).is_local())
|
.update(cx, |workspace, cx| workspace.project().read(cx).is_local())
|
||||||
else {
|
else {
|
||||||
return;
|
return Task::ready(Err(anyhow!("Project is not local")));
|
||||||
};
|
};
|
||||||
|
|
||||||
let builder = ShellBuilder::new(is_local, &task.shell);
|
let builder = ShellBuilder::new(is_local, &task.shell);
|
||||||
|
@ -506,58 +503,53 @@ impl TerminalPanel {
|
||||||
};
|
};
|
||||||
|
|
||||||
if task.allow_concurrent_runs && task.use_new_terminal {
|
if task.allow_concurrent_runs && task.use_new_terminal {
|
||||||
self.spawn_in_new_terminal(task, window, cx)
|
return self.spawn_in_new_terminal(task, window, cx);
|
||||||
.detach_and_log_err(cx);
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut terminals_for_task = self.terminals_for_task(&task.full_label, cx);
|
let mut terminals_for_task = self.terminals_for_task(&task.full_label, cx);
|
||||||
let Some(existing) = terminals_for_task.pop() else {
|
let Some(existing) = terminals_for_task.pop() else {
|
||||||
self.spawn_in_new_terminal(task, window, cx)
|
return self.spawn_in_new_terminal(task, window, cx);
|
||||||
.detach_and_log_err(cx);
|
|
||||||
return;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let (existing_item_index, task_pane, existing_terminal) = existing;
|
let (existing_item_index, task_pane, existing_terminal) = existing;
|
||||||
if task.allow_concurrent_runs {
|
if task.allow_concurrent_runs {
|
||||||
self.replace_terminal(
|
return self.replace_terminal(
|
||||||
task,
|
task,
|
||||||
task_pane,
|
task_pane,
|
||||||
existing_item_index,
|
existing_item_index,
|
||||||
existing_terminal,
|
existing_terminal,
|
||||||
window,
|
window,
|
||||||
cx,
|
cx,
|
||||||
)
|
);
|
||||||
.detach();
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let (tx, rx) = oneshot::channel();
|
||||||
|
|
||||||
self.deferred_tasks.insert(
|
self.deferred_tasks.insert(
|
||||||
task.id.clone(),
|
task.id.clone(),
|
||||||
cx.spawn_in(window, async move |terminal_panel, cx| {
|
cx.spawn_in(window, async move |terminal_panel, cx| {
|
||||||
wait_for_terminals_tasks(terminals_for_task, cx).await;
|
wait_for_terminals_tasks(terminals_for_task, cx).await;
|
||||||
let task = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
let task = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||||
if task.use_new_terminal {
|
if task.use_new_terminal {
|
||||||
terminal_panel
|
terminal_panel.spawn_in_new_terminal(task, window, cx)
|
||||||
.spawn_in_new_terminal(task, window, cx)
|
|
||||||
.detach_and_log_err(cx);
|
|
||||||
None
|
|
||||||
} else {
|
} else {
|
||||||
Some(terminal_panel.replace_terminal(
|
terminal_panel.replace_terminal(
|
||||||
task,
|
task,
|
||||||
task_pane,
|
task_pane,
|
||||||
existing_item_index,
|
existing_item_index,
|
||||||
existing_terminal,
|
existing_terminal,
|
||||||
window,
|
window,
|
||||||
cx,
|
cx,
|
||||||
))
|
)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if let Ok(Some(task)) = task {
|
if let Ok(task) = task {
|
||||||
task.await;
|
tx.send(task.await).ok();
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
cx.spawn(async move |_, _| rx.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spawn_in_new_terminal(
|
pub fn spawn_in_new_terminal(
|
||||||
|
@ -810,60 +802,47 @@ impl TerminalPanel {
|
||||||
terminal_to_replace: Entity<TerminalView>,
|
terminal_to_replace: Entity<TerminalView>,
|
||||||
window: &mut Window,
|
window: &mut Window,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut Context<Self>,
|
||||||
) -> Task<Option<()>> {
|
) -> Task<Result<Entity<Terminal>>> {
|
||||||
let reveal = spawn_task.reveal;
|
let reveal = spawn_task.reveal;
|
||||||
let reveal_target = spawn_task.reveal_target;
|
let reveal_target = spawn_task.reveal_target;
|
||||||
let window_handle = window.window_handle();
|
let window_handle = window.window_handle();
|
||||||
let task_workspace = self.workspace.clone();
|
let task_workspace = self.workspace.clone();
|
||||||
cx.spawn_in(window, async move |terminal_panel, cx| {
|
cx.spawn_in(window, async move |terminal_panel, cx| {
|
||||||
let project = terminal_panel
|
let project = terminal_panel.update(cx, |this, cx| {
|
||||||
.update(cx, |this, cx| {
|
this.workspace
|
||||||
this.workspace
|
.update(cx, |workspace, _| workspace.project().clone())
|
||||||
.update(cx, |workspace, _| workspace.project().clone())
|
})??;
|
||||||
.ok()
|
|
||||||
})
|
|
||||||
.ok()
|
|
||||||
.flatten()?;
|
|
||||||
let new_terminal = project
|
let new_terminal = project
|
||||||
.update(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
project.create_terminal(TerminalKind::Task(spawn_task), window_handle, cx)
|
project.create_terminal(TerminalKind::Task(spawn_task), window_handle, cx)
|
||||||
})
|
})?
|
||||||
.ok()?
|
.await?;
|
||||||
.await
|
terminal_to_replace.update_in(cx, |terminal_to_replace, window, cx| {
|
||||||
.log_err()?;
|
terminal_to_replace.set_terminal(new_terminal.clone(), window, cx);
|
||||||
terminal_to_replace
|
})?;
|
||||||
.update_in(cx, |terminal_to_replace, window, cx| {
|
|
||||||
terminal_to_replace.set_terminal(new_terminal, window, cx);
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
match reveal {
|
match reveal {
|
||||||
RevealStrategy::Always => match reveal_target {
|
RevealStrategy::Always => match reveal_target {
|
||||||
RevealTarget::Center => {
|
RevealTarget::Center => {
|
||||||
task_workspace
|
task_workspace.update_in(cx, |workspace, window, cx| {
|
||||||
.update_in(cx, |workspace, window, cx| {
|
workspace
|
||||||
workspace
|
.active_item(cx)
|
||||||
.active_item(cx)
|
.context("retrieving active terminal item in the workspace")?
|
||||||
.context("retrieving active terminal item in the workspace")
|
.item_focus_handle(cx)
|
||||||
.log_err()?
|
.focus(window);
|
||||||
.item_focus_handle(cx)
|
anyhow::Ok(())
|
||||||
.focus(window);
|
})??;
|
||||||
Some(())
|
|
||||||
})
|
|
||||||
.ok()??;
|
|
||||||
}
|
}
|
||||||
RevealTarget::Dock => {
|
RevealTarget::Dock => {
|
||||||
terminal_panel
|
terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||||
.update_in(cx, |terminal_panel, window, cx| {
|
terminal_panel.activate_terminal_view(
|
||||||
terminal_panel.activate_terminal_view(
|
&task_pane,
|
||||||
&task_pane,
|
terminal_item_index,
|
||||||
terminal_item_index,
|
true,
|
||||||
true,
|
window,
|
||||||
window,
|
cx,
|
||||||
cx,
|
)
|
||||||
)
|
})?;
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
cx.spawn(async move |cx| {
|
cx.spawn(async move |cx| {
|
||||||
task_workspace
|
task_workspace
|
||||||
|
@ -877,24 +856,20 @@ impl TerminalPanel {
|
||||||
},
|
},
|
||||||
RevealStrategy::NoFocus => match reveal_target {
|
RevealStrategy::NoFocus => match reveal_target {
|
||||||
RevealTarget::Center => {
|
RevealTarget::Center => {
|
||||||
task_workspace
|
task_workspace.update_in(cx, |workspace, window, cx| {
|
||||||
.update_in(cx, |workspace, window, cx| {
|
workspace.active_pane().focus_handle(cx).focus(window);
|
||||||
workspace.active_pane().focus_handle(cx).focus(window);
|
})?;
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
}
|
}
|
||||||
RevealTarget::Dock => {
|
RevealTarget::Dock => {
|
||||||
terminal_panel
|
terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||||
.update_in(cx, |terminal_panel, window, cx| {
|
terminal_panel.activate_terminal_view(
|
||||||
terminal_panel.activate_terminal_view(
|
&task_pane,
|
||||||
&task_pane,
|
terminal_item_index,
|
||||||
terminal_item_index,
|
false,
|
||||||
false,
|
window,
|
||||||
window,
|
cx,
|
||||||
cx,
|
)
|
||||||
)
|
})?;
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
cx.spawn(async move |cx| {
|
cx.spawn(async move |cx| {
|
||||||
task_workspace
|
task_workspace
|
||||||
|
@ -909,7 +884,7 @@ impl TerminalPanel {
|
||||||
RevealStrategy::Never => {}
|
RevealStrategy::Never => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(())
|
Ok(new_terminal)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1158,7 +1133,7 @@ async fn wait_for_terminals_tasks(
|
||||||
})
|
})
|
||||||
.ok()
|
.ok()
|
||||||
});
|
});
|
||||||
let _: Vec<()> = join_all(pending_tasks).await;
|
let _: Vec<_> = join_all(pending_tasks).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_paths_to_terminal(
|
fn add_paths_to_terminal(
|
||||||
|
@ -1475,6 +1450,34 @@ impl Panel for TerminalPanel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct TerminalProvider(Entity<TerminalPanel>);
|
||||||
|
|
||||||
|
impl workspace::TerminalProvider for TerminalProvider {
|
||||||
|
fn spawn(
|
||||||
|
&self,
|
||||||
|
task: SpawnInTerminal,
|
||||||
|
window: &mut Window,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> Task<Result<ExitStatus>> {
|
||||||
|
let this = self.0.clone();
|
||||||
|
window.spawn(cx, async move |cx| {
|
||||||
|
let terminal = this
|
||||||
|
.update_in(cx, |terminal_panel, window, cx| {
|
||||||
|
terminal_panel.spawn_task(&task, window, cx)
|
||||||
|
})?
|
||||||
|
.await?;
|
||||||
|
let Some(exit_code) = terminal
|
||||||
|
.read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?
|
||||||
|
.await
|
||||||
|
else {
|
||||||
|
return Err(anyhow!("Task cancelled"));
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(exit_code)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
struct InlineAssistTabBarButton {
|
struct InlineAssistTabBarButton {
|
||||||
focus_handle: FocusHandle,
|
focus_handle: FocusHandle,
|
||||||
}
|
}
|
||||||
|
|
|
@ -982,15 +982,6 @@ fn subscribe_for_terminal_events(
|
||||||
window.invalidate_character_coordinates();
|
window.invalidate_character_coordinates();
|
||||||
cx.emit(SearchEvent::ActiveMatchChanged)
|
cx.emit(SearchEvent::ActiveMatchChanged)
|
||||||
}
|
}
|
||||||
Event::TaskLocatorReady { task_id, success } => {
|
|
||||||
if *success {
|
|
||||||
workspace
|
|
||||||
.update(cx, |workspace, cx| {
|
|
||||||
workspace.debug_task_ready(task_id, cx);
|
|
||||||
})
|
|
||||||
.log_err();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
vec![terminal_subscription, terminal_events_subscription]
|
vec![terminal_subscription, terminal_events_subscription]
|
||||||
|
|
|
@ -1446,27 +1446,29 @@ impl ShellExec {
|
||||||
let project = workspace.project().read(cx);
|
let project = workspace.project().read(cx);
|
||||||
let cwd = project.first_project_directory(cx);
|
let cwd = project.first_project_directory(cx);
|
||||||
let shell = project.terminal_settings(&cwd, cx).shell.clone();
|
let shell = project.terminal_settings(&cwd, cx).shell.clone();
|
||||||
cx.emit(workspace::Event::SpawnTask {
|
|
||||||
action: Box::new(SpawnInTerminal {
|
let spawn_in_terminal = SpawnInTerminal {
|
||||||
id: TaskId("vim".to_string()),
|
id: TaskId("vim".to_string()),
|
||||||
full_label: command.clone(),
|
full_label: command.clone(),
|
||||||
label: command.clone(),
|
label: command.clone(),
|
||||||
command: command.clone(),
|
command: command.clone(),
|
||||||
args: Vec::new(),
|
args: Vec::new(),
|
||||||
command_label: command.clone(),
|
command_label: command.clone(),
|
||||||
cwd,
|
cwd,
|
||||||
env: HashMap::default(),
|
env: HashMap::default(),
|
||||||
use_new_terminal: true,
|
use_new_terminal: true,
|
||||||
allow_concurrent_runs: true,
|
allow_concurrent_runs: true,
|
||||||
reveal: RevealStrategy::NoFocus,
|
reveal: RevealStrategy::NoFocus,
|
||||||
reveal_target: RevealTarget::Dock,
|
reveal_target: RevealTarget::Dock,
|
||||||
hide: HideStrategy::Never,
|
hide: HideStrategy::Never,
|
||||||
shell,
|
shell,
|
||||||
show_summary: false,
|
show_summary: false,
|
||||||
show_command: false,
|
show_command: false,
|
||||||
show_rerun: false,
|
show_rerun: false,
|
||||||
}),
|
};
|
||||||
});
|
workspace
|
||||||
|
.spawn_in_terminal(spawn_in_terminal, window, cx)
|
||||||
|
.detach_and_log_err(cx);
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
|
@ -35,7 +35,6 @@ client.workspace = true
|
||||||
clock.workspace = true
|
clock.workspace = true
|
||||||
collections.workspace = true
|
collections.workspace = true
|
||||||
component.workspace = true
|
component.workspace = true
|
||||||
dap.workspace = true
|
|
||||||
db.workspace = true
|
db.workspace = true
|
||||||
derive_more.workspace = true
|
derive_more.workspace = true
|
||||||
fs.workspace = true
|
fs.workspace = true
|
||||||
|
|
|
@ -1,75 +1,132 @@
|
||||||
use gpui::Context;
|
use std::process::ExitStatus;
|
||||||
|
|
||||||
|
use anyhow::{Result, anyhow};
|
||||||
|
use gpui::{Context, Task};
|
||||||
use project::TaskSourceKind;
|
use project::TaskSourceKind;
|
||||||
use remote::ConnectionState;
|
use remote::ConnectionState;
|
||||||
use task::{ResolvedTask, TaskContext, TaskTemplate};
|
use task::{ResolvedTask, SpawnInTerminal, TaskContext, TaskTemplate};
|
||||||
|
use ui::Window;
|
||||||
|
|
||||||
use crate::Workspace;
|
use crate::Workspace;
|
||||||
|
|
||||||
pub fn schedule_task(
|
impl Workspace {
|
||||||
workspace: &mut Workspace,
|
pub fn schedule_task(
|
||||||
task_source_kind: TaskSourceKind,
|
self: &mut Workspace,
|
||||||
task_to_resolve: &TaskTemplate,
|
task_source_kind: TaskSourceKind,
|
||||||
task_cx: &TaskContext,
|
task_to_resolve: &TaskTemplate,
|
||||||
omit_history: bool,
|
task_cx: &TaskContext,
|
||||||
cx: &mut Context<Workspace>,
|
omit_history: bool,
|
||||||
) {
|
window: &mut Window,
|
||||||
match workspace.project.read(cx).ssh_connection_state(cx) {
|
cx: &mut Context<Self>,
|
||||||
None | Some(ConnectionState::Connected) => {}
|
) {
|
||||||
Some(
|
match self.project.read(cx).ssh_connection_state(cx) {
|
||||||
ConnectionState::Connecting
|
None | Some(ConnectionState::Connected) => {}
|
||||||
| ConnectionState::Disconnected
|
Some(
|
||||||
| ConnectionState::HeartbeatMissed
|
ConnectionState::Connecting
|
||||||
| ConnectionState::Reconnecting,
|
| ConnectionState::Disconnected
|
||||||
) => {
|
| ConnectionState::HeartbeatMissed
|
||||||
log::warn!("Cannot schedule tasks when disconnected from a remote host");
|
| ConnectionState::Reconnecting,
|
||||||
|
) => {
|
||||||
|
log::warn!("Cannot schedule tasks when disconnected from a remote host");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(spawn_in_terminal) =
|
||||||
|
task_to_resolve.resolve_task(&task_source_kind.to_id_base(), task_cx)
|
||||||
|
{
|
||||||
|
self.schedule_resolved_task(
|
||||||
|
task_source_kind,
|
||||||
|
spawn_in_terminal,
|
||||||
|
omit_history,
|
||||||
|
window,
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn schedule_resolved_task(
|
||||||
|
self: &mut Workspace,
|
||||||
|
task_source_kind: TaskSourceKind,
|
||||||
|
mut resolved_task: ResolvedTask,
|
||||||
|
omit_history: bool,
|
||||||
|
window: &mut Window,
|
||||||
|
cx: &mut Context<Workspace>,
|
||||||
|
) {
|
||||||
|
if let Some(spawn_in_terminal) = resolved_task.resolved.take() {
|
||||||
|
if !omit_history {
|
||||||
|
resolved_task.resolved = Some(spawn_in_terminal.clone());
|
||||||
|
self.project().update(cx, |project, cx| {
|
||||||
|
if let Some(task_inventory) =
|
||||||
|
project.task_store().read(cx).task_inventory().cloned()
|
||||||
|
{
|
||||||
|
task_inventory.update(cx, |inventory, _| {
|
||||||
|
inventory.task_scheduled(task_source_kind, resolved_task);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(terminal_provider) = self.terminal_provider.as_ref() {
|
||||||
|
terminal_provider
|
||||||
|
.spawn(spawn_in_terminal, window, cx)
|
||||||
|
.detach_and_log_err(cx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn schedule_debug_task(
|
||||||
|
&mut self,
|
||||||
|
task: ResolvedTask,
|
||||||
|
window: &mut Window,
|
||||||
|
cx: &mut Context<Workspace>,
|
||||||
|
) {
|
||||||
|
let Some(debug_config) = task.resolved_debug_adapter_config() else {
|
||||||
|
log::error!("Debug task has no debug adapter config");
|
||||||
return;
|
return;
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(spawn_in_terminal) =
|
let project = self.project().clone();
|
||||||
task_to_resolve.resolve_task(&task_source_kind.to_id_base(), task_cx)
|
cx.spawn_in(window, async move |workspace, cx| {
|
||||||
{
|
let config = if debug_config.locator.is_some() {
|
||||||
schedule_resolved_task(
|
let task = workspace.update_in(cx, |workspace, window, cx| {
|
||||||
workspace,
|
workspace.spawn_in_terminal(task.resolved.unwrap(), window, cx)
|
||||||
task_source_kind,
|
})?;
|
||||||
spawn_in_terminal,
|
|
||||||
omit_history,
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn schedule_resolved_task(
|
let exit_code = task.await?;
|
||||||
workspace: &mut Workspace,
|
if !exit_code.success() {
|
||||||
task_source_kind: TaskSourceKind,
|
return anyhow::Ok(());
|
||||||
mut resolved_task: ResolvedTask,
|
|
||||||
omit_history: bool,
|
|
||||||
cx: &mut Context<Workspace>,
|
|
||||||
) {
|
|
||||||
let debug_config = resolved_task.resolved_debug_adapter_config();
|
|
||||||
|
|
||||||
if let Some(spawn_in_terminal) = resolved_task.resolved.take() {
|
|
||||||
if let Some(debug_config) = debug_config {
|
|
||||||
workspace
|
|
||||||
.debug_task_queue
|
|
||||||
.insert(resolved_task.id.clone(), debug_config);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !omit_history {
|
|
||||||
resolved_task.resolved = Some(spawn_in_terminal.clone());
|
|
||||||
workspace.project().update(cx, |project, cx| {
|
|
||||||
if let Some(task_inventory) =
|
|
||||||
project.task_store().read(cx).task_inventory().cloned()
|
|
||||||
{
|
|
||||||
task_inventory.update(cx, |inventory, _| {
|
|
||||||
inventory.task_scheduled(task_source_kind, resolved_task);
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
});
|
let ret = project
|
||||||
}
|
.update(cx, |project, cx| {
|
||||||
|
project.dap_store().update(cx, |dap_store, cx| {
|
||||||
|
dap_store.run_debug_locator(debug_config, cx)
|
||||||
|
})
|
||||||
|
})?
|
||||||
|
.await?;
|
||||||
|
ret
|
||||||
|
} else {
|
||||||
|
debug_config.definition
|
||||||
|
};
|
||||||
|
|
||||||
cx.emit(crate::Event::SpawnTask {
|
project
|
||||||
action: Box::new(spawn_in_terminal),
|
.update(cx, |project, cx| project.start_debug_session(config, cx))?
|
||||||
});
|
.await?;
|
||||||
|
anyhow::Ok(())
|
||||||
|
})
|
||||||
|
.detach_and_log_err(cx);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spawn_in_terminal(
|
||||||
|
self: &mut Workspace,
|
||||||
|
spawn_in_terminal: SpawnInTerminal,
|
||||||
|
window: &mut Window,
|
||||||
|
cx: &mut Context<Workspace>,
|
||||||
|
) -> Task<Result<ExitStatus>> {
|
||||||
|
if let Some(terminal_provider) = self.terminal_provider.as_ref() {
|
||||||
|
terminal_provider.spawn(spawn_in_terminal, window, cx)
|
||||||
|
} else {
|
||||||
|
Task::ready(Err(anyhow!("No terminal provider")))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,6 @@ mod toast_layer;
|
||||||
mod toolbar;
|
mod toolbar;
|
||||||
mod workspace_settings;
|
mod workspace_settings;
|
||||||
|
|
||||||
use dap::DapRegistry;
|
|
||||||
pub use toast_layer::{RunAction, ToastAction, ToastLayer, ToastView};
|
pub use toast_layer::{RunAction, ToastAction, ToastLayer, ToastView};
|
||||||
|
|
||||||
use anyhow::{Context as _, Result, anyhow};
|
use anyhow::{Context as _, Result, anyhow};
|
||||||
|
@ -92,11 +91,12 @@ use std::{
|
||||||
env,
|
env,
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
process::ExitStatus,
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
sync::{Arc, LazyLock, Weak, atomic::AtomicUsize},
|
sync::{Arc, LazyLock, Weak, atomic::AtomicUsize},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
use task::{DebugTaskDefinition, SpawnInTerminal, TaskId};
|
use task::SpawnInTerminal;
|
||||||
use theme::{ActiveTheme, SystemAppearance, ThemeSettings};
|
use theme::{ActiveTheme, SystemAppearance, ThemeSettings};
|
||||||
pub use toolbar::{Toolbar, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView};
|
pub use toolbar::{Toolbar, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView};
|
||||||
pub use ui;
|
pub use ui;
|
||||||
|
@ -130,6 +130,15 @@ static ZED_WINDOW_POSITION: LazyLock<Option<Point<Pixels>>> = LazyLock::new(|| {
|
||||||
.and_then(parse_pixel_position_env_var)
|
.and_then(parse_pixel_position_env_var)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
pub trait TerminalProvider {
|
||||||
|
fn spawn(
|
||||||
|
&self,
|
||||||
|
task: SpawnInTerminal,
|
||||||
|
window: &mut Window,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> Task<Result<ExitStatus>>;
|
||||||
|
}
|
||||||
|
|
||||||
actions!(
|
actions!(
|
||||||
workspace,
|
workspace,
|
||||||
[
|
[
|
||||||
|
@ -626,7 +635,6 @@ pub fn register_serializable_item<I: SerializableItem>(cx: &mut App) {
|
||||||
|
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
pub languages: Arc<LanguageRegistry>,
|
pub languages: Arc<LanguageRegistry>,
|
||||||
pub debug_adapters: Arc<DapRegistry>,
|
|
||||||
pub client: Arc<Client>,
|
pub client: Arc<Client>,
|
||||||
pub user_store: Entity<UserStore>,
|
pub user_store: Entity<UserStore>,
|
||||||
pub workspace_store: Entity<WorkspaceStore>,
|
pub workspace_store: Entity<WorkspaceStore>,
|
||||||
|
@ -678,7 +686,6 @@ impl AppState {
|
||||||
|
|
||||||
let fs = fs::FakeFs::new(cx.background_executor().clone());
|
let fs = fs::FakeFs::new(cx.background_executor().clone());
|
||||||
let languages = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
|
let languages = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
|
||||||
let debug_adapters = Arc::new(DapRegistry::fake());
|
|
||||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||||
let http_client = http_client::FakeHttpClient::with_404_response();
|
let http_client = http_client::FakeHttpClient::with_404_response();
|
||||||
let client = Client::new(clock, http_client.clone(), cx);
|
let client = Client::new(clock, http_client.clone(), cx);
|
||||||
|
@ -694,7 +701,6 @@ impl AppState {
|
||||||
client,
|
client,
|
||||||
fs,
|
fs,
|
||||||
languages,
|
languages,
|
||||||
debug_adapters,
|
|
||||||
user_store,
|
user_store,
|
||||||
workspace_store,
|
workspace_store,
|
||||||
node_runtime: NodeRuntime::unavailable(),
|
node_runtime: NodeRuntime::unavailable(),
|
||||||
|
@ -772,9 +778,6 @@ pub enum Event {
|
||||||
},
|
},
|
||||||
ContactRequestedJoin(u64),
|
ContactRequestedJoin(u64),
|
||||||
WorkspaceCreated(WeakEntity<Workspace>),
|
WorkspaceCreated(WeakEntity<Workspace>),
|
||||||
SpawnTask {
|
|
||||||
action: Box<SpawnInTerminal>,
|
|
||||||
},
|
|
||||||
OpenBundledFile {
|
OpenBundledFile {
|
||||||
text: Cow<'static, str>,
|
text: Cow<'static, str>,
|
||||||
title: &'static str,
|
title: &'static str,
|
||||||
|
@ -856,11 +859,11 @@ pub struct Workspace {
|
||||||
bounds_save_task_queued: Option<Task<()>>,
|
bounds_save_task_queued: Option<Task<()>>,
|
||||||
on_prompt_for_new_path: Option<PromptForNewPath>,
|
on_prompt_for_new_path: Option<PromptForNewPath>,
|
||||||
on_prompt_for_open_path: Option<PromptForOpenPath>,
|
on_prompt_for_open_path: Option<PromptForOpenPath>,
|
||||||
|
terminal_provider: Option<Box<dyn TerminalProvider>>,
|
||||||
serializable_items_tx: UnboundedSender<Box<dyn SerializableItemHandle>>,
|
serializable_items_tx: UnboundedSender<Box<dyn SerializableItemHandle>>,
|
||||||
serialized_ssh_project: Option<SerializedSshProject>,
|
serialized_ssh_project: Option<SerializedSshProject>,
|
||||||
_items_serializer: Task<Result<()>>,
|
_items_serializer: Task<Result<()>>,
|
||||||
session_id: Option<String>,
|
session_id: Option<String>,
|
||||||
debug_task_queue: HashMap<task::TaskId, DebugTaskDefinition>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EventEmitter<Event> for Workspace {}
|
impl EventEmitter<Event> for Workspace {}
|
||||||
|
@ -1182,11 +1185,11 @@ impl Workspace {
|
||||||
bounds_save_task_queued: None,
|
bounds_save_task_queued: None,
|
||||||
on_prompt_for_new_path: None,
|
on_prompt_for_new_path: None,
|
||||||
on_prompt_for_open_path: None,
|
on_prompt_for_open_path: None,
|
||||||
|
terminal_provider: None,
|
||||||
serializable_items_tx,
|
serializable_items_tx,
|
||||||
_items_serializer,
|
_items_serializer,
|
||||||
session_id: Some(session_id),
|
session_id: Some(session_id),
|
||||||
serialized_ssh_project: None,
|
serialized_ssh_project: None,
|
||||||
debug_task_queue: Default::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1207,7 +1210,6 @@ impl Workspace {
|
||||||
app_state.node_runtime.clone(),
|
app_state.node_runtime.clone(),
|
||||||
app_state.user_store.clone(),
|
app_state.user_store.clone(),
|
||||||
app_state.languages.clone(),
|
app_state.languages.clone(),
|
||||||
app_state.debug_adapters.clone(),
|
|
||||||
app_state.fs.clone(),
|
app_state.fs.clone(),
|
||||||
env,
|
env,
|
||||||
cx,
|
cx,
|
||||||
|
@ -1699,6 +1701,10 @@ impl Workspace {
|
||||||
self.on_prompt_for_open_path = Some(prompt)
|
self.on_prompt_for_open_path = Some(prompt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_terminal_provider(&mut self, provider: impl TerminalProvider + 'static) {
|
||||||
|
self.terminal_provider = Some(Box::new(provider));
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serialized_ssh_project(&self) -> Option<SerializedSshProject> {
|
pub fn serialized_ssh_project(&self) -> Option<SerializedSshProject> {
|
||||||
self.serialized_ssh_project.clone()
|
self.serialized_ssh_project.clone()
|
||||||
}
|
}
|
||||||
|
@ -5082,7 +5088,6 @@ impl Workspace {
|
||||||
window.activate_window();
|
window.activate_window();
|
||||||
let app_state = Arc::new(AppState {
|
let app_state = Arc::new(AppState {
|
||||||
languages: project.read(cx).languages().clone(),
|
languages: project.read(cx).languages().clone(),
|
||||||
debug_adapters: project.read(cx).debug_adapters().clone(),
|
|
||||||
workspace_store,
|
workspace_store,
|
||||||
client,
|
client,
|
||||||
user_store,
|
user_store,
|
||||||
|
@ -5238,16 +5243,6 @@ impl Workspace {
|
||||||
.update(cx, |_, window, _| window.activate_window())
|
.update(cx, |_, window, _| window.activate_window())
|
||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn debug_task_ready(&mut self, task_id: &TaskId, cx: &mut App) {
|
|
||||||
if let Some(debug_config) = self.debug_task_queue.remove(task_id) {
|
|
||||||
self.project.update(cx, |project, cx| {
|
|
||||||
project
|
|
||||||
.start_debug_session(debug_config, cx)
|
|
||||||
.detach_and_log_err(cx);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn leader_border_for_pane(
|
fn leader_border_for_pane(
|
||||||
|
|
|
@ -42,7 +42,6 @@ command_palette.workspace = true
|
||||||
command_palette_hooks.workspace = true
|
command_palette_hooks.workspace = true
|
||||||
component_preview.workspace = true
|
component_preview.workspace = true
|
||||||
copilot.workspace = true
|
copilot.workspace = true
|
||||||
dap.workspace = true
|
|
||||||
dap_adapters.workspace = true
|
dap_adapters.workspace = true
|
||||||
debugger_ui.workspace = true
|
debugger_ui.workspace = true
|
||||||
debugger_tools.workspace = true
|
debugger_tools.workspace = true
|
||||||
|
|
|
@ -10,7 +10,6 @@ use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
|
||||||
use client::{Client, ProxySettings, UserStore, parse_zed_link};
|
use client::{Client, ProxySettings, UserStore, parse_zed_link};
|
||||||
use collab_ui::channel_view::ChannelView;
|
use collab_ui::channel_view::ChannelView;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use dap::DapRegistry;
|
|
||||||
use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE};
|
use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE};
|
||||||
use editor::Editor;
|
use editor::Editor;
|
||||||
use extension::ExtensionHostProxy;
|
use extension::ExtensionHostProxy;
|
||||||
|
@ -449,7 +448,6 @@ fn main() {
|
||||||
|
|
||||||
let app_state = Arc::new(AppState {
|
let app_state = Arc::new(AppState {
|
||||||
languages: languages.clone(),
|
languages: languages.clone(),
|
||||||
debug_adapters: DapRegistry::default().into(),
|
|
||||||
client: client.clone(),
|
client: client.clone(),
|
||||||
user_store: user_store.clone(),
|
user_store: user_store.clone(),
|
||||||
fs: fs.clone(),
|
fs: fs.clone(),
|
||||||
|
@ -461,7 +459,7 @@ fn main() {
|
||||||
AppState::set_global(Arc::downgrade(&app_state), cx);
|
AppState::set_global(Arc::downgrade(&app_state), cx);
|
||||||
|
|
||||||
auto_update::init(client.http_client(), cx);
|
auto_update::init(client.http_client(), cx);
|
||||||
dap_adapters::init(app_state.debug_adapters.clone());
|
dap_adapters::init(cx);
|
||||||
auto_update_ui::init(cx);
|
auto_update_ui::init(cx);
|
||||||
reliability::init(
|
reliability::init(
|
||||||
client.http_client(),
|
client.http_client(),
|
||||||
|
|
|
@ -14,7 +14,7 @@ use log;
|
||||||
static ENV_FILTER: OnceLock<env_config::EnvFilter> = OnceLock::new();
|
static ENV_FILTER: OnceLock<env_config::EnvFilter> = OnceLock::new();
|
||||||
static SCOPE_MAP: RwLock<Option<ScopeMap>> = RwLock::new(None);
|
static SCOPE_MAP: RwLock<Option<ScopeMap>> = RwLock::new(None);
|
||||||
|
|
||||||
const LEVEL_ENABLED_MAX_DEFAULT: log::LevelFilter = log::LevelFilter::Info;
|
pub const LEVEL_ENABLED_MAX_DEFAULT: log::LevelFilter = log::LevelFilter::Info;
|
||||||
/// The maximum log level of verbosity that is enabled by default.
|
/// The maximum log level of verbosity that is enabled by default.
|
||||||
/// All messages more verbose than this level will be discarded
|
/// All messages more verbose than this level will be discarded
|
||||||
/// by default unless specially configured.
|
/// by default unless specially configured.
|
||||||
|
@ -34,7 +34,7 @@ static mut LEVEL_ENABLED_MAX_STATIC: log::LevelFilter = LEVEL_ENABLED_MAX_DEFAUL
|
||||||
/// `trace` logs will be discarded.
|
/// `trace` logs will be discarded.
|
||||||
/// Therefore, it should always be `>= LEVEL_ENABLED_MAX_STATIC`
|
/// Therefore, it should always be `>= LEVEL_ENABLED_MAX_STATIC`
|
||||||
// PERF: this doesn't need to be an atomic, we don't actually care about race conditions here
|
// PERF: this doesn't need to be an atomic, we don't actually care about race conditions here
|
||||||
static LEVEL_ENABLED_MAX_CONFIG: AtomicU8 = AtomicU8::new(LEVEL_ENABLED_MAX_DEFAULT as u8);
|
pub static LEVEL_ENABLED_MAX_CONFIG: AtomicU8 = AtomicU8::new(LEVEL_ENABLED_MAX_DEFAULT as u8);
|
||||||
|
|
||||||
pub fn init_env_filter(filter: env_config::EnvFilter) {
|
pub fn init_env_filter(filter: env_config::EnvFilter) {
|
||||||
if let Some(level_max) = filter.level_global {
|
if let Some(level_max) = filter.level_global {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue