diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 25a1ed8670..23f0b3915a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -800,7 +800,8 @@ jobs: - name: Upload Artifacts to release uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) && env.RELEASE_CHANNEL == 'preview' }} # upload only preview + # Re-enable when we are ready to publish windows preview releases + if: false && ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) && env.RELEASE_CHANNEL == 'preview' }} # upload only preview with: draft: true prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} diff --git a/Cargo.lock b/Cargo.lock index 38bb7819ca..db798f40c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3043,6 +3043,7 @@ dependencies = [ "context_server", "ctor", "dap", + "dap-types", "dap_adapters", "dashmap 6.1.0", "debugger_ui", @@ -19972,7 +19973,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.195.0" +version = "0.195.1" dependencies = [ "activity_indicator", "agent", diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 55c15cac5a..7b536a2d24 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -94,6 +94,7 @@ context_server.workspace = true ctor.workspace = true dap = { workspace = true, features = ["test-support"] } dap_adapters = { workspace = true, features = ["test-support"] } +dap-types.workspace = true debugger_ui = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } extension.workspace = true diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 7aeb381c02..8ab6e6910c 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -2,6 +2,7 @@ use crate::tests::TestServer; use call::ActiveCall; use collections::{HashMap, HashSet}; +use dap::{Capabilities, adapters::DebugTaskDefinition, transport::RequestHandling}; use debugger_ui::debugger_panel::DebugPanel; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs as _, RemoveOptions}; @@ -22,6 +23,7 @@ use language::{ use node_runtime::NodeRuntime; use project::{ ProjectPath, + debugger::session::ThreadId, lsp_store::{FormatTrigger, LspFormatTarget}, }; use remote::SshRemoteClient; @@ -29,7 +31,11 @@ use remote_server::{HeadlessAppState, HeadlessProject}; use rpc::proto; use serde_json::json; use settings::SettingsStore; -use std::{path::Path, sync::Arc}; +use std::{ + path::Path, + sync::{Arc, atomic::AtomicUsize}, +}; +use task::TcpArgumentsTemplate; use util::path; #[gpui::test(iterations = 10)] @@ -688,3 +694,162 @@ async fn test_remote_server_debugger( shutdown_session.await.unwrap(); } + +#[gpui::test] +async fn test_slow_adapter_startup_retries( + cx_a: &mut TestAppContext, + server_cx: &mut TestAppContext, + executor: BackgroundExecutor, +) { + cx_a.update(|cx| { + release_channel::init(SemanticVersion::default(), cx); + command_palette_hooks::init(cx); + zlog::init_test(); + dap_adapters::init(cx); + }); + server_cx.update(|cx| { + release_channel::init(SemanticVersion::default(), cx); + dap_adapters::init(cx); + }); + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree( + path!("/code"), + json!({ + "lib.rs": "fn one() -> usize { 1 }" + }), + ) + .await; + + // User A connects to the remote project via SSH. + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let node = NodeRuntime::unavailable(); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let _headless_project = server_cx.new(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: node, + languages, + extension_host_proxy: Arc::new(ExtensionHostProxy::new()), + }, + cx, + ) + }); + + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; + let mut server = TestServer::start(server_cx.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + cx_a.update(|cx| { + debugger_ui::init(cx); + command_palette_hooks::init(cx); + }); + let (project_a, _) = client_a + .build_ssh_project(path!("/code"), client_ssh.clone(), cx_a) + .await; + + let (workspace, cx_a) = client_a.build_workspace(&project_a, cx_a); + + let debugger_panel = workspace + .update_in(cx_a, |_workspace, window, cx| { + cx.spawn_in(window, DebugPanel::load) + }) + .await + .unwrap(); + + workspace.update_in(cx_a, |workspace, window, cx| { + workspace.add_panel(debugger_panel, window, cx); + }); + + cx_a.run_until_parked(); + let debug_panel = workspace + .update(cx_a, |workspace, cx| workspace.panel::(cx)) + .unwrap(); + + let workspace_window = cx_a + .window_handle() + .downcast::() + .unwrap(); + + let count = Arc::new(AtomicUsize::new(0)); + let session = debugger_ui::tests::start_debug_session_with( + &workspace_window, + cx_a, + DebugTaskDefinition { + adapter: "fake-adapter".into(), + label: "test".into(), + config: json!({ + "request": "launch" + }), + tcp_connection: Some(TcpArgumentsTemplate { + port: None, + host: None, + timeout: None, + }), + }, + move |client| { + let count = count.clone(); + client.on_request_ext::(move |_seq, _request| { + if count.fetch_add(1, std::sync::atomic::Ordering::SeqCst) < 5 { + return RequestHandling::Exit; + } + RequestHandling::Respond(Ok(Capabilities::default())) + }); + }, + ) + .unwrap(); + cx_a.run_until_parked(); + + let client = session.update(cx_a, |session, _| session.adapter_client().unwrap()); + client + .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent { + reason: dap::StoppedEventReason::Pause, + description: None, + thread_id: Some(1), + preserve_focus_hint: None, + text: None, + all_threads_stopped: None, + hit_breakpoint_ids: None, + })) + .await; + + cx_a.run_until_parked(); + + let active_session = debug_panel + .update(cx_a, |this, _| this.active_session()) + .unwrap(); + + let running_state = active_session.update(cx_a, |active_session, _| { + active_session.running_state().clone() + }); + + assert_eq!( + client.id(), + running_state.read_with(cx_a, |running_state, _| running_state.session_id()) + ); + assert_eq!( + ThreadId(1), + running_state.read_with(cx_a, |running_state, _| running_state + .selected_thread_id() + .unwrap()) + ); + + let shutdown_session = workspace.update(cx_a, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.dap_store().update(cx, |dap_store, cx| { + dap_store.shutdown_session(session.read(cx).session_id(), cx) + }) + }) + }); + + client_ssh.update(cx_a, |a, _| { + a.shutdown_processes(Some(proto::ShutdownRemoteServer {}), executor) + }); + + shutdown_session.await.unwrap(); +} diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index d9f26b3b34..bd36b07387 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -442,10 +442,18 @@ impl DebugAdapter for FakeAdapter { _: Option>, _: &mut AsyncApp, ) -> Result { + let connection = task_definition + .tcp_connection + .as_ref() + .map(|connection| TcpArguments { + host: connection.host(), + port: connection.port.unwrap_or(17), + timeout: connection.timeout, + }); Ok(DebugAdapterBinary { command: Some("command".into()), arguments: vec![], - connection: None, + connection, envs: HashMap::default(), cwd: None, request_args: StartDebuggingRequestArguments { diff --git a/crates/dap/src/client.rs b/crates/dap/src/client.rs index ff082e3b76..86a15b2d8a 100644 --- a/crates/dap/src/client.rs +++ b/crates/dap/src/client.rs @@ -2,7 +2,7 @@ use crate::{ adapters::DebugAdapterBinary, transport::{IoKind, LogKind, TransportDelegate}, }; -use anyhow::{Context as _, Result}; +use anyhow::Result; use dap_types::{ messages::{Message, Response}, requests::Request, @@ -110,9 +110,7 @@ impl DebugAdapterClient { self.transport_delegate .pending_requests .lock() - .as_mut() - .context("client is closed")? - .insert(sequence_id, callback_tx); + .insert(sequence_id, callback_tx)?; log::debug!( "Client {} send `{}` request with sequence_id: {}", @@ -170,6 +168,7 @@ impl DebugAdapterClient { pub fn kill(&self) { log::debug!("Killing DAP process"); self.transport_delegate.transport.lock().kill(); + self.transport_delegate.pending_requests.lock().shutdown(); } pub fn has_adapter_logs(&self) -> bool { @@ -184,11 +183,34 @@ impl DebugAdapterClient { } #[cfg(any(test, feature = "test-support"))] - pub fn on_request(&self, handler: F) + pub fn on_request(&self, mut handler: F) where F: 'static + Send + FnMut(u64, R::Arguments) -> Result, + { + use crate::transport::RequestHandling; + + self.transport_delegate + .transport + .lock() + .as_fake() + .on_request::(move |seq, request| { + RequestHandling::Respond(handler(seq, request)) + }); + } + + #[cfg(any(test, feature = "test-support"))] + pub fn on_request_ext(&self, handler: F) + where + F: 'static + + Send + + FnMut( + u64, + R::Arguments, + ) -> crate::transport::RequestHandling< + Result, + >, { self.transport_delegate .transport diff --git a/crates/dap/src/transport.rs b/crates/dap/src/transport.rs index 14370f66e4..6dadf1cf35 100644 --- a/crates/dap/src/transport.rs +++ b/crates/dap/src/transport.rs @@ -49,6 +49,12 @@ pub enum IoKind { StdErr, } +#[cfg(any(test, feature = "test-support"))] +pub enum RequestHandling { + Respond(T), + Exit, +} + type LogHandlers = Arc>>; pub trait Transport: Send + Sync { @@ -76,7 +82,11 @@ async fn start( ) -> Result> { #[cfg(any(test, feature = "test-support"))] if cfg!(any(test, feature = "test-support")) { - return Ok(Box::new(FakeTransport::start(cx).await?)); + if let Some(connection) = binary.connection.clone() { + return Ok(Box::new(FakeTransport::start_tcp(connection, cx).await?)); + } else { + return Ok(Box::new(FakeTransport::start_stdio(cx).await?)); + } } if binary.connection.is_some() { @@ -90,11 +100,57 @@ async fn start( } } +pub(crate) struct PendingRequests { + inner: Option>>>, +} + +impl PendingRequests { + fn new() -> Self { + Self { + inner: Some(HashMap::default()), + } + } + + fn flush(&mut self, e: anyhow::Error) { + let Some(inner) = self.inner.as_mut() else { + return; + }; + for (_, sender) in inner.drain() { + sender.send(Err(e.cloned())).ok(); + } + } + + pub(crate) fn insert( + &mut self, + sequence_id: u64, + callback_tx: oneshot::Sender>, + ) -> anyhow::Result<()> { + let Some(inner) = self.inner.as_mut() else { + bail!("client is closed") + }; + inner.insert(sequence_id, callback_tx); + Ok(()) + } + + pub(crate) fn remove( + &mut self, + sequence_id: u64, + ) -> anyhow::Result>>> { + let Some(inner) = self.inner.as_mut() else { + bail!("client is closed"); + }; + Ok(inner.remove(&sequence_id)) + } + + pub(crate) fn shutdown(&mut self) { + self.flush(anyhow!("transport shutdown")); + self.inner = None; + } +} + pub(crate) struct TransportDelegate { log_handlers: LogHandlers, - // TODO this should really be some kind of associative channel - pub(crate) pending_requests: - Arc>>>>>, + pub(crate) pending_requests: Arc>, pub(crate) transport: Mutex>, pub(crate) server_tx: smol::lock::Mutex>>, tasks: Mutex>>, @@ -108,7 +164,7 @@ impl TransportDelegate { transport: Mutex::new(transport), log_handlers, server_tx: Default::default(), - pending_requests: Arc::new(Mutex::new(Some(HashMap::default()))), + pending_requests: Arc::new(Mutex::new(PendingRequests::new())), tasks: Default::default(), }) } @@ -151,24 +207,10 @@ impl TransportDelegate { Ok(()) => { pending_requests .lock() - .take() - .into_iter() - .flatten() - .for_each(|(_, request)| { - request - .send(Err(anyhow!("debugger shutdown unexpectedly"))) - .ok(); - }); + .flush(anyhow!("debugger shutdown unexpectedly")); } Err(e) => { - pending_requests - .lock() - .take() - .into_iter() - .flatten() - .for_each(|(_, request)| { - request.send(Err(e.cloned())).ok(); - }); + pending_requests.lock().flush(e); } } })); @@ -286,7 +328,7 @@ impl TransportDelegate { async fn recv_from_server( server_stdout: Stdout, mut message_handler: DapMessageHandler, - pending_requests: Arc>>>>>, + pending_requests: Arc>, log_handlers: Option, ) -> Result<()> where @@ -303,14 +345,10 @@ impl TransportDelegate { ConnectionResult::Timeout => anyhow::bail!("Timed out when connecting to debugger"), ConnectionResult::ConnectionReset => { log::info!("Debugger closed the connection"); - break Ok(()); + return Ok(()); } ConnectionResult::Result(Ok(Message::Response(res))) => { - let tx = pending_requests - .lock() - .as_mut() - .context("client is closed")? - .remove(&res.request_seq); + let tx = pending_requests.lock().remove(res.request_seq)?; if let Some(tx) = tx { if let Err(e) = tx.send(Self::process_response(res)) { log::trace!("Did not send response `{:?}` for a cancelled", e); @@ -704,8 +742,7 @@ impl Drop for StdioTransport { } #[cfg(any(test, feature = "test-support"))] -type RequestHandler = - Box dap_types::messages::Response>; +type RequestHandler = Box RequestHandling>; #[cfg(any(test, feature = "test-support"))] type ResponseHandler = Box; @@ -716,23 +753,38 @@ pub struct FakeTransport { request_handlers: Arc>>, // for reverse request responses response_handlers: Arc>>, - - stdin_writer: Option, - stdout_reader: Option, message_handler: Option>>, + kind: FakeTransportKind, +} + +#[cfg(any(test, feature = "test-support"))] +pub enum FakeTransportKind { + Stdio { + stdin_writer: Option, + stdout_reader: Option, + }, + Tcp { + connection: TcpArguments, + executor: BackgroundExecutor, + }, } #[cfg(any(test, feature = "test-support"))] impl FakeTransport { pub fn on_request(&self, mut handler: F) where - F: 'static + Send + FnMut(u64, R::Arguments) -> Result, + F: 'static + + Send + + FnMut(u64, R::Arguments) -> RequestHandling>, { self.request_handlers.lock().insert( R::COMMAND, Box::new(move |seq, args| { let result = handler(seq, serde_json::from_value(args).unwrap()); - let response = match result { + let RequestHandling::Respond(response) = result else { + return RequestHandling::Exit; + }; + let response = match response { Ok(response) => Response { seq: seq + 1, request_seq: seq, @@ -750,7 +802,7 @@ impl FakeTransport { message: None, }, }; - response + RequestHandling::Respond(response) }), ); } @@ -764,86 +816,75 @@ impl FakeTransport { .insert(R::COMMAND, Box::new(handler)); } - async fn start(cx: &mut AsyncApp) -> Result { + async fn start_tcp(connection: TcpArguments, cx: &mut AsyncApp) -> Result { + Ok(Self { + request_handlers: Arc::new(Mutex::new(HashMap::default())), + response_handlers: Arc::new(Mutex::new(HashMap::default())), + message_handler: None, + kind: FakeTransportKind::Tcp { + connection, + executor: cx.background_executor().clone(), + }, + }) + } + + async fn handle_messages( + request_handlers: Arc>>, + response_handlers: Arc>>, + stdin_reader: PipeReader, + stdout_writer: PipeWriter, + ) -> Result<()> { use dap_types::requests::{Request, RunInTerminal, StartDebugging}; use serde_json::json; - let (stdin_writer, stdin_reader) = async_pipe::pipe(); - let (stdout_writer, stdout_reader) = async_pipe::pipe(); - - let mut this = Self { - request_handlers: Arc::new(Mutex::new(HashMap::default())), - response_handlers: Arc::new(Mutex::new(HashMap::default())), - stdin_writer: Some(stdin_writer), - stdout_reader: Some(stdout_reader), - message_handler: None, - }; - - let request_handlers = this.request_handlers.clone(); - let response_handlers = this.response_handlers.clone(); + let mut reader = BufReader::new(stdin_reader); let stdout_writer = Arc::new(smol::lock::Mutex::new(stdout_writer)); + let mut buffer = String::new(); - this.message_handler = Some(cx.background_spawn(async move { - let mut reader = BufReader::new(stdin_reader); - let mut buffer = String::new(); - - loop { - match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None) - .await - { - ConnectionResult::Timeout => { - anyhow::bail!("Timed out when connecting to debugger"); - } - ConnectionResult::ConnectionReset => { - log::info!("Debugger closed the connection"); - break Ok(()); - } - ConnectionResult::Result(Err(e)) => break Err(e), - ConnectionResult::Result(Ok(message)) => { - match message { - Message::Request(request) => { - // redirect reverse requests to stdout writer/reader - if request.command == RunInTerminal::COMMAND - || request.command == StartDebugging::COMMAND - { - let message = - serde_json::to_string(&Message::Request(request)).unwrap(); - - let mut writer = stdout_writer.lock().await; - writer - .write_all( - TransportDelegate::build_rpc_message(message) - .as_bytes(), - ) - .await - .unwrap(); - writer.flush().await.unwrap(); - } else { - let response = if let Some(handle) = - request_handlers.lock().get_mut(request.command.as_str()) - { - handle(request.seq, request.arguments.unwrap_or(json!({}))) - } else { - panic!("No request handler for {}", request.command); - }; - let message = - serde_json::to_string(&Message::Response(response)) - .unwrap(); - - let mut writer = stdout_writer.lock().await; - writer - .write_all( - TransportDelegate::build_rpc_message(message) - .as_bytes(), - ) - .await - .unwrap(); - writer.flush().await.unwrap(); - } - } - Message::Event(event) => { + loop { + match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None).await { + ConnectionResult::Timeout => { + anyhow::bail!("Timed out when connecting to debugger"); + } + ConnectionResult::ConnectionReset => { + log::info!("Debugger closed the connection"); + break Ok(()); + } + ConnectionResult::Result(Err(e)) => break Err(e), + ConnectionResult::Result(Ok(message)) => { + match message { + Message::Request(request) => { + // redirect reverse requests to stdout writer/reader + if request.command == RunInTerminal::COMMAND + || request.command == StartDebugging::COMMAND + { let message = - serde_json::to_string(&Message::Event(event)).unwrap(); + serde_json::to_string(&Message::Request(request)).unwrap(); + + let mut writer = stdout_writer.lock().await; + writer + .write_all( + TransportDelegate::build_rpc_message(message).as_bytes(), + ) + .await + .unwrap(); + writer.flush().await.unwrap(); + } else { + let response = if let Some(handle) = + request_handlers.lock().get_mut(request.command.as_str()) + { + handle(request.seq, request.arguments.unwrap_or(json!({}))) + } else { + panic!("No request handler for {}", request.command); + }; + let response = match response { + RequestHandling::Respond(response) => response, + RequestHandling::Exit => { + break Err(anyhow!("exit in response to request")); + } + }; + let message = + serde_json::to_string(&Message::Response(response)).unwrap(); let mut writer = stdout_writer.lock().await; writer @@ -854,20 +895,56 @@ impl FakeTransport { .unwrap(); writer.flush().await.unwrap(); } - Message::Response(response) => { - if let Some(handle) = - response_handlers.lock().get(response.command.as_str()) - { - handle(response); - } else { - log::error!("No response handler for {}", response.command); - } + } + Message::Event(event) => { + let message = serde_json::to_string(&Message::Event(event)).unwrap(); + + let mut writer = stdout_writer.lock().await; + writer + .write_all(TransportDelegate::build_rpc_message(message).as_bytes()) + .await + .unwrap(); + writer.flush().await.unwrap(); + } + Message::Response(response) => { + if let Some(handle) = + response_handlers.lock().get(response.command.as_str()) + { + handle(response); + } else { + log::error!("No response handler for {}", response.command); } } } } } - })); + } + } + + async fn start_stdio(cx: &mut AsyncApp) -> Result { + let (stdin_writer, stdin_reader) = async_pipe::pipe(); + let (stdout_writer, stdout_reader) = async_pipe::pipe(); + let kind = FakeTransportKind::Stdio { + stdin_writer: Some(stdin_writer), + stdout_reader: Some(stdout_reader), + }; + + let mut this = Self { + request_handlers: Arc::new(Mutex::new(HashMap::default())), + response_handlers: Arc::new(Mutex::new(HashMap::default())), + message_handler: None, + kind, + }; + + let request_handlers = this.request_handlers.clone(); + let response_handlers = this.response_handlers.clone(); + + this.message_handler = Some(cx.background_spawn(Self::handle_messages( + request_handlers, + response_handlers, + stdin_reader, + stdout_writer, + ))); Ok(this) } @@ -876,7 +953,10 @@ impl FakeTransport { #[cfg(any(test, feature = "test-support"))] impl Transport for FakeTransport { fn tcp_arguments(&self) -> Option { - None + match &self.kind { + FakeTransportKind::Stdio { .. } => None, + FakeTransportKind::Tcp { connection, .. } => Some(connection.clone()), + } } fn connect( @@ -887,12 +967,33 @@ impl Transport for FakeTransport { Box, )>, > { - let result = util::maybe!({ - Ok(( - Box::new(self.stdin_writer.take().context("Cannot reconnect")?) as _, - Box::new(self.stdout_reader.take().context("Cannot reconnect")?) as _, - )) - }); + let result = match &mut self.kind { + FakeTransportKind::Stdio { + stdin_writer, + stdout_reader, + } => util::maybe!({ + Ok(( + Box::new(stdin_writer.take().context("Cannot reconnect")?) as _, + Box::new(stdout_reader.take().context("Cannot reconnect")?) as _, + )) + }), + FakeTransportKind::Tcp { executor, .. } => { + let (stdin_writer, stdin_reader) = async_pipe::pipe(); + let (stdout_writer, stdout_reader) = async_pipe::pipe(); + + let request_handlers = self.request_handlers.clone(); + let response_handlers = self.response_handlers.clone(); + + self.message_handler = Some(executor.spawn(Self::handle_messages( + request_handlers, + response_handlers, + stdin_reader, + stdout_writer, + ))); + + Ok((Box::new(stdin_writer) as _, Box::new(stdout_reader) as _)) + } + }; Task::ready(result) } diff --git a/crates/debugger_ui/src/session.rs b/crates/debugger_ui/src/session.rs index 482297b136..4cc2602909 100644 --- a/crates/debugger_ui/src/session.rs +++ b/crates/debugger_ui/src/session.rs @@ -122,7 +122,7 @@ impl DebugSession { .to_owned() } - pub(crate) fn running_state(&self) -> &Entity { + pub fn running_state(&self) -> &Entity { &self.running_state } diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index af8c14aef7..d308fc9bd2 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -1459,7 +1459,7 @@ impl RunningState { } } - pub(crate) fn selected_thread_id(&self) -> Option { + pub fn selected_thread_id(&self) -> Option { self.thread_id } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c5fe0db74c..263544dba6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -482,9 +482,7 @@ pub enum SelectMode { #[derive(Clone, PartialEq, Eq, Debug)] pub enum EditorMode { - SingleLine { - auto_width: bool, - }, + SingleLine, AutoHeight { min_lines: usize, max_lines: Option, @@ -1662,13 +1660,7 @@ impl Editor { pub fn single_line(window: &mut Window, cx: &mut Context) -> Self { let buffer = cx.new(|cx| Buffer::local("", cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new( - EditorMode::SingleLine { auto_width: false }, - buffer, - None, - window, - cx, - ) + Self::new(EditorMode::SingleLine, buffer, None, window, cx) } pub fn multi_line(window: &mut Window, cx: &mut Context) -> Self { @@ -1677,18 +1669,6 @@ impl Editor { Self::new(EditorMode::full(), buffer, None, window, cx) } - pub fn auto_width(window: &mut Window, cx: &mut Context) -> Self { - let buffer = cx.new(|cx| Buffer::local("", cx)); - let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); - Self::new( - EditorMode::SingleLine { auto_width: true }, - buffer, - None, - window, - cx, - ) - } - pub fn auto_height( min_lines: usize, max_lines: usize, diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 8a5bfb3bab..8db65189f8 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7777,46 +7777,13 @@ impl Element for EditorElement { editor.set_style(self.style.clone(), window, cx); let layout_id = match editor.mode { - EditorMode::SingleLine { auto_width } => { + EditorMode::SingleLine => { let rem_size = window.rem_size(); - let height = self.style.text.line_height_in_pixels(rem_size); - if auto_width { - let editor_handle = cx.entity().clone(); - let style = self.style.clone(); - window.request_measured_layout( - Style::default(), - move |_, _, window, cx| { - let editor_snapshot = editor_handle - .update(cx, |editor, cx| editor.snapshot(window, cx)); - let line = Self::layout_lines( - DisplayRow(0)..DisplayRow(1), - &editor_snapshot, - &style, - px(f32::MAX), - |_| false, // Single lines never soft wrap - window, - cx, - ) - .pop() - .unwrap(); - - let font_id = - window.text_system().resolve_font(&style.text.font()); - let font_size = - style.text.font_size.to_pixels(window.rem_size()); - let em_width = - window.text_system().em_width(font_id, font_size).unwrap(); - - size(line.width + em_width, height) - }, - ) - } else { - let mut style = Style::default(); - style.size.height = height.into(); - style.size.width = relative(1.).into(); - window.request_layout(style, None, cx) - } + let mut style = Style::default(); + style.size.height = height.into(); + style.size.width = relative(1.).into(); + window.request_layout(style, None, cx) } EditorMode::AutoHeight { min_lines, @@ -10388,7 +10355,7 @@ mod tests { }); for editor_mode_without_invisibles in [ - EditorMode::SingleLine { auto_width: false }, + EditorMode::SingleLine, EditorMode::AutoHeight { min_lines: 1, max_lines: Some(100), diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 9b7fee228a..518f386ebe 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -166,46 +166,9 @@ impl State { } let response = Self::fetch_models(client, llm_api_token, use_cloud).await?; - cx.update(|cx| { - this.update(cx, |this, cx| { - let mut models = Vec::new(); - - for model in response.models { - models.push(Arc::new(model.clone())); - - // Right now we represent thinking variants of models as separate models on the client, - // so we need to insert variants for any model that supports thinking. - if model.supports_thinking { - models.push(Arc::new(zed_llm_client::LanguageModel { - id: zed_llm_client::LanguageModelId( - format!("{}-thinking", model.id).into(), - ), - display_name: format!("{} Thinking", model.display_name), - ..model - })); - } - } - - this.default_model = models - .iter() - .find(|model| model.id == response.default_model) - .cloned(); - this.default_fast_model = models - .iter() - .find(|model| model.id == response.default_fast_model) - .cloned(); - this.recommended_models = response - .recommended_models - .iter() - .filter_map(|id| models.iter().find(|model| &model.id == id)) - .cloned() - .collect(); - this.models = models; - cx.notify(); - }) - })??; - - anyhow::Ok(()) + this.update(cx, |this, cx| { + this.update_models(response, cx); + }) }) .await .context("failed to fetch Zed models") @@ -216,12 +179,15 @@ impl State { }), _llm_token_subscription: cx.subscribe( &refresh_llm_token_listener, - |this, _listener, _event, cx| { + move |this, _listener, _event, cx| { let client = this.client.clone(); let llm_api_token = this.llm_api_token.clone(); - cx.spawn(async move |_this, _cx| { + cx.spawn(async move |this, cx| { llm_api_token.refresh(&client).await?; - anyhow::Ok(()) + let response = Self::fetch_models(client, llm_api_token, use_cloud).await?; + this.update(cx, |this, cx| { + this.update_models(response, cx); + }) }) .detach_and_log_err(cx); }, @@ -264,6 +230,41 @@ impl State { })); } + fn update_models(&mut self, response: ListModelsResponse, cx: &mut Context) { + let mut models = Vec::new(); + + for model in response.models { + models.push(Arc::new(model.clone())); + + // Right now we represent thinking variants of models as separate models on the client, + // so we need to insert variants for any model that supports thinking. + if model.supports_thinking { + models.push(Arc::new(zed_llm_client::LanguageModel { + id: zed_llm_client::LanguageModelId(format!("{}-thinking", model.id).into()), + display_name: format!("{} Thinking", model.display_name), + ..model + })); + } + } + + self.default_model = models + .iter() + .find(|model| model.id == response.default_model) + .cloned(); + self.default_fast_model = models + .iter() + .find(|model| model.id == response.default_fast_model) + .cloned(); + self.recommended_models = response + .recommended_models + .iter() + .filter_map(|id| models.iter().find(|model| &model.id == id)) + .cloned() + .collect(); + self.models = models; + cx.notify(); + } + async fn fetch_models( client: Arc, llm_api_token: LlmApiToken, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 8e1026421e..85036eca86 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3362,8 +3362,14 @@ impl Project { cx: &mut Context, ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.definitions(buffer, position, cx) + }); + cx.spawn(async move |_, _| { + let result = task.await; + drop(guard); + result }) } @@ -3374,8 +3380,14 @@ impl Project { cx: &mut Context, ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.declarations(buffer, position, cx) + }); + cx.spawn(async move |_, _| { + let result = task.await; + drop(guard); + result }) } @@ -3386,8 +3398,14 @@ impl Project { cx: &mut Context, ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.type_definitions(buffer, position, cx) + }); + cx.spawn(async move |_, _| { + let result = task.await; + drop(guard); + result }) } @@ -3398,8 +3416,14 @@ impl Project { cx: &mut Context, ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.implementations(buffer, position, cx) + }); + cx.spawn(async move |_, _| { + let result = task.await; + drop(guard); + result }) } @@ -3410,8 +3434,14 @@ impl Project { cx: &mut Context, ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.references(buffer, position, cx) + }); + cx.spawn(async move |_, _| { + let result = task.await; + drop(guard); + result }) } diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index 66f589bfd3..49eca26838 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -611,7 +611,7 @@ impl RulesLibrary { this.update_in(cx, |this, window, cx| match rule { Ok(rule) => { let title_editor = cx.new(|cx| { - let mut editor = Editor::auto_width(window, cx); + let mut editor = Editor::single_line(window, cx); editor.set_placeholder_text("Untitled", cx); editor.set_text(rule_metadata.title.unwrap_or_default(), window, cx); if prompt_id.is_built_in() { diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index c34d892644..7f435c67ac 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -127,7 +127,7 @@ impl BatchedTextRun { cx: &mut App, ) { let pos = Point::new( - (origin.x + self.start_point.column as f32 * dimensions.cell_width).floor(), + origin.x + self.start_point.column as f32 * dimensions.cell_width, origin.y + self.start_point.line as f32 * dimensions.line_height, ); diff --git a/crates/vim/src/normal/scroll.rs b/crates/vim/src/normal/scroll.rs index 47b9fe92fd..e2ae74b52b 100644 --- a/crates/vim/src/normal/scroll.rs +++ b/crates/vim/src/normal/scroll.rs @@ -230,7 +230,11 @@ fn scroll_editor( // column position, or the right-most column in the current // line, seeing as the cursor might be in a short line, in which // case we don't want to go past its last column. - let max_row_column = map.line_len(new_row); + let max_row_column = if new_row <= map.max_point().row() { + map.line_len(new_row) + } else { + 0 + }; let max_column = match min_column + visible_column_count as u32 { max_column if max_column >= max_row_column => max_row_column, max_column => max_column, diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 884443e770..3abc07f36f 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.195.0" +version = "0.195.1" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] diff --git a/crates/zed/RELEASE_CHANNEL b/crates/zed/RELEASE_CHANNEL index 38f8e886e1..4de2f126df 100644 --- a/crates/zed/RELEASE_CHANNEL +++ b/crates/zed/RELEASE_CHANNEL @@ -1 +1 @@ -dev +preview \ No newline at end of file diff --git a/script/bundle-windows.ps1 b/script/bundle-windows.ps1 index 9b61d220cf..dc0bdb9b7d 100644 --- a/script/bundle-windows.ps1 +++ b/script/bundle-windows.ps1 @@ -44,8 +44,6 @@ function CheckEnvironmentVariables { } } -$innoDir = "$env:ZED_WORKSPACE\inno" - function PrepareForBundle { if (Test-Path "$innoDir") { Remove-Item -Path "$innoDir" -Recurse -Force @@ -236,6 +234,8 @@ function BuildInstaller { } ParseZedWorkspace +$innoDir = "$env:ZED_WORKSPACE\inno" + CheckEnvironmentVariables PrepareForBundle BuildZedAndItsFriends