Compare commits

...
Sign in to create a new pull request.

10 commits

Author SHA1 Message Date
Cole Miller
7f2283749b
Remove auto-width editor type (#34438)
Closes #34044

`EditorMode::SingleLine { auto_width: true }` was only used for the
title editor in the rules library, and following
https://github.com/zed-industries/zed/pull/31994 we can replace that
with a normal single-line editor without problems. The auto-width editor
was interacting badly with the recently-added newline visualization
code, causing a panic during layout---by switching it to
`Editor::single_line` the newline visualization works there too.

Release Notes:

- Fixed a panic that could occur when opening the rules library.

---------

Co-authored-by: Finn <finn@zed.dev>
2025-07-15 14:57:56 -04:00
Zed Bot
2d724520bc Bump to 0.195.1 for @ConradIrwin 2025-07-14 19:06:38 +00:00
gcp-cherry-pick-bot[bot]
473062aeef
debugger: Fix endless restarts when connecting to TCP adapters over SSH (cherry-pick #34328) (#34343)
Cherry-picked debugger: Fix endless restarts when connecting to TCP
adapters over SSH (#34328)

Closes #34323
Closes #34313

The previous PR #33932 introduced a way to "close" the
`pending_requests` buffer of the `TransportDelegate`, preventing any
more requests from being added. This prevents pending requests from
accumulating without ever being drained during the shutdown sequence;
without it, some of our tests hang at this point (due to using a
single-threaded executor).

The bug occurred because we were closing `pending_requests` whenever we
detected the server side of the transport shut down, and this closed
state stuck around and interfered with the retry logic for SSH+TCP
adapter connections.

This PR fixes the bug by only closing `pending_requests` on session
shutdown, and adds a regression test covering the SSH retry logic.

Release Notes:

- debugger: Fixed a bug causing SSH connections to some adapters
(Python, Go, JavaScript) to fail and restart endlessly.

Co-authored-by: Cole Miller <cole@zed.dev>
2025-07-12 17:13:27 -04:00
gcp-cherry-pick-bot[bot]
612c9addff
Return back the guards when goto targets are queried for (cherry-pick #34340) (#34344) 2025-07-12 19:49:32 +03:00
gcp-cherry-pick-bot[bot]
19a60dbf9c
Fix bad kerning in integrated terminal (cherry-pick #34292) (#34298)
Cherry-picked Fix bad kerning in integrated terminal (#34292)

Closes #16869

Release Notes:

- (preview only): Fix bad kerning in integrated terminal.

Co-authored-by: Alisina Bahadori <alisina.bm@gmail.com>
2025-07-11 12:13:36 -06:00
gcp-cherry-pick-bot[bot]
acba38dabd
language_models: Refresh the list of models when the LLM token is refreshed (cherry-pick #34222) (#34294)
Cherry-picked language_models: Refresh the list of models when the LLM
token is refreshed (#34222)

This PR makes it so we refresh the list of models whenever the LLM token
is refreshed.

This allows us to add or remove models based on the plan in the new
token.

Release Notes:

- Fixed model list not refreshing when subscribing to Zed Pro.

---------

Co-authored-by: Bennet Bo Fenner <bennetbo@gmx.de>

Co-authored-by: Marshall Bowers <git@maxdeviant.com>
Co-authored-by: Bennet Bo Fenner <bennetbo@gmx.de>
2025-07-11 11:48:51 -04:00
gcp-cherry-pick-bot[bot]
c1b3111c15
vim: Fix panic when scrolling beyond last line (cherry-pick #34172) (#34174)
Cherry-picked vim: Fix panic when scrolling beyond last line (#34172)

cc @dinocosta

Release Notes:

- (preview only) vim: Fix panic when scrolling down at end of file

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-07-09 20:25:03 -06:00
localcc
623388ad80
Fix inno dir (#34116)
Fix inno dir for nightly builds

Release Notes:

- N/A
2025-07-09 14:45:49 -04:00
Max Brunsfeld
eb89e9a572
Don't upload windows installer to preview releases for now (#34147)
Release Notes:

- N/A
2025-07-09 14:45:46 -04:00
Peter Tripp
e306a55073
v0.195.x preview 2025-07-09 11:02:11 -04:00
19 changed files with 534 additions and 253 deletions

View file

@ -800,7 +800,8 @@ jobs:
- name: Upload Artifacts to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) && env.RELEASE_CHANNEL == 'preview' }} # upload only preview
# Re-enable when we are ready to publish windows preview releases
if: false && ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) && env.RELEASE_CHANNEL == 'preview' }} # upload only preview
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}

3
Cargo.lock generated
View file

@ -3043,6 +3043,7 @@ dependencies = [
"context_server",
"ctor",
"dap",
"dap-types",
"dap_adapters",
"dashmap 6.1.0",
"debugger_ui",
@ -19972,7 +19973,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.195.0"
version = "0.195.1"
dependencies = [
"activity_indicator",
"agent",

View file

@ -94,6 +94,7 @@ context_server.workspace = true
ctor.workspace = true
dap = { workspace = true, features = ["test-support"] }
dap_adapters = { workspace = true, features = ["test-support"] }
dap-types.workspace = true
debugger_ui = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
extension.workspace = true

View file

@ -2,6 +2,7 @@ use crate::tests::TestServer;
use call::ActiveCall;
use collections::{HashMap, HashSet};
use dap::{Capabilities, adapters::DebugTaskDefinition, transport::RequestHandling};
use debugger_ui::debugger_panel::DebugPanel;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs as _, RemoveOptions};
@ -22,6 +23,7 @@ use language::{
use node_runtime::NodeRuntime;
use project::{
ProjectPath,
debugger::session::ThreadId,
lsp_store::{FormatTrigger, LspFormatTarget},
};
use remote::SshRemoteClient;
@ -29,7 +31,11 @@ use remote_server::{HeadlessAppState, HeadlessProject};
use rpc::proto;
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc};
use std::{
path::Path,
sync::{Arc, atomic::AtomicUsize},
};
use task::TcpArgumentsTemplate;
use util::path;
#[gpui::test(iterations = 10)]
@ -688,3 +694,162 @@ async fn test_remote_server_debugger(
shutdown_session.await.unwrap();
}
#[gpui::test]
async fn test_slow_adapter_startup_retries(
cx_a: &mut TestAppContext,
server_cx: &mut TestAppContext,
executor: BackgroundExecutor,
) {
cx_a.update(|cx| {
release_channel::init(SemanticVersion::default(), cx);
command_palette_hooks::init(cx);
zlog::init_test();
dap_adapters::init(cx);
});
server_cx.update(|cx| {
release_channel::init(SemanticVersion::default(), cx);
dap_adapters::init(cx);
});
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
path!("/code"),
json!({
"lib.rs": "fn one() -> usize { 1 }"
}),
)
.await;
// User A connects to the remote project via SSH.
server_cx.update(HeadlessProject::init);
let remote_http_client = Arc::new(BlockedHttpClient);
let node = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
let _headless_project = server_cx.new(|cx| {
client::init_settings(cx);
HeadlessProject::new(
HeadlessAppState {
session: server_ssh,
fs: remote_fs.clone(),
http_client: remote_http_client,
node_runtime: node,
languages,
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
},
cx,
)
});
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
let mut server = TestServer::start(server_cx.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
cx_a.update(|cx| {
debugger_ui::init(cx);
command_palette_hooks::init(cx);
});
let (project_a, _) = client_a
.build_ssh_project(path!("/code"), client_ssh.clone(), cx_a)
.await;
let (workspace, cx_a) = client_a.build_workspace(&project_a, cx_a);
let debugger_panel = workspace
.update_in(cx_a, |_workspace, window, cx| {
cx.spawn_in(window, DebugPanel::load)
})
.await
.unwrap();
workspace.update_in(cx_a, |workspace, window, cx| {
workspace.add_panel(debugger_panel, window, cx);
});
cx_a.run_until_parked();
let debug_panel = workspace
.update(cx_a, |workspace, cx| workspace.panel::<DebugPanel>(cx))
.unwrap();
let workspace_window = cx_a
.window_handle()
.downcast::<workspace::Workspace>()
.unwrap();
let count = Arc::new(AtomicUsize::new(0));
let session = debugger_ui::tests::start_debug_session_with(
&workspace_window,
cx_a,
DebugTaskDefinition {
adapter: "fake-adapter".into(),
label: "test".into(),
config: json!({
"request": "launch"
}),
tcp_connection: Some(TcpArgumentsTemplate {
port: None,
host: None,
timeout: None,
}),
},
move |client| {
let count = count.clone();
client.on_request_ext::<dap::requests::Initialize, _>(move |_seq, _request| {
if count.fetch_add(1, std::sync::atomic::Ordering::SeqCst) < 5 {
return RequestHandling::Exit;
}
RequestHandling::Respond(Ok(Capabilities::default()))
});
},
)
.unwrap();
cx_a.run_until_parked();
let client = session.update(cx_a, |session, _| session.adapter_client().unwrap());
client
.fake_event(dap::messages::Events::Stopped(dap::StoppedEvent {
reason: dap::StoppedEventReason::Pause,
description: None,
thread_id: Some(1),
preserve_focus_hint: None,
text: None,
all_threads_stopped: None,
hit_breakpoint_ids: None,
}))
.await;
cx_a.run_until_parked();
let active_session = debug_panel
.update(cx_a, |this, _| this.active_session())
.unwrap();
let running_state = active_session.update(cx_a, |active_session, _| {
active_session.running_state().clone()
});
assert_eq!(
client.id(),
running_state.read_with(cx_a, |running_state, _| running_state.session_id())
);
assert_eq!(
ThreadId(1),
running_state.read_with(cx_a, |running_state, _| running_state
.selected_thread_id()
.unwrap())
);
let shutdown_session = workspace.update(cx_a, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
project.dap_store().update(cx, |dap_store, cx| {
dap_store.shutdown_session(session.read(cx).session_id(), cx)
})
})
});
client_ssh.update(cx_a, |a, _| {
a.shutdown_processes(Some(proto::ShutdownRemoteServer {}), executor)
});
shutdown_session.await.unwrap();
}

View file

@ -442,10 +442,18 @@ impl DebugAdapter for FakeAdapter {
_: Option<Vec<String>>,
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
let connection = task_definition
.tcp_connection
.as_ref()
.map(|connection| TcpArguments {
host: connection.host(),
port: connection.port.unwrap_or(17),
timeout: connection.timeout,
});
Ok(DebugAdapterBinary {
command: Some("command".into()),
arguments: vec![],
connection: None,
connection,
envs: HashMap::default(),
cwd: None,
request_args: StartDebuggingRequestArguments {

View file

@ -2,7 +2,7 @@ use crate::{
adapters::DebugAdapterBinary,
transport::{IoKind, LogKind, TransportDelegate},
};
use anyhow::{Context as _, Result};
use anyhow::Result;
use dap_types::{
messages::{Message, Response},
requests::Request,
@ -110,9 +110,7 @@ impl DebugAdapterClient {
self.transport_delegate
.pending_requests
.lock()
.as_mut()
.context("client is closed")?
.insert(sequence_id, callback_tx);
.insert(sequence_id, callback_tx)?;
log::debug!(
"Client {} send `{}` request with sequence_id: {}",
@ -170,6 +168,7 @@ impl DebugAdapterClient {
pub fn kill(&self) {
log::debug!("Killing DAP process");
self.transport_delegate.transport.lock().kill();
self.transport_delegate.pending_requests.lock().shutdown();
}
pub fn has_adapter_logs(&self) -> bool {
@ -184,11 +183,34 @@ impl DebugAdapterClient {
}
#[cfg(any(test, feature = "test-support"))]
pub fn on_request<R: dap_types::requests::Request, F>(&self, handler: F)
pub fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
where
F: 'static
+ Send
+ FnMut(u64, R::Arguments) -> Result<R::Response, dap_types::ErrorResponse>,
{
use crate::transport::RequestHandling;
self.transport_delegate
.transport
.lock()
.as_fake()
.on_request::<R, _>(move |seq, request| {
RequestHandling::Respond(handler(seq, request))
});
}
#[cfg(any(test, feature = "test-support"))]
pub fn on_request_ext<R: dap_types::requests::Request, F>(&self, handler: F)
where
F: 'static
+ Send
+ FnMut(
u64,
R::Arguments,
) -> crate::transport::RequestHandling<
Result<R::Response, dap_types::ErrorResponse>,
>,
{
self.transport_delegate
.transport

View file

@ -49,6 +49,12 @@ pub enum IoKind {
StdErr,
}
#[cfg(any(test, feature = "test-support"))]
pub enum RequestHandling<T> {
Respond(T),
Exit,
}
type LogHandlers = Arc<Mutex<SmallVec<[(LogKind, IoHandler); 2]>>>;
pub trait Transport: Send + Sync {
@ -76,7 +82,11 @@ async fn start(
) -> Result<Box<dyn Transport>> {
#[cfg(any(test, feature = "test-support"))]
if cfg!(any(test, feature = "test-support")) {
return Ok(Box::new(FakeTransport::start(cx).await?));
if let Some(connection) = binary.connection.clone() {
return Ok(Box::new(FakeTransport::start_tcp(connection, cx).await?));
} else {
return Ok(Box::new(FakeTransport::start_stdio(cx).await?));
}
}
if binary.connection.is_some() {
@ -90,11 +100,57 @@ async fn start(
}
}
pub(crate) struct PendingRequests {
inner: Option<HashMap<u64, oneshot::Sender<Result<Response>>>>,
}
impl PendingRequests {
fn new() -> Self {
Self {
inner: Some(HashMap::default()),
}
}
fn flush(&mut self, e: anyhow::Error) {
let Some(inner) = self.inner.as_mut() else {
return;
};
for (_, sender) in inner.drain() {
sender.send(Err(e.cloned())).ok();
}
}
pub(crate) fn insert(
&mut self,
sequence_id: u64,
callback_tx: oneshot::Sender<Result<Response>>,
) -> anyhow::Result<()> {
let Some(inner) = self.inner.as_mut() else {
bail!("client is closed")
};
inner.insert(sequence_id, callback_tx);
Ok(())
}
pub(crate) fn remove(
&mut self,
sequence_id: u64,
) -> anyhow::Result<Option<oneshot::Sender<Result<Response>>>> {
let Some(inner) = self.inner.as_mut() else {
bail!("client is closed");
};
Ok(inner.remove(&sequence_id))
}
pub(crate) fn shutdown(&mut self) {
self.flush(anyhow!("transport shutdown"));
self.inner = None;
}
}
pub(crate) struct TransportDelegate {
log_handlers: LogHandlers,
// TODO this should really be some kind of associative channel
pub(crate) pending_requests:
Arc<Mutex<Option<HashMap<u64, oneshot::Sender<Result<Response>>>>>>,
pub(crate) pending_requests: Arc<Mutex<PendingRequests>>,
pub(crate) transport: Mutex<Box<dyn Transport>>,
pub(crate) server_tx: smol::lock::Mutex<Option<Sender<Message>>>,
tasks: Mutex<Vec<Task<()>>>,
@ -108,7 +164,7 @@ impl TransportDelegate {
transport: Mutex::new(transport),
log_handlers,
server_tx: Default::default(),
pending_requests: Arc::new(Mutex::new(Some(HashMap::default()))),
pending_requests: Arc::new(Mutex::new(PendingRequests::new())),
tasks: Default::default(),
})
}
@ -151,24 +207,10 @@ impl TransportDelegate {
Ok(()) => {
pending_requests
.lock()
.take()
.into_iter()
.flatten()
.for_each(|(_, request)| {
request
.send(Err(anyhow!("debugger shutdown unexpectedly")))
.ok();
});
.flush(anyhow!("debugger shutdown unexpectedly"));
}
Err(e) => {
pending_requests
.lock()
.take()
.into_iter()
.flatten()
.for_each(|(_, request)| {
request.send(Err(e.cloned())).ok();
});
pending_requests.lock().flush(e);
}
}
}));
@ -286,7 +328,7 @@ impl TransportDelegate {
async fn recv_from_server<Stdout>(
server_stdout: Stdout,
mut message_handler: DapMessageHandler,
pending_requests: Arc<Mutex<Option<HashMap<u64, oneshot::Sender<Result<Response>>>>>>,
pending_requests: Arc<Mutex<PendingRequests>>,
log_handlers: Option<LogHandlers>,
) -> Result<()>
where
@ -303,14 +345,10 @@ impl TransportDelegate {
ConnectionResult::Timeout => anyhow::bail!("Timed out when connecting to debugger"),
ConnectionResult::ConnectionReset => {
log::info!("Debugger closed the connection");
break Ok(());
return Ok(());
}
ConnectionResult::Result(Ok(Message::Response(res))) => {
let tx = pending_requests
.lock()
.as_mut()
.context("client is closed")?
.remove(&res.request_seq);
let tx = pending_requests.lock().remove(res.request_seq)?;
if let Some(tx) = tx {
if let Err(e) = tx.send(Self::process_response(res)) {
log::trace!("Did not send response `{:?}` for a cancelled", e);
@ -704,8 +742,7 @@ impl Drop for StdioTransport {
}
#[cfg(any(test, feature = "test-support"))]
type RequestHandler =
Box<dyn Send + FnMut(u64, serde_json::Value) -> dap_types::messages::Response>;
type RequestHandler = Box<dyn Send + FnMut(u64, serde_json::Value) -> RequestHandling<Response>>;
#[cfg(any(test, feature = "test-support"))]
type ResponseHandler = Box<dyn Send + Fn(Response)>;
@ -716,23 +753,38 @@ pub struct FakeTransport {
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
// for reverse request responses
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
stdin_writer: Option<PipeWriter>,
stdout_reader: Option<PipeReader>,
message_handler: Option<Task<Result<()>>>,
kind: FakeTransportKind,
}
#[cfg(any(test, feature = "test-support"))]
pub enum FakeTransportKind {
Stdio {
stdin_writer: Option<PipeWriter>,
stdout_reader: Option<PipeReader>,
},
Tcp {
connection: TcpArguments,
executor: BackgroundExecutor,
},
}
#[cfg(any(test, feature = "test-support"))]
impl FakeTransport {
pub fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
where
F: 'static + Send + FnMut(u64, R::Arguments) -> Result<R::Response, ErrorResponse>,
F: 'static
+ Send
+ FnMut(u64, R::Arguments) -> RequestHandling<Result<R::Response, ErrorResponse>>,
{
self.request_handlers.lock().insert(
R::COMMAND,
Box::new(move |seq, args| {
let result = handler(seq, serde_json::from_value(args).unwrap());
let response = match result {
let RequestHandling::Respond(response) = result else {
return RequestHandling::Exit;
};
let response = match response {
Ok(response) => Response {
seq: seq + 1,
request_seq: seq,
@ -750,7 +802,7 @@ impl FakeTransport {
message: None,
},
};
response
RequestHandling::Respond(response)
}),
);
}
@ -764,86 +816,75 @@ impl FakeTransport {
.insert(R::COMMAND, Box::new(handler));
}
async fn start(cx: &mut AsyncApp) -> Result<Self> {
async fn start_tcp(connection: TcpArguments, cx: &mut AsyncApp) -> Result<Self> {
Ok(Self {
request_handlers: Arc::new(Mutex::new(HashMap::default())),
response_handlers: Arc::new(Mutex::new(HashMap::default())),
message_handler: None,
kind: FakeTransportKind::Tcp {
connection,
executor: cx.background_executor().clone(),
},
})
}
async fn handle_messages(
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
stdin_reader: PipeReader,
stdout_writer: PipeWriter,
) -> Result<()> {
use dap_types::requests::{Request, RunInTerminal, StartDebugging};
use serde_json::json;
let (stdin_writer, stdin_reader) = async_pipe::pipe();
let (stdout_writer, stdout_reader) = async_pipe::pipe();
let mut this = Self {
request_handlers: Arc::new(Mutex::new(HashMap::default())),
response_handlers: Arc::new(Mutex::new(HashMap::default())),
stdin_writer: Some(stdin_writer),
stdout_reader: Some(stdout_reader),
message_handler: None,
};
let request_handlers = this.request_handlers.clone();
let response_handlers = this.response_handlers.clone();
let mut reader = BufReader::new(stdin_reader);
let stdout_writer = Arc::new(smol::lock::Mutex::new(stdout_writer));
let mut buffer = String::new();
this.message_handler = Some(cx.background_spawn(async move {
let mut reader = BufReader::new(stdin_reader);
let mut buffer = String::new();
loop {
match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None)
.await
{
ConnectionResult::Timeout => {
anyhow::bail!("Timed out when connecting to debugger");
}
ConnectionResult::ConnectionReset => {
log::info!("Debugger closed the connection");
break Ok(());
}
ConnectionResult::Result(Err(e)) => break Err(e),
ConnectionResult::Result(Ok(message)) => {
match message {
Message::Request(request) => {
// redirect reverse requests to stdout writer/reader
if request.command == RunInTerminal::COMMAND
|| request.command == StartDebugging::COMMAND
{
let message =
serde_json::to_string(&Message::Request(request)).unwrap();
let mut writer = stdout_writer.lock().await;
writer
.write_all(
TransportDelegate::build_rpc_message(message)
.as_bytes(),
)
.await
.unwrap();
writer.flush().await.unwrap();
} else {
let response = if let Some(handle) =
request_handlers.lock().get_mut(request.command.as_str())
{
handle(request.seq, request.arguments.unwrap_or(json!({})))
} else {
panic!("No request handler for {}", request.command);
};
let message =
serde_json::to_string(&Message::Response(response))
.unwrap();
let mut writer = stdout_writer.lock().await;
writer
.write_all(
TransportDelegate::build_rpc_message(message)
.as_bytes(),
)
.await
.unwrap();
writer.flush().await.unwrap();
}
}
Message::Event(event) => {
loop {
match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None).await {
ConnectionResult::Timeout => {
anyhow::bail!("Timed out when connecting to debugger");
}
ConnectionResult::ConnectionReset => {
log::info!("Debugger closed the connection");
break Ok(());
}
ConnectionResult::Result(Err(e)) => break Err(e),
ConnectionResult::Result(Ok(message)) => {
match message {
Message::Request(request) => {
// redirect reverse requests to stdout writer/reader
if request.command == RunInTerminal::COMMAND
|| request.command == StartDebugging::COMMAND
{
let message =
serde_json::to_string(&Message::Event(event)).unwrap();
serde_json::to_string(&Message::Request(request)).unwrap();
let mut writer = stdout_writer.lock().await;
writer
.write_all(
TransportDelegate::build_rpc_message(message).as_bytes(),
)
.await
.unwrap();
writer.flush().await.unwrap();
} else {
let response = if let Some(handle) =
request_handlers.lock().get_mut(request.command.as_str())
{
handle(request.seq, request.arguments.unwrap_or(json!({})))
} else {
panic!("No request handler for {}", request.command);
};
let response = match response {
RequestHandling::Respond(response) => response,
RequestHandling::Exit => {
break Err(anyhow!("exit in response to request"));
}
};
let message =
serde_json::to_string(&Message::Response(response)).unwrap();
let mut writer = stdout_writer.lock().await;
writer
@ -854,20 +895,56 @@ impl FakeTransport {
.unwrap();
writer.flush().await.unwrap();
}
Message::Response(response) => {
if let Some(handle) =
response_handlers.lock().get(response.command.as_str())
{
handle(response);
} else {
log::error!("No response handler for {}", response.command);
}
}
Message::Event(event) => {
let message = serde_json::to_string(&Message::Event(event)).unwrap();
let mut writer = stdout_writer.lock().await;
writer
.write_all(TransportDelegate::build_rpc_message(message).as_bytes())
.await
.unwrap();
writer.flush().await.unwrap();
}
Message::Response(response) => {
if let Some(handle) =
response_handlers.lock().get(response.command.as_str())
{
handle(response);
} else {
log::error!("No response handler for {}", response.command);
}
}
}
}
}
}));
}
}
async fn start_stdio(cx: &mut AsyncApp) -> Result<Self> {
let (stdin_writer, stdin_reader) = async_pipe::pipe();
let (stdout_writer, stdout_reader) = async_pipe::pipe();
let kind = FakeTransportKind::Stdio {
stdin_writer: Some(stdin_writer),
stdout_reader: Some(stdout_reader),
};
let mut this = Self {
request_handlers: Arc::new(Mutex::new(HashMap::default())),
response_handlers: Arc::new(Mutex::new(HashMap::default())),
message_handler: None,
kind,
};
let request_handlers = this.request_handlers.clone();
let response_handlers = this.response_handlers.clone();
this.message_handler = Some(cx.background_spawn(Self::handle_messages(
request_handlers,
response_handlers,
stdin_reader,
stdout_writer,
)));
Ok(this)
}
@ -876,7 +953,10 @@ impl FakeTransport {
#[cfg(any(test, feature = "test-support"))]
impl Transport for FakeTransport {
fn tcp_arguments(&self) -> Option<TcpArguments> {
None
match &self.kind {
FakeTransportKind::Stdio { .. } => None,
FakeTransportKind::Tcp { connection, .. } => Some(connection.clone()),
}
}
fn connect(
@ -887,12 +967,33 @@ impl Transport for FakeTransport {
Box<dyn AsyncRead + Unpin + Send + 'static>,
)>,
> {
let result = util::maybe!({
Ok((
Box::new(self.stdin_writer.take().context("Cannot reconnect")?) as _,
Box::new(self.stdout_reader.take().context("Cannot reconnect")?) as _,
))
});
let result = match &mut self.kind {
FakeTransportKind::Stdio {
stdin_writer,
stdout_reader,
} => util::maybe!({
Ok((
Box::new(stdin_writer.take().context("Cannot reconnect")?) as _,
Box::new(stdout_reader.take().context("Cannot reconnect")?) as _,
))
}),
FakeTransportKind::Tcp { executor, .. } => {
let (stdin_writer, stdin_reader) = async_pipe::pipe();
let (stdout_writer, stdout_reader) = async_pipe::pipe();
let request_handlers = self.request_handlers.clone();
let response_handlers = self.response_handlers.clone();
self.message_handler = Some(executor.spawn(Self::handle_messages(
request_handlers,
response_handlers,
stdin_reader,
stdout_writer,
)));
Ok((Box::new(stdin_writer) as _, Box::new(stdout_reader) as _))
}
};
Task::ready(result)
}

View file

@ -122,7 +122,7 @@ impl DebugSession {
.to_owned()
}
pub(crate) fn running_state(&self) -> &Entity<RunningState> {
pub fn running_state(&self) -> &Entity<RunningState> {
&self.running_state
}

View file

@ -1459,7 +1459,7 @@ impl RunningState {
}
}
pub(crate) fn selected_thread_id(&self) -> Option<ThreadId> {
pub fn selected_thread_id(&self) -> Option<ThreadId> {
self.thread_id
}

View file

@ -482,9 +482,7 @@ pub enum SelectMode {
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum EditorMode {
SingleLine {
auto_width: bool,
},
SingleLine,
AutoHeight {
min_lines: usize,
max_lines: Option<usize>,
@ -1662,13 +1660,7 @@ impl Editor {
pub fn single_line(window: &mut Window, cx: &mut Context<Self>) -> Self {
let buffer = cx.new(|cx| Buffer::local("", cx));
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(
EditorMode::SingleLine { auto_width: false },
buffer,
None,
window,
cx,
)
Self::new(EditorMode::SingleLine, buffer, None, window, cx)
}
pub fn multi_line(window: &mut Window, cx: &mut Context<Self>) -> Self {
@ -1677,18 +1669,6 @@ impl Editor {
Self::new(EditorMode::full(), buffer, None, window, cx)
}
pub fn auto_width(window: &mut Window, cx: &mut Context<Self>) -> Self {
let buffer = cx.new(|cx| Buffer::local("", cx));
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
Self::new(
EditorMode::SingleLine { auto_width: true },
buffer,
None,
window,
cx,
)
}
pub fn auto_height(
min_lines: usize,
max_lines: usize,

View file

@ -7777,46 +7777,13 @@ impl Element for EditorElement {
editor.set_style(self.style.clone(), window, cx);
let layout_id = match editor.mode {
EditorMode::SingleLine { auto_width } => {
EditorMode::SingleLine => {
let rem_size = window.rem_size();
let height = self.style.text.line_height_in_pixels(rem_size);
if auto_width {
let editor_handle = cx.entity().clone();
let style = self.style.clone();
window.request_measured_layout(
Style::default(),
move |_, _, window, cx| {
let editor_snapshot = editor_handle
.update(cx, |editor, cx| editor.snapshot(window, cx));
let line = Self::layout_lines(
DisplayRow(0)..DisplayRow(1),
&editor_snapshot,
&style,
px(f32::MAX),
|_| false, // Single lines never soft wrap
window,
cx,
)
.pop()
.unwrap();
let font_id =
window.text_system().resolve_font(&style.text.font());
let font_size =
style.text.font_size.to_pixels(window.rem_size());
let em_width =
window.text_system().em_width(font_id, font_size).unwrap();
size(line.width + em_width, height)
},
)
} else {
let mut style = Style::default();
style.size.height = height.into();
style.size.width = relative(1.).into();
window.request_layout(style, None, cx)
}
let mut style = Style::default();
style.size.height = height.into();
style.size.width = relative(1.).into();
window.request_layout(style, None, cx)
}
EditorMode::AutoHeight {
min_lines,
@ -10388,7 +10355,7 @@ mod tests {
});
for editor_mode_without_invisibles in [
EditorMode::SingleLine { auto_width: false },
EditorMode::SingleLine,
EditorMode::AutoHeight {
min_lines: 1,
max_lines: Some(100),

View file

@ -166,46 +166,9 @@ impl State {
}
let response = Self::fetch_models(client, llm_api_token, use_cloud).await?;
cx.update(|cx| {
this.update(cx, |this, cx| {
let mut models = Vec::new();
for model in response.models {
models.push(Arc::new(model.clone()));
// Right now we represent thinking variants of models as separate models on the client,
// so we need to insert variants for any model that supports thinking.
if model.supports_thinking {
models.push(Arc::new(zed_llm_client::LanguageModel {
id: zed_llm_client::LanguageModelId(
format!("{}-thinking", model.id).into(),
),
display_name: format!("{} Thinking", model.display_name),
..model
}));
}
}
this.default_model = models
.iter()
.find(|model| model.id == response.default_model)
.cloned();
this.default_fast_model = models
.iter()
.find(|model| model.id == response.default_fast_model)
.cloned();
this.recommended_models = response
.recommended_models
.iter()
.filter_map(|id| models.iter().find(|model| &model.id == id))
.cloned()
.collect();
this.models = models;
cx.notify();
})
})??;
anyhow::Ok(())
this.update(cx, |this, cx| {
this.update_models(response, cx);
})
})
.await
.context("failed to fetch Zed models")
@ -216,12 +179,15 @@ impl State {
}),
_llm_token_subscription: cx.subscribe(
&refresh_llm_token_listener,
|this, _listener, _event, cx| {
move |this, _listener, _event, cx| {
let client = this.client.clone();
let llm_api_token = this.llm_api_token.clone();
cx.spawn(async move |_this, _cx| {
cx.spawn(async move |this, cx| {
llm_api_token.refresh(&client).await?;
anyhow::Ok(())
let response = Self::fetch_models(client, llm_api_token, use_cloud).await?;
this.update(cx, |this, cx| {
this.update_models(response, cx);
})
})
.detach_and_log_err(cx);
},
@ -264,6 +230,41 @@ impl State {
}));
}
fn update_models(&mut self, response: ListModelsResponse, cx: &mut Context<Self>) {
let mut models = Vec::new();
for model in response.models {
models.push(Arc::new(model.clone()));
// Right now we represent thinking variants of models as separate models on the client,
// so we need to insert variants for any model that supports thinking.
if model.supports_thinking {
models.push(Arc::new(zed_llm_client::LanguageModel {
id: zed_llm_client::LanguageModelId(format!("{}-thinking", model.id).into()),
display_name: format!("{} Thinking", model.display_name),
..model
}));
}
}
self.default_model = models
.iter()
.find(|model| model.id == response.default_model)
.cloned();
self.default_fast_model = models
.iter()
.find(|model| model.id == response.default_fast_model)
.cloned();
self.recommended_models = response
.recommended_models
.iter()
.filter_map(|id| models.iter().find(|model| &model.id == id))
.cloned()
.collect();
self.models = models;
cx.notify();
}
async fn fetch_models(
client: Arc<Client>,
llm_api_token: LlmApiToken,

View file

@ -3362,8 +3362,14 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
self.lsp_store.update(cx, |lsp_store, cx| {
let guard = self.retain_remotely_created_models(cx);
let task = self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.definitions(buffer, position, cx)
});
cx.spawn(async move |_, _| {
let result = task.await;
drop(guard);
result
})
}
@ -3374,8 +3380,14 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
self.lsp_store.update(cx, |lsp_store, cx| {
let guard = self.retain_remotely_created_models(cx);
let task = self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.declarations(buffer, position, cx)
});
cx.spawn(async move |_, _| {
let result = task.await;
drop(guard);
result
})
}
@ -3386,8 +3398,14 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
self.lsp_store.update(cx, |lsp_store, cx| {
let guard = self.retain_remotely_created_models(cx);
let task = self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.type_definitions(buffer, position, cx)
});
cx.spawn(async move |_, _| {
let result = task.await;
drop(guard);
result
})
}
@ -3398,8 +3416,14 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
self.lsp_store.update(cx, |lsp_store, cx| {
let guard = self.retain_remotely_created_models(cx);
let task = self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.implementations(buffer, position, cx)
});
cx.spawn(async move |_, _| {
let result = task.await;
drop(guard);
result
})
}
@ -3410,8 +3434,14 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<Location>>> {
let position = position.to_point_utf16(buffer.read(cx));
self.lsp_store.update(cx, |lsp_store, cx| {
let guard = self.retain_remotely_created_models(cx);
let task = self.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.references(buffer, position, cx)
});
cx.spawn(async move |_, _| {
let result = task.await;
drop(guard);
result
})
}

View file

@ -611,7 +611,7 @@ impl RulesLibrary {
this.update_in(cx, |this, window, cx| match rule {
Ok(rule) => {
let title_editor = cx.new(|cx| {
let mut editor = Editor::auto_width(window, cx);
let mut editor = Editor::single_line(window, cx);
editor.set_placeholder_text("Untitled", cx);
editor.set_text(rule_metadata.title.unwrap_or_default(), window, cx);
if prompt_id.is_built_in() {

View file

@ -127,7 +127,7 @@ impl BatchedTextRun {
cx: &mut App,
) {
let pos = Point::new(
(origin.x + self.start_point.column as f32 * dimensions.cell_width).floor(),
origin.x + self.start_point.column as f32 * dimensions.cell_width,
origin.y + self.start_point.line as f32 * dimensions.line_height,
);

View file

@ -230,7 +230,11 @@ fn scroll_editor(
// column position, or the right-most column in the current
// line, seeing as the cursor might be in a short line, in which
// case we don't want to go past its last column.
let max_row_column = map.line_len(new_row);
let max_row_column = if new_row <= map.max_point().row() {
map.line_len(new_row)
} else {
0
};
let max_column = match min_column + visible_column_count as u32 {
max_column if max_column >= max_row_column => max_row_column,
max_column => max_column,

View file

@ -2,7 +2,7 @@
description = "The fast, collaborative code editor."
edition.workspace = true
name = "zed"
version = "0.195.0"
version = "0.195.1"
publish.workspace = true
license = "GPL-3.0-or-later"
authors = ["Zed Team <hi@zed.dev>"]

View file

@ -1 +1 @@
dev
preview

View file

@ -44,8 +44,6 @@ function CheckEnvironmentVariables {
}
}
$innoDir = "$env:ZED_WORKSPACE\inno"
function PrepareForBundle {
if (Test-Path "$innoDir") {
Remove-Item -Path "$innoDir" -Recurse -Force
@ -236,6 +234,8 @@ function BuildInstaller {
}
ParseZedWorkspace
$innoDir = "$env:ZED_WORKSPACE\inno"
CheckEnvironmentVariables
PrepareForBundle
BuildZedAndItsFriends