diff --git a/.github/actionlint.yml b/.github/actionlint.yml index 6bfbc27705..0ee6af8a1d 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -5,25 +5,28 @@ self-hosted-runner: # GitHub-hosted Runners - github-8vcpu-ubuntu-2404 - github-16vcpu-ubuntu-2404 + - github-32vcpu-ubuntu-2404 + - github-8vcpu-ubuntu-2204 + - github-16vcpu-ubuntu-2204 + - github-32vcpu-ubuntu-2204 + - github-16vcpu-ubuntu-2204-arm - windows-2025-16 - windows-2025-32 - windows-2025-64 - # Buildjet Ubuntu 20.04 - AMD x86_64 - - buildjet-2vcpu-ubuntu-2004 - - buildjet-4vcpu-ubuntu-2004 - - buildjet-8vcpu-ubuntu-2004 - - buildjet-16vcpu-ubuntu-2004 - - buildjet-32vcpu-ubuntu-2004 - # Buildjet Ubuntu 22.04 - AMD x86_64 - - buildjet-2vcpu-ubuntu-2204 - - buildjet-4vcpu-ubuntu-2204 - - buildjet-8vcpu-ubuntu-2204 - - buildjet-16vcpu-ubuntu-2204 - - buildjet-32vcpu-ubuntu-2204 - # Buildjet Ubuntu 22.04 - Graviton aarch64 - - buildjet-8vcpu-ubuntu-2204-arm - - buildjet-16vcpu-ubuntu-2204-arm - - buildjet-32vcpu-ubuntu-2204-arm + # Namespace Ubuntu 20.04 (Release builds) + - namespace-profile-16x32-ubuntu-2004 + - namespace-profile-32x64-ubuntu-2004 + - namespace-profile-16x32-ubuntu-2004-arm + - namespace-profile-32x64-ubuntu-2004-arm + # Namespace Ubuntu 22.04 (Everything else) + - namespace-profile-2x4-ubuntu-2204 + - namespace-profile-4x8-ubuntu-2204 + - namespace-profile-8x16-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 + - namespace-profile-32x64-ubuntu-2204 + # Namespace Limited Preview + - namespace-profile-8x16-ubuntu-2004-arm-m4 + - namespace-profile-8x32-ubuntu-2004-arm-m4 # Self Hosted Runners - self-mini-macos - self-32vcpu-windows-2022 diff --git a/.github/actions/build_docs/action.yml b/.github/actions/build_docs/action.yml index a7effad247..d2e62d5b22 100644 --- a/.github/actions/build_docs/action.yml +++ b/.github/actions/build_docs/action.yml @@ -13,7 +13,7 @@ runs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies shell: bash -euxo pipefail {0} diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index 8a48ff96f1..bfaf7a271b 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -16,7 +16,7 @@ jobs: bump_patch_version: if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 43d305faae..e53b64552b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -137,7 +137,7 @@ jobs: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-8x16-ubuntu-2204 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -168,7 +168,7 @@ jobs: needs: [job_spec] if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-4x8-ubuntu-2204 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -221,7 +221,7 @@ jobs: github.repository_owner == 'zed-industries' && (needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true') runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-8x16-ubuntu-2204 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -328,7 +328,7 @@ jobs: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -342,7 +342,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux @@ -380,7 +380,7 @@ jobs: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -394,7 +394,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Clang & Mold run: ./script/remote-server && ./script/install-mold 2.34.0 @@ -511,8 +511,8 @@ jobs: runs-on: - self-mini-macos if: | - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') + ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [macos_tests] env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} @@ -597,10 +597,10 @@ jobs: timeout-minutes: 60 name: Linux x86_x64 release bundle runs-on: - - buildjet-16vcpu-ubuntu-2004 # ubuntu 20.04 for minimal glibc + - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc if: | - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') + ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [linux_tests] steps: - name: Checkout repo @@ -650,7 +650,7 @@ jobs: timeout-minutes: 60 name: Linux arm64 release bundle runs-on: - - buildjet-32vcpu-ubuntu-2204-arm + - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc if: | startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') @@ -703,10 +703,8 @@ jobs: timeout-minutes: 60 runs-on: github-8vcpu-ubuntu-2404 if: | - false && ( - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') - ) + false && ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [linux_tests] name: Build Zed on FreeBSD steps: diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index fe443d493e..df35d44ca9 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -9,7 +9,7 @@ jobs: deploy-docs: name: Deploy Docs if: github.repository_owner == 'zed-industries' - runs-on: buildjet-16vcpu-ubuntu-2204 + runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: Checkout repo diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index f7348a1069..8f56cac5d9 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,7 +61,7 @@ jobs: - style - tests runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Install doctl uses: digitalocean/action-doctl@v2 @@ -94,7 +94,7 @@ jobs: needs: - publish runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Checkout repo diff --git a/.github/workflows/eval.yml b/.github/workflows/eval.yml index 2ad302a602..b5da9e7b7c 100644 --- a/.github/workflows/eval.yml +++ b/.github/workflows/eval.yml @@ -32,7 +32,7 @@ jobs: github.repository_owner == 'zed-industries' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -46,7 +46,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index 6c3a97c163..e682ce5890 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -20,7 +20,7 @@ jobs: matrix: system: - os: x86 Linux - runner: buildjet-16vcpu-ubuntu-2204 + runner: namespace-profile-16x32-ubuntu-2204 install_nix: true - os: arm Mac runner: [macOS, ARM64, test] diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index db4d44318e..de96c3df78 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -20,7 +20,7 @@ jobs: name: Run randomized tests if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Install Node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index c847149984..ed9f4c8450 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -128,7 +128,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2004 + - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc needs: tests steps: - name: Checkout repo @@ -168,7 +168,7 @@ jobs: name: Create a Linux *.tar.gz bundle for ARM if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-32vcpu-ubuntu-2204-arm + - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc needs: tests steps: - name: Checkout repo diff --git a/.github/workflows/unit_evals.yml b/.github/workflows/unit_evals.yml index cb4e39d151..2e03fb028f 100644 --- a/.github/workflows/unit_evals.yml +++ b/.github/workflows/unit_evals.yml @@ -23,7 +23,7 @@ jobs: timeout-minutes: 60 name: Run unit evals runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -37,7 +37,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux diff --git a/Cargo.lock b/Cargo.lock index e034212748..8f72e783c5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1411,7 +1411,7 @@ dependencies = [ "anyhow", "arrayvec", "log", - "nom", + "nom 7.1.3", "num-rational", "v_frame", ] @@ -2785,7 +2785,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ - "nom", + "nom 7.1.3", ] [[package]] @@ -3071,17 +3071,22 @@ dependencies = [ "anyhow", "cloud_api_types", "futures 0.3.31", + "gpui", + "gpui_tokio", "http_client", "parking_lot", "serde_json", "workspace-hack", + "yawc", ] [[package]] name = "cloud_api_types" version = "0.1.0" dependencies = [ + "anyhow", "chrono", + "ciborium", "cloud_llm_client", "pretty_assertions", "serde", @@ -9118,6 +9123,7 @@ dependencies = [ "anyhow", "base64 0.22.1", "client", + "cloud_api_types", "cloud_llm_client", "collections", "futures 0.3.31", @@ -10580,6 +10586,15 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nom" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] + [[package]] name = "noop_proc_macro" version = "0.3.0" @@ -11169,6 +11184,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", + "log", "schemars", "serde", "serde_json", @@ -15401,7 +15417,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" dependencies = [ - "nom", + "nom 7.1.3", "unicode_categories", ] @@ -16604,9 +16620,8 @@ dependencies = [ [[package]] name = "tiktoken-rs" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25563eeba904d770acf527e8b370fe9a5547bacd20ff84a0b6c3bc41288e5625" +version = "0.8.0" +source = "git+https://github.com/zed-industries/tiktoken-rs?rev=30c32a4522751699adeda0d5840c71c3b75ae73d#30c32a4522751699adeda0d5840c71c3b75ae73d" dependencies = [ "anyhow", "base64 0.22.1", @@ -19977,7 +19992,7 @@ dependencies = [ "naga", "nix 0.28.0", "nix 0.29.0", - "nom", + "nom 7.1.3", "num-bigint", "num-bigint-dig", "num-integer", @@ -20312,6 +20327,34 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yawc" +version = "0.2.4" +source = "git+https://github.com/deviant-forks/yawc?rev=1899688f3e69ace4545aceb97b2a13881cf26142#1899688f3e69ace4545aceb97b2a13881cf26142" +dependencies = [ + "base64 0.22.1", + "bytes 1.10.1", + "flate2", + "futures 0.3.31", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "js-sys", + "nom 8.0.0", + "pin-project", + "rand 0.8.5", + "sha1", + "thiserror 1.0.69", + "tokio", + "tokio-rustls 0.26.2", + "tokio-util", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", +] + [[package]] name = "yazi" version = "0.2.1" @@ -20418,7 +20461,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.199.0" +version = "0.199.7" dependencies = [ "activity_indicator", "agent", @@ -20821,6 +20864,7 @@ dependencies = [ "menu", "postage", "project", + "rand 0.8.5", "regex", "release_channel", "reqwest_client", diff --git a/Cargo.toml b/Cargo.toml index 7b82fd1910..86f1b8b0a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -461,6 +461,7 @@ bytes = "1.0" cargo_metadata = "0.19" cargo_toml = "0.21" chrono = { version = "0.4", features = ["serde"] } +ciborium = "0.2" circular-buffer = "1.0" clap = { version = "4.4", features = ["derive"] } cocoa = "0.26" @@ -600,7 +601,7 @@ sysinfo = "0.31.0" take-until = "0.2.0" tempfile = "3.20.0" thiserror = "2.0.12" -tiktoken-rs = "0.7.0" +tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "30c32a4522751699adeda0d5840c71c3b75ae73d" } time = { version = "0.3", features = [ "macros", "parsing", @@ -660,6 +661,9 @@ which = "6.0.0" windows-core = "0.61" wit-component = "0.221" workspace-hack = "0.1.0" +# We can switch back to the published version once https://github.com/infinitefield/yawc/pull/16 is merged and a new +# version is released. +yawc = { git = "https://github.com/deviant-forks/yawc", rev = "1899688f3e69ace4545aceb97b2a13881cf26142" } zstd = "0.11" [workspace.dependencies.async-stripe] diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index b4894cddcf..f09c012a85 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -14,7 +14,9 @@ use async_tungstenite::tungstenite::{ }; use clock::SystemClock; use cloud_api_client::CloudApiClient; +use cloud_api_client::websocket_protocol::MessageToClient; use credentials_provider::CredentialsProvider; +use feature_flags::FeatureFlagAppExt as _; use futures::{ AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt, TryFutureExt as _, TryStreamExt, channel::oneshot, future::BoxFuture, @@ -191,6 +193,8 @@ pub fn init(client: &Arc, cx: &mut App) { }); } +pub type MessageToClientHandler = Box; + struct GlobalClient(Arc); impl Global for GlobalClient {} @@ -204,6 +208,7 @@ pub struct Client { credentials_provider: ClientCredentialsProvider, state: RwLock, handler_set: parking_lot::Mutex, + message_to_client_handlers: parking_lot::Mutex>, #[allow(clippy::type_complexity)] #[cfg(any(test, feature = "test-support"))] @@ -553,6 +558,7 @@ impl Client { credentials_provider: ClientCredentialsProvider::new(cx), state: Default::default(), handler_set: Default::default(), + message_to_client_handlers: parking_lot::Mutex::new(Vec::new()), #[cfg(any(test, feature = "test-support"))] authenticate: Default::default(), @@ -933,23 +939,77 @@ impl Client { } } - /// Performs a sign-in and also connects to Collab. + /// Establishes a WebSocket connection with Cloud for receiving updates from the server. + async fn connect_to_cloud(self: &Arc, cx: &AsyncApp) -> Result<()> { + let connect_task = cx.update({ + let cloud_client = self.cloud_client.clone(); + move |cx| cloud_client.connect(cx) + })??; + let connection = connect_task.await?; + + let (mut messages, task) = cx.update(|cx| connection.spawn(cx))?; + task.detach(); + + cx.spawn({ + let this = self.clone(); + async move |cx| { + while let Some(message) = messages.next().await { + if let Some(message) = message.log_err() { + this.handle_message_to_client(message, cx); + } + } + } + }) + .detach(); + + Ok(()) + } + + /// Performs a sign-in and also (optionally) connects to Collab. /// - /// This is called in places where we *don't* need to connect in the future. We will replace these calls with calls - /// to `sign_in` when we're ready to remove auto-connection to Collab. + /// Only Zed staff automatically connect to Collab. pub async fn sign_in_with_optional_connect( self: &Arc, try_provider: bool, cx: &AsyncApp, ) -> Result<()> { + let (is_staff_tx, is_staff_rx) = oneshot::channel::(); + let mut is_staff_tx = Some(is_staff_tx); + cx.update(|cx| { + cx.on_flags_ready(move |state, _cx| { + if let Some(is_staff_tx) = is_staff_tx.take() { + is_staff_tx.send(state.is_staff).log_err(); + } + }) + .detach(); + }) + .log_err(); + let credentials = self.sign_in(try_provider, cx).await?; - let connect_result = match self.connect_with_credentials(credentials, cx).await { - ConnectionResult::Timeout => Err(anyhow!("connection timed out")), - ConnectionResult::ConnectionReset => Err(anyhow!("connection reset")), - ConnectionResult::Result(result) => result.context("client auth and connect"), - }; - connect_result.log_err(); + self.connect_to_cloud(cx).await.log_err(); + + cx.update(move |cx| { + cx.spawn({ + let client = self.clone(); + async move |cx| { + let is_staff = is_staff_rx.await?; + if is_staff { + match client.connect_with_credentials(credentials, cx).await { + ConnectionResult::Timeout => Err(anyhow!("connection timed out")), + ConnectionResult::ConnectionReset => Err(anyhow!("connection reset")), + ConnectionResult::Result(result) => { + result.context("client auth and connect") + } + } + } else { + Ok(()) + } + } + }) + .detach_and_log_err(cx); + }) + .log_err(); Ok(()) } @@ -1622,6 +1682,24 @@ impl Client { } } + pub fn add_message_to_client_handler( + self: &Arc, + handler: impl Fn(&MessageToClient, &mut App) + Send + Sync + 'static, + ) { + self.message_to_client_handlers + .lock() + .push(Box::new(handler)); + } + + fn handle_message_to_client(self: &Arc, message: MessageToClient, cx: &AsyncApp) { + cx.update(|cx| { + for handler in self.message_to_client_handlers.lock().iter() { + handler(&message, cx); + } + }) + .ok(); + } + pub fn telemetry(&self) -> &Arc { &self.telemetry } diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 3c125a0882..faf46945d8 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -1,6 +1,7 @@ use super::{Client, Status, TypedEnvelope, proto}; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; +use cloud_api_client::websocket_protocol::MessageToClient; use cloud_api_client::{GetAuthenticatedUserResponse, PlanInfo}; use cloud_llm_client::{ EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME, @@ -181,6 +182,12 @@ impl UserStore { client.add_message_handler(cx.weak_entity(), Self::handle_update_invite_info), client.add_message_handler(cx.weak_entity(), Self::handle_show_contacts), ]; + + client.add_message_to_client_handler({ + let this = cx.weak_entity(); + move |message, cx| Self::handle_message_to_client(this.clone(), message, cx) + }); + Self { users: Default::default(), by_github_login: Default::default(), @@ -219,17 +226,35 @@ impl UserStore { match status { Status::Authenticated | Status::Connected { .. } => { if let Some(user_id) = client.user_id() { - let response = client.cloud_client().get_authenticated_user().await; - let mut current_user = None; + let response = client + .cloud_client() + .get_authenticated_user() + .await + .log_err(); + + let current_user_and_response = if let Some(response) = response { + let user = Arc::new(User { + id: user_id, + github_login: response.user.github_login.clone().into(), + avatar_uri: response.user.avatar_url.clone().into(), + name: response.user.name.clone(), + }); + + Some((user, response)) + } else { + None + }; + current_user_tx + .send( + current_user_and_response + .as_ref() + .map(|(user, _)| user.clone()), + ) + .await + .ok(); + cx.update(|cx| { - if let Some(response) = response.log_err() { - let user = Arc::new(User { - id: user_id, - github_login: response.user.github_login.clone().into(), - avatar_uri: response.user.avatar_url.clone().into(), - name: response.user.name.clone(), - }); - current_user = Some(user.clone()); + if let Some((user, response)) = current_user_and_response { this.update(cx, |this, cx| { this.by_github_login .insert(user.github_login.clone(), user_id); @@ -240,7 +265,6 @@ impl UserStore { anyhow::Ok(()) } })??; - current_user_tx.send(current_user).await.ok(); this.update(cx, |_, cx| cx.notify())?; } @@ -813,6 +837,32 @@ impl UserStore { cx.emit(Event::PrivateUserInfoUpdated); } + fn handle_message_to_client(this: WeakEntity, message: &MessageToClient, cx: &App) { + cx.spawn(async move |cx| { + match message { + MessageToClient::UserUpdated => { + let cloud_client = cx + .update(|cx| { + this.read_with(cx, |this, _cx| { + this.client.upgrade().map(|client| client.cloud_client()) + }) + })?? + .ok_or(anyhow::anyhow!("Failed to get Cloud client"))?; + + let response = cloud_client.get_authenticated_user().await?; + cx.update(|cx| { + this.update(cx, |this, cx| { + this.update_authenticated_user(response, cx); + }) + })??; + } + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + pub fn watch_current_user(&self) -> watch::Receiver>> { self.current_user.clone() } diff --git a/crates/cloud_api_client/Cargo.toml b/crates/cloud_api_client/Cargo.toml index d56aa94c6e..8e50ccb191 100644 --- a/crates/cloud_api_client/Cargo.toml +++ b/crates/cloud_api_client/Cargo.toml @@ -15,7 +15,10 @@ path = "src/cloud_api_client.rs" anyhow.workspace = true cloud_api_types.workspace = true futures.workspace = true +gpui.workspace = true +gpui_tokio.workspace = true http_client.workspace = true parking_lot.workspace = true serde_json.workspace = true workspace-hack.workspace = true +yawc.workspace = true diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index edac051a0e..ef9a1a9a55 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -1,11 +1,19 @@ +mod websocket; + use std::sync::Arc; use anyhow::{Context, Result, anyhow}; +use cloud_api_types::websocket_protocol::{PROTOCOL_VERSION, PROTOCOL_VERSION_HEADER_NAME}; pub use cloud_api_types::*; use futures::AsyncReadExt as _; +use gpui::{App, Task}; +use gpui_tokio::Tokio; use http_client::http::request; use http_client::{AsyncBody, HttpClientWithUrl, Method, Request, StatusCode}; use parking_lot::RwLock; +use yawc::WebSocket; + +use crate::websocket::Connection; struct Credentials { user_id: u32, @@ -78,6 +86,41 @@ impl CloudApiClient { Ok(serde_json::from_str(&body)?) } + pub fn connect(&self, cx: &App) -> Result>> { + let mut connect_url = self + .http_client + .build_zed_cloud_url("/client/users/connect", &[])?; + connect_url + .set_scheme(match connect_url.scheme() { + "https" => "wss", + "http" => "ws", + scheme => Err(anyhow!("invalid URL scheme: {scheme}"))?, + }) + .map_err(|_| anyhow!("failed to set URL scheme"))?; + + let credentials = self.credentials.read(); + let credentials = credentials.as_ref().context("no credentials provided")?; + let authorization_header = format!("{} {}", credentials.user_id, credentials.access_token); + + Ok(cx.spawn(async move |cx| { + let handle = cx + .update(|cx| Tokio::handle(cx)) + .ok() + .context("failed to get Tokio handle")?; + let _guard = handle.enter(); + + let ws = WebSocket::connect(connect_url) + .with_request( + request::Builder::new() + .header("Authorization", authorization_header) + .header(PROTOCOL_VERSION_HEADER_NAME, PROTOCOL_VERSION.to_string()), + ) + .await?; + + Ok(Connection::new(ws)) + })) + } + pub async fn accept_terms_of_service(&self) -> Result { let request = self.build_request( Request::builder().method(Method::POST).uri( diff --git a/crates/cloud_api_client/src/websocket.rs b/crates/cloud_api_client/src/websocket.rs new file mode 100644 index 0000000000..48a628db78 --- /dev/null +++ b/crates/cloud_api_client/src/websocket.rs @@ -0,0 +1,73 @@ +use std::pin::Pin; +use std::time::Duration; + +use anyhow::Result; +use cloud_api_types::websocket_protocol::MessageToClient; +use futures::channel::mpsc::unbounded; +use futures::stream::{SplitSink, SplitStream}; +use futures::{FutureExt as _, SinkExt as _, Stream, StreamExt as _, TryStreamExt as _, pin_mut}; +use gpui::{App, BackgroundExecutor, Task}; +use yawc::WebSocket; +use yawc::frame::{FrameView, OpCode}; + +const KEEPALIVE_INTERVAL: Duration = Duration::from_secs(1); + +pub type MessageStream = Pin>>>; + +pub struct Connection { + tx: SplitSink, + rx: SplitStream, +} + +impl Connection { + pub fn new(ws: WebSocket) -> Self { + let (tx, rx) = ws.split(); + + Self { tx, rx } + } + + pub fn spawn(self, cx: &App) -> (MessageStream, Task<()>) { + let (mut tx, rx) = (self.tx, self.rx); + + let (message_tx, message_rx) = unbounded(); + + let handle_io = |executor: BackgroundExecutor| async move { + // Send messages on this frequency so the connection isn't closed. + let keepalive_timer = executor.timer(KEEPALIVE_INTERVAL).fuse(); + futures::pin_mut!(keepalive_timer); + + let rx = rx.fuse(); + pin_mut!(rx); + + loop { + futures::select_biased! { + _ = keepalive_timer => { + let _ = tx.send(FrameView::ping(Vec::new())).await; + + keepalive_timer.set(executor.timer(KEEPALIVE_INTERVAL).fuse()); + } + frame = rx.next() => { + let Some(frame) = frame else { + break; + }; + + match frame.opcode { + OpCode::Binary => { + let message_result = MessageToClient::deserialize(&frame.payload); + message_tx.unbounded_send(message_result).ok(); + } + OpCode::Close => { + break; + } + _ => {} + } + } + } + } + }; + + let task = cx.spawn(async move |cx| handle_io(cx.background_executor().clone()).await); + + (message_rx.into_stream().boxed(), task) + } +} diff --git a/crates/cloud_api_types/Cargo.toml b/crates/cloud_api_types/Cargo.toml index 868797df3b..28e0a36a44 100644 --- a/crates/cloud_api_types/Cargo.toml +++ b/crates/cloud_api_types/Cargo.toml @@ -12,7 +12,9 @@ workspace = true path = "src/cloud_api_types.rs" [dependencies] +anyhow.workspace = true chrono.workspace = true +ciborium.workspace = true cloud_llm_client.workspace = true serde.workspace = true workspace-hack.workspace = true diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index b38b38cde1..fa189cd3b5 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -1,4 +1,5 @@ mod timestamp; +pub mod websocket_protocol; use serde::{Deserialize, Serialize}; diff --git a/crates/cloud_api_types/src/websocket_protocol.rs b/crates/cloud_api_types/src/websocket_protocol.rs new file mode 100644 index 0000000000..75f6a73b43 --- /dev/null +++ b/crates/cloud_api_types/src/websocket_protocol.rs @@ -0,0 +1,28 @@ +use anyhow::{Context as _, Result}; +use serde::{Deserialize, Serialize}; + +/// The version of the Cloud WebSocket protocol. +pub const PROTOCOL_VERSION: u32 = 0; + +/// The name of the header used to indicate the protocol version in use. +pub const PROTOCOL_VERSION_HEADER_NAME: &str = "x-zed-protocol-version"; + +/// A message from Cloud to the Zed client. +#[derive(Debug, Serialize, Deserialize)] +pub enum MessageToClient { + /// The user was updated and should be refreshed. + UserUpdated, +} + +impl MessageToClient { + pub fn serialize(&self) -> Result> { + let mut buffer = Vec::new(); + ciborium::into_writer(self, &mut buffer).context("failed to serialize message")?; + + Ok(buffer) + } + + pub fn deserialize(data: &[u8]) -> Result { + ciborium::from_reader(data).context("failed to deserialize message") + } +} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 31635575a8..82f74d910b 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -699,7 +699,10 @@ impl Database { language_server::Column::ProjectId, language_server::Column::Id, ]) - .update_column(language_server::Column::Name) + .update_columns([ + language_server::Column::Name, + language_server::Column::Capabilities, + ]) .to_owned(), ) .exec(&*tx) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index bb7c2ba1cd..51e4ff8965 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -3053,7 +3053,7 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::move_channel_down)) .track_focus(&self.focus_handle) .size_full() - .child(if self.user_store.read(cx).current_user().is_none() { + .child(if !self.client.status().borrow().is_connected() { self.render_signed_out(cx) } else { self.render_signed_in(window, cx) diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 49ae2b9d9c..166a582c70 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -21,7 +21,7 @@ use language::{ point_from_lsp, point_to_lsp, }; use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionCheck}; use parking_lot::Mutex; use project::DisableAiSettings; use request::StatusNotification; @@ -1169,9 +1169,8 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: const SERVER_PATH: &str = "node_modules/@github/copilot-language-server/dist/language-server.js"; - let latest_version = node_runtime - .npm_package_latest_version(PACKAGE_NAME) - .await?; + // pinning it: https://github.com/zed-industries/zed/issues/36093 + const PINNED_VERSION: &str = "1.354"; let server_path = paths::copilot_dir().join(SERVER_PATH); fs.create_dir(paths::copilot_dir()).await?; @@ -1181,12 +1180,13 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: PACKAGE_NAME, &server_path, paths::copilot_dir(), - &latest_version, + &PINNED_VERSION, + VersionCheck::VersionMismatch, ) .await; if should_install { node_runtime - .npm_install_packages(paths::copilot_dir(), &[(PACKAGE_NAME, &latest_version)]) + .npm_install_packages(paths::copilot_dir(), &[(PACKAGE_NAME, &PINNED_VERSION)]) .await?; } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 461ce6fbb3..a2bd934311 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -152,6 +152,9 @@ impl PythonDebugAdapter { maybe!(async move { let response = latest_release.filter(|response| response.status().is_success())?; + let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME); + std::fs::create_dir_all(&download_dir).ok()?; + let mut output = String::new(); response .into_body() diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e1647215bc..6980037664 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8028,12 +8028,20 @@ impl Element for EditorElement { autoscroll_containing_element, needs_horizontal_autoscroll, ) = self.editor.update(cx, |editor, cx| { - let autoscroll_request = editor.autoscroll_request(); + let autoscroll_request = editor.scroll_manager.take_autoscroll_request(); + let autoscroll_containing_element = autoscroll_request.is_some() || editor.has_pending_selection(); let (needs_horizontal_autoscroll, was_scrolled) = editor - .autoscroll_vertically(bounds, line_height, max_scroll_top, window, cx); + .autoscroll_vertically( + bounds, + line_height, + max_scroll_top, + autoscroll_request, + window, + cx, + ); if was_scrolled.0 { snapshot = editor.snapshot(window, cx); } @@ -8423,7 +8431,11 @@ impl Element for EditorElement { Ok(blocks) => blocks, Err(resized_blocks) => { self.editor.update(cx, |editor, cx| { - editor.resize_blocks(resized_blocks, autoscroll_request, cx) + editor.resize_blocks( + resized_blocks, + autoscroll_request.map(|(autoscroll, _)| autoscroll), + cx, + ) }); return self.prepaint(None, _inspector_id, bounds, &mut (), window, cx); } @@ -8468,6 +8480,7 @@ impl Element for EditorElement { scroll_width, em_advance, &line_layouts, + autoscroll_request, window, cx, ) diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index ecaf7c11e4..08ff23f8f7 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -348,8 +348,8 @@ impl ScrollManager { self.show_scrollbars } - pub fn autoscroll_request(&self) -> Option { - self.autoscroll_request.map(|(autoscroll, _)| autoscroll) + pub fn take_autoscroll_request(&mut self) -> Option<(Autoscroll, bool)> { + self.autoscroll_request.take() } pub fn active_scrollbar_state(&self) -> Option<&ActiveScrollbarState> { diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index e8a1f8da73..88d3b52d76 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -102,15 +102,12 @@ impl AutoscrollStrategy { pub(crate) struct NeedsHorizontalAutoscroll(pub(crate) bool); impl Editor { - pub fn autoscroll_request(&self) -> Option { - self.scroll_manager.autoscroll_request() - } - pub(crate) fn autoscroll_vertically( &mut self, bounds: Bounds, line_height: Pixels, max_scroll_top: f32, + autoscroll_request: Option<(Autoscroll, bool)>, window: &mut Window, cx: &mut Context, ) -> (NeedsHorizontalAutoscroll, WasScrolled) { @@ -137,7 +134,7 @@ impl Editor { WasScrolled(false) }; - let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else { + let Some((autoscroll, local)) = autoscroll_request else { return (NeedsHorizontalAutoscroll(false), editor_was_scrolled); }; @@ -284,9 +281,12 @@ impl Editor { scroll_width: Pixels, em_advance: Pixels, layouts: &[LineWithInvisibles], + autoscroll_request: Option<(Autoscroll, bool)>, window: &mut Window, cx: &mut Context, ) -> Option> { + let (_, local) = autoscroll_request?; + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all::(cx); let mut scroll_position = self.scroll_manager.scroll_position(&display_map); @@ -335,10 +335,10 @@ impl Editor { let was_scrolled = if target_left < scroll_left { scroll_position.x = target_left / em_advance; - self.set_scroll_position_internal(scroll_position, true, true, window, cx) + self.set_scroll_position_internal(scroll_position, local, true, window, cx) } else if target_right > scroll_right { scroll_position.x = (target_right - viewport_width) / em_advance; - self.set_scroll_position_internal(scroll_position, true, true, window, cx) + self.set_scroll_position_internal(scroll_position, local, true, window, cx) } else { WasScrolled(false) }; diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index 631bafc841..ef357adf35 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -158,6 +158,11 @@ where } } +#[derive(Debug)] +pub struct OnFlagsReady { + pub is_staff: bool, +} + pub trait FeatureFlagAppExt { fn wait_for_flag(&mut self) -> WaitForFlag; @@ -169,6 +174,10 @@ pub trait FeatureFlagAppExt { fn has_flag(&self) -> bool; fn is_staff(&self) -> bool; + fn on_flags_ready(&mut self, callback: F) -> Subscription + where + F: FnMut(OnFlagsReady, &mut App) + 'static; + fn observe_flag(&mut self, callback: F) -> Subscription where F: FnMut(bool, &mut App) + 'static; @@ -198,6 +207,21 @@ impl FeatureFlagAppExt for App { .unwrap_or(false) } + fn on_flags_ready(&mut self, mut callback: F) -> Subscription + where + F: FnMut(OnFlagsReady, &mut App) + 'static, + { + self.observe_global::(move |cx| { + let feature_flags = cx.global::(); + callback( + OnFlagsReady { + is_staff: feature_flags.staff, + }, + cx, + ); + }) + } + fn observe_flag(&mut self, mut callback: F) -> Subscription where F: FnMut(bool, &mut App) + 'static, diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 841be60b0e..f9920623b5 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -20,6 +20,7 @@ anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true base64.workspace = true client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true futures.workspace = true diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 8ae5893410..3b4c1fa269 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -3,11 +3,9 @@ use std::sync::Arc; use anyhow::Result; use client::Client; +use cloud_api_types::websocket_protocol::MessageToClient; use cloud_llm_client::Plan; -use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal as _, -}; -use proto::TypedEnvelope; +use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _}; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use thiserror::Error; @@ -82,9 +80,7 @@ impl Global for GlobalRefreshLlmTokenListener {} pub struct RefreshLlmTokenEvent; -pub struct RefreshLlmTokenListener { - _llm_token_subscription: client::Subscription, -} +pub struct RefreshLlmTokenListener; impl EventEmitter for RefreshLlmTokenListener {} @@ -99,17 +95,21 @@ impl RefreshLlmTokenListener { } fn new(client: Arc, cx: &mut Context) -> Self { - Self { - _llm_token_subscription: client - .add_message_handler(cx.weak_entity(), Self::handle_refresh_llm_token), - } + client.add_message_to_client_handler({ + let this = cx.entity(); + move |message, cx| { + Self::handle_refresh_llm_token(this.clone(), message, cx); + } + }); + + Self } - async fn handle_refresh_llm_token( - this: Entity, - _: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result<()> { - this.update(&mut cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)) + fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { + match message { + MessageToClient::UserUpdated => { + this.update(cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)); + } + } } } diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 40dd120761..8aa5c54844 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -941,6 +941,8 @@ impl LanguageModel for CloudLanguageModel { request, model.id(), model.supports_parallel_tool_calls(), + model.supports_prompt_cache_key(), + None, None, ); let llm_api_token = self.llm_api_token.clone(); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 5185e979b7..64114d5a3e 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -14,7 +14,7 @@ use language_model::{ RateLimiter, Role, StopReason, TokenUsage, }; use menu; -use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion}; +use open_ai::{ImageUrl, Model, ReasoningEffort, ResponseStreamEvent, stream_completion}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; @@ -45,6 +45,7 @@ pub struct AvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + pub reasoning_effort: Option, } pub struct OpenAiLanguageModelProvider { @@ -213,6 +214,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, max_completion_tokens: model.max_completion_tokens, + reasoning_effort: model.reasoning_effort.clone(), }, ); } @@ -301,7 +303,25 @@ impl LanguageModel for OpenAiLanguageModel { } fn supports_images(&self) -> bool { - false + use open_ai::Model; + match &self.model { + Model::FourOmni + | Model::FourOmniMini + | Model::FourPointOne + | Model::FourPointOneMini + | Model::FourPointOneNano + | Model::Five + | Model::FiveMini + | Model::FiveNano + | Model::O1 + | Model::O3 + | Model::O4Mini => true, + Model::ThreePointFiveTurbo + | Model::Four + | Model::FourTurbo + | Model::O3Mini + | Model::Custom { .. } => false, + } } fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { @@ -350,7 +370,9 @@ impl LanguageModel for OpenAiLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + self.model.reasoning_effort(), ); let completions = self.stream_completion(request, cx); async move { @@ -365,7 +387,9 @@ pub fn into_open_ai( request: LanguageModelRequest, model_id: &str, supports_parallel_tool_calls: bool, + supports_prompt_cache_key: bool, max_output_tokens: Option, + reasoning_effort: Option, ) -> open_ai::Request { let stream = !model_id.starts_with("o1-"); @@ -455,6 +479,11 @@ pub fn into_open_ai( } else { None }, + prompt_cache_key: if supports_prompt_cache_key { + request.thread_id + } else { + None + }, tools: request .tools .into_iter() @@ -471,6 +500,7 @@ pub fn into_open_ai( LanguageModelToolChoice::Any => open_ai::ToolChoice::Required, LanguageModelToolChoice::None => open_ai::ToolChoice::None, }), + reasoning_effort, } } @@ -674,6 +704,10 @@ pub fn count_open_ai_tokens( | Model::O3 | Model::O3Mini | Model::O4Mini => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), + // GPT-5 models don't have tiktoken support yet; fall back on gpt-4o tokenizer + Model::Five | Model::FiveMini | Model::FiveNano => { + tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages) + } } .map(|tokens| tokens as u64) }) diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 38bd7cee06..5f546f5219 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -355,7 +355,16 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { LanguageModelCompletionError, >, > { - let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens()); + let supports_parallel_tool_call = true; + let supports_prompt_cache_key = false; + let request = into_open_ai( + request, + &self.model.name, + supports_parallel_tool_call, + supports_prompt_cache_key, + self.max_output_tokens(), + None, + ); let completions = self.stream_completion(request, cx); async move { let mapper = OpenAiEventMapper::new(); diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 037ce467d0..9f447cb68b 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -355,7 +355,9 @@ impl LanguageModel for VercelLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + None, ); let completions = self.stream_completion(request, cx); async move { diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 5f6034571b..fed6fe92bf 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -359,7 +359,9 @@ impl LanguageModel for XAiLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + None, ); let completions = self.stream_completion(request, cx); async move { diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 7725e079be..19329fcc6e 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -103,7 +103,13 @@ impl LspAdapter for CssLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index ca82bb2431..019b45d396 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -340,7 +340,13 @@ impl LspAdapter for JsonLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 0524c02fd5..5513324487 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -206,6 +206,7 @@ impl LspAdapter for PythonLspAdapter { &server_path, &container_dir, &version, + Default::default(), ) .await; diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index a7edbb148c..6f03eeda8d 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -108,7 +108,13 @@ impl LspAdapter for TailwindLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index f976b62614..a8ba880889 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -589,6 +589,7 @@ impl LspAdapter for TypeScriptLspAdapter { &server_path, &container_dir, version.typescript_version.as_str(), + Default::default(), ) .await; diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 33751f733e..73498fc579 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -116,6 +116,7 @@ impl LspAdapter for VtslsLspAdapter { &server_path, &container_dir, &latest_version.server_version, + Default::default(), ) .await { @@ -129,6 +130,7 @@ impl LspAdapter for VtslsLspAdapter { &container_dir.join(Self::TYPESCRIPT_TSDK_PATH), &container_dir, &latest_version.typescript_version, + Default::default(), ) .await { diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 815605d524..28be2cc1a4 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -104,7 +104,13 @@ impl LspAdapter for YamlLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/lsp/src/input_handler.rs b/crates/lsp/src/input_handler.rs index db3f1190fc..001ebf1fc9 100644 --- a/crates/lsp/src/input_handler.rs +++ b/crates/lsp/src/input_handler.rs @@ -13,14 +13,15 @@ use parking_lot::Mutex; use smol::io::BufReader; use crate::{ - AnyNotification, AnyResponse, CONTENT_LEN_HEADER, IoHandler, IoKind, RequestId, ResponseHandler, + AnyResponse, CONTENT_LEN_HEADER, IoHandler, IoKind, NotificationOrRequest, RequestId, + ResponseHandler, }; const HEADER_DELIMITER: &[u8; 4] = b"\r\n\r\n"; /// Handler for stdout of language server. pub struct LspStdoutHandler { pub(super) loop_handle: Task>, - pub(super) notifications_channel: UnboundedReceiver, + pub(super) incoming_messages: UnboundedReceiver, } async fn read_headers(reader: &mut BufReader, buffer: &mut Vec) -> Result<()> @@ -54,13 +55,13 @@ impl LspStdoutHandler { let loop_handle = cx.spawn(Self::handler(stdout, tx, response_handlers, io_handlers)); Self { loop_handle, - notifications_channel, + incoming_messages: notifications_channel, } } async fn handler( stdout: Input, - notifications_sender: UnboundedSender, + notifications_sender: UnboundedSender, response_handlers: Arc>>>, io_handlers: Arc>>, ) -> anyhow::Result<()> @@ -96,7 +97,7 @@ impl LspStdoutHandler { } } - if let Ok(msg) = serde_json::from_slice::(&buffer) { + if let Ok(msg) = serde_json::from_slice::(&buffer) { notifications_sender.unbounded_send(msg)?; } else if let Ok(AnyResponse { id, error, result, .. diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index b9701a83d2..3f45d2e6fc 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -242,7 +242,7 @@ struct Notification<'a, T> { /// Language server RPC notification message before it is deserialized into a concrete type. #[derive(Debug, Clone, Deserialize)] -struct AnyNotification { +struct NotificationOrRequest { #[serde(default)] id: Option, method: String, @@ -252,7 +252,10 @@ struct AnyNotification { #[derive(Debug, Serialize, Deserialize)] struct Error { + code: i64, message: String, + #[serde(default)] + data: Option, } pub trait LspRequestFuture: Future> { @@ -364,6 +367,7 @@ impl LanguageServer { notification.method, serde_json::to_string_pretty(¬ification.params).unwrap(), ); + false }, ); @@ -389,7 +393,7 @@ impl LanguageServer { Stdin: AsyncWrite + Unpin + Send + 'static, Stdout: AsyncRead + Unpin + Send + 'static, Stderr: AsyncRead + Unpin + Send + 'static, - F: FnMut(AnyNotification) + 'static + Send + Sync + Clone, + F: Fn(&NotificationOrRequest) -> bool + 'static + Send + Sync + Clone, { let (outbound_tx, outbound_rx) = channel::unbounded::(); let (output_done_tx, output_done_rx) = barrier::channel(); @@ -400,14 +404,34 @@ impl LanguageServer { let io_handlers = Arc::new(Mutex::new(HashMap::default())); let stdout_input_task = cx.spawn({ - let on_unhandled_notification = on_unhandled_notification.clone(); + let unhandled_notification_wrapper = { + let response_channel = outbound_tx.clone(); + async move |msg: NotificationOrRequest| { + let did_handle = on_unhandled_notification(&msg); + if !did_handle && let Some(message_id) = msg.id { + let response = AnyResponse { + jsonrpc: JSON_RPC_VERSION, + id: message_id, + error: Some(Error { + code: -32601, + message: format!("Unrecognized method `{}`", msg.method), + data: None, + }), + result: None, + }; + if let Ok(response) = serde_json::to_string(&response) { + response_channel.send(response).await.ok(); + } + } + } + }; let notification_handlers = notification_handlers.clone(); let response_handlers = response_handlers.clone(); let io_handlers = io_handlers.clone(); async move |cx| { - Self::handle_input( + Self::handle_incoming_messages( stdout, - on_unhandled_notification, + unhandled_notification_wrapper, notification_handlers, response_handlers, io_handlers, @@ -433,7 +457,7 @@ impl LanguageServer { stdout.or(stderr) }); let output_task = cx.background_spawn({ - Self::handle_output( + Self::handle_outgoing_messages( stdin, outbound_rx, output_done_tx, @@ -479,9 +503,9 @@ impl LanguageServer { self.code_action_kinds.clone() } - async fn handle_input( + async fn handle_incoming_messages( stdout: Stdout, - mut on_unhandled_notification: F, + on_unhandled_notification: impl AsyncFn(NotificationOrRequest) + 'static + Send, notification_handlers: Arc>>, response_handlers: Arc>>>, io_handlers: Arc>>, @@ -489,7 +513,6 @@ impl LanguageServer { ) -> anyhow::Result<()> where Stdout: AsyncRead + Unpin + Send + 'static, - F: FnMut(AnyNotification) + 'static + Send, { use smol::stream::StreamExt; let stdout = BufReader::new(stdout); @@ -506,15 +529,19 @@ impl LanguageServer { cx.background_executor().clone(), ); - while let Some(msg) = input_handler.notifications_channel.next().await { - { + while let Some(msg) = input_handler.incoming_messages.next().await { + let unhandled_message = { let mut notification_handlers = notification_handlers.lock(); if let Some(handler) = notification_handlers.get_mut(msg.method.as_str()) { handler(msg.id, msg.params.unwrap_or(Value::Null), cx); + None } else { - drop(notification_handlers); - on_unhandled_notification(msg); + Some(msg) } + }; + + if let Some(msg) = unhandled_message { + on_unhandled_notification(msg).await; } // Don't starve the main thread when receiving lots of notifications at once. @@ -558,7 +585,7 @@ impl LanguageServer { } } - async fn handle_output( + async fn handle_outgoing_messages( stdin: Stdin, outbound_rx: channel::Receiver, output_done_tx: barrier::Sender, @@ -1036,7 +1063,9 @@ impl LanguageServer { jsonrpc: JSON_RPC_VERSION, id, value: LspResult::Error(Some(Error { + code: lsp_types::error_codes::REQUEST_FAILED, message: error.to_string(), + data: None, })), }, }; @@ -1057,7 +1086,9 @@ impl LanguageServer { id, result: None, error: Some(Error { + code: -32700, // Parse error message: error.to_string(), + data: None, }), }; if let Some(response) = serde_json::to_string(&response).log_err() { @@ -1559,7 +1590,7 @@ impl FakeLanguageServer { root, Some(workspace_folders.clone()), cx, - |_| {}, + |_| false, ); server.process_name = process_name; let fake = FakeLanguageServer { @@ -1582,9 +1613,10 @@ impl FakeLanguageServer { notifications_tx .try_send(( msg.method.to_string(), - msg.params.unwrap_or(Value::Null).to_string(), + msg.params.as_ref().unwrap_or(&Value::Null).to_string(), )) .ok(); + true }, ); server.process_name = name.as_str().into(); @@ -1862,7 +1894,7 @@ mod tests { #[gpui::test] fn test_deserialize_string_digit_id() { let json = r#"{"jsonrpc":"2.0","id":"2","method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#; - let notification = serde_json::from_str::(json) + let notification = serde_json::from_str::(json) .expect("message with string id should be parsed"); let expected_id = RequestId::Str("2".to_string()); assert_eq!(notification.id, Some(expected_id)); @@ -1871,7 +1903,7 @@ mod tests { #[gpui::test] fn test_deserialize_string_id() { let json = r#"{"jsonrpc":"2.0","id":"anythingAtAll","method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#; - let notification = serde_json::from_str::(json) + let notification = serde_json::from_str::(json) .expect("message with string id should be parsed"); let expected_id = RequestId::Str("anythingAtAll".to_string()); assert_eq!(notification.id, Some(expected_id)); @@ -1880,7 +1912,7 @@ mod tests { #[gpui::test] fn test_deserialize_int_id() { let json = r#"{"jsonrpc":"2.0","id":2,"method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#; - let notification = serde_json::from_str::(json) + let notification = serde_json::from_str::(json) .expect("message with string id should be parsed"); let expected_id = RequestId::Int(2); assert_eq!(notification.id, Some(expected_id)); diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 08698a1d6c..6fcc3a728a 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -29,6 +29,15 @@ pub struct NodeBinaryOptions { pub use_paths: Option<(PathBuf, PathBuf)>, } +#[derive(Default)] +pub enum VersionCheck { + /// Check whether the installed and requested version have a mismatch + VersionMismatch, + /// Only check whether the currently installed version is older than the newest one + #[default] + OlderVersion, +} + #[derive(Clone)] pub struct NodeRuntime(Arc>); @@ -287,6 +296,7 @@ impl NodeRuntime { local_executable_path: &Path, local_package_directory: &Path, latest_version: &str, + version_check: VersionCheck, ) -> bool { // In the case of the local system not having the package installed, // or in the instances where we fail to parse package.json data, @@ -311,7 +321,10 @@ impl NodeRuntime { return true; }; - installed_version < latest_version + match version_check { + VersionCheck::VersionMismatch => installed_version != latest_version, + VersionCheck::OlderVersion => installed_version < latest_version, + } } } diff --git a/crates/open_ai/Cargo.toml b/crates/open_ai/Cargo.toml index 2d40cd2735..bae00f0a8e 100644 --- a/crates/open_ai/Cargo.toml +++ b/crates/open_ai/Cargo.toml @@ -20,6 +20,7 @@ anyhow.workspace = true futures.workspace = true http_client.workspace = true schemars = { workspace = true, optional = true } +log.workspace = true serde.workspace = true serde_json.workspace = true strum.workspace = true diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 12a5cf52d2..604e8fe622 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -74,6 +74,12 @@ pub enum Model { O3, #[serde(rename = "o4-mini")] O4Mini, + #[serde(rename = "gpt-5")] + Five, + #[serde(rename = "gpt-5-mini")] + FiveMini, + #[serde(rename = "gpt-5-nano")] + FiveNano, #[serde(rename = "custom")] Custom { @@ -83,11 +89,13 @@ pub enum Model { max_tokens: u64, max_output_tokens: Option, max_completion_tokens: Option, + reasoning_effort: Option, }, } impl Model { pub fn default_fast() -> Self { + // TODO: Replace with FiveMini since all other models are deprecated Self::FourPointOneMini } @@ -105,6 +113,9 @@ impl Model { "o3-mini" => Ok(Self::O3Mini), "o3" => Ok(Self::O3), "o4-mini" => Ok(Self::O4Mini), + "gpt-5" => Ok(Self::Five), + "gpt-5-mini" => Ok(Self::FiveMini), + "gpt-5-nano" => Ok(Self::FiveNano), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } @@ -123,6 +134,9 @@ impl Model { Self::O3Mini => "o3-mini", Self::O3 => "o3", Self::O4Mini => "o4-mini", + Self::Five => "gpt-5", + Self::FiveMini => "gpt-5-mini", + Self::FiveNano => "gpt-5-nano", Self::Custom { name, .. } => name, } } @@ -141,6 +155,9 @@ impl Model { Self::O3Mini => "o3-mini", Self::O3 => "o3", Self::O4Mini => "o4-mini", + Self::Five => "gpt-5", + Self::FiveMini => "gpt-5-mini", + Self::FiveNano => "gpt-5-nano", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -161,6 +178,9 @@ impl Model { Self::O3Mini => 200_000, Self::O3 => 200_000, Self::O4Mini => 200_000, + Self::Five => 272_000, + Self::FiveMini => 272_000, + Self::FiveNano => 272_000, Self::Custom { max_tokens, .. } => *max_tokens, } } @@ -182,6 +202,18 @@ impl Model { Self::O3Mini => Some(100_000), Self::O3 => Some(100_000), Self::O4Mini => Some(100_000), + Self::Five => Some(128_000), + Self::FiveMini => Some(128_000), + Self::FiveNano => Some(128_000), + } + } + + pub fn reasoning_effort(&self) -> Option { + match self { + Self::Custom { + reasoning_effort, .. + } => reasoning_effort.to_owned(), + _ => None, } } @@ -197,10 +229,20 @@ impl Model { | Self::FourOmniMini | Self::FourPointOne | Self::FourPointOneMini - | Self::FourPointOneNano => true, + | Self::FourPointOneNano + | Self::Five + | Self::FiveMini + | Self::FiveNano => true, Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false, } } + + /// Returns whether the given model supports the `prompt_cache_key` parameter. + /// + /// If the model does not support the parameter, do not pass it up. + pub fn supports_prompt_cache_key(&self) -> bool { + return true; + } } #[derive(Debug, Serialize, Deserialize)] @@ -220,6 +262,10 @@ pub struct Request { pub parallel_tool_calls: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tools: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub prompt_cache_key: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub reasoning_effort: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -231,6 +277,16 @@ pub enum ToolChoice { Other(ToolDefinition), } +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningEffort { + Minimal, + Low, + Medium, + High, +} + #[derive(Clone, Deserialize, Serialize, Debug)] #[serde(tag = "type", rename_all = "snake_case")] pub enum ToolDefinition { @@ -421,7 +477,15 @@ pub async fn stream_completion( Ok(ResponseStreamResult::Err { error }) => { Some(Err(anyhow!(error))) } - Err(error) => Some(Err(anyhow!(error))), + Err(error) => { + log::error!( + "Failed to parse OpenAI response into ResponseStreamResult: `{}`\n\ + Response: `{}`", + error, + line, + ); + Some(Err(anyhow!(error))) + } } } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 4489f9f043..a4c04cd06a 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7722,12 +7722,19 @@ impl LspStore { pub(crate) fn set_language_server_statuses_from_proto( &mut self, language_servers: Vec, + server_capabilities: Vec, ) { self.language_server_statuses = language_servers .into_iter() - .map(|server| { + .zip(server_capabilities) + .map(|(server, server_capabilities)| { + let server_id = LanguageServerId(server.id as usize); + if let Ok(server_capabilities) = serde_json::from_str(&server_capabilities) { + self.lsp_server_capabilities + .insert(server_id, server_capabilities); + } ( - LanguageServerId(server.id as usize), + server_id, LanguageServerStatus { name: LanguageServerName::from_proto(server.name), pending_work: Default::default(), diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index cca026ec87..7838f5744a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1488,7 +1488,10 @@ impl Project { fs.clone(), cx, ); - lsp_store.set_language_server_statuses_from_proto(response.payload.language_servers); + lsp_store.set_language_server_statuses_from_proto( + response.payload.language_servers, + response.payload.language_server_capabilities, + ); lsp_store })?; @@ -2319,7 +2322,10 @@ impl Project { self.set_worktrees_from_proto(message.worktrees, cx)?; self.set_collaborators_from_proto(message.collaborators, cx)?; self.lsp_store.update(cx, |lsp_store, _| { - lsp_store.set_language_server_statuses_from_proto(message.language_servers) + lsp_store.set_language_server_statuses_from_proto( + message.language_servers, + message.language_server_capabilities, + ) }); self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync) .unwrap(); diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs index 1ae22c5fef..8686fda53f 100644 --- a/crates/vercel/src/vercel.rs +++ b/crates/vercel/src/vercel.rs @@ -71,4 +71,8 @@ impl Model { Model::Custom { .. } => false, } } + + pub fn supports_prompt_cache_key(&self) -> bool { + false + } } diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index ac116b2f8f..23cd5b9320 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -105,6 +105,10 @@ impl Model { } } + pub fn supports_prompt_cache_key(&self) -> bool { + false + } + pub fn supports_tool(&self) -> bool { match self { Self::Grok2Vision diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 5bd6d981fa..04af522e18 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.199.0" +version = "0.199.7" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] diff --git a/crates/zed/RELEASE_CHANNEL b/crates/zed/RELEASE_CHANNEL index 38f8e886e1..870bbe4e50 100644 --- a/crates/zed/RELEASE_CHANNEL +++ b/crates/zed/RELEASE_CHANNEL @@ -1 +1 @@ -dev +stable \ No newline at end of file diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index b9f561c0e7..da4b6e78c6 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -5,11 +5,9 @@ use editor::Editor; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; use settings::SettingsStore; -use smol::stream::StreamExt; use std::{cell::RefCell, rc::Rc, sync::Arc}; use supermaven::{Supermaven, SupermavenCompletionProvider}; use ui::Window; -use util::ResultExt; use workspace::Workspace; use zeta::{ProviderDataCollection, ZetaEditPredictionProvider}; @@ -59,25 +57,20 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { cx.on_action(clear_zeta_edit_history); let mut provider = all_language_settings(None, cx).edit_predictions.provider; - cx.spawn({ - let user_store = user_store.clone(); + cx.subscribe(&user_store, { let editors = editors.clone(); let client = client.clone(); - - async move |cx| { - let mut status = client.status(); - while let Some(_status) = status.next().await { - cx.update(|cx| { - assign_edit_prediction_providers( - &editors, - provider, - &client, - user_store.clone(), - cx, - ); - }) - .log_err(); + move |user_store, event, cx| match event { + client::user::Event::PrivateUserInfoUpdated => { + assign_edit_prediction_providers( + &editors, + provider, + &client, + user_store.clone(), + cx, + ); } + _ => {} } }) .detach(); diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index 9f1d02b790..ee76308ff3 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -26,6 +26,7 @@ collections.workspace = true command_palette_hooks.workspace = true copilot.workspace = true db.workspace = true +edit_prediction.workspace = true editor.workspace = true feature_flags.workspace = true fs.workspace = true @@ -33,13 +34,13 @@ futures.workspace = true gpui.workspace = true http_client.workspace = true indoc.workspace = true -edit_prediction.workspace = true language.workspace = true language_model.workspace = true log.workspace = true menu.workspace = true postage.workspace = true project.workspace = true +rand.workspace = true regex.workspace = true release_channel.workspace = true serde.workspace = true diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index b1bd737dbf..c711b2734c 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -432,6 +432,7 @@ impl Zeta { body, editable_range, } = gather_task.await?; + let done_gathering_context_at = Instant::now(); log::debug!( "Events:\n{}\nExcerpt:\n{:?}", @@ -484,6 +485,7 @@ impl Zeta { } }; + let received_response_at = Instant::now(); log::debug!("completion response: {}", &response.output_excerpt); if let Some(usage) = usage { @@ -495,7 +497,7 @@ impl Zeta { .ok(); } - Self::process_completion_response( + let edit_prediction = Self::process_completion_response( response, buffer, &snapshot, @@ -508,7 +510,25 @@ impl Zeta { buffer_snapshotted_at, &cx, ) - .await + .await; + + let finished_at = Instant::now(); + + // record latency for ~1% of requests + if rand::random::() <= 2 { + telemetry::event!( + "Edit Prediction Request", + context_latency = done_gathering_context_at + .duration_since(buffer_snapshotted_at) + .as_millis(), + request_latency = received_response_at + .duration_since(done_gathering_context_at) + .as_millis(), + process_latency = finished_at.duration_since(received_response_at).as_millis() + ); + } + + edit_prediction }) } diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index 5678e46236..338985ed95 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -305,7 +305,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -334,7 +334,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -362,7 +362,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -391,7 +391,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -429,7 +429,7 @@ rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -468,7 +468,7 @@ rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["ev rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net", "process", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -509,7 +509,7 @@ rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -548,7 +548,7 @@ rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["ev rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net", "process", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -568,7 +568,7 @@ ring = { version = "0.17", features = ["std"] } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -592,7 +592,7 @@ ring = { version = "0.17", features = ["std"] } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -636,7 +636,7 @@ rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -675,7 +675,7 @@ rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["ev rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net", "process", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] }