diff --git a/.github/actionlint.yml b/.github/actionlint.yml index 6bfbc27705..0ee6af8a1d 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -5,25 +5,28 @@ self-hosted-runner: # GitHub-hosted Runners - github-8vcpu-ubuntu-2404 - github-16vcpu-ubuntu-2404 + - github-32vcpu-ubuntu-2404 + - github-8vcpu-ubuntu-2204 + - github-16vcpu-ubuntu-2204 + - github-32vcpu-ubuntu-2204 + - github-16vcpu-ubuntu-2204-arm - windows-2025-16 - windows-2025-32 - windows-2025-64 - # Buildjet Ubuntu 20.04 - AMD x86_64 - - buildjet-2vcpu-ubuntu-2004 - - buildjet-4vcpu-ubuntu-2004 - - buildjet-8vcpu-ubuntu-2004 - - buildjet-16vcpu-ubuntu-2004 - - buildjet-32vcpu-ubuntu-2004 - # Buildjet Ubuntu 22.04 - AMD x86_64 - - buildjet-2vcpu-ubuntu-2204 - - buildjet-4vcpu-ubuntu-2204 - - buildjet-8vcpu-ubuntu-2204 - - buildjet-16vcpu-ubuntu-2204 - - buildjet-32vcpu-ubuntu-2204 - # Buildjet Ubuntu 22.04 - Graviton aarch64 - - buildjet-8vcpu-ubuntu-2204-arm - - buildjet-16vcpu-ubuntu-2204-arm - - buildjet-32vcpu-ubuntu-2204-arm + # Namespace Ubuntu 20.04 (Release builds) + - namespace-profile-16x32-ubuntu-2004 + - namespace-profile-32x64-ubuntu-2004 + - namespace-profile-16x32-ubuntu-2004-arm + - namespace-profile-32x64-ubuntu-2004-arm + # Namespace Ubuntu 22.04 (Everything else) + - namespace-profile-2x4-ubuntu-2204 + - namespace-profile-4x8-ubuntu-2204 + - namespace-profile-8x16-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 + - namespace-profile-32x64-ubuntu-2204 + # Namespace Limited Preview + - namespace-profile-8x16-ubuntu-2004-arm-m4 + - namespace-profile-8x32-ubuntu-2004-arm-m4 # Self Hosted Runners - self-mini-macos - self-32vcpu-windows-2022 diff --git a/.github/actions/build_docs/action.yml b/.github/actions/build_docs/action.yml index a7effad247..d2e62d5b22 100644 --- a/.github/actions/build_docs/action.yml +++ b/.github/actions/build_docs/action.yml @@ -13,7 +13,7 @@ runs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies shell: bash -euxo pipefail {0} diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index 8a48ff96f1..bfaf7a271b 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -16,7 +16,7 @@ jobs: bump_patch_version: if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 43d305faae..f4ba227168 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -137,7 +137,7 @@ jobs: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-8x16-ubuntu-2204 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -168,7 +168,7 @@ jobs: needs: [job_spec] if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-4x8-ubuntu-2204 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -221,7 +221,7 @@ jobs: github.repository_owner == 'zed-industries' && (needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true') runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-8x16-ubuntu-2204 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -328,7 +328,7 @@ jobs: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -342,7 +342,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux @@ -380,7 +380,7 @@ jobs: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' runs-on: - - buildjet-8vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -394,7 +394,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Clang & Mold run: ./script/remote-server && ./script/install-mold 2.34.0 @@ -511,8 +511,8 @@ jobs: runs-on: - self-mini-macos if: | - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') + ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [macos_tests] env: MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} @@ -526,6 +526,11 @@ jobs: with: node-version: "18" + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: @@ -597,10 +602,10 @@ jobs: timeout-minutes: 60 name: Linux x86_x64 release bundle runs-on: - - buildjet-16vcpu-ubuntu-2004 # ubuntu 20.04 for minimal glibc + - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc if: | - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') + ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [linux_tests] steps: - name: Checkout repo @@ -611,6 +616,11 @@ jobs: - name: Install Linux dependencies run: ./script/linux && ./script/install-mold 2.34.0 + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Determine version and release channel if: startsWith(github.ref, 'refs/tags/v') run: | @@ -650,7 +660,7 @@ jobs: timeout-minutes: 60 name: Linux arm64 release bundle runs-on: - - buildjet-32vcpu-ubuntu-2204-arm + - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc if: | startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') @@ -664,6 +674,11 @@ jobs: - name: Install Linux dependencies run: ./script/linux + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Determine version and release channel if: startsWith(github.ref, 'refs/tags/v') run: | @@ -703,10 +718,8 @@ jobs: timeout-minutes: 60 runs-on: github-8vcpu-ubuntu-2404 if: | - false && ( - startsWith(github.ref, 'refs/tags/v') - || contains(github.event.pull_request.labels.*.name, 'run-bundling') - ) + false && ( startsWith(github.ref, 'refs/tags/v') + || contains(github.event.pull_request.labels.*.name, 'run-bundling') ) needs: [linux_tests] name: Build Zed on FreeBSD steps: @@ -791,6 +804,11 @@ jobs: with: clean: false + - name: Setup Sentry CLI + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 + with: + token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} + - name: Determine version and release channel working-directory: ${{ env.ZED_WORKSPACE }} if: ${{ startsWith(github.ref, 'refs/tags/v') }} @@ -833,3 +851,12 @@ jobs: run: gh release edit "$GITHUB_REF_NAME" --draft=false env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Create Sentry release + uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3 + env: + SENTRY_ORG: zed-dev + SENTRY_PROJECT: zed + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + with: + environment: production diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index fe443d493e..df35d44ca9 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -9,7 +9,7 @@ jobs: deploy-docs: name: Deploy Docs if: github.repository_owner == 'zed-industries' - runs-on: buildjet-16vcpu-ubuntu-2204 + runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: Checkout repo diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index f7348a1069..8f56cac5d9 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,7 +61,7 @@ jobs: - style - tests runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Install doctl uses: digitalocean/action-doctl@v2 @@ -94,7 +94,7 @@ jobs: needs: - publish runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Checkout repo diff --git a/.github/workflows/eval.yml b/.github/workflows/eval.yml index 2ad302a602..b5da9e7b7c 100644 --- a/.github/workflows/eval.yml +++ b/.github/workflows/eval.yml @@ -32,7 +32,7 @@ jobs: github.repository_owner == 'zed-industries' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -46,7 +46,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index 6c3a97c163..e682ce5890 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -20,7 +20,7 @@ jobs: matrix: system: - os: x86 Linux - runner: buildjet-16vcpu-ubuntu-2204 + runner: namespace-profile-16x32-ubuntu-2204 install_nix: true - os: arm Mac runner: [macOS, ARM64, test] diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index db4d44318e..de96c3df78 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -20,7 +20,7 @@ jobs: name: Run randomized tests if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Install Node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index c847149984..0cc6737a45 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -128,7 +128,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-16vcpu-ubuntu-2004 + - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc needs: tests steps: - name: Checkout repo @@ -168,7 +168,7 @@ jobs: name: Create a Linux *.tar.gz bundle for ARM if: github.repository_owner == 'zed-industries' runs-on: - - buildjet-32vcpu-ubuntu-2204-arm + - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc needs: tests steps: - name: Checkout repo @@ -316,3 +316,12 @@ jobs: git config user.email github-actions@github.com git tag -f nightly git push origin nightly --force + + - name: Create Sentry release + uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3 + env: + SENTRY_ORG: zed-dev + SENTRY_PROJECT: zed + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + with: + environment: production diff --git a/.github/workflows/unit_evals.yml b/.github/workflows/unit_evals.yml index cb4e39d151..2e03fb028f 100644 --- a/.github/workflows/unit_evals.yml +++ b/.github/workflows/unit_evals.yml @@ -23,7 +23,7 @@ jobs: timeout-minutes: 60 name: Run unit evals runs-on: - - buildjet-16vcpu-ubuntu-2204 + - namespace-profile-16x32-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" @@ -37,7 +37,7 @@ jobs: uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - cache-provider: "buildjet" + # cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux diff --git a/Cargo.lock b/Cargo.lock index e034212748..eeb05850f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1411,7 +1411,7 @@ dependencies = [ "anyhow", "arrayvec", "log", - "nom", + "nom 7.1.3", "num-rational", "v_frame", ] @@ -2785,7 +2785,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ - "nom", + "nom 7.1.3", ] [[package]] @@ -3071,17 +3071,22 @@ dependencies = [ "anyhow", "cloud_api_types", "futures 0.3.31", + "gpui", + "gpui_tokio", "http_client", "parking_lot", "serde_json", "workspace-hack", + "yawc", ] [[package]] name = "cloud_api_types" version = "0.1.0" dependencies = [ + "anyhow", "chrono", + "ciborium", "cloud_llm_client", "pretty_assertions", "serde", @@ -3999,6 +4004,9 @@ dependencies = [ "log", "minidumper", "paths", + "release_channel", + "serde", + "serde_json", "smol", "workspace-hack", ] @@ -9118,6 +9126,7 @@ dependencies = [ "anyhow", "base64 0.22.1", "client", + "cloud_api_types", "cloud_llm_client", "collections", "futures 0.3.31", @@ -10580,6 +10589,15 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nom" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" +dependencies = [ + "memchr", +] + [[package]] name = "noop_proc_macro" version = "0.3.0" @@ -11169,6 +11187,7 @@ dependencies = [ "anyhow", "futures 0.3.31", "http_client", + "log", "schemars", "serde", "serde_json", @@ -15401,7 +15420,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" dependencies = [ - "nom", + "nom 7.1.3", "unicode_categories", ] @@ -16604,9 +16623,8 @@ dependencies = [ [[package]] name = "tiktoken-rs" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25563eeba904d770acf527e8b370fe9a5547bacd20ff84a0b6c3bc41288e5625" +version = "0.8.0" +source = "git+https://github.com/zed-industries/tiktoken-rs?rev=30c32a4522751699adeda0d5840c71c3b75ae73d#30c32a4522751699adeda0d5840c71c3b75ae73d" dependencies = [ "anyhow", "base64 0.22.1", @@ -19977,7 +19995,7 @@ dependencies = [ "naga", "nix 0.28.0", "nix 0.29.0", - "nom", + "nom 7.1.3", "num-bigint", "num-bigint-dig", "num-integer", @@ -20312,6 +20330,34 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yawc" +version = "0.2.4" +source = "git+https://github.com/deviant-forks/yawc?rev=1899688f3e69ace4545aceb97b2a13881cf26142#1899688f3e69ace4545aceb97b2a13881cf26142" +dependencies = [ + "base64 0.22.1", + "bytes 1.10.1", + "flate2", + "futures 0.3.31", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "js-sys", + "nom 8.0.0", + "pin-project", + "rand 0.8.5", + "sha1", + "thiserror 1.0.69", + "tokio", + "tokio-rustls 0.26.2", + "tokio-util", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", +] + [[package]] name = "yazi" version = "0.2.1" @@ -20418,7 +20464,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.199.0" +version = "0.199.10" dependencies = [ "activity_indicator", "agent", @@ -20821,6 +20867,7 @@ dependencies = [ "menu", "postage", "project", + "rand 0.8.5", "regex", "release_channel", "reqwest_client", diff --git a/Cargo.toml b/Cargo.toml index 7b82fd1910..86f1b8b0a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -461,6 +461,7 @@ bytes = "1.0" cargo_metadata = "0.19" cargo_toml = "0.21" chrono = { version = "0.4", features = ["serde"] } +ciborium = "0.2" circular-buffer = "1.0" clap = { version = "4.4", features = ["derive"] } cocoa = "0.26" @@ -600,7 +601,7 @@ sysinfo = "0.31.0" take-until = "0.2.0" tempfile = "3.20.0" thiserror = "2.0.12" -tiktoken-rs = "0.7.0" +tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "30c32a4522751699adeda0d5840c71c3b75ae73d" } time = { version = "0.3", features = [ "macros", "parsing", @@ -660,6 +661,9 @@ which = "6.0.0" windows-core = "0.61" wit-component = "0.221" workspace-hack = "0.1.0" +# We can switch back to the published version once https://github.com/infinitefield/yawc/pull/16 is merged and a new +# version is released. +yawc = { git = "https://github.com/deviant-forks/yawc", rev = "1899688f3e69ace4545aceb97b2a13881cf26142" } zstd = "0.11" [workspace.dependencies.async-stripe] diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index b4894cddcf..f09c012a85 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -14,7 +14,9 @@ use async_tungstenite::tungstenite::{ }; use clock::SystemClock; use cloud_api_client::CloudApiClient; +use cloud_api_client::websocket_protocol::MessageToClient; use credentials_provider::CredentialsProvider; +use feature_flags::FeatureFlagAppExt as _; use futures::{ AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt, TryFutureExt as _, TryStreamExt, channel::oneshot, future::BoxFuture, @@ -191,6 +193,8 @@ pub fn init(client: &Arc, cx: &mut App) { }); } +pub type MessageToClientHandler = Box; + struct GlobalClient(Arc); impl Global for GlobalClient {} @@ -204,6 +208,7 @@ pub struct Client { credentials_provider: ClientCredentialsProvider, state: RwLock, handler_set: parking_lot::Mutex, + message_to_client_handlers: parking_lot::Mutex>, #[allow(clippy::type_complexity)] #[cfg(any(test, feature = "test-support"))] @@ -553,6 +558,7 @@ impl Client { credentials_provider: ClientCredentialsProvider::new(cx), state: Default::default(), handler_set: Default::default(), + message_to_client_handlers: parking_lot::Mutex::new(Vec::new()), #[cfg(any(test, feature = "test-support"))] authenticate: Default::default(), @@ -933,23 +939,77 @@ impl Client { } } - /// Performs a sign-in and also connects to Collab. + /// Establishes a WebSocket connection with Cloud for receiving updates from the server. + async fn connect_to_cloud(self: &Arc, cx: &AsyncApp) -> Result<()> { + let connect_task = cx.update({ + let cloud_client = self.cloud_client.clone(); + move |cx| cloud_client.connect(cx) + })??; + let connection = connect_task.await?; + + let (mut messages, task) = cx.update(|cx| connection.spawn(cx))?; + task.detach(); + + cx.spawn({ + let this = self.clone(); + async move |cx| { + while let Some(message) = messages.next().await { + if let Some(message) = message.log_err() { + this.handle_message_to_client(message, cx); + } + } + } + }) + .detach(); + + Ok(()) + } + + /// Performs a sign-in and also (optionally) connects to Collab. /// - /// This is called in places where we *don't* need to connect in the future. We will replace these calls with calls - /// to `sign_in` when we're ready to remove auto-connection to Collab. + /// Only Zed staff automatically connect to Collab. pub async fn sign_in_with_optional_connect( self: &Arc, try_provider: bool, cx: &AsyncApp, ) -> Result<()> { + let (is_staff_tx, is_staff_rx) = oneshot::channel::(); + let mut is_staff_tx = Some(is_staff_tx); + cx.update(|cx| { + cx.on_flags_ready(move |state, _cx| { + if let Some(is_staff_tx) = is_staff_tx.take() { + is_staff_tx.send(state.is_staff).log_err(); + } + }) + .detach(); + }) + .log_err(); + let credentials = self.sign_in(try_provider, cx).await?; - let connect_result = match self.connect_with_credentials(credentials, cx).await { - ConnectionResult::Timeout => Err(anyhow!("connection timed out")), - ConnectionResult::ConnectionReset => Err(anyhow!("connection reset")), - ConnectionResult::Result(result) => result.context("client auth and connect"), - }; - connect_result.log_err(); + self.connect_to_cloud(cx).await.log_err(); + + cx.update(move |cx| { + cx.spawn({ + let client = self.clone(); + async move |cx| { + let is_staff = is_staff_rx.await?; + if is_staff { + match client.connect_with_credentials(credentials, cx).await { + ConnectionResult::Timeout => Err(anyhow!("connection timed out")), + ConnectionResult::ConnectionReset => Err(anyhow!("connection reset")), + ConnectionResult::Result(result) => { + result.context("client auth and connect") + } + } + } else { + Ok(()) + } + } + }) + .detach_and_log_err(cx); + }) + .log_err(); Ok(()) } @@ -1622,6 +1682,24 @@ impl Client { } } + pub fn add_message_to_client_handler( + self: &Arc, + handler: impl Fn(&MessageToClient, &mut App) + Send + Sync + 'static, + ) { + self.message_to_client_handlers + .lock() + .push(Box::new(handler)); + } + + fn handle_message_to_client(self: &Arc, message: MessageToClient, cx: &AsyncApp) { + cx.update(|cx| { + for handler in self.message_to_client_handlers.lock().iter() { + handler(&message, cx); + } + }) + .ok(); + } + pub fn telemetry(&self) -> &Arc { &self.telemetry } diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 3c125a0882..faf46945d8 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -1,6 +1,7 @@ use super::{Client, Status, TypedEnvelope, proto}; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; +use cloud_api_client::websocket_protocol::MessageToClient; use cloud_api_client::{GetAuthenticatedUserResponse, PlanInfo}; use cloud_llm_client::{ EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME, @@ -181,6 +182,12 @@ impl UserStore { client.add_message_handler(cx.weak_entity(), Self::handle_update_invite_info), client.add_message_handler(cx.weak_entity(), Self::handle_show_contacts), ]; + + client.add_message_to_client_handler({ + let this = cx.weak_entity(); + move |message, cx| Self::handle_message_to_client(this.clone(), message, cx) + }); + Self { users: Default::default(), by_github_login: Default::default(), @@ -219,17 +226,35 @@ impl UserStore { match status { Status::Authenticated | Status::Connected { .. } => { if let Some(user_id) = client.user_id() { - let response = client.cloud_client().get_authenticated_user().await; - let mut current_user = None; + let response = client + .cloud_client() + .get_authenticated_user() + .await + .log_err(); + + let current_user_and_response = if let Some(response) = response { + let user = Arc::new(User { + id: user_id, + github_login: response.user.github_login.clone().into(), + avatar_uri: response.user.avatar_url.clone().into(), + name: response.user.name.clone(), + }); + + Some((user, response)) + } else { + None + }; + current_user_tx + .send( + current_user_and_response + .as_ref() + .map(|(user, _)| user.clone()), + ) + .await + .ok(); + cx.update(|cx| { - if let Some(response) = response.log_err() { - let user = Arc::new(User { - id: user_id, - github_login: response.user.github_login.clone().into(), - avatar_uri: response.user.avatar_url.clone().into(), - name: response.user.name.clone(), - }); - current_user = Some(user.clone()); + if let Some((user, response)) = current_user_and_response { this.update(cx, |this, cx| { this.by_github_login .insert(user.github_login.clone(), user_id); @@ -240,7 +265,6 @@ impl UserStore { anyhow::Ok(()) } })??; - current_user_tx.send(current_user).await.ok(); this.update(cx, |_, cx| cx.notify())?; } @@ -813,6 +837,32 @@ impl UserStore { cx.emit(Event::PrivateUserInfoUpdated); } + fn handle_message_to_client(this: WeakEntity, message: &MessageToClient, cx: &App) { + cx.spawn(async move |cx| { + match message { + MessageToClient::UserUpdated => { + let cloud_client = cx + .update(|cx| { + this.read_with(cx, |this, _cx| { + this.client.upgrade().map(|client| client.cloud_client()) + }) + })?? + .ok_or(anyhow::anyhow!("Failed to get Cloud client"))?; + + let response = cloud_client.get_authenticated_user().await?; + cx.update(|cx| { + this.update(cx, |this, cx| { + this.update_authenticated_user(response, cx); + }) + })??; + } + } + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + pub fn watch_current_user(&self) -> watch::Receiver>> { self.current_user.clone() } diff --git a/crates/cloud_api_client/Cargo.toml b/crates/cloud_api_client/Cargo.toml index d56aa94c6e..8e50ccb191 100644 --- a/crates/cloud_api_client/Cargo.toml +++ b/crates/cloud_api_client/Cargo.toml @@ -15,7 +15,10 @@ path = "src/cloud_api_client.rs" anyhow.workspace = true cloud_api_types.workspace = true futures.workspace = true +gpui.workspace = true +gpui_tokio.workspace = true http_client.workspace = true parking_lot.workspace = true serde_json.workspace = true workspace-hack.workspace = true +yawc.workspace = true diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index edac051a0e..ef9a1a9a55 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -1,11 +1,19 @@ +mod websocket; + use std::sync::Arc; use anyhow::{Context, Result, anyhow}; +use cloud_api_types::websocket_protocol::{PROTOCOL_VERSION, PROTOCOL_VERSION_HEADER_NAME}; pub use cloud_api_types::*; use futures::AsyncReadExt as _; +use gpui::{App, Task}; +use gpui_tokio::Tokio; use http_client::http::request; use http_client::{AsyncBody, HttpClientWithUrl, Method, Request, StatusCode}; use parking_lot::RwLock; +use yawc::WebSocket; + +use crate::websocket::Connection; struct Credentials { user_id: u32, @@ -78,6 +86,41 @@ impl CloudApiClient { Ok(serde_json::from_str(&body)?) } + pub fn connect(&self, cx: &App) -> Result>> { + let mut connect_url = self + .http_client + .build_zed_cloud_url("/client/users/connect", &[])?; + connect_url + .set_scheme(match connect_url.scheme() { + "https" => "wss", + "http" => "ws", + scheme => Err(anyhow!("invalid URL scheme: {scheme}"))?, + }) + .map_err(|_| anyhow!("failed to set URL scheme"))?; + + let credentials = self.credentials.read(); + let credentials = credentials.as_ref().context("no credentials provided")?; + let authorization_header = format!("{} {}", credentials.user_id, credentials.access_token); + + Ok(cx.spawn(async move |cx| { + let handle = cx + .update(|cx| Tokio::handle(cx)) + .ok() + .context("failed to get Tokio handle")?; + let _guard = handle.enter(); + + let ws = WebSocket::connect(connect_url) + .with_request( + request::Builder::new() + .header("Authorization", authorization_header) + .header(PROTOCOL_VERSION_HEADER_NAME, PROTOCOL_VERSION.to_string()), + ) + .await?; + + Ok(Connection::new(ws)) + })) + } + pub async fn accept_terms_of_service(&self) -> Result { let request = self.build_request( Request::builder().method(Method::POST).uri( diff --git a/crates/cloud_api_client/src/websocket.rs b/crates/cloud_api_client/src/websocket.rs new file mode 100644 index 0000000000..48a628db78 --- /dev/null +++ b/crates/cloud_api_client/src/websocket.rs @@ -0,0 +1,73 @@ +use std::pin::Pin; +use std::time::Duration; + +use anyhow::Result; +use cloud_api_types::websocket_protocol::MessageToClient; +use futures::channel::mpsc::unbounded; +use futures::stream::{SplitSink, SplitStream}; +use futures::{FutureExt as _, SinkExt as _, Stream, StreamExt as _, TryStreamExt as _, pin_mut}; +use gpui::{App, BackgroundExecutor, Task}; +use yawc::WebSocket; +use yawc::frame::{FrameView, OpCode}; + +const KEEPALIVE_INTERVAL: Duration = Duration::from_secs(1); + +pub type MessageStream = Pin>>>; + +pub struct Connection { + tx: SplitSink, + rx: SplitStream, +} + +impl Connection { + pub fn new(ws: WebSocket) -> Self { + let (tx, rx) = ws.split(); + + Self { tx, rx } + } + + pub fn spawn(self, cx: &App) -> (MessageStream, Task<()>) { + let (mut tx, rx) = (self.tx, self.rx); + + let (message_tx, message_rx) = unbounded(); + + let handle_io = |executor: BackgroundExecutor| async move { + // Send messages on this frequency so the connection isn't closed. + let keepalive_timer = executor.timer(KEEPALIVE_INTERVAL).fuse(); + futures::pin_mut!(keepalive_timer); + + let rx = rx.fuse(); + pin_mut!(rx); + + loop { + futures::select_biased! { + _ = keepalive_timer => { + let _ = tx.send(FrameView::ping(Vec::new())).await; + + keepalive_timer.set(executor.timer(KEEPALIVE_INTERVAL).fuse()); + } + frame = rx.next() => { + let Some(frame) = frame else { + break; + }; + + match frame.opcode { + OpCode::Binary => { + let message_result = MessageToClient::deserialize(&frame.payload); + message_tx.unbounded_send(message_result).ok(); + } + OpCode::Close => { + break; + } + _ => {} + } + } + } + } + }; + + let task = cx.spawn(async move |cx| handle_io(cx.background_executor().clone()).await); + + (message_rx.into_stream().boxed(), task) + } +} diff --git a/crates/cloud_api_types/Cargo.toml b/crates/cloud_api_types/Cargo.toml index 868797df3b..28e0a36a44 100644 --- a/crates/cloud_api_types/Cargo.toml +++ b/crates/cloud_api_types/Cargo.toml @@ -12,7 +12,9 @@ workspace = true path = "src/cloud_api_types.rs" [dependencies] +anyhow.workspace = true chrono.workspace = true +ciborium.workspace = true cloud_llm_client.workspace = true serde.workspace = true workspace-hack.workspace = true diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index b38b38cde1..fa189cd3b5 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -1,4 +1,5 @@ mod timestamp; +pub mod websocket_protocol; use serde::{Deserialize, Serialize}; diff --git a/crates/cloud_api_types/src/websocket_protocol.rs b/crates/cloud_api_types/src/websocket_protocol.rs new file mode 100644 index 0000000000..75f6a73b43 --- /dev/null +++ b/crates/cloud_api_types/src/websocket_protocol.rs @@ -0,0 +1,28 @@ +use anyhow::{Context as _, Result}; +use serde::{Deserialize, Serialize}; + +/// The version of the Cloud WebSocket protocol. +pub const PROTOCOL_VERSION: u32 = 0; + +/// The name of the header used to indicate the protocol version in use. +pub const PROTOCOL_VERSION_HEADER_NAME: &str = "x-zed-protocol-version"; + +/// A message from Cloud to the Zed client. +#[derive(Debug, Serialize, Deserialize)] +pub enum MessageToClient { + /// The user was updated and should be refreshed. + UserUpdated, +} + +impl MessageToClient { + pub fn serialize(&self) -> Result> { + let mut buffer = Vec::new(); + ciborium::into_writer(self, &mut buffer).context("failed to serialize message")?; + + Ok(buffer) + } + + pub fn deserialize(data: &[u8]) -> Result { + ciborium::from_reader(data).context("failed to deserialize message") + } +} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 31635575a8..82f74d910b 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -699,7 +699,10 @@ impl Database { language_server::Column::ProjectId, language_server::Column::Id, ]) - .update_column(language_server::Column::Name) + .update_columns([ + language_server::Column::Name, + language_server::Column::Capabilities, + ]) .to_owned(), ) .exec(&*tx) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index bb7c2ba1cd..51e4ff8965 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -3053,7 +3053,7 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::move_channel_down)) .track_focus(&self.focus_handle) .size_full() - .child(if self.user_store.read(cx).current_user().is_none() { + .child(if !self.client.status().borrow().is_connected() { self.render_signed_out(cx) } else { self.render_signed_in(window, cx) diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 49ae2b9d9c..166a582c70 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -21,7 +21,7 @@ use language::{ point_from_lsp, point_to_lsp, }; use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId, LanguageServerName}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, VersionCheck}; use parking_lot::Mutex; use project::DisableAiSettings; use request::StatusNotification; @@ -1169,9 +1169,8 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: const SERVER_PATH: &str = "node_modules/@github/copilot-language-server/dist/language-server.js"; - let latest_version = node_runtime - .npm_package_latest_version(PACKAGE_NAME) - .await?; + // pinning it: https://github.com/zed-industries/zed/issues/36093 + const PINNED_VERSION: &str = "1.354"; let server_path = paths::copilot_dir().join(SERVER_PATH); fs.create_dir(paths::copilot_dir()).await?; @@ -1181,12 +1180,13 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: PACKAGE_NAME, &server_path, paths::copilot_dir(), - &latest_version, + &PINNED_VERSION, + VersionCheck::VersionMismatch, ) .await; if should_install { node_runtime - .npm_install_packages(paths::copilot_dir(), &[(PACKAGE_NAME, &latest_version)]) + .npm_install_packages(paths::copilot_dir(), &[(PACKAGE_NAME, &PINNED_VERSION)]) .await?; } diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 641a97765a..2420b499f8 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -10,7 +10,10 @@ crash-handler.workspace = true log.workspace = true minidumper.workspace = true paths.workspace = true +release_channel.workspace = true smol.workspace = true +serde.workspace = true +serde_json.workspace = true workspace-hack.workspace = true [lints] diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index cfb4b57d5d..ddf6468be8 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -1,15 +1,18 @@ use crash_handler::CrashHandler; use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; +use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; +use serde::{Deserialize, Serialize}; use std::{ env, - fs::File, + fs::{self, File}, io, + panic::Location, path::{Path, PathBuf}, process::{self, Command}, sync::{ - OnceLock, + Arc, OnceLock, atomic::{AtomicBool, Ordering}, }, thread, @@ -17,12 +20,17 @@ use std::{ }; // set once the crash handler has initialized and the client has connected to it -pub static CRASH_HANDLER: AtomicBool = AtomicBool::new(false); +pub static CRASH_HANDLER: OnceLock> = OnceLock::new(); // set when the first minidump request is made to avoid generating duplicate crash reports pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); -const CRASH_HANDLER_TIMEOUT: Duration = Duration::from_secs(60); +const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60); +const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); + +pub async fn init(crash_init: InitCrashHandler) { + if *RELEASE_CHANNEL == ReleaseChannel::Dev && env::var("ZED_GENERATE_MINIDUMPS").is_err() { + return; + } -pub async fn init(id: String) { let exe = env::current_exe().expect("unable to find ourselves"); let zed_pid = process::id(); // TODO: we should be able to get away with using 1 crash-handler process per machine, @@ -53,9 +61,11 @@ pub async fn init(id: String) { smol::Timer::after(retry_frequency).await; } let client = maybe_client.unwrap(); - client.send_message(1, id).unwrap(); // set session id on the server + client + .send_message(1, serde_json::to_vec(&crash_init).unwrap()) + .unwrap(); - let client = std::sync::Arc::new(client); + let client = Arc::new(client); let handler = crash_handler::CrashHandler::attach(unsafe { let client = client.clone(); crash_handler::make_crash_event(move |crash_context: &crash_handler::CrashContext| { @@ -64,7 +74,6 @@ pub async fn init(id: String) { .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) .is_ok() { - client.send_message(2, "mistakes were made").unwrap(); client.ping().unwrap(); client.request_dump(crash_context).is_ok() } else { @@ -79,7 +88,7 @@ pub async fn init(id: String) { { handler.set_ptracer(Some(server_pid)); } - CRASH_HANDLER.store(true, Ordering::Release); + CRASH_HANDLER.set(client.clone()).ok(); std::mem::forget(handler); info!("crash handler registered"); @@ -90,14 +99,43 @@ pub async fn init(id: String) { } pub struct CrashServer { - session_id: OnceLock, + initialization_params: OnceLock, + panic_info: OnceLock, + has_connection: Arc, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct CrashInfo { + pub init: InitCrashHandler, + pub panic: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct InitCrashHandler { + pub session_id: String, + pub zed_version: String, + pub release_channel: String, + pub commit_sha: String, + // pub gpu: String, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct CrashPanic { + pub message: String, + pub span: String, } impl minidumper::ServerHandler for CrashServer { fn create_minidump_file(&self) -> Result<(File, PathBuf), io::Error> { - let err_message = "Need to send a message with the ID upon starting the crash handler"; + let err_message = "Missing initialization data"; let dump_path = paths::logs_dir() - .join(self.session_id.get().expect(err_message)) + .join( + &self + .initialization_params + .get() + .expect(err_message) + .session_id, + ) .with_extension("dmp"); let file = File::create(&dump_path)?; Ok((file, dump_path)) @@ -114,35 +152,71 @@ impl minidumper::ServerHandler for CrashServer { info!("failed to write minidump: {:#}", e); } } + + let crash_info = CrashInfo { + init: self + .initialization_params + .get() + .expect("not initialized") + .clone(), + panic: self.panic_info.get().cloned(), + }; + + let crash_data_path = paths::logs_dir() + .join(&crash_info.init.session_id) + .with_extension("json"); + + fs::write(crash_data_path, serde_json::to_vec(&crash_info).unwrap()).ok(); + LoopAction::Exit } fn on_message(&self, kind: u32, buffer: Vec) { - let message = String::from_utf8(buffer).expect("invalid utf-8"); - info!("kind: {kind}, message: {message}",); - if kind == 1 { - self.session_id - .set(message) - .expect("session id already initialized"); + match kind { + 1 => { + let init_data = + serde_json::from_slice::(&buffer).expect("invalid init data"); + self.initialization_params + .set(init_data) + .expect("already initialized"); + } + 2 => { + let panic_data = + serde_json::from_slice::(&buffer).expect("invalid panic data"); + self.panic_info.set(panic_data).expect("already panicked"); + } + _ => { + panic!("invalid message kind"); + } } } - fn on_client_disconnected(&self, clients: usize) -> LoopAction { - info!("client disconnected, {clients} remaining"); - if clients == 0 { - LoopAction::Exit - } else { - LoopAction::Continue - } + fn on_client_disconnected(&self, _clients: usize) -> LoopAction { + LoopAction::Exit + } + + fn on_client_connected(&self, _clients: usize) -> LoopAction { + self.has_connection.store(true, Ordering::SeqCst); + LoopAction::Continue } } -pub fn handle_panic() { +pub fn handle_panic(message: String, span: Option<&Location>) { + let span = span + .map(|loc| format!("{}:{}", loc.file(), loc.line())) + .unwrap_or_default(); + // wait 500ms for the crash handler process to start up // if it's still not there just write panic info and no minidump let retry_frequency = Duration::from_millis(100); for _ in 0..5 { - if CRASH_HANDLER.load(Ordering::Acquire) { + if let Some(client) = CRASH_HANDLER.get() { + client + .send_message( + 2, + serde_json::to_vec(&CrashPanic { message, span }).unwrap(), + ) + .ok(); log::error!("triggering a crash to generate a minidump..."); #[cfg(target_os = "linux")] CrashHandler.simulate_signal(crash_handler::Signal::Trap as u32); @@ -159,14 +233,30 @@ pub fn crash_server(socket: &Path) { log::info!("Couldn't create socket, there may already be a running crash server"); return; }; - let ab = AtomicBool::new(false); + + let shutdown = Arc::new(AtomicBool::new(false)); + let has_connection = Arc::new(AtomicBool::new(false)); + + std::thread::spawn({ + let shutdown = shutdown.clone(); + let has_connection = has_connection.clone(); + move || { + std::thread::sleep(CRASH_HANDLER_CONNECT_TIMEOUT); + if !has_connection.load(Ordering::SeqCst) { + shutdown.store(true, Ordering::SeqCst); + } + } + }); + server .run( Box::new(CrashServer { - session_id: OnceLock::new(), + initialization_params: OnceLock::new(), + panic_info: OnceLock::new(), + has_connection, }), - &ab, - Some(CRASH_HANDLER_TIMEOUT), + &shutdown, + Some(CRASH_HANDLER_PING_TIMEOUT), ) .expect("failed to run server"); } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 461ce6fbb3..a2bd934311 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -152,6 +152,9 @@ impl PythonDebugAdapter { maybe!(async move { let response = latest_release.filter(|response| response.status().is_success())?; + let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME); + std::fs::create_dir_all(&download_dir).ok()?; + let mut output = String::new(); response .into_body() diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e1647215bc..6980037664 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8028,12 +8028,20 @@ impl Element for EditorElement { autoscroll_containing_element, needs_horizontal_autoscroll, ) = self.editor.update(cx, |editor, cx| { - let autoscroll_request = editor.autoscroll_request(); + let autoscroll_request = editor.scroll_manager.take_autoscroll_request(); + let autoscroll_containing_element = autoscroll_request.is_some() || editor.has_pending_selection(); let (needs_horizontal_autoscroll, was_scrolled) = editor - .autoscroll_vertically(bounds, line_height, max_scroll_top, window, cx); + .autoscroll_vertically( + bounds, + line_height, + max_scroll_top, + autoscroll_request, + window, + cx, + ); if was_scrolled.0 { snapshot = editor.snapshot(window, cx); } @@ -8423,7 +8431,11 @@ impl Element for EditorElement { Ok(blocks) => blocks, Err(resized_blocks) => { self.editor.update(cx, |editor, cx| { - editor.resize_blocks(resized_blocks, autoscroll_request, cx) + editor.resize_blocks( + resized_blocks, + autoscroll_request.map(|(autoscroll, _)| autoscroll), + cx, + ) }); return self.prepaint(None, _inspector_id, bounds, &mut (), window, cx); } @@ -8468,6 +8480,7 @@ impl Element for EditorElement { scroll_width, em_advance, &line_layouts, + autoscroll_request, window, cx, ) diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index ecaf7c11e4..08ff23f8f7 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -348,8 +348,8 @@ impl ScrollManager { self.show_scrollbars } - pub fn autoscroll_request(&self) -> Option { - self.autoscroll_request.map(|(autoscroll, _)| autoscroll) + pub fn take_autoscroll_request(&mut self) -> Option<(Autoscroll, bool)> { + self.autoscroll_request.take() } pub fn active_scrollbar_state(&self) -> Option<&ActiveScrollbarState> { diff --git a/crates/editor/src/scroll/autoscroll.rs b/crates/editor/src/scroll/autoscroll.rs index e8a1f8da73..88d3b52d76 100644 --- a/crates/editor/src/scroll/autoscroll.rs +++ b/crates/editor/src/scroll/autoscroll.rs @@ -102,15 +102,12 @@ impl AutoscrollStrategy { pub(crate) struct NeedsHorizontalAutoscroll(pub(crate) bool); impl Editor { - pub fn autoscroll_request(&self) -> Option { - self.scroll_manager.autoscroll_request() - } - pub(crate) fn autoscroll_vertically( &mut self, bounds: Bounds, line_height: Pixels, max_scroll_top: f32, + autoscroll_request: Option<(Autoscroll, bool)>, window: &mut Window, cx: &mut Context, ) -> (NeedsHorizontalAutoscroll, WasScrolled) { @@ -137,7 +134,7 @@ impl Editor { WasScrolled(false) }; - let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else { + let Some((autoscroll, local)) = autoscroll_request else { return (NeedsHorizontalAutoscroll(false), editor_was_scrolled); }; @@ -284,9 +281,12 @@ impl Editor { scroll_width: Pixels, em_advance: Pixels, layouts: &[LineWithInvisibles], + autoscroll_request: Option<(Autoscroll, bool)>, window: &mut Window, cx: &mut Context, ) -> Option> { + let (_, local) = autoscroll_request?; + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let selections = self.selections.all::(cx); let mut scroll_position = self.scroll_manager.scroll_position(&display_map); @@ -335,10 +335,10 @@ impl Editor { let was_scrolled = if target_left < scroll_left { scroll_position.x = target_left / em_advance; - self.set_scroll_position_internal(scroll_position, true, true, window, cx) + self.set_scroll_position_internal(scroll_position, local, true, window, cx) } else if target_right > scroll_right { scroll_position.x = (target_right - viewport_width) / em_advance; - self.set_scroll_position_internal(scroll_position, true, true, window, cx) + self.set_scroll_position_internal(scroll_position, local, true, window, cx) } else { WasScrolled(false) }; diff --git a/crates/feature_flags/src/feature_flags.rs b/crates/feature_flags/src/feature_flags.rs index 631bafc841..ef357adf35 100644 --- a/crates/feature_flags/src/feature_flags.rs +++ b/crates/feature_flags/src/feature_flags.rs @@ -158,6 +158,11 @@ where } } +#[derive(Debug)] +pub struct OnFlagsReady { + pub is_staff: bool, +} + pub trait FeatureFlagAppExt { fn wait_for_flag(&mut self) -> WaitForFlag; @@ -169,6 +174,10 @@ pub trait FeatureFlagAppExt { fn has_flag(&self) -> bool; fn is_staff(&self) -> bool; + fn on_flags_ready(&mut self, callback: F) -> Subscription + where + F: FnMut(OnFlagsReady, &mut App) + 'static; + fn observe_flag(&mut self, callback: F) -> Subscription where F: FnMut(bool, &mut App) + 'static; @@ -198,6 +207,21 @@ impl FeatureFlagAppExt for App { .unwrap_or(false) } + fn on_flags_ready(&mut self, mut callback: F) -> Subscription + where + F: FnMut(OnFlagsReady, &mut App) + 'static, + { + self.observe_global::(move |cx| { + let feature_flags = cx.global::(); + callback( + OnFlagsReady { + is_staff: feature_flags.staff, + }, + cx, + ); + }) + } + fn observe_flag(&mut self, mut callback: F) -> Subscription where F: FnMut(bool, &mut App) + 'static, diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index 629654014d..a686d8c45b 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -314,6 +314,15 @@ impl MetalRenderer { } fn update_path_intermediate_textures(&mut self, size: Size) { + // We are uncertain when this happens, but sometimes size can be 0 here. Most likely before + // the layout pass on window creation. Zero-sized texture creation causes SIGABRT. + // https://github.com/zed-industries/zed/issues/36229 + if size.width.0 <= 0 || size.height.0 <= 0 { + self.path_intermediate_texture = None; + self.path_intermediate_msaa_texture = None; + return; + } + let texture_descriptor = metal::TextureDescriptor::new(); texture_descriptor.set_width(size.width.0 as u64); texture_descriptor.set_height(size.height.0 as u64); diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index a19c13b0ff..89309ff344 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -71,11 +71,19 @@ pub async fn latest_github_release( } }; - releases + let mut release = releases .into_iter() .filter(|release| !require_assets || !release.assets.is_empty()) .find(|release| release.pre_release == pre_release) - .context("finding a prerelease") + .context("finding a prerelease")?; + release.assets.iter_mut().for_each(|asset| { + if let Some(digest) = &mut asset.digest { + if let Some(stripped) = digest.strip_prefix("sha256:") { + *digest = stripped.to_owned(); + } + } + }); + Ok(release) } pub async fn get_release_by_tag_name( diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 841be60b0e..f9920623b5 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -20,6 +20,7 @@ anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true base64.workspace = true client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true futures.workspace = true diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 8ae5893410..3b4c1fa269 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -3,11 +3,9 @@ use std::sync::Arc; use anyhow::Result; use client::Client; +use cloud_api_types::websocket_protocol::MessageToClient; use cloud_llm_client::Plan; -use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal as _, -}; -use proto::TypedEnvelope; +use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _}; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use thiserror::Error; @@ -82,9 +80,7 @@ impl Global for GlobalRefreshLlmTokenListener {} pub struct RefreshLlmTokenEvent; -pub struct RefreshLlmTokenListener { - _llm_token_subscription: client::Subscription, -} +pub struct RefreshLlmTokenListener; impl EventEmitter for RefreshLlmTokenListener {} @@ -99,17 +95,21 @@ impl RefreshLlmTokenListener { } fn new(client: Arc, cx: &mut Context) -> Self { - Self { - _llm_token_subscription: client - .add_message_handler(cx.weak_entity(), Self::handle_refresh_llm_token), - } + client.add_message_to_client_handler({ + let this = cx.entity(); + move |message, cx| { + Self::handle_refresh_llm_token(this.clone(), message, cx); + } + }); + + Self } - async fn handle_refresh_llm_token( - this: Entity, - _: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result<()> { - this.update(&mut cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)) + fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { + match message { + MessageToClient::UserUpdated => { + this.update(cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)); + } + } } } diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 40dd120761..8aa5c54844 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -941,6 +941,8 @@ impl LanguageModel for CloudLanguageModel { request, model.id(), model.supports_parallel_tool_calls(), + model.supports_prompt_cache_key(), + None, None, ); let llm_api_token = self.llm_api_token.clone(); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 5185e979b7..64114d5a3e 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -14,7 +14,7 @@ use language_model::{ RateLimiter, Role, StopReason, TokenUsage, }; use menu; -use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion}; +use open_ai::{ImageUrl, Model, ReasoningEffort, ResponseStreamEvent, stream_completion}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; @@ -45,6 +45,7 @@ pub struct AvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + pub reasoning_effort: Option, } pub struct OpenAiLanguageModelProvider { @@ -213,6 +214,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider { max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, max_completion_tokens: model.max_completion_tokens, + reasoning_effort: model.reasoning_effort.clone(), }, ); } @@ -301,7 +303,25 @@ impl LanguageModel for OpenAiLanguageModel { } fn supports_images(&self) -> bool { - false + use open_ai::Model; + match &self.model { + Model::FourOmni + | Model::FourOmniMini + | Model::FourPointOne + | Model::FourPointOneMini + | Model::FourPointOneNano + | Model::Five + | Model::FiveMini + | Model::FiveNano + | Model::O1 + | Model::O3 + | Model::O4Mini => true, + Model::ThreePointFiveTurbo + | Model::Four + | Model::FourTurbo + | Model::O3Mini + | Model::Custom { .. } => false, + } } fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { @@ -350,7 +370,9 @@ impl LanguageModel for OpenAiLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + self.model.reasoning_effort(), ); let completions = self.stream_completion(request, cx); async move { @@ -365,7 +387,9 @@ pub fn into_open_ai( request: LanguageModelRequest, model_id: &str, supports_parallel_tool_calls: bool, + supports_prompt_cache_key: bool, max_output_tokens: Option, + reasoning_effort: Option, ) -> open_ai::Request { let stream = !model_id.starts_with("o1-"); @@ -455,6 +479,11 @@ pub fn into_open_ai( } else { None }, + prompt_cache_key: if supports_prompt_cache_key { + request.thread_id + } else { + None + }, tools: request .tools .into_iter() @@ -471,6 +500,7 @@ pub fn into_open_ai( LanguageModelToolChoice::Any => open_ai::ToolChoice::Required, LanguageModelToolChoice::None => open_ai::ToolChoice::None, }), + reasoning_effort, } } @@ -674,6 +704,10 @@ pub fn count_open_ai_tokens( | Model::O3 | Model::O3Mini | Model::O4Mini => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), + // GPT-5 models don't have tiktoken support yet; fall back on gpt-4o tokenizer + Model::Five | Model::FiveMini | Model::FiveNano => { + tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages) + } } .map(|tokens| tokens as u64) }) diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 38bd7cee06..5f546f5219 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -355,7 +355,16 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { LanguageModelCompletionError, >, > { - let request = into_open_ai(request, &self.model.name, true, self.max_output_tokens()); + let supports_parallel_tool_call = true; + let supports_prompt_cache_key = false; + let request = into_open_ai( + request, + &self.model.name, + supports_parallel_tool_call, + supports_prompt_cache_key, + self.max_output_tokens(), + None, + ); let completions = self.stream_completion(request, cx); async move { let mapper = OpenAiEventMapper::new(); diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 037ce467d0..9f447cb68b 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -355,7 +355,9 @@ impl LanguageModel for VercelLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + None, ); let completions = self.stream_completion(request, cx); async move { diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 5f6034571b..fed6fe92bf 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -359,7 +359,9 @@ impl LanguageModel for XAiLanguageModel { request, self.model.id(), self.model.supports_parallel_tool_calls(), + self.model.supports_prompt_cache_key(), self.max_output_tokens(), + None, ); let completions = self.stream_completion(request, cx); async move { diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index a55d8ff998..aee1abee95 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -71,8 +71,11 @@ impl super::LspAdapter for CLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { - let GitHubLspBinaryVersion { name, url, digest } = - &*version.downcast::().unwrap(); + let GitHubLspBinaryVersion { + name, + url, + digest: expected_digest, + } = *version.downcast::().unwrap(); let version_dir = container_dir.join(format!("clangd_{name}")); let binary_path = version_dir.join("bin/clangd"); @@ -99,7 +102,9 @@ impl super::LspAdapter for CLspAdapter { log::warn!("Unable to run {binary_path:?} asset, redownloading: {err}",) }) }; - if let (Some(actual_digest), Some(expected_digest)) = (&metadata.digest, digest) { + if let (Some(actual_digest), Some(expected_digest)) = + (&metadata.digest, &expected_digest) + { if actual_digest == expected_digest { if validity_check().await.is_ok() { return Ok(binary); @@ -115,8 +120,8 @@ impl super::LspAdapter for CLspAdapter { } download_server_binary( delegate, - url, - digest.as_deref(), + &url, + expected_digest.as_deref(), &container_dir, AssetKind::Zip, ) @@ -125,7 +130,7 @@ impl super::LspAdapter for CLspAdapter { GithubBinaryMetadata::write_to_file( &GithubBinaryMetadata { metadata_version: 1, - digest: digest.clone(), + digest: expected_digest, }, &metadata_path, ) diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index 7725e079be..19329fcc6e 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -103,7 +103,13 @@ impl LspAdapter for CssLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/languages/src/github_download.rs b/crates/languages/src/github_download.rs index a3cd0a964b..5b0f1d0729 100644 --- a/crates/languages/src/github_download.rs +++ b/crates/languages/src/github_download.rs @@ -18,9 +18,8 @@ impl GithubBinaryMetadata { let metadata_content = async_fs::read_to_string(metadata_path) .await .with_context(|| format!("reading metadata file at {metadata_path:?}"))?; - let metadata: GithubBinaryMetadata = serde_json::from_str(&metadata_content) - .with_context(|| format!("parsing metadata file at {metadata_path:?}"))?; - Ok(metadata) + serde_json::from_str(&metadata_content) + .with_context(|| format!("parsing metadata file at {metadata_path:?}")) } pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> { @@ -62,6 +61,7 @@ pub(crate) async fn download_server_binary( format!("saving archive contents into the temporary file for {url}",) })?; let asset_sha_256 = format!("{:x}", writer.hasher.finalize()); + anyhow::ensure!( asset_sha_256 == expected_sha_256, "{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}", diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index ca82bb2431..019b45d396 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -340,7 +340,13 @@ impl LspAdapter for JsonLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 0524c02fd5..5513324487 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -206,6 +206,7 @@ impl LspAdapter for PythonLspAdapter { &server_path, &container_dir, &version, + Default::default(), ) .await; diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 6545bf64a2..2761181a50 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -22,7 +22,7 @@ use std::{ sync::{Arc, LazyLock}, }; use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; -use util::fs::make_file_executable; +use util::fs::{make_file_executable, remove_matching}; use util::merge_json_value_into; use util::{ResultExt, maybe}; @@ -161,13 +161,13 @@ impl LspAdapter for RustLspAdapter { let asset_name = Self::build_asset_name(); let asset = release .assets - .iter() + .into_iter() .find(|asset| asset.name == asset_name) .with_context(|| format!("no asset found matching `{asset_name:?}`"))?; Ok(Box::new(GitHubLspBinaryVersion { name: release.tag_name, - url: asset.browser_download_url.clone(), - digest: asset.digest.clone(), + url: asset.browser_download_url, + digest: asset.digest, })) } @@ -177,11 +177,11 @@ impl LspAdapter for RustLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { - let GitHubLspBinaryVersion { name, url, digest } = - &*version.downcast::().unwrap(); - let expected_digest = digest - .as_ref() - .and_then(|digest| digest.strip_prefix("sha256:")); + let GitHubLspBinaryVersion { + name, + url, + digest: expected_digest, + } = *version.downcast::().unwrap(); let destination_path = container_dir.join(format!("rust-analyzer-{name}")); let server_path = match Self::GITHUB_ASSET_KIND { AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place. @@ -212,7 +212,7 @@ impl LspAdapter for RustLspAdapter { }) }; if let (Some(actual_digest), Some(expected_digest)) = - (&metadata.digest, expected_digest) + (&metadata.digest, &expected_digest) { if actual_digest == expected_digest { if validity_check().await.is_ok() { @@ -228,20 +228,20 @@ impl LspAdapter for RustLspAdapter { } } - _ = fs::remove_dir_all(&destination_path).await; download_server_binary( delegate, - url, - expected_digest, + &url, + expected_digest.as_deref(), &destination_path, Self::GITHUB_ASSET_KIND, ) .await?; make_file_executable(&server_path).await?; + remove_matching(&container_dir, |path| server_path != path).await; GithubBinaryMetadata::write_to_file( &GithubBinaryMetadata { metadata_version: 1, - digest: expected_digest.map(ToString::to_string), + digest: expected_digest, }, &metadata_path, ) diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index a7edbb148c..6f03eeda8d 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -108,7 +108,13 @@ impl LspAdapter for TailwindLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index f976b62614..a8ba880889 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -589,6 +589,7 @@ impl LspAdapter for TypeScriptLspAdapter { &server_path, &container_dir, version.typescript_version.as_str(), + Default::default(), ) .await; diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 33751f733e..73498fc579 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -116,6 +116,7 @@ impl LspAdapter for VtslsLspAdapter { &server_path, &container_dir, &latest_version.server_version, + Default::default(), ) .await { @@ -129,6 +130,7 @@ impl LspAdapter for VtslsLspAdapter { &container_dir.join(Self::TYPESCRIPT_TSDK_PATH), &container_dir, &latest_version.typescript_version, + Default::default(), ) .await { diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 815605d524..28be2cc1a4 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -104,7 +104,13 @@ impl LspAdapter for YamlLspAdapter { let should_install_language_server = self .node - .should_install_npm_package(Self::PACKAGE_NAME, &server_path, &container_dir, &version) + .should_install_npm_package( + Self::PACKAGE_NAME, + &server_path, + &container_dir, + &version, + Default::default(), + ) .await; if should_install_language_server { diff --git a/crates/lsp/src/input_handler.rs b/crates/lsp/src/input_handler.rs index db3f1190fc..001ebf1fc9 100644 --- a/crates/lsp/src/input_handler.rs +++ b/crates/lsp/src/input_handler.rs @@ -13,14 +13,15 @@ use parking_lot::Mutex; use smol::io::BufReader; use crate::{ - AnyNotification, AnyResponse, CONTENT_LEN_HEADER, IoHandler, IoKind, RequestId, ResponseHandler, + AnyResponse, CONTENT_LEN_HEADER, IoHandler, IoKind, NotificationOrRequest, RequestId, + ResponseHandler, }; const HEADER_DELIMITER: &[u8; 4] = b"\r\n\r\n"; /// Handler for stdout of language server. pub struct LspStdoutHandler { pub(super) loop_handle: Task>, - pub(super) notifications_channel: UnboundedReceiver, + pub(super) incoming_messages: UnboundedReceiver, } async fn read_headers(reader: &mut BufReader, buffer: &mut Vec) -> Result<()> @@ -54,13 +55,13 @@ impl LspStdoutHandler { let loop_handle = cx.spawn(Self::handler(stdout, tx, response_handlers, io_handlers)); Self { loop_handle, - notifications_channel, + incoming_messages: notifications_channel, } } async fn handler( stdout: Input, - notifications_sender: UnboundedSender, + notifications_sender: UnboundedSender, response_handlers: Arc>>>, io_handlers: Arc>>, ) -> anyhow::Result<()> @@ -96,7 +97,7 @@ impl LspStdoutHandler { } } - if let Ok(msg) = serde_json::from_slice::(&buffer) { + if let Ok(msg) = serde_json::from_slice::(&buffer) { notifications_sender.unbounded_send(msg)?; } else if let Ok(AnyResponse { id, error, result, .. diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index b9701a83d2..3f45d2e6fc 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -242,7 +242,7 @@ struct Notification<'a, T> { /// Language server RPC notification message before it is deserialized into a concrete type. #[derive(Debug, Clone, Deserialize)] -struct AnyNotification { +struct NotificationOrRequest { #[serde(default)] id: Option, method: String, @@ -252,7 +252,10 @@ struct AnyNotification { #[derive(Debug, Serialize, Deserialize)] struct Error { + code: i64, message: String, + #[serde(default)] + data: Option, } pub trait LspRequestFuture: Future> { @@ -364,6 +367,7 @@ impl LanguageServer { notification.method, serde_json::to_string_pretty(¬ification.params).unwrap(), ); + false }, ); @@ -389,7 +393,7 @@ impl LanguageServer { Stdin: AsyncWrite + Unpin + Send + 'static, Stdout: AsyncRead + Unpin + Send + 'static, Stderr: AsyncRead + Unpin + Send + 'static, - F: FnMut(AnyNotification) + 'static + Send + Sync + Clone, + F: Fn(&NotificationOrRequest) -> bool + 'static + Send + Sync + Clone, { let (outbound_tx, outbound_rx) = channel::unbounded::(); let (output_done_tx, output_done_rx) = barrier::channel(); @@ -400,14 +404,34 @@ impl LanguageServer { let io_handlers = Arc::new(Mutex::new(HashMap::default())); let stdout_input_task = cx.spawn({ - let on_unhandled_notification = on_unhandled_notification.clone(); + let unhandled_notification_wrapper = { + let response_channel = outbound_tx.clone(); + async move |msg: NotificationOrRequest| { + let did_handle = on_unhandled_notification(&msg); + if !did_handle && let Some(message_id) = msg.id { + let response = AnyResponse { + jsonrpc: JSON_RPC_VERSION, + id: message_id, + error: Some(Error { + code: -32601, + message: format!("Unrecognized method `{}`", msg.method), + data: None, + }), + result: None, + }; + if let Ok(response) = serde_json::to_string(&response) { + response_channel.send(response).await.ok(); + } + } + } + }; let notification_handlers = notification_handlers.clone(); let response_handlers = response_handlers.clone(); let io_handlers = io_handlers.clone(); async move |cx| { - Self::handle_input( + Self::handle_incoming_messages( stdout, - on_unhandled_notification, + unhandled_notification_wrapper, notification_handlers, response_handlers, io_handlers, @@ -433,7 +457,7 @@ impl LanguageServer { stdout.or(stderr) }); let output_task = cx.background_spawn({ - Self::handle_output( + Self::handle_outgoing_messages( stdin, outbound_rx, output_done_tx, @@ -479,9 +503,9 @@ impl LanguageServer { self.code_action_kinds.clone() } - async fn handle_input( + async fn handle_incoming_messages( stdout: Stdout, - mut on_unhandled_notification: F, + on_unhandled_notification: impl AsyncFn(NotificationOrRequest) + 'static + Send, notification_handlers: Arc>>, response_handlers: Arc>>>, io_handlers: Arc>>, @@ -489,7 +513,6 @@ impl LanguageServer { ) -> anyhow::Result<()> where Stdout: AsyncRead + Unpin + Send + 'static, - F: FnMut(AnyNotification) + 'static + Send, { use smol::stream::StreamExt; let stdout = BufReader::new(stdout); @@ -506,15 +529,19 @@ impl LanguageServer { cx.background_executor().clone(), ); - while let Some(msg) = input_handler.notifications_channel.next().await { - { + while let Some(msg) = input_handler.incoming_messages.next().await { + let unhandled_message = { let mut notification_handlers = notification_handlers.lock(); if let Some(handler) = notification_handlers.get_mut(msg.method.as_str()) { handler(msg.id, msg.params.unwrap_or(Value::Null), cx); + None } else { - drop(notification_handlers); - on_unhandled_notification(msg); + Some(msg) } + }; + + if let Some(msg) = unhandled_message { + on_unhandled_notification(msg).await; } // Don't starve the main thread when receiving lots of notifications at once. @@ -558,7 +585,7 @@ impl LanguageServer { } } - async fn handle_output( + async fn handle_outgoing_messages( stdin: Stdin, outbound_rx: channel::Receiver, output_done_tx: barrier::Sender, @@ -1036,7 +1063,9 @@ impl LanguageServer { jsonrpc: JSON_RPC_VERSION, id, value: LspResult::Error(Some(Error { + code: lsp_types::error_codes::REQUEST_FAILED, message: error.to_string(), + data: None, })), }, }; @@ -1057,7 +1086,9 @@ impl LanguageServer { id, result: None, error: Some(Error { + code: -32700, // Parse error message: error.to_string(), + data: None, }), }; if let Some(response) = serde_json::to_string(&response).log_err() { @@ -1559,7 +1590,7 @@ impl FakeLanguageServer { root, Some(workspace_folders.clone()), cx, - |_| {}, + |_| false, ); server.process_name = process_name; let fake = FakeLanguageServer { @@ -1582,9 +1613,10 @@ impl FakeLanguageServer { notifications_tx .try_send(( msg.method.to_string(), - msg.params.unwrap_or(Value::Null).to_string(), + msg.params.as_ref().unwrap_or(&Value::Null).to_string(), )) .ok(); + true }, ); server.process_name = name.as_str().into(); @@ -1862,7 +1894,7 @@ mod tests { #[gpui::test] fn test_deserialize_string_digit_id() { let json = r#"{"jsonrpc":"2.0","id":"2","method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#; - let notification = serde_json::from_str::(json) + let notification = serde_json::from_str::(json) .expect("message with string id should be parsed"); let expected_id = RequestId::Str("2".to_string()); assert_eq!(notification.id, Some(expected_id)); @@ -1871,7 +1903,7 @@ mod tests { #[gpui::test] fn test_deserialize_string_id() { let json = r#"{"jsonrpc":"2.0","id":"anythingAtAll","method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#; - let notification = serde_json::from_str::(json) + let notification = serde_json::from_str::(json) .expect("message with string id should be parsed"); let expected_id = RequestId::Str("anythingAtAll".to_string()); assert_eq!(notification.id, Some(expected_id)); @@ -1880,7 +1912,7 @@ mod tests { #[gpui::test] fn test_deserialize_int_id() { let json = r#"{"jsonrpc":"2.0","id":2,"method":"workspace/configuration","params":{"items":[{"scopeUri":"file:///Users/mph/Devel/personal/hello-scala/","section":"metals"}]}}"#; - let notification = serde_json::from_str::(json) + let notification = serde_json::from_str::(json) .expect("message with string id should be parsed"); let expected_id = RequestId::Int(2); assert_eq!(notification.id, Some(expected_id)); diff --git a/crates/node_runtime/src/node_runtime.rs b/crates/node_runtime/src/node_runtime.rs index 08698a1d6c..6fcc3a728a 100644 --- a/crates/node_runtime/src/node_runtime.rs +++ b/crates/node_runtime/src/node_runtime.rs @@ -29,6 +29,15 @@ pub struct NodeBinaryOptions { pub use_paths: Option<(PathBuf, PathBuf)>, } +#[derive(Default)] +pub enum VersionCheck { + /// Check whether the installed and requested version have a mismatch + VersionMismatch, + /// Only check whether the currently installed version is older than the newest one + #[default] + OlderVersion, +} + #[derive(Clone)] pub struct NodeRuntime(Arc>); @@ -287,6 +296,7 @@ impl NodeRuntime { local_executable_path: &Path, local_package_directory: &Path, latest_version: &str, + version_check: VersionCheck, ) -> bool { // In the case of the local system not having the package installed, // or in the instances where we fail to parse package.json data, @@ -311,7 +321,10 @@ impl NodeRuntime { return true; }; - installed_version < latest_version + match version_check { + VersionCheck::VersionMismatch => installed_version != latest_version, + VersionCheck::OlderVersion => installed_version < latest_version, + } } } diff --git a/crates/open_ai/Cargo.toml b/crates/open_ai/Cargo.toml index 2d40cd2735..bae00f0a8e 100644 --- a/crates/open_ai/Cargo.toml +++ b/crates/open_ai/Cargo.toml @@ -20,6 +20,7 @@ anyhow.workspace = true futures.workspace = true http_client.workspace = true schemars = { workspace = true, optional = true } +log.workspace = true serde.workspace = true serde_json.workspace = true strum.workspace = true diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 12a5cf52d2..604e8fe622 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -74,6 +74,12 @@ pub enum Model { O3, #[serde(rename = "o4-mini")] O4Mini, + #[serde(rename = "gpt-5")] + Five, + #[serde(rename = "gpt-5-mini")] + FiveMini, + #[serde(rename = "gpt-5-nano")] + FiveNano, #[serde(rename = "custom")] Custom { @@ -83,11 +89,13 @@ pub enum Model { max_tokens: u64, max_output_tokens: Option, max_completion_tokens: Option, + reasoning_effort: Option, }, } impl Model { pub fn default_fast() -> Self { + // TODO: Replace with FiveMini since all other models are deprecated Self::FourPointOneMini } @@ -105,6 +113,9 @@ impl Model { "o3-mini" => Ok(Self::O3Mini), "o3" => Ok(Self::O3), "o4-mini" => Ok(Self::O4Mini), + "gpt-5" => Ok(Self::Five), + "gpt-5-mini" => Ok(Self::FiveMini), + "gpt-5-nano" => Ok(Self::FiveNano), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } @@ -123,6 +134,9 @@ impl Model { Self::O3Mini => "o3-mini", Self::O3 => "o3", Self::O4Mini => "o4-mini", + Self::Five => "gpt-5", + Self::FiveMini => "gpt-5-mini", + Self::FiveNano => "gpt-5-nano", Self::Custom { name, .. } => name, } } @@ -141,6 +155,9 @@ impl Model { Self::O3Mini => "o3-mini", Self::O3 => "o3", Self::O4Mini => "o4-mini", + Self::Five => "gpt-5", + Self::FiveMini => "gpt-5-mini", + Self::FiveNano => "gpt-5-nano", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -161,6 +178,9 @@ impl Model { Self::O3Mini => 200_000, Self::O3 => 200_000, Self::O4Mini => 200_000, + Self::Five => 272_000, + Self::FiveMini => 272_000, + Self::FiveNano => 272_000, Self::Custom { max_tokens, .. } => *max_tokens, } } @@ -182,6 +202,18 @@ impl Model { Self::O3Mini => Some(100_000), Self::O3 => Some(100_000), Self::O4Mini => Some(100_000), + Self::Five => Some(128_000), + Self::FiveMini => Some(128_000), + Self::FiveNano => Some(128_000), + } + } + + pub fn reasoning_effort(&self) -> Option { + match self { + Self::Custom { + reasoning_effort, .. + } => reasoning_effort.to_owned(), + _ => None, } } @@ -197,10 +229,20 @@ impl Model { | Self::FourOmniMini | Self::FourPointOne | Self::FourPointOneMini - | Self::FourPointOneNano => true, + | Self::FourPointOneNano + | Self::Five + | Self::FiveMini + | Self::FiveNano => true, Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false, } } + + /// Returns whether the given model supports the `prompt_cache_key` parameter. + /// + /// If the model does not support the parameter, do not pass it up. + pub fn supports_prompt_cache_key(&self) -> bool { + return true; + } } #[derive(Debug, Serialize, Deserialize)] @@ -220,6 +262,10 @@ pub struct Request { pub parallel_tool_calls: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tools: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub prompt_cache_key: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub reasoning_effort: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -231,6 +277,16 @@ pub enum ToolChoice { Other(ToolDefinition), } +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "lowercase")] +pub enum ReasoningEffort { + Minimal, + Low, + Medium, + High, +} + #[derive(Clone, Deserialize, Serialize, Debug)] #[serde(tag = "type", rename_all = "snake_case")] pub enum ToolDefinition { @@ -421,7 +477,15 @@ pub async fn stream_completion( Ok(ResponseStreamResult::Err { error }) => { Some(Err(anyhow!(error))) } - Err(error) => Some(Err(anyhow!(error))), + Err(error) => { + log::error!( + "Failed to parse OpenAI response into ResponseStreamResult: `{}`\n\ + Response: `{}`", + error, + line, + ); + Some(Err(anyhow!(error))) + } } } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 4489f9f043..a4c04cd06a 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7722,12 +7722,19 @@ impl LspStore { pub(crate) fn set_language_server_statuses_from_proto( &mut self, language_servers: Vec, + server_capabilities: Vec, ) { self.language_server_statuses = language_servers .into_iter() - .map(|server| { + .zip(server_capabilities) + .map(|(server, server_capabilities)| { + let server_id = LanguageServerId(server.id as usize); + if let Ok(server_capabilities) = serde_json::from_str(&server_capabilities) { + self.lsp_server_capabilities + .insert(server_id, server_capabilities); + } ( - LanguageServerId(server.id as usize), + server_id, LanguageServerStatus { name: LanguageServerName::from_proto(server.name), pending_work: Default::default(), diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index cca026ec87..7838f5744a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1488,7 +1488,10 @@ impl Project { fs.clone(), cx, ); - lsp_store.set_language_server_statuses_from_proto(response.payload.language_servers); + lsp_store.set_language_server_statuses_from_proto( + response.payload.language_servers, + response.payload.language_server_capabilities, + ); lsp_store })?; @@ -2319,7 +2322,10 @@ impl Project { self.set_worktrees_from_proto(message.worktrees, cx)?; self.set_collaborators_from_proto(message.collaborators, cx)?; self.lsp_store.update(cx, |lsp_store, _| { - lsp_store.set_language_server_statuses_from_proto(message.language_servers) + lsp_store.set_language_server_statuses_from_proto( + message.language_servers, + message.language_server_capabilities, + ) }); self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync) .unwrap(); diff --git a/crates/proto/proto/app.proto b/crates/proto/proto/app.proto index 353f19adb2..6b42ac9c40 100644 --- a/crates/proto/proto/app.proto +++ b/crates/proto/proto/app.proto @@ -84,11 +84,13 @@ message GetCrashFiles { message GetCrashFilesResponse { repeated CrashReport crashes = 1; + repeated string legacy_panics = 2; } message CrashReport { - optional string panic_contents = 1; - optional bytes minidump_contents = 2; + reserved 1, 2; + string metadata = 3; + bytes minidump_contents = 4; } message Extension { diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 4306251e44..d23b4f8cd6 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1484,20 +1484,17 @@ impl RemoteConnection for SshRemoteConnection { identifier = &unique_identifier, ); - if let Some(rust_log) = std::env::var("RUST_LOG").ok() { - start_proxy_command = format!( - "RUST_LOG={} {}", - shlex::try_quote(&rust_log).unwrap(), - start_proxy_command - ) - } - if let Some(rust_backtrace) = std::env::var("RUST_BACKTRACE").ok() { - start_proxy_command = format!( - "RUST_BACKTRACE={} {}", - shlex::try_quote(&rust_backtrace).unwrap(), - start_proxy_command - ) + for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { + if let Some(value) = std::env::var(env_var).ok() { + start_proxy_command = format!( + "{}={} {} ", + env_var, + shlex::try_quote(&value).unwrap(), + start_proxy_command, + ); + } } + if reconnect { start_proxy_command.push_str(" --reconnect"); } @@ -2229,8 +2226,7 @@ impl SshRemoteConnection { #[cfg(not(target_os = "windows"))] { - run_cmd(Command::new("gzip").args(["-9", "-f", &bin_path.to_string_lossy()])) - .await?; + run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?; } #[cfg(target_os = "windows")] { @@ -2462,7 +2458,7 @@ impl ChannelClient { }, async { smol::Timer::after(timeout).await; - anyhow::bail!("Timeout detected") + anyhow::bail!("Timed out resyncing remote client") }, ) .await @@ -2476,7 +2472,7 @@ impl ChannelClient { }, async { smol::Timer::after(timeout).await; - anyhow::bail!("Timeout detected") + anyhow::bail!("Timed out pinging remote client") }, ) .await diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index 9bb5645dc7..dc7fab8c3c 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -34,10 +34,10 @@ use smol::io::AsyncReadExt; use smol::Async; use smol::{net::unix::UnixListener, stream::StreamExt as _}; -use std::collections::HashMap; use std::ffi::OsStr; use std::ops::ControlFlow; use std::str::FromStr; +use std::sync::LazyLock; use std::{env, thread}; use std::{ io::Write, @@ -48,6 +48,13 @@ use std::{ use telemetry_events::LocationData; use util::ResultExt; +pub static VERSION: LazyLock<&str> = LazyLock::new(|| match *RELEASE_CHANNEL { + ReleaseChannel::Stable | ReleaseChannel::Preview => env!("ZED_PKG_VERSION"), + ReleaseChannel::Nightly | ReleaseChannel::Dev => { + option_env!("ZED_COMMIT_SHA").unwrap_or("missing-zed-commit-sha") + } +}); + fn init_logging_proxy() { env_logger::builder() .format(|buf, record| { @@ -113,7 +120,6 @@ fn init_logging_server(log_file_path: PathBuf) -> Result>> { fn init_panic_hook(session_id: String) { std::panic::set_hook(Box::new(move |info| { - crashes::handle_panic(); let payload = info .payload() .downcast_ref::<&str>() @@ -121,6 +127,8 @@ fn init_panic_hook(session_id: String) { .or_else(|| info.payload().downcast_ref::().cloned()) .unwrap_or_else(|| "Box".to_string()); + crashes::handle_panic(payload.clone(), info.location()); + let backtrace = backtrace::Backtrace::new(); let mut backtrace = backtrace .frames() @@ -150,14 +158,6 @@ fn init_panic_hook(session_id: String) { (&backtrace).join("\n") ); - let release_channel = *RELEASE_CHANNEL; - let version = match release_channel { - ReleaseChannel::Stable | ReleaseChannel::Preview => env!("ZED_PKG_VERSION"), - ReleaseChannel::Nightly | ReleaseChannel::Dev => { - option_env!("ZED_COMMIT_SHA").unwrap_or("missing-zed-commit-sha") - } - }; - let panic_data = telemetry_events::Panic { thread: thread_name.into(), payload: payload.clone(), @@ -165,9 +165,9 @@ fn init_panic_hook(session_id: String) { file: location.file().into(), line: location.line(), }), - app_version: format!("remote-server-{version}"), + app_version: format!("remote-server-{}", *VERSION), app_commit_sha: option_env!("ZED_COMMIT_SHA").map(|sha| sha.into()), - release_channel: release_channel.dev_name().into(), + release_channel: RELEASE_CHANNEL.dev_name().into(), target: env!("TARGET").to_owned().into(), os_name: telemetry::os_name(), os_version: Some(telemetry::os_version()), @@ -204,8 +204,8 @@ fn handle_crash_files_requests(project: &Entity, client: &Arc, _cx| async move { + let mut legacy_panics = Vec::new(); let mut crashes = Vec::new(); - let mut minidumps_by_session_id = HashMap::new(); let mut children = smol::fs::read_dir(paths::logs_dir()).await?; while let Some(child) = children.next().await { let child = child?; @@ -227,41 +227,31 @@ fn handle_crash_files_requests(project: &Entity, client: &Arc Result<()> { let server_paths = ServerPaths::new(&identifier)?; let id = std::process::id().to_string(); - smol::spawn(crashes::init(id.clone())).detach(); + smol::spawn(crashes::init(crashes::InitCrashHandler { + session_id: id.clone(), + zed_version: VERSION.to_owned(), + release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), + commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), + })) + .detach(); init_panic_hook(id); log::info!("starting proxy process. PID: {}", std::process::id()); diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index 7802671fec..fb03662290 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -928,14 +928,14 @@ impl<'a> KeybindUpdateTarget<'a> { } let action_name: Value = self.action_name.into(); let value = match self.action_arguments { - Some(args) => { + Some(args) if !args.is_empty() => { let args = serde_json::from_str::(args) .context("Failed to parse action arguments as JSON")?; serde_json::json!([action_name, args]) } - None => action_name, + _ => action_name, }; - return Ok(value); + Ok(value) } fn keystrokes_unparsed(&self) -> String { @@ -1084,6 +1084,24 @@ mod tests { .unindent(), ); + check_keymap_update( + "[]", + KeybindUpdateOperation::add(KeybindUpdateTarget { + keystrokes: &parse_keystrokes("ctrl-a"), + action_name: "zed::SomeAction", + context: None, + action_arguments: Some(""), + }), + r#"[ + { + "bindings": { + "ctrl-a": "zed::SomeAction" + } + } + ]"# + .unindent(), + ); + check_keymap_update( r#"[ { diff --git a/crates/settings_ui/src/keybindings.rs b/crates/settings_ui/src/keybindings.rs index 81c461fed6..0713bc129f 100644 --- a/crates/settings_ui/src/keybindings.rs +++ b/crates/settings_ui/src/keybindings.rs @@ -2148,7 +2148,8 @@ impl KeybindingEditorModal { let action_arguments = self .action_arguments_editor .as_ref() - .map(|editor| editor.read(cx).editor.read(cx).text(cx)); + .map(|arguments_editor| arguments_editor.read(cx).editor.read(cx).text(cx)) + .filter(|args| !args.is_empty()); let value = action_arguments .as_ref() @@ -2259,29 +2260,11 @@ impl KeybindingEditorModal { let create = self.creating; - let status_toast = StatusToast::new( - format!( - "Saved edits to the {} action.", - &self.editing_keybind.action().humanized_name - ), - cx, - move |this, _cx| { - this.icon(ToastIcon::new(IconName::Check).color(Color::Success)) - .dismiss_button(true) - // .action("Undo", f) todo: wire the undo functionality - }, - ); - - self.workspace - .update(cx, |workspace, cx| { - workspace.toggle_status_toast(status_toast, cx); - }) - .log_err(); - cx.spawn(async move |this, cx| { let action_name = existing_keybind.action().name; + let humanized_action_name = existing_keybind.action().humanized_name.clone(); - if let Err(err) = save_keybinding_update( + match save_keybinding_update( create, existing_keybind, &action_mapping, @@ -2291,25 +2274,43 @@ impl KeybindingEditorModal { ) .await { - this.update(cx, |this, cx| { - this.set_error(InputError::error(err), cx); - }) - .log_err(); - } else { - this.update(cx, |this, cx| { - this.keymap_editor.update(cx, |keymap, cx| { - keymap.previous_edit = Some(PreviousEdit::Keybinding { - action_mapping, - action_name, - fallback: keymap - .table_interaction_state - .read(cx) - .get_scrollbar_offset(Axis::Vertical), - }) - }); - cx.emit(DismissEvent); - }) - .ok(); + Ok(_) => { + this.update(cx, |this, cx| { + this.keymap_editor.update(cx, |keymap, cx| { + keymap.previous_edit = Some(PreviousEdit::Keybinding { + action_mapping, + action_name, + fallback: keymap + .table_interaction_state + .read(cx) + .get_scrollbar_offset(Axis::Vertical), + }); + let status_toast = StatusToast::new( + format!("Saved edits to the {} action.", humanized_action_name), + cx, + move |this, _cx| { + this.icon(ToastIcon::new(IconName::Check).color(Color::Success)) + .dismiss_button(true) + // .action("Undo", f) todo: wire the undo functionality + }, + ); + + this.workspace + .update(cx, |workspace, cx| { + workspace.toggle_status_toast(status_toast, cx); + }) + .log_err(); + }); + cx.emit(DismissEvent); + }) + .ok(); + } + Err(err) => { + this.update(cx, |this, cx| { + this.set_error(InputError::error(err), cx); + }) + .log_err(); + } } }) .detach(); @@ -2906,7 +2907,7 @@ async fn save_keybinding_update( let updated_keymap_contents = settings::KeymapFile::update_keybinding(operation, keymap_contents, tab_size) - .context("Failed to update keybinding")?; + .map_err(|err| anyhow::anyhow!("Could not save updated keybinding: {}", err))?; fs.write( paths::keymap_file().as_path(), updated_keymap_contents.as_bytes(), diff --git a/crates/ui/src/styles/animation.rs b/crates/ui/src/styles/animation.rs index ee5352d454..acea834548 100644 --- a/crates/ui/src/styles/animation.rs +++ b/crates/ui/src/styles/animation.rs @@ -31,7 +31,7 @@ pub enum AnimationDirection { FromTop, } -pub trait DefaultAnimations: Styled + Sized { +pub trait DefaultAnimations: Styled + Sized + Element { fn animate_in( self, animation_type: AnimationDirection, @@ -44,8 +44,13 @@ pub trait DefaultAnimations: Styled + Sized { AnimationDirection::FromTop => "animate_from_top", }; + let animation_id = self.id().map_or_else( + || ElementId::from(animation_name), + |id| (id, animation_name).into(), + ); + self.with_animation( - animation_name, + animation_id, gpui::Animation::new(AnimationDuration::Fast.into()).with_easing(ease_out_quint()), move |mut this, delta| { let start_opacity = 0.4; @@ -91,7 +96,7 @@ pub trait DefaultAnimations: Styled + Sized { } } -impl DefaultAnimations for E {} +impl DefaultAnimations for E {} // Don't use this directly, it only exists to show animation previews #[derive(RegisterComponent)] @@ -132,7 +137,7 @@ impl Component for Animation { .left(px(offset)) .rounded_md() .bg(gpui::red()) - .animate_in(AnimationDirection::FromBottom, false), + .animate_in_from_bottom(false), ) .into_any_element(), ), @@ -151,7 +156,7 @@ impl Component for Animation { .left(px(offset)) .rounded_md() .bg(gpui::blue()) - .animate_in(AnimationDirection::FromTop, false), + .animate_in_from_top(false), ) .into_any_element(), ), @@ -170,7 +175,7 @@ impl Component for Animation { .top(px(offset)) .rounded_md() .bg(gpui::green()) - .animate_in(AnimationDirection::FromLeft, false), + .animate_in_from_left(false), ) .into_any_element(), ), @@ -189,7 +194,7 @@ impl Component for Animation { .top(px(offset)) .rounded_md() .bg(gpui::yellow()) - .animate_in(AnimationDirection::FromRight, false), + .animate_in_from_right(false), ) .into_any_element(), ), @@ -214,7 +219,7 @@ impl Component for Animation { .left(px(offset)) .rounded_md() .bg(gpui::red()) - .animate_in(AnimationDirection::FromBottom, true), + .animate_in_from_bottom(true), ) .into_any_element(), ), @@ -233,7 +238,7 @@ impl Component for Animation { .left(px(offset)) .rounded_md() .bg(gpui::blue()) - .animate_in(AnimationDirection::FromTop, true), + .animate_in_from_top(true), ) .into_any_element(), ), @@ -252,7 +257,7 @@ impl Component for Animation { .top(px(offset)) .rounded_md() .bg(gpui::green()) - .animate_in(AnimationDirection::FromLeft, true), + .animate_in_from_left(true), ) .into_any_element(), ), @@ -271,7 +276,7 @@ impl Component for Animation { .top(px(offset)) .rounded_md() .bg(gpui::yellow()) - .animate_in(AnimationDirection::FromRight, true), + .animate_in_from_right(true), ) .into_any_element(), ), diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs index 1ae22c5fef..8686fda53f 100644 --- a/crates/vercel/src/vercel.rs +++ b/crates/vercel/src/vercel.rs @@ -71,4 +71,8 @@ impl Model { Model::Custom { .. } => false, } } + + pub fn supports_prompt_cache_key(&self) -> bool { + false + } } diff --git a/crates/workspace/src/toast_layer.rs b/crates/workspace/src/toast_layer.rs index 28be3e7e47..5157945548 100644 --- a/crates/workspace/src/toast_layer.rs +++ b/crates/workspace/src/toast_layer.rs @@ -3,7 +3,7 @@ use std::{ time::{Duration, Instant}, }; -use gpui::{AnyView, DismissEvent, Entity, FocusHandle, ManagedView, Subscription, Task}; +use gpui::{AnyView, DismissEvent, Entity, EntityId, FocusHandle, ManagedView, Subscription, Task}; use ui::{animation::DefaultAnimations, prelude::*}; use zed_actions::toast; @@ -76,6 +76,7 @@ impl ToastViewHandle for Entity { } pub struct ActiveToast { + id: EntityId, toast: Box, action: Option, _subscriptions: [Subscription; 1], @@ -113,9 +114,9 @@ impl ToastLayer { V: ToastView, { if let Some(active_toast) = &self.active_toast { - let is_close = active_toast.toast.view().downcast::().is_ok(); - let did_close = self.hide_toast(cx); - if is_close || !did_close { + let show_new = active_toast.id != new_toast.entity_id(); + self.hide_toast(cx); + if !show_new { return; } } @@ -130,11 +131,12 @@ impl ToastLayer { let focus_handle = cx.focus_handle(); self.active_toast = Some(ActiveToast { - toast: Box::new(new_toast.clone()), - action, _subscriptions: [cx.subscribe(&new_toast, |this, _, _: &DismissEvent, cx| { this.hide_toast(cx); })], + id: new_toast.entity_id(), + toast: Box::new(new_toast), + action, focus_handle, }); @@ -143,11 +145,9 @@ impl ToastLayer { cx.notify(); } - pub fn hide_toast(&mut self, cx: &mut Context) -> bool { + pub fn hide_toast(&mut self, cx: &mut Context) { self.active_toast.take(); cx.notify(); - - true } pub fn active_toast(&self) -> Option> @@ -218,11 +218,10 @@ impl Render for ToastLayer { let Some(active_toast) = &self.active_toast else { return div(); }; - let handle = cx.weak_entity(); div().absolute().size_full().bottom_0().left_0().child( v_flex() - .id("toast-layer-container") + .id(("toast-layer-container", active_toast.id)) .absolute() .w_full() .bottom(px(0.)) @@ -234,17 +233,14 @@ impl Render for ToastLayer { h_flex() .id("active-toast-container") .occlude() - .on_hover(move |hover_start, _window, cx| { - let Some(this) = handle.upgrade() else { - return; - }; + .on_hover(cx.listener(|this, hover_start, _window, cx| { if *hover_start { - this.update(cx, |this, _| this.pause_dismiss_timer()); + this.pause_dismiss_timer(); } else { - this.update(cx, |this, cx| this.restart_dismiss_timer(cx)); + this.restart_dismiss_timer(cx); } cx.stop_propagation(); - }) + })) .on_click(|_, _, cx| { cx.stop_propagation(); }) diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index ac116b2f8f..23cd5b9320 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -105,6 +105,10 @@ impl Model { } } + pub fn supports_prompt_cache_key(&self) -> bool { + false + } + pub fn supports_tool(&self) -> bool { match self { Self::Grok2Vision diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 5bd6d981fa..f21a1bc185 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.199.0" +version = "0.199.10" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] diff --git a/crates/zed/RELEASE_CHANNEL b/crates/zed/RELEASE_CHANNEL index 38f8e886e1..870bbe4e50 100644 --- a/crates/zed/RELEASE_CHANNEL +++ b/crates/zed/RELEASE_CHANNEL @@ -1 +1 @@ -dev +stable \ No newline at end of file diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index e4a14b5d32..dd25ac2f1c 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -8,6 +8,7 @@ use cli::FORCE_CLI_MODE_ENV_VAR_NAME; use client::{Client, ProxySettings, UserStore, parse_zed_link}; use collab_ui::channel_view::ChannelView; use collections::HashMap; +use crashes::InitCrashHandler; use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; use editor::Editor; use extension::ExtensionHostProxy; @@ -271,7 +272,15 @@ pub fn main() { let session = app.background_executor().block(Session::new()); app.background_executor() - .spawn(crashes::init(session_id.clone())) + .spawn(crashes::init(InitCrashHandler { + session_id: session_id.clone(), + zed_version: app_version.to_string(), + release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), + commit_sha: app_commit_sha + .as_ref() + .map(|sha| sha.full()) + .unwrap_or_else(|| "no sha".to_owned()), + })) .detach(); reliability::init_panic_hook( app_version, diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 53539699cc..c27f4cb0a8 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -12,6 +12,7 @@ use gpui::{App, AppContext as _, SemanticVersion}; use http_client::{self, HttpClient, HttpClientWithUrl, HttpRequestExt, Method}; use paths::{crashes_dir, crashes_retired_dir}; use project::Project; +use proto::{CrashReport, GetCrashFilesResponse}; use release_channel::{AppCommitSha, RELEASE_CHANNEL, ReleaseChannel}; use reqwest::multipart::{Form, Part}; use settings::Settings; @@ -51,10 +52,6 @@ pub fn init_panic_hook( thread::yield_now(); } } - crashes::handle_panic(); - - let thread = thread::current(); - let thread_name = thread.name().unwrap_or(""); let payload = info .payload() @@ -63,6 +60,11 @@ pub fn init_panic_hook( .or_else(|| info.payload().downcast_ref::().cloned()) .unwrap_or_else(|| "Box".to_string()); + crashes::handle_panic(payload.clone(), info.location()); + + let thread = thread::current(); + let thread_name = thread.name().unwrap_or(""); + if *release_channel::RELEASE_CHANNEL == ReleaseChannel::Dev { let location = info.location().unwrap(); let backtrace = Backtrace::new(); @@ -214,45 +216,53 @@ pub fn init( let installation_id = installation_id.clone(); let system_id = system_id.clone(); - if let Some(ssh_client) = project.ssh_client() { - ssh_client.update(cx, |client, cx| { - if TelemetrySettings::get_global(cx).diagnostics { - let request = client.proto_client().request(proto::GetCrashFiles {}); - cx.background_spawn(async move { - let crash_files = request.await?; - for crash in crash_files.crashes { - let mut panic: Option = crash - .panic_contents - .and_then(|s| serde_json::from_str(&s).log_err()); + let Some(ssh_client) = project.ssh_client() else { + return; + }; + ssh_client.update(cx, |client, cx| { + if !TelemetrySettings::get_global(cx).diagnostics { + return; + } + let request = client.proto_client().request(proto::GetCrashFiles {}); + cx.background_spawn(async move { + let GetCrashFilesResponse { + legacy_panics, + crashes, + } = request.await?; - if let Some(panic) = panic.as_mut() { - panic.session_id = session_id.clone(); - panic.system_id = system_id.clone(); - panic.installation_id = installation_id.clone(); - } - - if let Some(minidump) = crash.minidump_contents { - upload_minidump( - http_client.clone(), - minidump.clone(), - panic.as_ref(), - ) - .await - .log_err(); - } - - if let Some(panic) = panic { - upload_panic(&http_client, &panic_report_url, panic, &mut None) - .await?; - } - } - - anyhow::Ok(()) - }) - .detach_and_log_err(cx); + for panic in legacy_panics { + if let Some(mut panic) = serde_json::from_str::(&panic).log_err() { + panic.session_id = session_id.clone(); + panic.system_id = system_id.clone(); + panic.installation_id = installation_id.clone(); + upload_panic(&http_client, &panic_report_url, panic, &mut None).await?; + } } + + let Some(endpoint) = MINIDUMP_ENDPOINT.as_ref() else { + return Ok(()); + }; + for CrashReport { + metadata, + minidump_contents, + } in crashes + { + if let Some(metadata) = serde_json::from_str(&metadata).log_err() { + upload_minidump( + http_client.clone(), + endpoint, + minidump_contents, + &metadata, + ) + .await + .log_err(); + } + } + + anyhow::Ok(()) }) - } + .detach_and_log_err(cx); + }) }) .detach(); } @@ -466,16 +476,18 @@ fn upload_panics_and_crashes( installation_id: Option, cx: &App, ) { - let telemetry_settings = *client::TelemetrySettings::get_global(cx); + if !client::TelemetrySettings::get_global(cx).diagnostics { + return; + } cx.background_spawn(async move { - let most_recent_panic = - upload_previous_panics(http.clone(), &panic_report_url, telemetry_settings) - .await - .log_err() - .flatten(); - upload_previous_crashes(http, most_recent_panic, installation_id, telemetry_settings) + upload_previous_minidumps(http.clone()).await.warn_on_err(); + let most_recent_panic = upload_previous_panics(http.clone(), &panic_report_url) .await .log_err() + .flatten(); + upload_previous_crashes(http, most_recent_panic, installation_id) + .await + .log_err(); }) .detach() } @@ -484,7 +496,6 @@ fn upload_panics_and_crashes( async fn upload_previous_panics( http: Arc, panic_report_url: &Url, - telemetry_settings: client::TelemetrySettings, ) -> anyhow::Result> { let mut children = smol::fs::read_dir(paths::logs_dir()).await?; @@ -507,58 +518,41 @@ async fn upload_previous_panics( continue; } - if telemetry_settings.diagnostics { - let panic_file_content = smol::fs::read_to_string(&child_path) - .await - .context("error reading panic file")?; + let panic_file_content = smol::fs::read_to_string(&child_path) + .await + .context("error reading panic file")?; - let panic: Option = serde_json::from_str(&panic_file_content) - .log_err() - .or_else(|| { - panic_file_content - .lines() - .next() - .and_then(|line| serde_json::from_str(line).ok()) - }) - .unwrap_or_else(|| { - log::error!("failed to deserialize panic file {:?}", panic_file_content); - None - }); + let panic: Option = serde_json::from_str(&panic_file_content) + .log_err() + .or_else(|| { + panic_file_content + .lines() + .next() + .and_then(|line| serde_json::from_str(line).ok()) + }) + .unwrap_or_else(|| { + log::error!("failed to deserialize panic file {:?}", panic_file_content); + None + }); - if let Some(panic) = panic { - let minidump_path = paths::logs_dir() - .join(&panic.session_id) - .with_extension("dmp"); - if minidump_path.exists() { - let minidump = smol::fs::read(&minidump_path) - .await - .context("Failed to read minidump")?; - if upload_minidump(http.clone(), minidump, Some(&panic)) - .await - .log_err() - .is_some() - { - fs::remove_file(minidump_path).ok(); - } - } - - if !upload_panic(&http, &panic_report_url, panic, &mut most_recent_panic).await? { - continue; - } - } + if let Some(panic) = panic + && upload_panic(&http, &panic_report_url, panic, &mut most_recent_panic).await? + { + // We've done what we can, delete the file + fs::remove_file(child_path) + .context("error removing panic") + .log_err(); } - - // We've done what we can, delete the file - fs::remove_file(child_path) - .context("error removing panic") - .log_err(); } - if MINIDUMP_ENDPOINT.is_none() { - return Ok(most_recent_panic); - } + Ok(most_recent_panic) +} + +pub async fn upload_previous_minidumps(http: Arc) -> anyhow::Result<()> { + let Some(minidump_endpoint) = MINIDUMP_ENDPOINT.as_ref() else { + return Err(anyhow::anyhow!("Minidump endpoint not set")); + }; - // loop back over the directory again to upload any minidumps that are missing panics let mut children = smol::fs::read_dir(paths::logs_dir()).await?; while let Some(child) = children.next().await { let child = child?; @@ -566,33 +560,35 @@ async fn upload_previous_panics( if child_path.extension() != Some(OsStr::new("dmp")) { continue; } - if upload_minidump( - http.clone(), - smol::fs::read(&child_path) - .await - .context("Failed to read minidump")?, - None, - ) - .await - .log_err() - .is_some() - { - fs::remove_file(child_path).ok(); + let mut json_path = child_path.clone(); + json_path.set_extension("json"); + if let Ok(metadata) = serde_json::from_slice(&smol::fs::read(&json_path).await?) { + if upload_minidump( + http.clone(), + &minidump_endpoint, + smol::fs::read(&child_path) + .await + .context("Failed to read minidump")?, + &metadata, + ) + .await + .log_err() + .is_some() + { + fs::remove_file(child_path).ok(); + fs::remove_file(json_path).ok(); + } } } - - Ok(most_recent_panic) + Ok(()) } async fn upload_minidump( http: Arc, + endpoint: &str, minidump: Vec, - panic: Option<&Panic>, + metadata: &crashes::CrashInfo, ) -> Result<()> { - let minidump_endpoint = MINIDUMP_ENDPOINT - .to_owned() - .ok_or_else(|| anyhow::anyhow!("Minidump endpoint not set"))?; - let mut form = Form::new() .part( "upload_file_minidump", @@ -600,18 +596,22 @@ async fn upload_minidump( .file_name("minidump.dmp") .mime_str("application/octet-stream")?, ) + .text( + "sentry[tags][channel]", + metadata.init.release_channel.clone(), + ) + .text("sentry[tags][version]", metadata.init.zed_version.clone()) + .text("sentry[release]", metadata.init.commit_sha.clone()) .text("platform", "rust"); - if let Some(panic) = panic { - form = form - .text( - "sentry[release]", - format!("{}-{}", panic.release_channel, panic.app_version), - ) - .text("sentry[logentry][formatted]", panic.payload.clone()); + if let Some(panic_info) = metadata.panic.as_ref() { + form = form.text("sentry[logentry][formatted]", panic_info.message.clone()); + form = form.text("span", panic_info.span.clone()); + // TODO: add gpu-context, feature-flag-context, and more of device-context like gpu + // name, screen resolution, available ram, device model, etc } let mut response_text = String::new(); - let mut response = http.send_multipart_form(&minidump_endpoint, form).await?; + let mut response = http.send_multipart_form(endpoint, form).await?; response .body_mut() .read_to_string(&mut response_text) @@ -661,11 +661,7 @@ async fn upload_previous_crashes( http: Arc, most_recent_panic: Option<(i64, String)>, installation_id: Option, - telemetry_settings: client::TelemetrySettings, ) -> Result<()> { - if !telemetry_settings.diagnostics { - return Ok(()); - } let last_uploaded = KEY_VALUE_STORE .read_kvp(LAST_CRASH_UPLOADED)? .unwrap_or("zed-2024-01-17-221900.ips".to_string()); // don't upload old crash reports from before we had this. diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index b9f561c0e7..da4b6e78c6 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -5,11 +5,9 @@ use editor::Editor; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; use settings::SettingsStore; -use smol::stream::StreamExt; use std::{cell::RefCell, rc::Rc, sync::Arc}; use supermaven::{Supermaven, SupermavenCompletionProvider}; use ui::Window; -use util::ResultExt; use workspace::Workspace; use zeta::{ProviderDataCollection, ZetaEditPredictionProvider}; @@ -59,25 +57,20 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { cx.on_action(clear_zeta_edit_history); let mut provider = all_language_settings(None, cx).edit_predictions.provider; - cx.spawn({ - let user_store = user_store.clone(); + cx.subscribe(&user_store, { let editors = editors.clone(); let client = client.clone(); - - async move |cx| { - let mut status = client.status(); - while let Some(_status) = status.next().await { - cx.update(|cx| { - assign_edit_prediction_providers( - &editors, - provider, - &client, - user_store.clone(), - cx, - ); - }) - .log_err(); + move |user_store, event, cx| match event { + client::user::Event::PrivateUserInfoUpdated => { + assign_edit_prediction_providers( + &editors, + provider, + &client, + user_store.clone(), + cx, + ); } + _ => {} } }) .detach(); diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index 9f1d02b790..ee76308ff3 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -26,6 +26,7 @@ collections.workspace = true command_palette_hooks.workspace = true copilot.workspace = true db.workspace = true +edit_prediction.workspace = true editor.workspace = true feature_flags.workspace = true fs.workspace = true @@ -33,13 +34,13 @@ futures.workspace = true gpui.workspace = true http_client.workspace = true indoc.workspace = true -edit_prediction.workspace = true language.workspace = true language_model.workspace = true log.workspace = true menu.workspace = true postage.workspace = true project.workspace = true +rand.workspace = true regex.workspace = true release_channel.workspace = true serde.workspace = true diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index b1bd737dbf..c711b2734c 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -432,6 +432,7 @@ impl Zeta { body, editable_range, } = gather_task.await?; + let done_gathering_context_at = Instant::now(); log::debug!( "Events:\n{}\nExcerpt:\n{:?}", @@ -484,6 +485,7 @@ impl Zeta { } }; + let received_response_at = Instant::now(); log::debug!("completion response: {}", &response.output_excerpt); if let Some(usage) = usage { @@ -495,7 +497,7 @@ impl Zeta { .ok(); } - Self::process_completion_response( + let edit_prediction = Self::process_completion_response( response, buffer, &snapshot, @@ -508,7 +510,25 @@ impl Zeta { buffer_snapshotted_at, &cx, ) - .await + .await; + + let finished_at = Instant::now(); + + // record latency for ~1% of requests + if rand::random::() <= 2 { + telemetry::event!( + "Edit Prediction Request", + context_latency = done_gathering_context_at + .duration_since(buffer_snapshotted_at) + .as_millis(), + request_latency = received_response_at + .duration_since(done_gathering_context_at) + .as_millis(), + process_latency = finished_at.duration_since(received_response_at).as_millis() + ); + } + + edit_prediction }) } diff --git a/script/bundle-mac b/script/bundle-mac index b2be573235..f2a5bf313d 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -207,7 +207,7 @@ function prepare_binaries() { rm -f target/${architecture}/${target_dir}/Zed.dwarf.gz echo "Gzipping dSYMs for $architecture" - gzip -f target/${architecture}/${target_dir}/Zed.dwarf + gzip -kf target/${architecture}/${target_dir}/Zed.dwarf echo "Uploading dSYMs${architecture} for $architecture to by-uuid/${uuid}.dwarf.gz" upload_to_blob_store_public \ @@ -367,19 +367,25 @@ else gzip -f --stdout --best target/aarch64-apple-darwin/release/remote_server > target/zed-remote-server-macos-aarch64.gz fi -# Upload debug info to sentry.io -if ! command -v sentry-cli >/dev/null 2>&1; then - echo "sentry-cli not found. skipping sentry upload." - echo "install with: 'curl -sL https://sentry.io/get-cli | bash'" -else +function upload_debug_info() { + architecture=$1 if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then echo "Uploading zed debug symbols to sentry..." # note: this uploads the unstripped binary which is needed because it contains # .eh_frame data for stack unwinindg. see https://github.com/getsentry/symbolic/issues/783 sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev \ - "target/x86_64-apple-darwin/${target_dir}/" \ - "target/aarch64-apple-darwin/${target_dir}/" + "target/${architecture}/${target_dir}/zed" \ + "target/${architecture}/${target_dir}/remote_server" \ + "target/${architecture}/${target_dir}/zed.dwarf" else echo "missing SENTRY_AUTH_TOKEN. skipping sentry upload." fi +} + +if command -v sentry-cli >/dev/null 2>&1; then + upload_debug_info "aarch64-apple-darwin" + upload_debug_info "x86_64-apple-darwin" +else + echo "sentry-cli not found. skipping sentry upload." + echo "install with: 'curl -sL https://sentry.io/get-cli | bash'" fi diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index 5678e46236..338985ed95 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -305,7 +305,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -334,7 +334,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -362,7 +362,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -391,7 +391,7 @@ scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -429,7 +429,7 @@ rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -468,7 +468,7 @@ rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["ev rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net", "process", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -509,7 +509,7 @@ rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -548,7 +548,7 @@ rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["ev rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net", "process", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -568,7 +568,7 @@ ring = { version = "0.17", features = ["std"] } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -592,7 +592,7 @@ ring = { version = "0.17", features = ["std"] } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } tower = { version = "0.5", default-features = false, features = ["timeout", "util"] } @@ -636,7 +636,7 @@ rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] } @@ -675,7 +675,7 @@ rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["ev rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", features = ["fs", "net", "process", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } -tokio-rustls = { version = "0.26", default-features = false, features = ["ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } tokio-socks = { version = "0.5", features = ["futures-io"] } tokio-stream = { version = "0.1", features = ["fs"] } toml_datetime = { version = "0.6", default-features = false, features = ["serde"] }